Delete out-of-tree antlion code.
This change removes all files from antlion except the README.md
that now points to antlion in-tree.
Fixed: 440387588
Change-Id: I86f5e08902787439cc48ec965b807e5d77dbb8d8
Reviewed-on: https://fuchsia-review.googlesource.com/c/antlion/+/1353925
Reviewed-by: Adam Schrems <schrems@google.com>
Reviewed-by: Ritu Gupta <guptaritu@google.com>
Reviewed-by: Pavan Kumar Juturu <jpavankumar@google.com>
Commit-Queue: Charles Celerier <chcl@google.com>
Fuchsia-Auto-Submit: Charles Celerier <chcl@google.com>
diff --git a/.editorconfig b/.editorconfig
deleted file mode 100644
index 027bdb1..0000000
--- a/.editorconfig
+++ /dev/null
@@ -1,14 +0,0 @@
-# EditorConfig is awesome: https://EditorConfig.org
-
-root = true
-
-[*]
-end_of_line = lf
-insert_final_newline = true
-trim_trailing_whitespace = true
-
-[*.py]
-charset = utf-8
-indent_style = space
-indent_size = 4
-max_line_length = 88
diff --git a/.git-blame-ignore-revs b/.git-blame-ignore-revs
deleted file mode 100644
index befa060..0000000
--- a/.git-blame-ignore-revs
+++ /dev/null
@@ -1,5 +0,0 @@
-# Run code through yapf
-19a821d5f1ff9079f9a40d27553182a433a27834
-
-# Run code through black
-0d9e3581d57f376865f49ae62fe9171789beca56
diff --git a/.gitignore b/.gitignore
deleted file mode 100644
index b521867..0000000
--- a/.gitignore
+++ /dev/null
@@ -1,49 +0,0 @@
-#
-# OS-specific
-#
-
-.DS_Store
-
-#
-# Language specific
-#
-
-# Python
-*.py[cod]
-*.egg-info/
-/build/
-/.venv
-/.mypy_cache
-
-#
-# Editors
-#
-
-/.idea/
-/.vscode/
-*~
-
-#
-# antlion
-#
-
-# Configuration
-/*.json
-/*.yaml
-/config/
-
-# Generated during run-time
-/logs
-
-# Local development scripts
-/*.sh
-!/format.sh
-
-#
-# third_party
-#
-
-/third_party/*
-!/third_party/github.com/
-!/third_party/github.com/jd/tenacity
-/third_party/github.com/jd/tenacity/src
diff --git a/BUILD.gn b/BUILD.gn
deleted file mode 100644
index d3a434c..0000000
--- a/BUILD.gn
+++ /dev/null
@@ -1,220 +0,0 @@
-# Copyright 2023 The Fuchsia Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# Declare Fuchsia build targets for using antlion from the Fuchsia tree.
-# Requires additional configuration of jiri fetch attributes from your Fuchsia
-# checkout:
-# `jiri init -fetch-optional=antlion`
-
-import("//build/python/python_library.gni")
-
-assert(is_host, "antlion only supported on the host toolchain")
-
-# Tests for full build validation
-group("e2e_tests") {
- testonly = true
- public_deps = [ "tests:e2e_tests" ]
-}
-
-# Subset of tests to validate builds in under 15 minutes.
-group("e2e_tests_quick") {
- testonly = true
- public_deps = [ "tests:e2e_tests_quick" ]
-}
-
-# Tests for at-desk custom validation
-group("e2e_tests_manual") {
- testonly = true
- public_deps = [ "tests:e2e_tests_manual" ]
-}
-
-# Tests to validate the netstack in under 15 minutes.
-group("e2e_tests_netstack_quick") {
- testonly = true
- public_deps = [
- "tests/dhcp:dhcpv4_duplicate_address_test",
- "tests/dhcp:dhcpv4_interop_basic_test",
- "tests/dhcp:dhcpv4_interop_combinatorial_options_test",
- "tests/wlan/functional:beacon_loss_test",
- "tests/wlan/performance:channel_sweep_test_quick",
-
- # TODO(http://b/372467106): Uncomment once ToggleWlanInterfaceStressTest is
- # updated to use current Fuchsia APIs for removing interfaces.
- # "tests/netstack:toggle_wlan_interface_stress_test",
- ]
-}
-
-# Unit tests only
-group("tests") {
- testonly = true
- public_deps = [ "runner:tests" ]
-}
-
-python_library("antlion") {
- source_root = "//third_party/antlion/packages/antlion"
- testonly = true
- sources = [
- "__init__.py",
- "base_test.py",
- "capabilities/__init__.py",
- "capabilities/ssh.py",
- "context.py",
- "controllers/__init__.py",
- "controllers/access_point.py",
- "controllers/adb.py",
- "controllers/adb_lib/__init__.py",
- "controllers/adb_lib/error.py",
- "controllers/android_device.py",
- "controllers/android_lib/__init__.py",
- "controllers/android_lib/errors.py",
- "controllers/android_lib/events.py",
- "controllers/android_lib/logcat.py",
- "controllers/android_lib/services.py",
- "controllers/ap_lib/__init__.py",
- "controllers/ap_lib/ap_get_interface.py",
- "controllers/ap_lib/ap_iwconfig.py",
- "controllers/ap_lib/bridge_interface.py",
- "controllers/ap_lib/dhcp_config.py",
- "controllers/ap_lib/dhcp_server.py",
- "controllers/ap_lib/extended_capabilities.py",
- "controllers/ap_lib/hostapd.py",
- "controllers/ap_lib/hostapd_ap_preset.py",
- "controllers/ap_lib/hostapd_bss_settings.py",
- "controllers/ap_lib/hostapd_config.py",
- "controllers/ap_lib/hostapd_constants.py",
- "controllers/ap_lib/hostapd_security.py",
- "controllers/ap_lib/hostapd_utils.py",
- "controllers/ap_lib/radio_measurement.py",
- "controllers/ap_lib/radvd.py",
- "controllers/ap_lib/radvd_config.py",
- "controllers/ap_lib/radvd_constants.py",
- "controllers/ap_lib/regulatory_channels.py",
- "controllers/ap_lib/third_party_ap_profiles/__init__.py",
- "controllers/ap_lib/third_party_ap_profiles/actiontec.py",
- "controllers/ap_lib/third_party_ap_profiles/asus.py",
- "controllers/ap_lib/third_party_ap_profiles/belkin.py",
- "controllers/ap_lib/third_party_ap_profiles/linksys.py",
- "controllers/ap_lib/third_party_ap_profiles/netgear.py",
- "controllers/ap_lib/third_party_ap_profiles/securifi.py",
- "controllers/ap_lib/third_party_ap_profiles/tplink.py",
- "controllers/ap_lib/wireless_network_management.py",
- "controllers/attenuator.py",
- "controllers/attenuator_lib/__init__.py",
- "controllers/attenuator_lib/_tnhelper.py",
- "controllers/attenuator_lib/aeroflex/__init__.py",
- "controllers/attenuator_lib/aeroflex/telnet.py",
- "controllers/attenuator_lib/minicircuits/__init__.py",
- "controllers/attenuator_lib/minicircuits/http.py",
- "controllers/attenuator_lib/minicircuits/telnet.py",
- "controllers/fastboot.py",
- "controllers/fuchsia_device.py",
- "controllers/fuchsia_lib/__init__.py",
- "controllers/fuchsia_lib/base_lib.py",
- "controllers/fuchsia_lib/lib_controllers/__init__.py",
- "controllers/fuchsia_lib/lib_controllers/wlan_controller.py",
- "controllers/fuchsia_lib/lib_controllers/wlan_policy_controller.py",
- "controllers/fuchsia_lib/package_server.py",
- "controllers/fuchsia_lib/sl4f.py",
- "controllers/fuchsia_lib/ssh.py",
- "controllers/fuchsia_lib/wlan_deprecated_configuration_lib.py",
- "controllers/iperf_client.py",
- "controllers/iperf_server.py",
- "controllers/openwrt_ap.py",
- "controllers/openwrt_lib/__init__.py",
- "controllers/openwrt_lib/network_const.py",
- "controllers/openwrt_lib/network_settings.py",
- "controllers/openwrt_lib/openwrt_constants.py",
- "controllers/openwrt_lib/wireless_config.py",
- "controllers/openwrt_lib/wireless_settings_applier.py",
- "controllers/packet_capture.py",
- "controllers/pdu.py",
- "controllers/pdu_lib/__init__.py",
- "controllers/pdu_lib/digital_loggers/__init__.py",
- "controllers/pdu_lib/digital_loggers/webpowerswitch.py",
- "controllers/pdu_lib/synaccess/__init__.py",
- "controllers/pdu_lib/synaccess/np02b.py",
- "controllers/sl4a_lib/__init__.py",
- "controllers/sl4a_lib/error_reporter.py",
- "controllers/sl4a_lib/event_dispatcher.py",
- "controllers/sl4a_lib/rpc_client.py",
- "controllers/sl4a_lib/rpc_connection.py",
- "controllers/sl4a_lib/sl4a_manager.py",
- "controllers/sl4a_lib/sl4a_ports.py",
- "controllers/sl4a_lib/sl4a_session.py",
- "controllers/sniffer.py",
- "controllers/sniffer_lib/__init__.py",
- "controllers/sniffer_lib/local/__init__.py",
- "controllers/sniffer_lib/local/local_base.py",
- "controllers/sniffer_lib/local/tcpdump.py",
- "controllers/sniffer_lib/local/tshark.py",
- "controllers/utils_lib/__init__.py",
- "controllers/utils_lib/commands/__init__.py",
- "controllers/utils_lib/commands/command.py",
- "controllers/utils_lib/commands/date.py",
- "controllers/utils_lib/commands/ip.py",
- "controllers/utils_lib/commands/journalctl.py",
- "controllers/utils_lib/commands/nmcli.py",
- "controllers/utils_lib/commands/pgrep.py",
- "controllers/utils_lib/commands/route.py",
- "controllers/utils_lib/commands/shell.py",
- "controllers/utils_lib/commands/tcpdump.py",
- "controllers/utils_lib/ssh/__init__.py",
- "controllers/utils_lib/ssh/connection.py",
- "controllers/utils_lib/ssh/formatter.py",
- "controllers/utils_lib/ssh/settings.py",
- "decorators.py",
- "error.py",
- "event/__init__.py",
- "event/decorators.py",
- "event/event.py",
- "event/event_bus.py",
- "event/event_subscription.py",
- "event/subscription_handle.py",
- "keys.py",
- "libs/__init__.py",
- "libs/logging/__init__.py",
- "libs/logging/log_stream.py",
- "libs/ota/__init__.py",
- "libs/ota/ota_runners/__init__.py",
- "libs/ota/ota_runners/ota_runner.py",
- "libs/ota/ota_runners/ota_runner_factory.py",
- "libs/ota/ota_tools/__init__.py",
- "libs/ota/ota_tools/adb_sideload_ota_tool.py",
- "libs/ota/ota_tools/ota_tool.py",
- "libs/ota/ota_tools/ota_tool_factory.py",
- "libs/ota/ota_tools/update_device_ota_tool.py",
- "libs/ota/ota_updater.py",
- "libs/proc/__init__.py",
- "libs/proc/job.py",
- "libs/proc/process.py",
- "logger.py",
- "net.py",
- "runner.py",
- "test_utils/__init__.py",
- "test_utils/abstract_devices/__init__.py",
- "test_utils/abstract_devices/wlan_device.py",
- "test_utils/abstract_devices/wmm_transceiver.py",
- "test_utils/dhcp/__init__.py",
- "test_utils/dhcp/base_test.py",
- "test_utils/fuchsia/__init__.py",
- "test_utils/fuchsia/wmm_test_cases.py",
- "test_utils/net/__init__.py",
- "test_utils/net/connectivity_const.py",
- "test_utils/net/net_test_utils.py",
- "test_utils/wifi/__init__.py",
- "test_utils/wifi/base_test.py",
- "test_utils/wifi/wifi_constants.py",
- "test_utils/wifi/wifi_test_utils.py",
- "types.py",
- "utils.py",
- "validation.py",
- ]
- library_deps = [
- "//third_party/github.com/jd/tenacity",
- "//sdk/fidl/fuchsia.wlan.common:fuchsia.wlan.common_python",
- "//src/testing/end_to_end/honeydew",
- "//third_party/mobly",
- "//third_party/pyyaml:yaml",
- ]
-}
diff --git a/CHANGELOG.md b/CHANGELOG.md
deleted file mode 100644
index 0c36022..0000000
--- a/CHANGELOG.md
+++ /dev/null
@@ -1,164 +0,0 @@
-# Changelog
-
-All notable changes to this project will be documented in this file.
-
-The format is based on [Keep a Changelog], and this project adheres to
-[Semantic Versioning].
-
-[Keep a Changelog]: https://keepachangelog.com/en/1.0.0/
-[Semantic Versioning]: https://semver.org/spec/v2.0.0.html
-
-## [Unreleased]
-
-[unreleased]: https://fuchsia.googlesource.com/antlion/+/refs/tags/v0.3.0..refs/heads/main
-
-### Removed
-
-- [BREAKING CHANGE] Support for Python 3.8, 3.9, and 3.10. The minimum supported
-version of Python is now 3.11. If running antlion as part of the Fuchsia tree,
-nothing is required; Python 3.11 is vendored with Fuchsia and will be found by
-GN. If running antlion out of tree, ensure your Python version is at least 3.11.
-- `WlanRvrTest` user params `debug_pre_traffic_cmd` and `debug_post_traffic_cmd`
-
-## [0.3.0] - 2023-05-17
-
-[0.3.0]: https://fuchsia.googlesource.com/antlion/+/refs/tags/v0.2.0..refs/tags/v0.3.0
-
-### Deprecated
-
-- **Support for ACTS JSON configs; instead, use Mobly YAML configs.** To
-ease this transition, upon running `act.py`, a compatible YAML config will be
-generated for you and placed next to your JSON config.
-- **The `act.py` binary; instead, invoke tests directly.** Upon running
-`act.py`, a deprecation warning will provide instructions for how to invoke
-antlion tests without act.py and with the newly generated YAML config.
-
-### Added
-
-- Presubmit testing in [CV] (aka CQ). All tests specified with the `qemu_env`
-environment will run before every antlion CL is submitted.
-- Postsubmit testing in [CI]. See [Milo] for an exhaustive list of builders.
-- [EditorConfig] file for consistent coding styles.
-Installing an EditorConfig plugin for your editor is highly recommended.
-
-[CV]: https://chromium.googlesource.com/infra/luci/luci-go/+/refs/heads/main/cv/README.md
-[CI]: https://chromium.googlesource.com/chromium/src/+/master/docs/tour_of_luci_ui.md
-[Milo]: https://luci-milo.appspot.com/ui/search?q=antlion
-[EditorConfig]: https://editorconfig.org
-
-### Changed
-
-- Default test execution from ACTS to Mobly. `antlion_host_test()` now invokes
-the test file directly using the Mobly test runner, rather than using `act.py`.
- - All tests have been refactored to allow direct running with the Mobly test
- runner.
- - `act.py` now converts ACTS JSON config to compatible Mobly YAML config. The
- resulting config is passed directly to Mobly's config parser. See notes for
- this release's deprecations above.
-- Generate YAML config instead of JSON config from antlion-runner.
-- `FuchsiaDevice.authorized_file_loc` config field is now optional. This field
-is only used during `FlashTest`; it is not used when the device is already
-provisioned (e.g. when tests are dispatched in Fuchsia infrastructure).
-
-### Removed
-
-- Unused controllers and tests (full list)
-
-### Fixed
-
-- Failure to stop session_manager using ffx in `WlanRebootTest` ([@patricklu],
-[bug](http://b/267330535))
-- Failure to parse 'test_name' in DHCP configuration file in `Dhcpv4InteropTest`
-(invalid option) introduced by previous refactor ([@patricklu],
-[bug](http://b/232574848))
-- Logging for `Dhcpv4InteropTest` changed to utilize a temp file instead of
-/var/log/messages to fix test error with duplicate PID log messages
-([@patricklu], [bug](http://b/232574848))
-
-## [0.2.0] - 2023-01-03
-
-[0.2.0]: https://fuchsia.googlesource.com/antlion/+/refs/tags/v0.1.0..refs/tags/v0.2.0
-
-### Added
-
-- Added snapshots before reboot and during test teardown in `WlanRebootTest`
-([@patricklu], [bug](http://b/273923552))
-- Download radvd logs from AP for debugging IPv6 address allocation
-- Optional `wlan_features` config field to `FuchsiaDevice` for declaring which
-WLAN features the device supports, such as BSS Transition Management
-
-### Changed
-
-- All path config options in `FuchsiaDevice` expand the home directory (`~`) and
-environmental variables
- - Used by `ssh_priv_key`, `authorized_file_loc`, and `ffx_binary_path` for
- sensible defaults using `$FUCHSIA_DIR`
-- Running tests works out of the box without specifying `--testpaths`
- - Moved `tests` and `unit_tests` to the `antlion` package, enabling
- straight-forward packaging of tests.
- - Merged `antlion` and `antlion_contrib` packages
-- Converted several required dependencies to optional dependencies:
- - `bokeh` is only needed for producing HTML graphing. If this feature is
- desired, install antlion with the bokeh option: `pip install ".[bokeh]"`
- - `usbinfo` and `psutil` are not needed when a static IP address is assigned
- to the Fuchsia DUT. If assigning a static IP address is not an optional,
- install antlion with the mdns option: `pip install ".[mdns]"`
-
-### Removed
-
-- [BREAKING CHANGE] Dependencies for Python versions older than 3.8. Please
-upgrade your system to a newer version of Python to run antlion tests.
-- `ssh_config` from `FuchsiaDevice` config. SSH configs are generated to provide
-a reproducible connection method and ease initial setup.
-
-### Fixed
-
-- Failure to acquire IPv6 address in `WlanRebootTest` ([bug](http://b/256009189))
-- Typo in `ChannelSweepTest` preventing use of iPerf ([@patricklu])
-- "Country code never updated" error affecting all Fuchsia ToT builds
-([@karlward], [bug](https://fxbug.dev/42067674))
-- Parsing new stderr format from `ffx component destroy` ([@karlward],
-[bug](https://fxbug.dev/42067722))
-- "Socket operation on non-socket" error during initialization of ffx on MacOS
-([@karlward], [bug](https://fxbug.dev/42067812))
-- Python 3.8 support for IPv6 scope IDs ([bug](http://b/261746355))
-
-## [0.1.0] - 2022-11-28
-
-Forked from ACTS with the following changes
-
-[0.1.0]: https://fuchsia.googlesource.com/antlion/+/refs/tags/v0.1.0
-
-### Added
-
-- A modern approach to installation using `pyproject.toml` via `pip install .`
-
-### Changed
-
-- Directory structure to the [src-layout]
-- Package and import names from ACTS to antlion
-- Copyright notice from AOSP to Fuchsia Authors
-
-[src-layout]: https://setuptools.pypa.io/en/latest/userguide/package_discovery.html#src-layout
-
-### Deprecated
-
-- Use of the `setup.py` script. This is only used to keep infrastructure
-compatibility with ACTS. Once antlion is moved to Fuchsia infrastructure, this
-script will be removed.
-
-### Removed
-
-- Dependency on `protobuf` and `grpc` Python packages. Removes the metric
-logger, which was unused by Fuchsia WLAN tests.
-- Unused Android-specific build files, tests, and utilities for features Fuchsia
-doesn't support, such as telephony and automotive
-
-### Fixed
-
-- KeyError for 'mac_addr' in WlanDeprecatedConfigurationTest ([@sakuma],
-[bug](http://b/237709921))
-
-[@sakuma]: https://fuchsia-review.git.corp.google.com/q/owner:sakuma%2540google.com
-[@patricklu]: https://fuchsia-review.git.corp.google.com/q/owner:patricklu%2540google.com
-[@karlward]: https://fuchsia-review.git.corp.google.com/q/owner:karlward%2540google.com
diff --git a/LICENSE b/LICENSE
deleted file mode 100644
index d645695..0000000
--- a/LICENSE
+++ /dev/null
@@ -1,202 +0,0 @@
-
- Apache License
- Version 2.0, January 2004
- http://www.apache.org/licenses/
-
- TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
-
- 1. Definitions.
-
- "License" shall mean the terms and conditions for use, reproduction,
- and distribution as defined by Sections 1 through 9 of this document.
-
- "Licensor" shall mean the copyright owner or entity authorized by
- the copyright owner that is granting the License.
-
- "Legal Entity" shall mean the union of the acting entity and all
- other entities that control, are controlled by, or are under common
- control with that entity. For the purposes of this definition,
- "control" means (i) the power, direct or indirect, to cause the
- direction or management of such entity, whether by contract or
- otherwise, or (ii) ownership of fifty percent (50%) or more of the
- outstanding shares, or (iii) beneficial ownership of such entity.
-
- "You" (or "Your") shall mean an individual or Legal Entity
- exercising permissions granted by this License.
-
- "Source" form shall mean the preferred form for making modifications,
- including but not limited to software source code, documentation
- source, and configuration files.
-
- "Object" form shall mean any form resulting from mechanical
- transformation or translation of a Source form, including but
- not limited to compiled object code, generated documentation,
- and conversions to other media types.
-
- "Work" shall mean the work of authorship, whether in Source or
- Object form, made available under the License, as indicated by a
- copyright notice that is included in or attached to the work
- (an example is provided in the Appendix below).
-
- "Derivative Works" shall mean any work, whether in Source or Object
- form, that is based on (or derived from) the Work and for which the
- editorial revisions, annotations, elaborations, or other modifications
- represent, as a whole, an original work of authorship. For the purposes
- of this License, Derivative Works shall not include works that remain
- separable from, or merely link (or bind by name) to the interfaces of,
- the Work and Derivative Works thereof.
-
- "Contribution" shall mean any work of authorship, including
- the original version of the Work and any modifications or additions
- to that Work or Derivative Works thereof, that is intentionally
- submitted to Licensor for inclusion in the Work by the copyright owner
- or by an individual or Legal Entity authorized to submit on behalf of
- the copyright owner. For the purposes of this definition, "submitted"
- means any form of electronic, verbal, or written communication sent
- to the Licensor or its representatives, including but not limited to
- communication on electronic mailing lists, source code control systems,
- and issue tracking systems that are managed by, or on behalf of, the
- Licensor for the purpose of discussing and improving the Work, but
- excluding communication that is conspicuously marked or otherwise
- designated in writing by the copyright owner as "Not a Contribution."
-
- "Contributor" shall mean Licensor and any individual or Legal Entity
- on behalf of whom a Contribution has been received by Licensor and
- subsequently incorporated within the Work.
-
- 2. Grant of Copyright License. Subject to the terms and conditions of
- this License, each Contributor hereby grants to You a perpetual,
- worldwide, non-exclusive, no-charge, royalty-free, irrevocable
- copyright license to reproduce, prepare Derivative Works of,
- publicly display, publicly perform, sublicense, and distribute the
- Work and such Derivative Works in Source or Object form.
-
- 3. Grant of Patent License. Subject to the terms and conditions of
- this License, each Contributor hereby grants to You a perpetual,
- worldwide, non-exclusive, no-charge, royalty-free, irrevocable
- (except as stated in this section) patent license to make, have made,
- use, offer to sell, sell, import, and otherwise transfer the Work,
- where such license applies only to those patent claims licensable
- by such Contributor that are necessarily infringed by their
- Contribution(s) alone or by combination of their Contribution(s)
- with the Work to which such Contribution(s) was submitted. If You
- institute patent litigation against any entity (including a
- cross-claim or counterclaim in a lawsuit) alleging that the Work
- or a Contribution incorporated within the Work constitutes direct
- or contributory patent infringement, then any patent licenses
- granted to You under this License for that Work shall terminate
- as of the date such litigation is filed.
-
- 4. Redistribution. You may reproduce and distribute copies of the
- Work or Derivative Works thereof in any medium, with or without
- modifications, and in Source or Object form, provided that You
- meet the following conditions:
-
- (a) You must give any other recipients of the Work or
- Derivative Works a copy of this License; and
-
- (b) You must cause any modified files to carry prominent notices
- stating that You changed the files; and
-
- (c) You must retain, in the Source form of any Derivative Works
- that You distribute, all copyright, patent, trademark, and
- attribution notices from the Source form of the Work,
- excluding those notices that do not pertain to any part of
- the Derivative Works; and
-
- (d) If the Work includes a "NOTICE" text file as part of its
- distribution, then any Derivative Works that You distribute must
- include a readable copy of the attribution notices contained
- within such NOTICE file, excluding those notices that do not
- pertain to any part of the Derivative Works, in at least one
- of the following places: within a NOTICE text file distributed
- as part of the Derivative Works; within the Source form or
- documentation, if provided along with the Derivative Works; or,
- within a display generated by the Derivative Works, if and
- wherever such third-party notices normally appear. The contents
- of the NOTICE file are for informational purposes only and
- do not modify the License. You may add Your own attribution
- notices within Derivative Works that You distribute, alongside
- or as an addendum to the NOTICE text from the Work, provided
- that such additional attribution notices cannot be construed
- as modifying the License.
-
- You may add Your own copyright statement to Your modifications and
- may provide additional or different license terms and conditions
- for use, reproduction, or distribution of Your modifications, or
- for any such Derivative Works as a whole, provided Your use,
- reproduction, and distribution of the Work otherwise complies with
- the conditions stated in this License.
-
- 5. Submission of Contributions. Unless You explicitly state otherwise,
- any Contribution intentionally submitted for inclusion in the Work
- by You to the Licensor shall be under the terms and conditions of
- this License, without any additional terms or conditions.
- Notwithstanding the above, nothing herein shall supersede or modify
- the terms of any separate license agreement you may have executed
- with Licensor regarding such Contributions.
-
- 6. Trademarks. This License does not grant permission to use the trade
- names, trademarks, service marks, or product names of the Licensor,
- except as required for reasonable and customary use in describing the
- origin of the Work and reproducing the content of the NOTICE file.
-
- 7. Disclaimer of Warranty. Unless required by applicable law or
- agreed to in writing, Licensor provides the Work (and each
- Contributor provides its Contributions) on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
- implied, including, without limitation, any warranties or conditions
- of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
- PARTICULAR PURPOSE. You are solely responsible for determining the
- appropriateness of using or redistributing the Work and assume any
- risks associated with Your exercise of permissions under this License.
-
- 8. Limitation of Liability. In no event and under no legal theory,
- whether in tort (including negligence), contract, or otherwise,
- unless required by applicable law (such as deliberate and grossly
- negligent acts) or agreed to in writing, shall any Contributor be
- liable to You for damages, including any direct, indirect, special,
- incidental, or consequential damages of any character arising as a
- result of this License or out of the use or inability to use the
- Work (including but not limited to damages for loss of goodwill,
- work stoppage, computer failure or malfunction, or any and all
- other commercial damages or losses), even if such Contributor
- has been advised of the possibility of such damages.
-
- 9. Accepting Warranty or Additional Liability. While redistributing
- the Work or Derivative Works thereof, You may choose to offer,
- and charge a fee for, acceptance of support, warranty, indemnity,
- or other liability obligations and/or rights consistent with this
- License. However, in accepting such obligations, You may act only
- on Your own behalf and on Your sole responsibility, not on behalf
- of any other Contributor, and only if You agree to indemnify,
- defend, and hold each Contributor harmless for any liability
- incurred by, or claims asserted against, such Contributor by reason
- of your accepting any such warranty or additional liability.
-
- END OF TERMS AND CONDITIONS
-
- APPENDIX: How to apply the Apache License to your work.
-
- To apply the Apache License to your work, attach the following
- boilerplate notice, with the fields enclosed by brackets "[]"
- replaced with your own identifying information. (Don't include
- the brackets!) The text should be enclosed in the appropriate
- comment syntax for the file format. We also recommend that a
- file or class name and description of purpose be included on the
- same "printed page" as the copyright notice for easier
- identification within third-party archives.
-
- Copyright [yyyy] [name of copyright owner]
-
- Licensed under the Apache License, Version 2.0 (the "License");
- you may not use this file except in compliance with the License.
- You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
diff --git a/MANIFEST.in b/MANIFEST.in
deleted file mode 100644
index a6caf7f..0000000
--- a/MANIFEST.in
+++ /dev/null
@@ -1,4 +0,0 @@
-include setup.py README.md
-recursive-include packages/antlion *
-global-exclude .DS_Store
-global-exclude *.pyc
diff --git a/OWNERS b/OWNERS
deleted file mode 100644
index 2f62318..0000000
--- a/OWNERS
+++ /dev/null
@@ -1,5 +0,0 @@
-patricklu@google.com
-priyankach@google.com
-paulsethi@google.com
-jpavankumar@google.com
-guptaritu@google.com
diff --git a/README.md b/README.md
index 6c211f7..69a82f6 100644
--- a/README.md
+++ b/README.md
@@ -1,273 +1 @@
-# antlion
-
-Collection of host-driven, hardware-agnostic Fuchsia connectivity tests. Mainly
-targeting WLAN and Netstack testing.
-
-[Docs] | [Report Bug] | [Request Feature]
-
-[TOC]
-
-[Docs]: http://go/antlion
-[Report Bug]: http://go/conn-test-bug
-[Request Feature]: http://b/issues/new?component=1182297&template=1680893
-
-## Getting started with QEMU
-
-The quickest way to run antlion is by using the Fuchsia QEMU emulator. This
-enables antlion tests that do not require hardware-specific capabilities like
-WLAN. This is especially useful to verify if antlion builds and runs without
-syntax errors. If you require WLAN capabilities, see
-[below](#running-with-a-local-physical-device).
-
-1. [Checkout Fuchsia](https://fuchsia.dev/fuchsia-src/get-started/get_fuchsia_source)
-
-2. Configure and build Fuchsia to run antlion tests virtually on QEMU
-
- ```sh
- fx set core.qemu-x64 \
- --with //src/testing/sl4f \
- --with //src/sys/bin/start_sl4f \
- --args 'core_realm_shards += [ "//src/testing/sl4f:sl4f_core_shard" ]' \
- --with-host //third_party/antlion:e2e_tests_quick
- fx build
- ```
-
-3. In a separate terminal, run the emulator with networking enabled
-
- ```sh
- ffx emu stop && ffx emu start -H --net tap && ffx log
- ```
-
-4. In a separate terminal, run a package server
-
- ```sh
- fx serve
- ```
-
-5. Run an antlion test
-
- ```sh
- fx test --e2e --output //third_party/antlion/tests/examples:sl4f_sanity_test
- ```
-
-## Running with a local physical device
-
-A physical device is required for most antlion tests, which rely on physical I/O
-such as WLAN and Bluetooth. Antlion is designed to make testing physical devices
-as easy, reliable, and reproducible as possible. The device will be discovered
-using FFX or mDNS, so make sure your host machine has a network connection to
-the device.
-
-1. Configure and build Fuchsia for your target with the following extra
- arguments:
-
- ```sh
- fx set core.my-super-cool-product \
- --with-host //third_party/antlion:e2e_tests
- fx build
- ```
-
-2. Ensure your device is flashed with an appropriate build
-
-3. In a separate terminal, run a package server
-
- ```sh
- fx serve
- ```
-
-4. Run an antlion test
-
- ```sh
- fx test --e2e --output //third_party/antlion/tests/functional:ping_stress_test
- ```
-
-If you would like to include an AP in your test config:
-
-1. Run a test with an AP
-
- ```sh
- fx test --e2e --output //third_party/antlion/tests/functional:wlan_scan_test_without_wpa2 \
- -- --ap-ip 192.168.1.50 --ap-ssh-port 22
- ```
-
-If you would like to skip device discovery, or use further auxiliary devices,
-you can generate your own Mobly config.
-
-1. Write the config
-
- ```sh
- cat <<EOF > my-antlion-config.yaml
- TestBeds:
-
- - Name: antlion-runner
- Controllers:
- FuchsiaDevice:
- - mdns_name: fuchsia-00e0-4c01-04df
- ip: ::1
- ssh_port: 8022
- MoblyParams:
- LogPath: logs
- EOF
- ```
-
-1. Run an antlion test
-
- ```sh
- fx test --e2e --output //third_party/antlion/tests/functional:ping_stress_test -- --config-override $PWD/my-antlion-config.yaml
- ```
-
-## Running without a Fuchsia checkout
-
-Requires Python 3.11+
-
-1. Clone the repo
-
- ```sh
- git clone https://fuchsia.googlesource.com/antlion
- ```
-
-2. Install dependencies using [venv](https://docs.python.org/3/library/venv.html#how-venvs-work)
-
- ```sh
- cd antlion
- python3 -m venv .venv # Create a virtual environment in the `.venv` directory
- source .venv/bin/activate # Activate the virtual environment
- pip install --editable ".[mdns]"
- # Run `deactivate` later to exit the virtual environment
- ```
-
-3. Write the sample config and update the Fuchsia controller to match your
- development environment
-
- ```sh
- cat <<EOF > simple-config.yaml
- TestBeds:
- - Name: antlion-runner
- Controllers:
- FuchsiaDevice:
- - ip: fuchsia-00e0-4c01-04df
- MoblyParams:
- LogPath: logs
- EOF
- ```
-
- Replace `fuchsia-00e0-4c01-04df` with your device's nodename, or
- `fuchsia-emulator` if using an emulator. The nodename can be found by looking
- for a log similar to the one below.
-
- ```text
- [0.524][klog][klog][I] netsvc: nodename='fuchsia-emulator'
- ```
-
-4. Run the sanity test
-
- ```sh
- python tests/examples/Sl4fSanityTest.py -c simple-config.yaml
- ```
-
-## Contributing
-
-Contributions are what make open source projects a great place to learn,
-inspire, and create. Any contributions you make are **greatly appreciated**.
-If you have a suggestion that would make this better, please create a CL.
-
-Before contributing, additional setup is necessary:
-
-- Install developer Python packages for formatting and linting
-
- ```sh
- pip install --editable ".[dev]"
- ```
-
-- Install an [EditorConfig](https://editorconfig.org/) plugin for consistent
- whitespace
-
-- Complete the steps in '[Contribute source changes]' to gain authorization to
- upload CLs to Fuchsia's Gerrit.
-
-To create a CL:
-
-1. Create a branch (`git checkout -b feature/amazing-feature`)
-2. Make changes
-3. Document the changes in `CHANGELOG.md`
-4. Auto-format changes (`./format.sh`)
-
- > Note: antlion follows the [Black code style] (rather than the
- > [Google Python Style Guide])
-
-5. Verify no typing errors (`mypy .`)
-6. Commit changes (`git add . && git commit -m 'Add some amazing feature'`)
-7. Upload CL (`git push origin HEAD:refs/for/main`)
-
-> A public bug tracker is not (yet) available.
-
-[Black code style]: https://black.readthedocs.io/en/stable/the_black_code_style/current_style.html
-[Google Python Style Guide]: https://google.github.io/styleguide/pyguide.html
-[Contribute source changes]: https://fuchsia.dev/fuchsia-src/development/source_code/contribute_changes#prerequisites
-
-### Recommended git aliases
-
-There are a handful of git commands that will be commonly used throughout the
-process of contributing. Here are a few aliases to add to your git config
-(`~/.gitconfig`) for a smoother workflow:
-
-- `git amend` to modify your CL in response to code review comments
-- `git uc` to upload your CL, run pre-submit tests, enable auto-submit, and
- add a reviewer
-
-```gitconfig
-[alias]
- amend = commit --amend --no-edit
- uc = push origin HEAD:refs/for/main%l=Commit-Queue+1,l=Fuchsia-Auto-Submit+1,publish-comments,r=sbalana
-```
-
-You may also want to add a section to ignore the project's large formatting changes:
-
-```gitconfig
-[blame]
- ignoreRevsFile = .git-blame-ignore-revs
-```
-
-## License
-
-Distributed under the Apache 2.0 License. See `LICENSE` for more information.
-
-## Acknowledgments
-
-This is a fork of [ACTS][ACTS], the connectivity testing framework used by
-Android. The folks over there did a great job at cultivating amazing tools, much
-of which are being used or have been extended with additional features.
-
-[ACTS]: https://fuchsia.googlesource.com/third_party/android.googlesource.com/platform/tools/test/connectivity/
-
-### Migrating CLs from ACTS
-
-`antlion` and ACTS share the same git history, so migrating existing changes is
-straightforward:
-
-1. Checkout to latest `main`
-
- ```sh
- git checkout main
- git pull --rebase origin main
- ```
-
-2. Cherry-pick the ACTS change
-
- ```sh
- git fetch acts refs/changes/16/12345/6 && git checkout -b change-12345 FETCH_HEAD
- git fetch https://android.googlesource.com/platform/tools/test/connectivity refs/changes/30/2320530/1 && git cherry-pick FETCH_HEAD
- ```
-
-3. Resolve any merge conflicts, if any
-
- ```sh
- git add [...]
- git rebase --continue
- ```
-
-4. Upload CL
-
- ```sh
- git push origin HEAD:refs/for/main # or "git uc" if using the alias
- ```
+The code in this repository was migrated to //src/testing/end_to_end/antlion.
diff --git a/antlion_host_test.gni b/antlion_host_test.gni
deleted file mode 100644
index 1d09f13..0000000
--- a/antlion_host_test.gni
+++ /dev/null
@@ -1,201 +0,0 @@
-# Copyright 2024 The Fuchsia Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import("//build/host.gni")
-import("//build/python/python_binary.gni")
-import("//build/rust/rustc_binary.gni")
-import("//build/testing/host_test.gni")
-import("//build/testing/host_test_data.gni")
-
-# Declares a host-side antlion test.
-#
-# Examples
-#
-# ```
-# antlion_host_test("sl4f_sanity_test") {
-# main_source = "Sl4fSanityTest.py"
-# }
-#
-# antlion_host_test("wlan_rvr_test_2g") {
-# main_source = "WlanRvrTest.py"
-# test_params = "rvr_settings.yaml"
-# test_cases = [ "test_rvr_11n_2g_*" ]
-# }
-# ```
-#
-# Parameters
-#
-# main_source
-# The .py file defining the antlion test.
-# Type: path
-#
-# sources (optional)
-# Other files that are used in the test.
-# Type: list(path)
-# Default: empty list
-#
-# test_params (optional)
-# Path to a YAML file with additional test parameters. This will be provided
-# to the test in the antlion config under the "test_params" key.
-# Type: string
-#
-# test_cases (optional)
-# List of test cases to run. Defaults to running all test cases.
-# Type: list(string)
-#
-# test_data_deps (optional)
-# List of test data GN targets that are needed at runtime.
-# Type: list(string)
-# Default: empty list
-#
-# deps
-# environments
-# visibility
-template("antlion_host_test") {
- assert(defined(invoker.main_source), "main_source is required")
-
- #
- # Define antlion test python_binary().
- #
- _python_binary_name = "${target_name}.pyz"
- _python_binary_target = "${target_name}_python_binary"
- python_binary(_python_binary_target) {
- forward_variables_from(invoker,
- [
- "enable_mypy",
- "main_source",
- "sources",
- "data_sources",
- "data_package_name",
- ])
- output_name = _python_binary_name
- main_callable = "test_runner.main" # Mobly-specific entry point.
- deps = [ "//third_party/antlion" ]
- if (defined(invoker.test_data_deps)) {
- deps += invoker.test_data_deps
- }
- if (defined(invoker.libraries)) {
- deps += invoker.libraries
- }
- testonly = true
- visibility = [ ":*" ]
- }
-
- _test_dir = "${root_out_dir}/test_data/" + get_label_info(target_name, "dir")
-
- #
- # Define antlion test host_test_data().
- #
- _host_test_data_target = "${target_name}_test_data"
- host_test_data(_host_test_data_target) {
- testonly = true
- visibility = [ ":*" ]
- sources = [ get_label_info(":${_python_binary_target}", "target_out_dir") +
- "/${_python_binary_name}" ]
- outputs = [ "${_test_dir}/${_python_binary_name}" ]
- deps = [ ":${_python_binary_target}" ]
- if (defined(invoker.deps)) {
- deps += invoker.deps
- }
- }
-
- #
- # Define SSH binary host_test_data().
- #
- _host_test_data_ssh = "${target_name}_test_data_ssh"
- host_test_data(_host_test_data_ssh) {
- testonly = true
- visibility = [ ":*" ]
- sources = [
- "//prebuilt/third_party/openssh-portable/${host_os}-${host_cpu}/bin/ssh",
- ]
- outputs = [ "${_test_dir}/ssh" ]
- }
-
- #
- # Define Mobly test params YAML host_test_data().
- #
- if (defined(invoker.test_params)) {
- _host_test_data_test_params = "${target_name}_test_data_test_params"
- host_test_data(_host_test_data_test_params) {
- testonly = true
- visibility = [ ":*" ]
- sources = [ invoker.test_params ]
- outputs = [ "${_test_dir}/${invoker.test_params}" ]
- }
- }
-
- #
- # Define FFX binary host_test_data().
- #
- _host_test_data_ffx = "${target_name}_test_data_ffx"
- host_test_data(_host_test_data_ffx) {
- testonly = true
- visibility = [ ":*" ]
- sources = [ "${host_tools_dir}/ffx" ]
- outputs = [ "${_test_dir}/ffx" ]
- deps = [
- "//src/developer/ffx:host",
- "//src/developer/ffx:suite_test_data",
- ]
- }
-
- #
- # Define the antlion host_test() using antlion-runner.
- #
- host_test(target_name) {
- forward_variables_from(invoker,
- [
- "environments",
- "visibility",
- "isolated",
- "product_bundle",
- "timeout_secs",
- ])
-
- binary_path = "${root_out_dir}/antlion-runner"
-
- args = [
- "--python-bin",
- rebase_path(python_exe_src, root_build_dir),
- "--antlion-pyz",
- rebase_path("${_test_dir}/${_python_binary_name}", root_build_dir),
- "--out-dir",
- rebase_path("${_test_dir}", root_build_dir),
- "--ffx-binary",
- rebase_path("${_test_dir}/ffx", root_build_dir),
- "--ffx-subtools-search-path",
- rebase_path(host_tools_dir, root_build_dir),
- "--ssh-binary",
- rebase_path("${_test_dir}/ssh", root_build_dir),
- ]
-
- if (defined(invoker.test_cases)) {
- args += invoker.test_cases
- }
-
- data_deps = [ "//src/developer/ffx:suite_test_data" ]
-
- deps = [
- ":${_host_test_data_ffx}",
- ":${_host_test_data_ssh}",
- ":${_host_test_data_target}",
- "//build/python:interpreter",
- "//src/testing/end_to_end/honeydew",
- "//third_party/antlion/runner",
- ]
-
- if (defined(invoker.test_params)) {
- args += [
- "--test-params",
- rebase_path("${_test_dir}/${invoker.test_params}", root_build_dir),
- ]
- deps += [ ":${_host_test_data_test_params}" ]
- }
-
- if (defined(invoker.test_data_deps)) {
- deps += invoker.test_data_deps
- }
- }
-}
diff --git a/environments.gni b/environments.gni
deleted file mode 100644
index 1aee7c2..0000000
--- a/environments.gni
+++ /dev/null
@@ -1,193 +0,0 @@
-# Copyright 2023 The Fuchsia Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import("//build/testing/environments.gni")
-
-astro_ap_env = {
- dimensions = {
- access_points = "1"
- device_type = "Astro"
- pool = "fuchsia.tests.connectivity"
- }
- tags = [ "antlion" ]
-}
-
-astro_ap_iperf_env = {
- dimensions = {
- access_points = "1"
- device_type = "Astro"
- iperf_servers = "1"
- pool = "fuchsia.tests.connectivity"
- }
- tags = [ "antlion" ]
-}
-
-astro_ap_iperf_attenuator_env = {
- dimensions = {
- access_points = "1"
- attenuators = "1"
- device_type = "Astro"
- iperf_servers = "1"
- pool = "fuchsia.tests.connectivity"
- }
- tags = [ "antlion" ]
-}
-
-sherlock_ap_env = {
- dimensions = {
- access_points = "1"
- device_type = "Sherlock"
- pool = "fuchsia.tests.connectivity"
- }
- tags = [ "antlion" ]
-}
-
-sherlock_ap_iperf_env = {
- dimensions = {
- access_points = "1"
- device_type = "Sherlock"
- iperf_servers = "1"
- pool = "fuchsia.tests.connectivity"
- }
- tags = [ "antlion" ]
-}
-
-sherlock_ap_iperf_attenuator_env = {
- dimensions = {
- access_points = "1"
- attenuators = "1"
- device_type = "Sherlock"
- iperf_servers = "1"
- pool = "fuchsia.tests.connectivity"
- }
- tags = [ "antlion" ]
-}
-
-nelson_ap_env = {
- dimensions = {
- access_points = "1"
- device_type = "Nelson"
- pool = "fuchsia.tests.connectivity"
- }
- tags = [ "antlion" ]
-}
-
-nelson_ap_iperf_env = {
- dimensions = {
- access_points = "1"
- device_type = "Nelson"
- iperf_servers = "1"
- pool = "fuchsia.tests.connectivity"
- }
- tags = [ "antlion" ]
-}
-
-nelson_ap_iperf_attenuator_env = {
- dimensions = {
- access_points = "1"
- attenuators = "1"
- device_type = "Nelson"
- iperf_servers = "1"
- pool = "fuchsia.tests.connectivity"
- }
- tags = [ "antlion" ]
-}
-
-nuc11_ap_env = {
- dimensions = {
- access_points = "1"
- device_type = "Intel NUC Kit NUC11TNHv5"
- pool = "fuchsia.tests.connectivity"
- }
- tags = [ "antlion" ]
-}
-
-nuc11_ap_iperf_env = {
- dimensions = {
- access_points = "1"
- device_type = "Intel NUC Kit NUC11TNHv5"
- iperf_servers = "1"
- pool = "fuchsia.tests.connectivity"
- }
- tags = [ "antlion" ]
-}
-
-nuc11_ap_iperf_attenuator_env = {
- dimensions = {
- access_points = "1"
- attenuators = "1"
- device_type = "Intel NUC Kit NUC11TNHv5"
- iperf_servers = "1"
- pool = "fuchsia.tests.connectivity"
- }
- tags = [ "antlion" ]
-}
-
-vim3_ap_env = {
- dimensions = {
- access_points = "1"
- device_type = "Vim3"
- pool = "fuchsia.tests.connectivity"
- }
- tags = [ "antlion" ]
-}
-
-vim3_ap_iperf_env = {
- dimensions = {
- access_points = "1"
- device_type = "Vim3"
- iperf_servers = "1"
- pool = "fuchsia.tests.connectivity"
- }
- tags = [ "antlion" ]
-}
-
-vim3_ap_iperf_attenuator_env = {
- dimensions = {
- access_points = "1"
- attenuators = "1"
- device_type = "Vim3"
- iperf_servers = "1"
- pool = "fuchsia.tests.connectivity"
- }
- tags = [ "antlion" ]
-}
-
-# Display environments supported by antlion.
-display_envs = [
- astro_env,
- sherlock_env,
- nelson_env,
- nuc11_env,
- vim3_env,
- wlan_astro_env,
- wlan_sherlock_env,
- wlan_nelson_env,
- wlan_nuc11_env,
- wlan_vim3_env,
-]
-
-display_ap_envs = [
- astro_ap_env,
- sherlock_ap_env,
- nelson_ap_env,
- nuc11_ap_env,
- vim3_ap_env,
-]
-
-display_ap_iperf_envs = [
- astro_ap_iperf_env,
- sherlock_ap_iperf_env,
- nelson_ap_iperf_env,
- nuc11_ap_iperf_env,
- vim3_ap_iperf_env,
-]
-
-display_ap_iperf_attenuator_envs = [
- astro_ap_iperf_attenuator_env,
- sherlock_ap_iperf_attenuator_env,
- nelson_ap_iperf_attenuator_env,
- nuc11_ap_iperf_attenuator_env,
- vim3_ap_iperf_attenuator_env,
-]
diff --git a/format.sh b/format.sh
deleted file mode 100755
index 8ede1f6..0000000
--- a/format.sh
+++ /dev/null
@@ -1,97 +0,0 @@
-#!/bin/bash
-
-# Get the directory of this script
-SCRIPT_DIR="$(dirname "$(readlink -f "${BASH_SOURCE[0]}")")"
-
-install_virtual_environment_doc() {
- echo "Please install the virtual environment before running format.sh by running"
- echo "the following commands:"
- echo ""
- echo " cd $SCRIPT_DIR"
- echo " python3 -m venv .venv"
- echo " (source .venv/bin/activate && pip install -e \".[dev]\")"
-}
-
-if [ -f "$SCRIPT_DIR/.venv/bin/activate" ] ; then
- source "$SCRIPT_DIR/.venv/bin/activate"
-else
- echo ""
- echo "====================="
- echo "Error: Virtual environment not installed!"
- echo "====================="
- echo ""
- install_virtual_environment_doc
- echo ""
- exit 1
-fi
-
-# Verify expected virtual environment binaries exist to prevent unintentionally running
-# different versions from outside the environment.
-#
-# Note: The virtual environment may exist without the binaries if dependencies weren't installed
-# (e.g., running `python3 -m venv .venv` without `pip install -e '.[dev]'`).
-find_venv_binary() {
- find .venv/bin -name $1 | grep -q .
-}
-
-venv_binaries="autoflake black isort"
-all_binaries_found=true
-
-for binary in $venv_binaries; do
- if ! find_venv_binary $binary; then
- all_binaries_found=false
- echo "Error: $binary not installed in virtual environment"
- fi
-done
-
-if ! $all_binaries_found; then
- echo ""
- install_virtual_environment_doc
- echo ""
- exit 1
-fi
-
-# Detect trivial unused code.
-#
-# Automatically removal is possible, but is considered an unsafe operation. When a
-# change hasn't been commited, automatic removal could cause unintended irreversible
-# loss of in-progress code.
-#
-# Note: This cannot detect unused code between modules or packages. For complex unused
-# code detection, vulture should be used.
-autoflake \
- --quiet \
- --check-diff \
- --remove-duplicate-keys \
- --remove-unused-variables \
- --remove-all-unused-imports \
- --recursive .
-
-if [ $? -eq 0 ]; then
- echo "No unused code found"
-else
- echo ""
- echo "====================="
- echo "Unused code detected!"
- echo "====================="
- echo ""
- echo "If these changes are trivial, consider running:"
- echo "\"autoflake --in-place --remove-unused-variables --remove-all-unused-imports -r .\""
- echo ""
- read -p "Run this command to remove all unused code? [y/n] " -n 1 -r
- echo ""
- echo ""
-
- if [[ $REPLY =~ ^[Yy]$ ]]; then
- autoflake --in-place --remove-unused-variables --remove-all-unused-imports -r .
- else
- exit 1
- fi
-fi
-
-# Sort imports to avoid bikeshedding.
-isort .
-
-# Format code; also to avoid bikeshedding.
-black .
-
diff --git a/packages/antlion/__init__.py b/packages/antlion/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/packages/antlion/__init__.py
+++ /dev/null
diff --git a/packages/antlion/base_test.py b/packages/antlion/base_test.py
deleted file mode 100755
index 64508f6..0000000
--- a/packages/antlion/base_test.py
+++ /dev/null
@@ -1,75 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import inspect
-import re
-from typing import Callable
-
-from mobly.base_test import BaseTestClass
-from mobly.base_test import Error as MoblyError
-
-
-class AntlionBaseTest(BaseTestClass):
- # TODO(b/415313773): Remove this once wlanix tests are updated to use mobly's base_test.py
- # instead of AntlionBaseTest class, as the missing functionality is now merged into Mobly.
- def _get_test_methods(
- self, test_names: list[str]
- ) -> list[tuple[str, Callable[[], None]]]:
- """Resolves test method names to bound test methods.
-
- Args:
- test_names: Test method names.
-
- Returns:
- List of tuples containing the test method name and the function implementing
- its logic.
-
- Raises:
- MoblyError: test_names does not match any tests.
- """
-
- test_table: dict[str, Callable[[], None]] = {
- **self._generated_test_table
- }
- for name, _ in inspect.getmembers(type(self), callable):
- if name.startswith("test_"):
- test_table[name] = getattr(self, name)
-
- test_methods: list[tuple[str, Callable[[], None]]] = []
- for test_name in test_names:
- if test_name in test_table:
- test_methods.append((test_name, test_table[test_name]))
- else:
- try:
- pattern = re.compile(test_name)
- except Exception as e:
- raise MoblyError(
- f'"{test_name}" is not a valid regular expression'
- ) from e
- for name in test_table:
- if pattern.fullmatch(name.strip()):
- test_methods.append((name, test_table[name]))
-
- if len(test_methods) == 0:
- all_patterns = '" or "'.join(test_names)
- all_tests = "\n - ".join(test_table.keys())
- raise MoblyError(
- f"{self.TAG} does not declare any tests matching "
- f'"{all_patterns}". Please verify the correctness of '
- f"{self.TAG} test names: \n - {all_tests}"
- )
-
- return test_methods
diff --git a/packages/antlion/capabilities/__init__.py b/packages/antlion/capabilities/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/packages/antlion/capabilities/__init__.py
+++ /dev/null
diff --git a/packages/antlion/capabilities/ssh.py b/packages/antlion/capabilities/ssh.py
deleted file mode 100644
index 05bcf28..0000000
--- a/packages/antlion/capabilities/ssh.py
+++ /dev/null
@@ -1,467 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2023 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from __future__ import annotations
-
-import logging
-import os
-import shlex
-import shutil
-import signal
-import subprocess
-import time
-from dataclasses import dataclass
-from typing import IO, Mapping
-
-from mobly import logger, signals
-
-from antlion.net import wait_for_port
-from antlion.runner import (
- CalledProcessError,
- CalledProcessTransportError,
- Runner,
-)
-from antlion.types import Json
-from antlion.validation import MapValidator
-
-DEFAULT_SSH_PORT: int = 22
-DEFAULT_SSH_TIMEOUT_SEC: float = 60.0
-DEFAULT_SSH_CONNECT_TIMEOUT_SEC: int = 90
-DEFAULT_SSH_SERVER_ALIVE_INTERVAL: int = 30
-# The default package repository for all components.
-
-
-class SSHResult:
- """Result of an SSH command."""
-
- def __init__(
- self,
- process: (
- subprocess.CompletedProcess[bytes]
- | subprocess.CompletedProcess[str]
- | subprocess.CalledProcessError
- ),
- ) -> None:
- if isinstance(process.stdout, bytes):
- self._stdout_bytes = process.stdout
- elif isinstance(process.stdout, str):
- self._stdout = process.stdout
- else:
- raise TypeError(
- "Expected process.stdout to be either bytes or str, "
- f"got {type(process.stdout)}"
- )
-
- if isinstance(process.stderr, bytes):
- self._stderr_bytes = process.stderr
- elif isinstance(process.stderr, str):
- self._stderr = process.stderr
- else:
- raise TypeError(
- "Expected process.stderr to be either bytes or str, "
- f"got {type(process.stderr)}"
- )
-
- self._exit_status = process.returncode
-
- def __str__(self) -> str:
- if self.exit_status == 0:
- return self.stdout
- return f'status {self.exit_status}, stdout: "{self.stdout}", stderr: "{self.stderr}"'
-
- @property
- def stdout(self) -> str:
- if not hasattr(self, "_stdout"):
- self._stdout = self._stdout_bytes.decode("utf-8", errors="replace")
- return self._stdout
-
- @property
- def stdout_bytes(self) -> bytes:
- if not hasattr(self, "_stdout_bytes"):
- self._stdout_bytes = self._stdout.encode()
- return self._stdout_bytes
-
- @property
- def stderr(self) -> str:
- if not hasattr(self, "_stderr"):
- self._stderr = self._stderr_bytes.decode("utf-8", errors="replace")
- return self._stderr
-
- @property
- def exit_status(self) -> int:
- return self._exit_status
-
-
-class SSHError(signals.TestError):
- """A SSH command returned with a non-zero status code."""
-
- def __init__(
- self, command: list[str], result: CalledProcessError, elapsed_sec: float
- ):
- if result.returncode < 0:
- try:
- reason = f"died with {signal.Signals(-result.returncode)}"
- except ValueError:
- reason = f"died with unknown signal {-result.returncode}"
- else:
- reason = f"unexpectedly returned {result.returncode}"
-
- super().__init__(
- f'SSH command "{" ".join(command)}" {reason} after {elapsed_sec:.2f}s\n'
- f'stderr: {result.stderr.decode("utf-8", errors="replace")}\n'
- f'stdout: {result.stdout.decode("utf-8", errors="replace")}\n'
- )
- self.result = result
-
-
-@dataclass
-class SSHConfig:
- """SSH client config."""
-
- # SSH flags. See ssh(1) for full details.
- user: str
- host_name: str
- identity_file: str
-
- ssh_binary: str = "ssh"
- config_file: str = "/dev/null"
- port: int = 22
-
- #
- # SSH options. See ssh_config(5) for full details.
- #
- connect_timeout: int = DEFAULT_SSH_CONNECT_TIMEOUT_SEC
- server_alive_interval: int = DEFAULT_SSH_SERVER_ALIVE_INTERVAL
- strict_host_key_checking: bool = False
- user_known_hosts_file: str = "/dev/null"
- log_level: str = "ERROR"
-
- # Force allocation of a pseudo-tty. This can be used to execute arbitrary
- # screen-based programs on a remote machine, which can be very useful, e.g.
- # when implementing menu services.
- force_tty: bool = False
-
- def full_command(self, command: list[str]) -> list[str]:
- """Generate the complete command to execute command over SSH.
-
- Args:
- command: The command to run over SSH
- force_tty: Force pseudo-terminal allocation. This can be used to
- execute arbitrary screen-based programs on a remote machine,
- which can be very useful, e.g. when implementing menu services.
-
- Returns:
- Arguments composing the complete call to SSH.
- """
- return [
- self.ssh_binary,
- # SSH flags
- "-i",
- self.identity_file,
- "-F",
- self.config_file,
- "-p",
- str(self.port),
- # SSH configuration options
- "-o",
- f"ConnectTimeout={self.connect_timeout}",
- "-o",
- f"ServerAliveInterval={self.server_alive_interval}",
- "-o",
- f'StrictHostKeyChecking={"yes" if self.strict_host_key_checking else "no"}',
- "-o",
- f"UserKnownHostsFile={self.user_known_hosts_file}",
- "-o",
- f"LogLevel={self.log_level}",
- "-o",
- f'RequestTTY={"force" if self.force_tty else "auto"}',
- f"{self.user}@{self.host_name}",
- ] + command
-
- @staticmethod
- def from_config(config: Mapping[str, Json]) -> "SSHConfig":
- c = MapValidator(config)
- ssh_binary_path = c.get(str, "ssh_binary_path", None)
- if ssh_binary_path is None:
- found_path = shutil.which("ssh")
- if not isinstance(found_path, str):
- raise ValueError("Failed to find ssh in $PATH")
- ssh_binary_path = found_path
-
- return SSHConfig(
- user=c.get(str, "user"),
- host_name=c.get(str, "host"),
- identity_file=c.get(str, "identity_file"),
- ssh_binary=ssh_binary_path,
- config_file=c.get(str, "ssh_config", "/dev/null"),
- port=c.get(int, "port", 22),
- connect_timeout=c.get(int, "connect_timeout", 30),
- )
-
-
-class SSHProvider(Runner):
- """Device-specific provider for SSH clients."""
-
- def __init__(self, config: SSHConfig) -> None:
- """
- Args:
- config: SSH client config
- """
- logger_tag = f"ssh | {config.host_name}"
- if config.port != DEFAULT_SSH_PORT:
- logger_tag += f":{config.port}"
-
- # Escape IPv6 interface identifier if present.
- logger_tag = logger_tag.replace("%", "%%")
-
- self.log = logger.PrefixLoggerAdapter(
- logging.getLogger(),
- {
- logger.PrefixLoggerAdapter.EXTRA_KEY_LOG_PREFIX: f"[{logger_tag}]",
- },
- )
-
- self.config = config
-
- try:
- self.wait_until_reachable()
- self.log.info("sshd is reachable")
- except Exception as e:
- raise TimeoutError("sshd is unreachable") from e
-
- def wait_until_reachable(self) -> None:
- """Wait for the device to become reachable via SSH.
-
- Raises:
- TimeoutError: connect_timeout has expired without a successful SSH
- connection to the device
- CalledProcessTransportError: SSH is available on the device but
- connect_timeout has expired and SSH fails to run
- subprocess.TimeoutExpired: when the timeout expires while waiting
- for a child process
- """
- timeout_sec = self.config.connect_timeout
- timeout = time.time() + timeout_sec
- wait_for_port(
- self.config.host_name, self.config.port, timeout_sec=timeout_sec
- )
-
- while True:
- try:
- self._run(
- ["echo"],
- stdin=None,
- timeout_sec=timeout_sec,
- log_output=True,
- )
- return
- except CalledProcessTransportError as e:
- # Repeat if necessary; _run() can exit prematurely by receiving
- # SSH transport errors. These errors can be caused by sshd not
- # being fully initialized yet.
- if time.time() < timeout:
- continue
- else:
- raise e
-
- def wait_until_unreachable(
- self,
- interval_sec: int = 1,
- timeout_sec: int = DEFAULT_SSH_CONNECT_TIMEOUT_SEC,
- ) -> None:
- """Wait for the device to become unreachable via SSH.
-
- Args:
- interval_sec: Seconds to wait between unreachability attempts
- timeout_sec: Seconds to wait until raising TimeoutError
-
- Raises:
- TimeoutError: when timeout_sec has expired without an unsuccessful
- SSH connection to the device
- """
- timeout = time.time() + timeout_sec
-
- while True:
- try:
- wait_for_port(
- self.config.host_name,
- self.config.port,
- timeout_sec=interval_sec,
- )
- except TimeoutError:
- return
-
- if time.time() < timeout:
- raise TimeoutError(
- f"Connection to {self.config.host_name} is still reachable "
- f"after {timeout_sec}s"
- )
-
- def run(
- self,
- command: str | list[str],
- stdin: bytes | None = None,
- timeout_sec: float | None = DEFAULT_SSH_TIMEOUT_SEC,
- log_output: bool = True,
- connect_retries: int = 3,
- ) -> subprocess.CompletedProcess[bytes]:
- """Run a command on the device then exit.
-
- Args:
- command: String to send to the device.
- stdin: Standard input to command.
- timeout_sec: Seconds to wait for the command to complete.
- connect_retries: Amount of times to retry connect on fail.
-
- Raises:
- subprocess.CalledProcessError: when the process exits with a non-zero status
- subprocess.TimeoutExpired: when the timeout expires while waiting
- for a child process
- CalledProcessTransportError: when the underlying transport fails
-
- Returns:
- SSHResults from the executed command.
- """
- if isinstance(command, str):
- s = shlex.shlex(command, posix=True, punctuation_chars=True)
- s.whitespace_split = True
- command = list(s)
- return self._run_with_retry(
- command, stdin, timeout_sec, log_output, connect_retries
- )
-
- def _run_with_retry(
- self,
- command: list[str],
- stdin: bytes | None,
- timeout_sec: float | None,
- log_output: bool,
- connect_retries: int,
- ) -> subprocess.CompletedProcess[bytes]:
- err: Exception = ValueError("connect_retries cannot be 0")
- for _ in range(0, connect_retries):
- try:
- return self._run(command, stdin, timeout_sec, log_output)
- except CalledProcessTransportError as e:
- err = e
- self.log.warning("Connect failed: %s", e)
- raise err
-
- def _run(
- self,
- command: list[str],
- stdin: bytes | None,
- timeout_sec: float | None,
- log_output: bool,
- ) -> subprocess.CompletedProcess[bytes]:
- start = time.perf_counter()
- with self.start(command) as process:
- try:
- stdout, stderr = process.communicate(stdin, timeout_sec)
- except subprocess.TimeoutExpired as e:
- process.kill()
- process.wait()
- raise e
- except: # Including KeyboardInterrupt, communicate handled that.
- process.kill()
- # We don't call process.wait() as Popen.__exit__ does that for
- # us.
- raise
-
- elapsed = time.perf_counter() - start
- exit_code = process.poll()
-
- if log_output:
- self.log.debug(
- "Command %s exited with %d after %.2fs\nstdout: %s\nstderr: %s",
- " ".join(command),
- exit_code,
- elapsed,
- stdout.decode("utf-8", errors="replace"),
- stderr.decode("utf-8", errors="replace"),
- )
- else:
- self.log.debug(
- "Command %s exited with %d after %.2fs",
- " ".join(command),
- exit_code,
- elapsed,
- )
-
- if exit_code is None:
- raise ValueError(
- f'Expected process to be terminated: "{" ".join(command)}"'
- )
-
- if exit_code:
- err = CalledProcessError(
- exit_code, process.args, output=stdout, stderr=stderr
- )
-
- if err.returncode == 255:
- reason = stderr.decode("utf-8", errors="replace")
- if (
- "Name or service not known" in reason
- or "Host does not exist" in reason
- ):
- raise CalledProcessTransportError(
- f"Hostname {self.config.host_name} cannot be resolved to an address"
- ) from err
- if "Connection timed out" in reason:
- raise CalledProcessTransportError(
- f"Failed to establish a connection to {self.config.host_name} within {timeout_sec}s"
- ) from err
- if "Connection refused" in reason:
- raise CalledProcessTransportError(
- f"Connection refused by {self.config.host_name}"
- ) from err
-
- raise err
-
- return subprocess.CompletedProcess(
- process.args, exit_code, stdout, stderr
- )
-
- def run_async(self, command: str) -> subprocess.CompletedProcess[bytes]:
- s = shlex.shlex(command, posix=True, punctuation_chars=True)
- s.whitespace_split = True
- command_split = list(s)
-
- process = self.start(command_split)
- return subprocess.CompletedProcess(
- self.config.full_command(command_split),
- returncode=0,
- stdout=str(process.pid).encode("utf-8"),
- stderr=None,
- )
-
- def start(
- self,
- command: list[str],
- stdout: IO[bytes] | int = subprocess.PIPE,
- stdin: IO[bytes] | int = subprocess.PIPE,
- ) -> subprocess.Popen[bytes]:
- full_command = self.config.full_command(command)
- self.log.debug(
- f"Starting: {' '.join(command)}\nFull command: {' '.join(full_command)}"
- )
- return subprocess.Popen(
- full_command,
- stdin=stdin,
- stdout=stdout if stdout else subprocess.PIPE,
- stderr=subprocess.PIPE,
- preexec_fn=os.setpgrp,
- )
diff --git a/packages/antlion/context.py b/packages/antlion/context.py
deleted file mode 100644
index 137a95d..0000000
--- a/packages/antlion/context.py
+++ /dev/null
@@ -1,339 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-# Blanket ignores to enable mypy in Antlion
-# mypy: disable-error-code="no-untyped-def, var-annotated"
-import enum
-import logging
-import os
-
-from antlion.event import event_bus
-from antlion.event.event import (
- Event,
- TestCaseBeginEvent,
- TestCaseEndEvent,
- TestClassBeginEvent,
- TestClassEndEvent,
- TestClassEvent,
-)
-
-
-class ContextLevel(enum.IntEnum):
- ROOT = 0
- TESTCLASS = 1
- TESTCASE = 2
-
-
-def get_current_context(depth=None):
- """Get the current test context at the specified depth.
- Pulls the most recently created context, with a level at or below the given
- depth, from the _contexts stack.
-
- Args:
- depth: The desired context level. For example, the TESTCLASS level would
- yield the current test class context, even if the test is currently
- within a test case.
-
- Returns: An instance of TestContext.
- """
- if depth is None:
- return _contexts[-1]
- return _contexts[min(depth, len(_contexts) - 1)]
-
-
-def _get_context_for_test_case_event(event):
- """Generate a TestCaseContext from the given TestCaseEvent."""
- return TestCaseContext(event.test_class, event.test_case)
-
-
-def _get_context_for_test_class_event(event):
- """Generate a TestClassContext from the given TestClassEvent."""
- return TestClassContext(event.test_class)
-
-
-class NewContextEvent(Event):
- """The event posted when a test context has changed."""
-
-
-class NewTestClassContextEvent(NewContextEvent):
- """The event posted when the test class context has changed."""
-
-
-class NewTestCaseContextEvent(NewContextEvent):
- """The event posted when the test case context has changed."""
-
-
-def _update_test_class_context(event):
- """Pushes a new TestClassContext to the _contexts stack upon a
- TestClassBeginEvent. Pops the most recent context off the stack upon a
- TestClassEndEvent. Posts the context change to the event bus.
-
- Args:
- event: An instance of TestClassBeginEvent or TestClassEndEvent.
- """
- if isinstance(event, TestClassBeginEvent):
- _contexts.append(_get_context_for_test_class_event(event))
- if isinstance(event, TestClassEndEvent):
- if _contexts:
- _contexts.pop()
- event_bus.post(NewTestClassContextEvent())
-
-
-def _update_test_case_context(event):
- """Pushes a new TestCaseContext to the _contexts stack upon a
- TestCaseBeginEvent. Pops the most recent context off the stack upon a
- TestCaseEndEvent. Posts the context change to the event bus.
-
- Args:
- event: An instance of TestCaseBeginEvent or TestCaseEndEvent.
- """
- if isinstance(event, TestCaseBeginEvent):
- _contexts.append(_get_context_for_test_case_event(event))
- if isinstance(event, TestCaseEndEvent):
- if _contexts:
- _contexts.pop()
- event_bus.post(NewTestCaseContextEvent())
-
-
-event_bus.register(TestClassEvent, _update_test_class_context)
-event_bus.register(TestCaseBeginEvent, _update_test_case_context, order=-100)
-event_bus.register(TestCaseEndEvent, _update_test_case_context, order=100)
-
-
-class TestContext(object):
- """An object representing the current context in which a test is executing.
-
- The context encodes the current state of the test runner with respect to a
- particular scenario in which code is being executed. For example, if some
- code is being executed as part of a test case, then the context should
- encode information about that test case such as its name or enclosing
- class.
-
- The subcontext specifies a relative path in which certain outputs,
- e.g. logcat, should be kept for the given context.
-
- The full output path is given by
- <base_output_path>/<context_dir>/<subcontext>.
-
- Attributes:
- _base_output_paths: a dictionary mapping a logger's name to its base
- output path
- _subcontexts: a dictionary mapping a logger's name to its
- subcontext-level output directory
- """
-
- _base_output_paths = {}
- _subcontexts = {}
-
- def get_base_output_path(self, log_name=None):
- """Gets the base output path for this logger.
-
- The base output path is interpreted as the reporting root for the
- entire test runner.
-
- If a path has been added with add_base_output_path, it is returned.
- Otherwise, a default is determined by _get_default_base_output_path().
-
- Args:
- log_name: The name of the logger.
-
- Returns:
- The output path.
- """
- if log_name in self._base_output_paths:
- return self._base_output_paths[log_name]
- return self._get_default_base_output_path()
-
- @classmethod
- def add_base_output_path(cls, log_name, base_output_path):
- """Store the base path for this logger.
-
- Args:
- log_name: The name of the logger.
- base_output_path: The base path of output files for this logger.
- """
- cls._base_output_paths[log_name] = base_output_path
-
- def get_subcontext(self, log_name=None):
- """Gets the subcontext for this logger.
-
- The subcontext is interpreted as the directory, relative to the
- context-level path, where all outputs of the given logger are stored.
-
- If a path has been added with add_subcontext, it is returned.
- Otherwise, the empty string is returned.
-
- Args:
- log_name: The name of the logger.
-
- Returns:
- The output path.
- """
- return self._subcontexts.get(log_name, "")
-
- @classmethod
- def add_subcontext(cls, log_name, subcontext):
- """Store the subcontext path for this logger.
-
- Args:
- log_name: The name of the logger.
- subcontext: The relative subcontext path of output files for this
- logger.
- """
- cls._subcontexts[log_name] = subcontext
-
- def get_full_output_path(self, log_name=None):
- """Gets the full output path for this context.
-
- The full path represents the absolute path to the output directory,
- as given by <base_output_path>/<context_dir>/<subcontext>
-
- Args:
- log_name: The name of the logger. Used to specify the base output
- path and the subcontext.
-
- Returns:
- The output path.
- """
-
- path = os.path.join(
- self.get_base_output_path(log_name),
- self._get_default_context_dir(),
- self.get_subcontext(log_name),
- )
- os.makedirs(path, exist_ok=True)
- return path
-
- @property
- def identifier(self):
- raise NotImplementedError()
-
- def _get_default_base_output_path(self):
- """Gets the default base output path.
-
- This will attempt to use the ACTS logging path set up in the global
- logger.
-
- Returns:
- The logging path.
-
- Raises:
- EnvironmentError: If the ACTS logger has not been initialized.
- """
- try:
- return logging.log_path # type: ignore # Blanket ignore to enable mypy
- except AttributeError as e:
- raise EnvironmentError(
- "The ACTS logger has not been set up and"
- ' "base_output_path" has not been set.'
- ) from e
-
- def _get_default_context_dir(self):
- """Gets the default output directory for this context."""
- raise NotImplementedError()
-
-
-class RootContext(TestContext):
- """A TestContext that represents a test run."""
-
- @property
- def identifier(self):
- return "root"
-
- def _get_default_context_dir(self):
- """Gets the default output directory for this context.
-
- Logs at the root level context are placed directly in the base level
- directory, so no context-level path exists."""
- return ""
-
-
-class TestClassContext(TestContext):
- """A TestContext that represents a test class.
-
- Attributes:
- test_class: The test class instance that this context represents.
- """
-
- def __init__(self, test_class):
- """Initializes a TestClassContext for the given test class.
-
- Args:
- test_class: A test class object. Must be an instance of the test
- class, not the class object itself.
- """
- self.test_class = test_class
-
- @property
- def test_class_name(self):
- return self.test_class.__class__.__name__
-
- @property
- def identifier(self):
- return self.test_class_name
-
- def _get_default_context_dir(self):
- """Gets the default output directory for this context.
-
- For TestClassContexts, this will be the name of the test class. This is
- in line with the ACTS logger itself.
- """
- return self.test_class_name
-
-
-class TestCaseContext(TestContext):
- """A TestContext that represents a test case.
-
- Attributes:
- test_case: The string name of the test case.
- test_class: The test class instance enclosing the test case.
- """
-
- def __init__(self, test_class, test_case):
- """Initializes a TestCaseContext for the given test case.
-
- Args:
- test_class: A test class object. Must be an instance of the test
- class, not the class object itself.
- test_case: The string name of the test case.
- """
- self.test_class = test_class
- self.test_case = test_case
-
- @property
- def test_case_name(self):
- return self.test_case
-
- @property
- def test_class_name(self):
- return self.test_class.__class__.__name__
-
- @property
- def identifier(self):
- return f"{self.test_class_name}.{self.test_case_name}"
-
- def _get_default_context_dir(self):
- """Gets the default output directory for this context.
-
- For TestCaseContexts, this will be the name of the test class followed
- by the name of the test case. This is in line with the ACTS logger
- itself.
- """
- return os.path.join(self.test_class_name, self.test_case_name)
-
-
-# stack for keeping track of the current test context
-_contexts = [RootContext()]
diff --git a/packages/antlion/controllers/OWNERS b/packages/antlion/controllers/OWNERS
deleted file mode 100644
index 5e69e8c..0000000
--- a/packages/antlion/controllers/OWNERS
+++ /dev/null
@@ -1,5 +0,0 @@
-per-file asus_axe11000_ap.py = martschneider@google.com
-per-file fuchsia_device.py = chcl@google.com, haydennix@google.com, jmbrenna@google.com, mnck@google.com, nickchee@google.com, sbalana@google.com, silberst@google.com, tturney@google.com
-per-file bluetooth_pts_device.py = tturney@google.com
-per-file cellular_simulator.py = iguarna@google.com, chaoyangf@google.com, codycaldwell@google.com, yixiang@google.com
-per-file openwrt_ap.py = jerrypcchen@google.com, martschneider@google.com, gmoturu@google.com, sishichen@google.com
diff --git a/packages/antlion/controllers/__init__.py b/packages/antlion/controllers/__init__.py
deleted file mode 100644
index 6d1ae5a..0000000
--- a/packages/antlion/controllers/__init__.py
+++ /dev/null
@@ -1,47 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2024 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from . import (
- access_point,
- adb,
- android_device,
- attenuator,
- fastboot,
- fuchsia_device,
- iperf_client,
- iperf_server,
- openwrt_ap,
- packet_capture,
- pdu,
- sniffer,
-)
-
-# Reexport so static type checkers can find these modules when importing and
-# using antlion.controllers instead of "from antlion.controller import ..."
-__all__ = [
- "access_point",
- "adb",
- "android_device",
- "attenuator",
- "fastboot",
- "fuchsia_device",
- "iperf_client",
- "iperf_server",
- "openwrt_ap",
- "packet_capture",
- "pdu",
- "sniffer",
-]
diff --git a/packages/antlion/controllers/access_point.py b/packages/antlion/controllers/access_point.py
deleted file mode 100755
index 249296c..0000000
--- a/packages/antlion/controllers/access_point.py
+++ /dev/null
@@ -1,969 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from __future__ import annotations
-
-import ipaddress
-import logging
-import os
-import time
-from dataclasses import dataclass
-from typing import Any, FrozenSet
-
-from mobly import logger
-
-from antlion import utils
-from antlion.capabilities.ssh import SSHConfig, SSHProvider
-from antlion.controllers.ap_lib import hostapd_constants
-from antlion.controllers.ap_lib.ap_get_interface import ApInterfaces
-from antlion.controllers.ap_lib.ap_iwconfig import ApIwconfig
-from antlion.controllers.ap_lib.bridge_interface import BridgeInterface
-from antlion.controllers.ap_lib.dhcp_config import DhcpConfig, Subnet
-from antlion.controllers.ap_lib.dhcp_server import DhcpServer, NoInterfaceError
-from antlion.controllers.ap_lib.extended_capabilities import (
- ExtendedCapabilities,
-)
-from antlion.controllers.ap_lib.hostapd import Hostapd
-from antlion.controllers.ap_lib.hostapd_ap_preset import create_ap_preset
-from antlion.controllers.ap_lib.hostapd_config import HostapdConfig
-from antlion.controllers.ap_lib.hostapd_security import Security
-from antlion.controllers.ap_lib.radvd import Radvd
-from antlion.controllers.ap_lib.radvd_config import RadvdConfig
-from antlion.controllers.ap_lib.wireless_network_management import (
- BssTransitionManagementRequest,
-)
-from antlion.controllers.pdu import PduDevice, get_pdu_port_for_device
-from antlion.controllers.utils_lib.commands import (
- command,
- ip,
- journalctl,
- route,
-)
-from antlion.controllers.utils_lib.commands.date import LinuxDateCommand
-from antlion.controllers.utils_lib.commands.tcpdump import LinuxTcpdumpCommand
-from antlion.controllers.utils_lib.ssh import connection, settings
-from antlion.runner import CalledProcessError
-from antlion.types import ControllerConfig, Json
-from antlion.validation import MapValidator
-
-MOBLY_CONTROLLER_CONFIG_NAME: str = "AccessPoint"
-ACTS_CONTROLLER_REFERENCE_NAME = "access_points"
-
-
-class Error(Exception):
- """Error raised when there is a problem with the access point."""
-
-
-@dataclass
-class _ApInstance:
- hostapd: Hostapd
- subnet: Subnet
-
-
-# These ranges were split this way since each physical radio can have up
-# to 8 SSIDs so for the 2GHz radio the DHCP range will be
-# 192.168.1 - 8 and the 5Ghz radio will be 192.168.9 - 16
-_AP_2GHZ_SUBNET_STR_DEFAULT = "192.168.1.0/24"
-_AP_5GHZ_SUBNET_STR_DEFAULT = "192.168.9.0/24"
-
-# The last digit of the ip for the bridge interface
-BRIDGE_IP_LAST = "100"
-
-
-def create(configs: list[ControllerConfig]) -> list[AccessPoint]:
- """Creates ap controllers from a json config.
-
- Creates an ap controller from either a list, or a single
- element. The element can either be just the hostname or a dictionary
- containing the hostname and username of the ap to connect to over ssh.
-
- Args:
- The json configs that represent this controller.
-
- Returns:
- A new AccessPoint.
- """
- return [AccessPoint(c) for c in configs]
-
-
-def destroy(objects: list[AccessPoint]) -> None:
- """Destroys a list of access points.
-
- Args:
- aps: The list of access points to destroy.
- """
- for ap in objects:
- ap.close()
-
-
-def get_info(objects: list[AccessPoint]) -> list[Json]:
- """Get information on a list of access points.
-
- Args:
- aps: A list of AccessPoints.
-
- Returns:
- A list of all aps hostname.
- """
- return [ap.ssh_settings.hostname for ap in objects]
-
-
-class AccessPoint:
- """An access point controller.
-
- Attributes:
- ssh: The ssh connection to this ap.
- ssh_settings: The ssh settings being used by the ssh connection.
- dhcp_settings: The dhcp server settings being used.
- """
-
- def __init__(self, config: ControllerConfig) -> None:
- """
- Args:
- configs: configs for the access point from config file.
- """
- c = MapValidator(config)
- self.ssh_settings = settings.from_config(c.get(dict, "ssh_config"))
- self.log = logger.PrefixLoggerAdapter(
- logging.getLogger(),
- {
- logger.PrefixLoggerAdapter.EXTRA_KEY_LOG_PREFIX: f"[Access Point|{self.ssh_settings.hostname}]",
- },
- )
- self.device_pdu_config = c.get(dict, "PduDevice", None)
- self.identifier = self.ssh_settings.hostname
-
- subnet = MapValidator(c.get(dict, "ap_subnet", {}))
- self._AP_2G_SUBNET_STR = subnet.get(
- str, "2g", _AP_2GHZ_SUBNET_STR_DEFAULT
- )
- self._AP_5G_SUBNET_STR = subnet.get(
- str, "5g", _AP_5GHZ_SUBNET_STR_DEFAULT
- )
-
- self._AP_2G_SUBNET = Subnet(
- ipaddress.IPv4Network(self._AP_2G_SUBNET_STR)
- )
- self._AP_5G_SUBNET = Subnet(
- ipaddress.IPv4Network(self._AP_5G_SUBNET_STR)
- )
-
- self.ssh = connection.SshConnection(self.ssh_settings)
-
- # TODO(http://b/278758876): Replace self.ssh with self.ssh_provider
- self.ssh_provider = SSHProvider(
- SSHConfig(
- self.ssh_settings.username,
- self.ssh_settings.hostname,
- self.ssh_settings.identity_file,
- port=self.ssh_settings.port,
- ssh_binary=self.ssh_settings.executable,
- connect_timeout=90,
- )
- )
-
- # Singleton utilities for running various commands.
- self._ip_cmd = command.require(ip.LinuxIpCommand(self.ssh))
- self._route_cmd = command.require(route.LinuxRouteCommand(self.ssh))
- self._journalctl_cmd = command.require(
- journalctl.LinuxJournalctlCommand(self.ssh)
- )
-
- # A map from network interface name to _ApInstance objects representing
- # the hostapd instance running against the interface.
- self._aps: dict[str, _ApInstance] = dict()
- self._dhcp: DhcpServer | None = None
- self._dhcp_bss: dict[str, Subnet] = dict()
- self._radvd: Radvd | None = None
- self.bridge = BridgeInterface(self.ssh)
- self.iwconfig = ApIwconfig(self)
-
- # Check to see if wan_interface is specified in acts_config for tests
- # isolated from the internet and set this override.
- self.interfaces = ApInterfaces(self, c.get(str, "wan_interface", None))
-
- # Get needed interface names and initialize the unnecessary ones.
- self.wan = self.interfaces.get_wan_interface()
- self.wlan = self.interfaces.get_wlan_interface()
- self.wlan_2g = self.wlan[0]
- self.wlan_5g = self.wlan[1]
- self.lan = self.interfaces.get_lan_interface()
- self._initial_ap()
- self.setup_bridge = False
-
- # Allow use of tcpdump
- self.tcpdump = LinuxTcpdumpCommand(self.ssh_provider)
-
- # Access points are not given internet access, so their system time needs to be
- # manually set to be accurate.
- LinuxDateCommand(self.ssh_provider).sync()
-
- def _initial_ap(self) -> None:
- """Initial AP interfaces.
-
- Bring down hostapd if instance is running, bring down all bridge
- interfaces.
- """
- # This is necessary for Gale/Whirlwind flashed with dev channel image
- # Unused interfaces such as existing hostapd daemon, guest, mesh
- # interfaces need to be brought down as part of the AP initialization
- # process, otherwise test would fail.
- try:
- self.ssh.run("stop wpasupplicant")
- except CalledProcessError:
- self.log.info("No wpasupplicant running")
- try:
- self.ssh.run("stop hostapd")
- except CalledProcessError:
- self.log.info("No hostapd running")
- # Bring down all wireless interfaces
- for iface in self.wlan:
- WLAN_DOWN = f"ip link set {iface} down"
- self.ssh.run(WLAN_DOWN)
- # Bring down all bridge interfaces
- bridge_interfaces = self.interfaces.get_bridge_interface()
- for iface in bridge_interfaces:
- BRIDGE_DOWN = f"ip link set {iface} down"
- BRIDGE_DEL = f"brctl delbr {iface}"
- self.ssh.run(BRIDGE_DOWN)
- self.ssh.run(BRIDGE_DEL)
-
- def start_ap(
- self,
- hostapd_config: HostapdConfig,
- radvd_config: RadvdConfig | None = None,
- setup_bridge: bool = False,
- is_nat_enabled: bool = True,
- additional_parameters: dict[str, Any] | None = None,
- ) -> list[str]:
- """Starts as an ap using a set of configurations.
-
- This will start an ap on this host. To start an ap the controller
- selects a network interface to use based on the configs given. It then
- will start up hostapd on that interface. Next a subnet is created for
- the network interface and dhcp server is refreshed to give out ips
- for that subnet for any device that connects through that interface.
-
- Args:
- hostapd_config: The configurations to use when starting up the ap.
- radvd_config: The IPv6 configuration to use when starting up the ap.
- setup_bridge: Whether to bridge the LAN interface WLAN interface.
- Only one WLAN interface can be bridged with the LAN interface
- and none of the guest networks can be bridged.
- is_nat_enabled: If True, start NAT on the AP to allow the DUT to be
- able to access the internet if the WAN port is connected to the
- internet.
- additional_parameters: Parameters that can sent directly into the
- hostapd config file. This can be used for debugging and or
- adding one off parameters into the config.
-
- Returns:
- An identifier for each ssid being started. These identifiers can be
- used later by this controller to control the ap.
-
- Raises:
- Error: When the ap can't be brought up.
- """
- if additional_parameters is None:
- additional_parameters = {}
-
- if hostapd_config.frequency < 5000:
- interface = self.wlan_2g
- subnet = self._AP_2G_SUBNET
- else:
- interface = self.wlan_5g
- subnet = self._AP_5G_SUBNET
-
- # radvd requires the interface to have a IPv6 link-local address.
- if radvd_config:
- self.ssh.run(f"sysctl -w net.ipv6.conf.{interface}.disable_ipv6=0")
- self.ssh.run(f"sysctl -w net.ipv6.conf.{interface}.forwarding=1")
-
- # In order to handle dhcp servers on any interface, the initiation of
- # the dhcp server must be done after the wlan interfaces are figured
- # out as opposed to being in __init__
- self._dhcp = DhcpServer(self.ssh, interface=interface)
-
- # For multi bssid configurations the mac address
- # of the wireless interface needs to have enough space to mask out
- # up to 8 different mac addresses. So in for one interface the range is
- # hex 0-7 and for the other the range is hex 8-f.
- ip = self.ssh.run(["ip", "link", "show", interface])
-
- # Example output:
- # 5: wlan0: <BROADCAST,MULTICAST> mtu 1500 qdisc mq state DOWN mode DEFAULT group default qlen 1000
- # link/ether f4:f2:6d:aa:99:28 brd ff:ff:ff:ff:ff:ff
-
- lines = ip.stdout.decode("utf-8").splitlines()
- if len(lines) != 2:
- raise RuntimeError(
- f"Expected 2 lines from ip link show, got {len(lines)}"
- )
- tokens = lines[1].split()
- if len(tokens) != 4:
- raise RuntimeError(
- f"Expected 4 tokens from ip link show, got {len(tokens)}"
- )
- interface_mac_orig = tokens[1]
-
- if interface == self.wlan_5g:
- hostapd_config.bssid = f"{interface_mac_orig[:-1]}0"
- last_octet = 1
- elif interface == self.wlan_2g:
- hostapd_config.bssid = f"{interface_mac_orig[:-1]}8"
- last_octet = 9
- elif interface in self._aps:
- raise ValueError(
- "No WiFi interface available for AP on "
- f"channel {hostapd_config.channel}"
- )
- else:
- raise ValueError(f"Invalid WLAN interface: {interface}")
-
- apd = Hostapd(self.ssh, interface)
- new_instance = _ApInstance(hostapd=apd, subnet=subnet)
- self._aps[interface] = new_instance
-
- # Turn off the DHCP server, we're going to change its settings.
- self.stop_dhcp()
- # Clear all routes to prevent old routes from interfering.
- self._route_cmd.clear_routes(net_interface=interface)
- # Add IPv6 link-local route so packets destined to the AP will be
- # processed by the AP. This is necessary if an iperf server is running
- # on the AP, but not for traffic handled by the Linux networking stack
- # such as ping.
- if radvd_config:
- self._route_cmd.add_route(
- interface, ipaddress.IPv6Interface("fe80::/64")
- )
-
- self._dhcp_bss = dict()
- if hostapd_config.bss_lookup:
- # The self._dhcp_bss dictionary is created to hold the key/value
- # pair of the interface name and the ip scope that will be
- # used for the particular interface. The a, b, c, d
- # variables below are the octets for the ip address. The
- # third octet is then incremented for each interface that
- # is requested. This part is designed to bring up the
- # hostapd interfaces and not the DHCP servers for each
- # interface.
- counter = 1
- for iface in hostapd_config.bss_lookup:
- hostapd_config.bss_lookup[iface].bssid = (
- interface_mac_orig[:-1] + hex(last_octet)[-1:]
- )
- self._route_cmd.clear_routes(net_interface=str(iface))
- if interface is self.wlan_2g:
- starting_ip_range = self._AP_2G_SUBNET_STR
- else:
- starting_ip_range = self._AP_5G_SUBNET_STR
- a, b, c, d = starting_ip_range.split(".")
- self._dhcp_bss[iface] = Subnet(
- ipaddress.IPv4Network(f"{a}.{b}.{int(c) + counter}.{d}")
- )
- counter = counter + 1
- last_octet = last_octet + 1
-
- apd.start(hostapd_config, additional_parameters=additional_parameters)
-
- # The DHCP serer requires interfaces to have ips and routes before
- # the server will come up.
- interface_ip = ipaddress.IPv4Interface(
- f"{subnet.router}/{subnet.network.prefixlen}"
- )
- bridge_interface_name = "eth_test"
- if setup_bridge is True:
- interfaces = [interface]
- if self.lan:
- interfaces.append(self.lan)
- self.create_bridge(bridge_interface_name, interfaces)
- self._ip_cmd.set_ipv4_address(bridge_interface_name, interface_ip)
- else:
- self._ip_cmd.set_ipv4_address(interface, interface_ip)
- if hostapd_config.bss_lookup:
- # This loop goes through each interface that was setup for
- # hostapd and assigns the DHCP scopes that were defined but
- # not used during the hostapd loop above. The k and v
- # variables represent the interface name, k, and dhcp info, v.
- for iface, subnet in self._dhcp_bss.items():
- bss_interface_ip = ipaddress.IPv4Interface(
- f"{subnet.router}/{subnet.network.prefixlen}"
- )
- self._ip_cmd.set_ipv4_address(iface, bss_interface_ip)
-
- # Restart the DHCP server with our updated list of subnets.
- configured_subnets = self.get_configured_subnets()
- dhcp_conf = DhcpConfig(subnets=configured_subnets)
- self.start_dhcp(dhcp_conf=dhcp_conf)
- if is_nat_enabled:
- self.start_nat()
- self.enable_forwarding()
- else:
- self.stop_nat()
- self.enable_forwarding()
- if radvd_config:
- radvd_interface = (
- bridge_interface_name if setup_bridge else interface
- )
- self._radvd = Radvd(self.ssh, radvd_interface)
- self._radvd.start(radvd_config)
- else:
- self._radvd = None
-
- bss_interfaces = [bss for bss in hostapd_config.bss_lookup]
- bss_interfaces.append(interface)
-
- return bss_interfaces
-
- def get_configured_subnets(self) -> list[Subnet]:
- """Get the list of configured subnets on the access point.
-
- This allows consumers of the access point objects create custom DHCP
- configs with the correct subnets.
-
- Returns: a list of Subnet objects
- """
- configured_subnets = [x.subnet for x in self._aps.values()]
- for k, v in self._dhcp_bss.items():
- configured_subnets.append(v)
- return configured_subnets
-
- def start_dhcp(self, dhcp_conf: DhcpConfig) -> None:
- """Start a DHCP server for the specified subnets.
-
- This allows consumers of the access point objects to control DHCP.
-
- Args:
- dhcp_conf: A DhcpConfig object.
-
- Raises:
- Error: Raised when a dhcp server error is found.
- """
- if self._dhcp is not None:
- self._dhcp.start(config=dhcp_conf)
-
- def stop_dhcp(self) -> None:
- """Stop DHCP for this AP object.
-
- This allows consumers of the access point objects to control DHCP.
- """
- if self._dhcp is not None:
- self._dhcp.stop()
-
- def get_systemd_journal(self) -> str:
- """Get systemd journal logs from this current boot."""
- return self._journalctl_cmd.logs()
-
- def get_dhcp_logs(self) -> str | None:
- """Get DHCP logs for this AP object.
-
- This allows consumers of the access point objects to validate DHCP
- behavior.
-
- Returns:
- A string of the dhcp server logs, or None is a DHCP server has not
- been started.
- """
- if self._dhcp is not None:
- return self._dhcp.get_logs()
- return None
-
- def get_hostapd_logs(self) -> dict[str, str]:
- """Get hostapd logs for all interfaces on AP object.
-
- This allows consumers of the access point objects to validate hostapd
- behavior.
-
- Returns: A dict with {interface: log} from hostapd instances.
- """
- hostapd_logs: dict[str, str] = dict()
- for iface, ap in self._aps.items():
- hostapd_logs[iface] = ap.hostapd.pull_logs()
- return hostapd_logs
-
- def get_radvd_logs(self) -> str | None:
- """Get radvd logs for this AP object.
-
- This allows consumers of the access point objects to validate radvd
- behavior.
-
- Returns:
- A string of the radvd logs, or None is a radvd server has not been
- started.
- """
- if self._radvd:
- return self._radvd.pull_logs()
- return None
-
- def download_ap_logs(self, path: str) -> None:
- """Download all available logs to path.
-
- This convenience method gets all the logs, dhcp, hostapd, radvd. It
- writes these to the given path.
-
- Args:
- path: Path to write logs to.
- """
- timestamp = logger.normalize_log_line_timestamp(
- logger.epoch_to_log_line_timestamp(utils.get_current_epoch_time())
- )
-
- dhcp_log = self.get_dhcp_logs()
- if dhcp_log:
- dhcp_log_path = os.path.join(path, f"ap_dhcp_{timestamp}.log")
- with open(dhcp_log_path, "a") as f:
- f.write(dhcp_log)
- self.log.debug(f"Wrote DHCP logs to {dhcp_log_path}")
-
- hostapd_logs = self.get_hostapd_logs()
- for interface in hostapd_logs:
- hostapd_log_path = os.path.join(
- path,
- f"ap_hostapd_{interface}_{timestamp}.log",
- )
- with open(hostapd_log_path, "a") as f:
- f.write(hostapd_logs[interface])
- self.log.debug(f"Wrote hostapd logs to {hostapd_log_path}")
-
- radvd_log = self.get_radvd_logs()
- if radvd_log:
- radvd_log_path = os.path.join(path, f"ap_radvd_{timestamp}.log")
- with open(radvd_log_path, "a") as f:
- f.write(radvd_log)
- self.log.debug(f"Wrote radvd logs to {radvd_log_path}")
-
- systemd_journal = self.get_systemd_journal()
- systemd_journal_path = os.path.join(path, f"ap_systemd_{timestamp}.log")
- with open(systemd_journal_path, "a") as f:
- f.write(systemd_journal)
- self.log.debug(f"Wrote systemd journal to {systemd_journal_path}")
-
- def enable_forwarding(self) -> None:
- """Enable IPv4 and IPv6 forwarding on the AP.
-
- When forwarding is enabled, the access point is able to route IP packets
- between devices in the same subnet.
- """
- self.ssh.run("echo 1 > /proc/sys/net/ipv4/ip_forward")
- self.ssh.run("echo 1 > /proc/sys/net/ipv6/conf/all/forwarding")
-
- def start_nat(self) -> None:
- """Start NAT on the AP.
-
- This allows consumers of the access point objects to enable NAT
- on the AP.
-
- Note that this is currently a global setting, since we don't
- have per-interface masquerade rules.
- """
- # The following three commands are needed to enable NAT between
- # the WAN and LAN/WLAN ports. This means anyone connecting to the
- # WLAN/LAN ports will be able to access the internet if the WAN port
- # is connected to the internet.
- self.ssh.run("iptables -t nat -F")
- self.ssh.run(
- f"iptables -t nat -A POSTROUTING -o {self.wan} -j MASQUERADE"
- )
-
- def stop_nat(self) -> None:
- """Stop NAT on the AP.
-
- This allows consumers of the access point objects to disable NAT on the
- AP.
-
- Note that this is currently a global setting, since we don't have
- per-interface masquerade rules.
- """
- self.ssh.run("iptables -t nat -F")
-
- def create_bridge(self, bridge_name: str, interfaces: list[str]) -> None:
- """Create the specified bridge and bridge the specified interfaces.
-
- Args:
- bridge_name: The name of the bridge to create.
- interfaces: A list of interfaces to add to the bridge.
- """
-
- # Create the bridge interface
- self.ssh.run(f"brctl addbr {bridge_name}")
-
- for interface in interfaces:
- self.ssh.run(f"brctl addif {bridge_name} {interface}")
-
- self.ssh.run(f"ip link set {bridge_name} up")
-
- def remove_bridge(self, bridge_name: str) -> None:
- """Removes the specified bridge
-
- Args:
- bridge_name: The name of the bridge to remove.
- """
- # Check if the bridge exists.
- #
- # Cases where it may not are if we failed to initialize properly
- #
- # Or if we're doing 2.4Ghz and 5Ghz SSIDs and we've already torn
- # down the bridge once, but we got called for each band.
- result = self.ssh.run(f"brctl show {bridge_name}", ignore_status=True)
-
- # If the bridge exists, we'll get an exit_status of 0, indicating
- # success, so we can continue and remove the bridge.
- if result.returncode == 0:
- self.ssh.run(f"ip link set {bridge_name} down")
- self.ssh.run(f"brctl delbr {bridge_name}")
-
- def get_bssid_from_ssid(
- self, ssid: str, band: hostapd_constants.BandType
- ) -> str:
- """Gets the BSSID from a provided SSID
-
- Args:
- ssid: An SSID string.
- band: 2G or 5G Wifi band.
-
- Returns:
- The BSSID of on the AP hosting the given SSID on the given band.
-
- Raises:
- RuntimeError: when interface, ssid, or addr cannot be found.
- """
- match band:
- case hostapd_constants.BandType.BAND_2G:
- interface = self.wlan_2g
- case hostapd_constants.BandType.BAND_5G:
- interface = self.wlan_5g
-
- # Get the interface name associated with the given ssid.
- iw = self.ssh.run(["iw", "dev", interface, "info"])
- if b"command failed: No such device" in iw.stderr:
- raise RuntimeError(
- f'iw dev did not contain interface "{interface}"'
- )
-
- iw_out = iw.stdout.decode("utf-8")
- iw_lines = iw_out.splitlines()
-
- for line in iw_lines:
- if "ssid" in line and ssid in line:
- # Found the right interface.
- for line in iw_lines:
- if "addr" in line:
- tokens = line.split()
- if len(tokens) != 2:
- raise RuntimeError(
- f"Expected iw dev info addr to have 2 tokens, got {tokens}"
- )
- return tokens[1]
-
- raise RuntimeError(
- f"iw dev info contained ssid but not addr: \n{iw_out}"
- )
-
- raise RuntimeError(f'iw dev did not contain ssid "{ssid}"')
-
- def stop_ap(self, identifier: str) -> None:
- """Stops a running ap on this controller.
-
- Args:
- identifier: The identify of the ap that should be taken down.
- """
-
- instance = self._aps.get(identifier)
- if instance is None:
- raise ValueError(f"Invalid identifier {identifier} given")
-
- if self._radvd:
- self._radvd.stop()
- try:
- self.stop_dhcp()
- except NoInterfaceError:
- pass
- self.stop_nat()
- instance.hostapd.stop()
- self._ip_cmd.clear_ipv4_addresses(identifier)
-
- del self._aps[identifier]
- bridge_interfaces = self.interfaces.get_bridge_interface()
- for iface in bridge_interfaces:
- BRIDGE_DOWN = f"ip link set {iface} down"
- BRIDGE_DEL = f"brctl delbr {iface}"
- self.ssh.run(BRIDGE_DOWN)
- self.ssh.run(BRIDGE_DEL)
-
- def stop_all_aps(self) -> None:
- """Stops all running aps on this device."""
-
- for ap in list(self._aps.keys()):
- self.stop_ap(ap)
-
- def close(self) -> None:
- """Called to take down the entire access point.
-
- When called will stop all aps running on this host, shutdown the dhcp
- server, and stop the ssh connection.
- """
-
- if self._aps:
- self.stop_all_aps()
- self.ssh.close()
-
- def generate_bridge_configs(
- self, channel: int
- ) -> tuple[str, str | None, str]:
- """Generate a list of configs for a bridge between LAN and WLAN.
-
- Args:
- channel: the channel WLAN interface is brought up on
- iface_lan: the LAN interface to bridge
- Returns:
- configs: tuple containing iface_wlan, iface_lan and bridge_ip
- """
-
- if channel < 15:
- iface_wlan = self.wlan_2g
- subnet_str = self._AP_2G_SUBNET_STR
- else:
- iface_wlan = self.wlan_5g
- subnet_str = self._AP_5G_SUBNET_STR
-
- iface_lan = self.lan
-
- a, b, c, _ = subnet_str.strip("/24").split(".")
- bridge_ip = f"{a}.{b}.{c}.{BRIDGE_IP_LAST}"
-
- return (iface_wlan, iface_lan, bridge_ip)
-
- def ping(
- self,
- dest_ip: str,
- count: int = 3,
- interval: int = 1000,
- timeout: int = 1000,
- size: int = 56,
- additional_ping_params: str = "",
- ) -> utils.PingResult:
- """Pings from AP to dest_ip, returns dict of ping stats (see utils.ping)"""
- return utils.ping(
- self.ssh,
- dest_ip,
- count=count,
- interval=interval,
- timeout=timeout,
- size=size,
- additional_ping_params=additional_ping_params,
- )
-
- def hard_power_cycle(
- self,
- pdus: list[PduDevice],
- ) -> None:
- """Kills, then restores power to AccessPoint, verifying it goes down and
- comes back online cleanly.
-
- Args:
- pdus: PDUs in the testbed
- Raise:
- Error, if no PduDevice is provided in AccessPoint config.
- ConnectionError, if AccessPoint fails to go offline or come back.
- """
- if not self.device_pdu_config:
- raise Error("No PduDevice provided in AccessPoint config.")
-
- self._journalctl_cmd.save_and_reset()
-
- self.log.info("Power cycling")
- ap_pdu, ap_pdu_port = get_pdu_port_for_device(
- self.device_pdu_config, pdus
- )
-
- self.log.info("Killing power")
- ap_pdu.off(ap_pdu_port)
-
- self.log.info("Verifying AccessPoint is unreachable.")
- self.ssh_provider.wait_until_unreachable()
- self.log.info("AccessPoint is unreachable as expected.")
-
- self._aps.clear()
-
- self.log.info("Restoring power")
- ap_pdu.on(ap_pdu_port)
-
- self.log.info("Waiting for AccessPoint to become available via SSH.")
- self.ssh_provider.wait_until_reachable()
- self.log.info("AccessPoint responded to SSH.")
-
- # Allow 5 seconds for OS to finish getting set up
- time.sleep(5)
- self._initial_ap()
- self.log.info("Power cycled successfully")
-
- def channel_switch(
- self, identifier: str, channel_num: int, csa_beacon_count: int = 10
- ) -> None:
- """Switch to a different channel on the given AP."""
- instance = self._aps.get(identifier)
- if instance is None:
- raise ValueError(f"Invalid identifier {identifier} given")
- self.log.info(f"channel switch to channel {channel_num}")
- instance.hostapd.channel_switch(channel_num, csa_beacon_count)
-
- def get_current_channel(self, identifier: str) -> int:
- """Find the current channel on the given AP."""
- instance = self._aps.get(identifier)
- if instance is None:
- raise ValueError(f"Invalid identifier {identifier} given")
- return instance.hostapd.get_current_channel()
-
- def get_stas(self, identifier: str) -> set[str]:
- """Return MAC addresses of all associated STAs on the given AP."""
- instance = self._aps.get(identifier)
- if instance is None:
- raise ValueError(f"Invalid identifier {identifier} given")
- return instance.hostapd.get_stas()
-
- def sta_authenticated(self, identifier: str, sta_mac: str) -> bool:
- """Is STA authenticated?"""
- instance = self._aps.get(identifier)
- if instance is None:
- raise ValueError(f"Invalid identifier {identifier} given")
- return instance.hostapd.sta_authenticated(sta_mac)
-
- def sta_associated(self, identifier: str, sta_mac: str) -> bool:
- """Is STA associated?"""
- instance = self._aps.get(identifier)
- if instance is None:
- raise ValueError(f"Invalid identifier {identifier} given")
- return instance.hostapd.sta_associated(sta_mac)
-
- def sta_authorized(self, identifier: str, sta_mac: str) -> bool:
- """Is STA authorized (802.1X controlled port open)?"""
- instance = self._aps.get(identifier)
- if instance is None:
- raise ValueError(f"Invalid identifier {identifier} given")
- return instance.hostapd.sta_authorized(sta_mac)
-
- def get_sta_extended_capabilities(
- self, identifier: str, sta_mac: str
- ) -> ExtendedCapabilities:
- """Get extended capabilities for the given STA, as seen by the AP."""
- instance = self._aps.get(identifier)
- if instance is None:
- raise ValueError(f"Invalid identifier {identifier} given")
- return instance.hostapd.get_sta_extended_capabilities(sta_mac)
-
- def send_bss_transition_management_req(
- self,
- identifier: str,
- sta_mac: str,
- request: BssTransitionManagementRequest,
- ) -> None:
- """Send a BSS Transition Management request to an associated STA."""
- instance = self._aps.get(identifier)
- if instance is None:
- raise ValueError(f"Invalid identifier {identifier} given")
- instance.hostapd.send_bss_transition_management_req(sta_mac, request)
-
-
-def setup_ap(
- access_point: AccessPoint,
- profile_name: str,
- channel: int,
- ssid: str,
- mode: str | None = None,
- preamble: bool | None = None,
- beacon_interval: int | None = None,
- dtim_period: int | None = None,
- frag_threshold: int | None = None,
- rts_threshold: int | None = None,
- force_wmm: bool | None = None,
- hidden: bool | None = False,
- security: Security | None = None,
- pmf_support: int | None = None,
- additional_ap_parameters: dict[str, Any] | None = None,
- n_capabilities: list[Any] | None = None,
- ac_capabilities: list[Any] | None = None,
- vht_bandwidth: int | None = None,
- wnm_features: FrozenSet[hostapd_constants.WnmFeature] = frozenset(),
- setup_bridge: bool = False,
- is_ipv6_enabled: bool = False,
- is_nat_enabled: bool = True,
-) -> list[str]:
- """Creates a hostapd profile and runs it on an ap. This is a convenience
- function that allows us to start an ap with a single function, without first
- creating a hostapd config.
-
- Args:
- access_point: An ACTS access_point controller
- profile_name: The profile name of one of the hostapd ap presets.
- channel: What channel to set the AP to.
- preamble: Whether to set short or long preamble
- beacon_interval: The beacon interval
- dtim_period: Length of dtim period
- frag_threshold: Fragmentation threshold
- rts_threshold: RTS threshold
- force_wmm: Enable WMM or not
- hidden: Advertise the SSID or not
- security: What security to enable.
- pmf_support: Whether pmf is not disabled, enabled, or required
- additional_ap_parameters: Additional parameters to send the AP.
- check_connectivity: Whether to check for internet connectivity.
- wnm_features: WNM features to enable on the AP.
- setup_bridge: Whether to bridge the LAN interface WLAN interface.
- Only one WLAN interface can be bridged with the LAN interface
- and none of the guest networks can be bridged.
- is_ipv6_enabled: If True, start a IPv6 router advertisement daemon
- is_nat_enabled: If True, start NAT on the AP to allow the DUT to be able
- to access the internet if the WAN port is connected to the internet.
-
- Returns:
- An identifier for each ssid being started. These identifiers can be
- used later by this controller to control the ap.
-
- Raises:
- Error: When the ap can't be brought up.
- """
- if additional_ap_parameters is None:
- additional_ap_parameters = {}
-
- ap = create_ap_preset(
- profile_name=profile_name,
- iface_wlan_2g=access_point.wlan_2g,
- iface_wlan_5g=access_point.wlan_5g,
- channel=channel,
- ssid=ssid,
- mode=mode,
- short_preamble=preamble,
- beacon_interval=beacon_interval,
- dtim_period=dtim_period,
- frag_threshold=frag_threshold,
- rts_threshold=rts_threshold,
- force_wmm=force_wmm,
- hidden=hidden,
- bss_settings=[],
- security=security,
- pmf_support=pmf_support,
- n_capabilities=n_capabilities,
- ac_capabilities=ac_capabilities,
- vht_bandwidth=vht_bandwidth,
- wnm_features=wnm_features,
- )
- return access_point.start_ap(
- hostapd_config=ap,
- radvd_config=RadvdConfig() if is_ipv6_enabled else None,
- setup_bridge=setup_bridge,
- is_nat_enabled=is_nat_enabled,
- additional_parameters=additional_ap_parameters,
- )
diff --git a/packages/antlion/controllers/adb.py b/packages/antlion/controllers/adb.py
deleted file mode 100644
index 4f34769..0000000
--- a/packages/antlion/controllers/adb.py
+++ /dev/null
@@ -1,304 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-# Blanket ignores to enable mypy in Antlion
-# mypy: disable-error-code="no-untyped-def, attr-defined"
-import logging
-import re
-import shlex
-import shutil
-
-from antlion.controllers.adb_lib.error import AdbCommandError, AdbError
-from antlion.libs.proc import job
-
-DEFAULT_ADB_TIMEOUT = 60
-DEFAULT_ADB_PULL_TIMEOUT = 180
-
-ADB_REGEX = re.compile("adb:")
-# Uses a regex to be backwards compatible with previous versions of ADB
-# (N and above add the serial to the error msg).
-DEVICE_NOT_FOUND_REGEX = re.compile("error: device (?:'.*?' )?not found")
-DEVICE_OFFLINE_REGEX = re.compile("error: device offline")
-# Raised when adb forward commands fail to forward a port.
-CANNOT_BIND_LISTENER_REGEX = re.compile("error: cannot bind listener:")
-# Expected output is "Android Debug Bridge version 1.0.XX
-ADB_VERSION_REGEX = re.compile("Android Debug Bridge version 1.0.(\d+)")
-GREP_REGEX = re.compile("grep(\s+)")
-
-ROOT_USER_ID = "0"
-SHELL_USER_ID = "2000"
-
-
-def parsing_parcel_output(output):
- """Parsing the adb output in Parcel format.
-
- Parsing the adb output in format:
- Result: Parcel(
- 0x00000000: 00000000 00000014 00390038 00340031 '........8.9.1.4.'
- 0x00000010: 00300038 00300030 00300030 00340032 '8.0.0.0.0.0.2.4.'
- 0x00000020: 00350034 00330035 00320038 00310033 '4.5.5.3.8.2.3.1.'
- 0x00000030: 00000000 '.... ')
- """
- output = "".join(re.findall(r"'(.*)'", output))
- return re.sub(r"[.\s]", "", output)
-
-
-class AdbProxy(object):
- """Proxy class for ADB.
-
- For syntactic reasons, the '-' in adb commands need to be replaced with
- '_'. Can directly execute adb commands on an object:
- >> adb = AdbProxy(<serial>)
- >> adb.start_server()
- >> adb.devices() # will return the console output of "adb devices".
- """
-
- def __init__(self, serial="", ssh_connection=None):
- """Construct an instance of AdbProxy.
-
- Args:
- serial: str serial number of Android device from `adb devices`
- ssh_connection: SshConnection instance if the Android device is
- connected to a remote host that we can reach via SSH.
- """
- self.serial = serial
- self._server_local_port = None
- adb_path = shutil.which("adb")
- adb_cmd = [shlex.quote(adb_path)] # type: ignore # Blanket ignore to enable mypy
- if serial:
- adb_cmd.append(f"-s {serial}")
- if ssh_connection is not None:
- # Kill all existing adb processes on the remote host (if any)
- # Note that if there are none, then pkill exits with non-zero status
- ssh_connection.run("pkill adb", ignore_status=True)
- # Copy over the adb binary to a temp dir
- temp_dir = ssh_connection.run("mktemp -d").stdout.strip()
- ssh_connection.send_file(adb_path, temp_dir)
- # Start up a new adb server running as root from the copied binary.
- remote_adb_cmd = "%s/adb %s root" % (
- temp_dir,
- "-s %s" % serial if serial else "",
- )
- ssh_connection.run(remote_adb_cmd)
- # Proxy a local port to the adb server port
- local_port = ssh_connection.create_ssh_tunnel(5037)
- self._server_local_port = local_port
-
- if self._server_local_port:
- adb_cmd.append(f"-P {local_port}")
- self.adb_str = " ".join(adb_cmd)
- self._ssh_connection = ssh_connection
-
- def get_user_id(self):
- """Returns the adb user. Either 2000 (shell) or 0 (root)."""
- return self.shell("id -u")
-
- def is_root(self, user_id=None):
- """Checks if the user is root.
-
- Args:
- user_id: if supplied, the id to check against.
- Returns:
- True if the user is root. False otherwise.
- """
- if not user_id:
- user_id = self.get_user_id()
- return user_id == ROOT_USER_ID
-
- def ensure_root(self):
- """Ensures the user is root after making this call.
-
- Note that this will still fail if the device is a user build, as root
- is not accessible from a user build.
-
- Returns:
- False if the device is a user build. True otherwise.
- """
- self.ensure_user(ROOT_USER_ID)
- return self.is_root()
-
- def ensure_user(self, user_id=SHELL_USER_ID):
- """Ensures the user is set to the given user.
-
- Args:
- user_id: The id of the user.
- """
- if self.is_root(user_id):
- self.root()
- else:
- self.unroot()
- self.wait_for_device()
- return self.get_user_id() == user_id
-
- def _exec_cmd(self, cmd, ignore_status=False, timeout=DEFAULT_ADB_TIMEOUT):
- """Executes adb commands in a new shell.
-
- This is specific to executing adb commands.
-
- Args:
- cmd: A string or list that is the adb command to execute.
-
- Returns:
- The stdout of the adb command.
-
- Raises:
- AdbError for errors in ADB operations.
- AdbCommandError for errors from commands executed through ADB.
- """
- if isinstance(cmd, list):
- cmd = " ".join(cmd)
- result = job.run(cmd, ignore_status=True, timeout_sec=timeout)
- ret, out, err = result.exit_status, result.stdout, result.stderr
-
- if any(
- pattern.match(err) # type: ignore # Blanket ignore to enable mypy
- for pattern in [
- ADB_REGEX,
- DEVICE_OFFLINE_REGEX,
- DEVICE_NOT_FOUND_REGEX,
- CANNOT_BIND_LISTENER_REGEX,
- ]
- ):
- raise AdbError(cmd=cmd, stdout=out, stderr=err, ret_code=ret)
- if "Result: Parcel" in out: # type: ignore # Blanket ignore to enable mypy
- return parsing_parcel_output(out)
- if ignore_status or (ret == 1 and GREP_REGEX.search(cmd)):
- return out or err
- if ret != 0:
- raise AdbCommandError(cmd=cmd, stdout=out, stderr=err, ret_code=ret)
- return out
-
- def _exec_adb_cmd(self, name, arg_str, **kwargs):
- return self._exec_cmd(f"{self.adb_str} {name} {arg_str}", **kwargs)
-
- def _exec_cmd_nb(self, cmd, **kwargs):
- """Executes adb commands in a new shell, non blocking.
-
- Args:
- cmds: A string that is the adb command to execute.
-
- """
- return job.run_async(cmd, **kwargs)
-
- def _exec_adb_cmd_nb(self, name, arg_str, **kwargs):
- return self._exec_cmd_nb(f"{self.adb_str} {name} {arg_str}", **kwargs)
-
- def tcp_forward(self, host_port, device_port):
- """Starts tcp forwarding from localhost to this android device.
-
- Args:
- host_port: Port number to use on localhost
- device_port: Port number to use on the android device.
-
- Returns:
- Forwarded port on host as int or command output string on error
- """
- if self._ssh_connection:
- # We have to hop through a remote host first.
- # 1) Find some free port on the remote host's localhost
- # 2) Setup forwarding between that remote port and the requested
- # device port
- remote_port = self._ssh_connection.find_free_port()
- host_port = self._ssh_connection.create_ssh_tunnel(
- remote_port, local_port=host_port
- )
- output = self.forward(
- f"tcp:{host_port} tcp:{device_port}", ignore_status=True
- )
- # If hinted_port is 0, the output will be the selected port.
- # Otherwise, there will be no output upon successfully
- # forwarding the hinted port.
- if not output:
- return host_port
- try:
- output_int = int(output)
- except ValueError:
- return output
- return output_int
-
- def remove_tcp_forward(self, host_port):
- """Stop tcp forwarding a port from localhost to this android device.
-
- Args:
- host_port: Port number to use on localhost
- """
- if self._ssh_connection:
- remote_port = self._ssh_connection.close_ssh_tunnel(host_port)
- if remote_port is None:
- logging.warning(
- "Cannot close unknown forwarded tcp port: %d", host_port
- )
- return
- # The actual port we need to disable via adb is on the remote host.
- host_port = remote_port
- self.forward(f"--remove tcp:{host_port}")
-
- def getprop(self, prop_name):
- """Get a property of the device.
-
- This is a convenience wrapper for "adb shell getprop xxx".
-
- Args:
- prop_name: A string that is the name of the property to get.
-
- Returns:
- A string that is the value of the property, or None if the property
- doesn't exist.
- """
- return self.shell(f"getprop {prop_name}")
-
- # TODO: This should be abstracted out into an object like the other shell
- # command.
- def shell(self, command, ignore_status=False, timeout=DEFAULT_ADB_TIMEOUT):
- return self._exec_adb_cmd(
- "shell",
- shlex.quote(command),
- ignore_status=ignore_status,
- timeout=timeout,
- )
-
- def shell_nb(self, command):
- return self._exec_adb_cmd_nb("shell", shlex.quote(command))
-
- def __getattr__(self, name):
- def adb_call(*args, **kwargs):
- clean_name = name.replace("_", "-")
- if (
- clean_name in ["pull", "push", "remount"]
- and "timeout" not in kwargs
- ):
- kwargs["timeout"] = DEFAULT_ADB_PULL_TIMEOUT
- arg_str = " ".join(str(elem) for elem in args)
- return self._exec_adb_cmd(clean_name, arg_str, **kwargs)
-
- return adb_call
-
- def get_version_number(self):
- """Returns the version number of ADB as an int (XX in 1.0.XX).
-
- Raises:
- AdbError if the version number is not found/parsable.
- """
- version_output = self.version()
- match = re.search(ADB_VERSION_REGEX, version_output)
-
- if not match:
- logging.error(
- "Unable to capture ADB version from adb version "
- "output: %s" % version_output
- )
- raise AdbError("adb version", version_output, "", "")
- return int(match.group(1))
diff --git a/packages/antlion/controllers/adb_lib/__init__.py b/packages/antlion/controllers/adb_lib/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/packages/antlion/controllers/adb_lib/__init__.py
+++ /dev/null
diff --git a/packages/antlion/controllers/adb_lib/error.py b/packages/antlion/controllers/adb_lib/error.py
deleted file mode 100644
index 8cb8fb8..0000000
--- a/packages/antlion/controllers/adb_lib/error.py
+++ /dev/null
@@ -1,44 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-# Blanket ignores to enable mypy in Antlion
-# mypy: disable-error-code="no-untyped-def"
-from antlion import error
-
-
-class AdbError(error.ActsError):
- """Raised when there is an error in adb operations."""
-
- def __init__(self, cmd, stdout, stderr, ret_code):
- super().__init__()
- self.cmd = cmd
- self.stdout = stdout
- self.stderr = stderr
- self.ret_code = ret_code
-
- def __str__(self):
- return (
- "Error executing adb cmd '%s'. ret: %d, stdout: %s, stderr: %s"
- ) % (
- self.cmd,
- self.ret_code,
- self.stdout,
- self.stderr,
- )
-
-
-class AdbCommandError(AdbError):
- """Raised when there is an error in the command being run through ADB."""
diff --git a/packages/antlion/controllers/android_device.py b/packages/antlion/controllers/android_device.py
deleted file mode 100755
index 4ca2389..0000000
--- a/packages/antlion/controllers/android_device.py
+++ /dev/null
@@ -1,1869 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-# Blanket ignores to enable mypy in Antlion
-# mypy: disable-error-code="no-untyped-def, type-arg, union-attr, index, call-overload, var-annotated, attr-defined, assignment"
-from __future__ import annotations
-
-import collections
-import logging
-import math
-import os
-import re
-import shutil
-import socket
-import time
-from datetime import datetime
-
-from mobly import logger
-
-from antlion import context, utils
-from antlion.controllers import adb, fastboot
-from antlion.controllers.adb_lib.error import AdbError
-from antlion.controllers.android_lib import errors
-from antlion.controllers.android_lib import events as android_events
-from antlion.controllers.android_lib import logcat, services
-from antlion.controllers.sl4a_lib import sl4a_manager
-from antlion.controllers.utils_lib.ssh import connection, settings
-from antlion.event import event_bus
-from antlion.libs.proc import job
-from antlion.runner import Runner
-from antlion.types import ControllerConfig, Json
-
-MOBLY_CONTROLLER_CONFIG_NAME: str = "AndroidDevice"
-ACTS_CONTROLLER_REFERENCE_NAME = "android_devices"
-
-ANDROID_DEVICE_PICK_ALL_TOKEN = "*"
-# Key name for SL4A extra params in config file
-ANDROID_DEVICE_SL4A_CLIENT_PORT_KEY = "sl4a_client_port"
-ANDROID_DEVICE_SL4A_FORWARDED_PORT_KEY = "sl4a_forwarded_port"
-ANDROID_DEVICE_SL4A_SERVER_PORT_KEY = "sl4a_server_port"
-# Key name for adb logcat extra params in config file.
-ANDROID_DEVICE_ADB_LOGCAT_PARAM_KEY = "adb_logcat_param"
-ANDROID_DEVICE_EMPTY_CONFIG_MSG = "Configuration is empty, abort!"
-ANDROID_DEVICE_NOT_LIST_CONFIG_MSG = "Configuration should be a list, abort!"
-CRASH_REPORT_PATHS = (
- "/data/tombstones/",
- "/data/vendor/ramdump/",
- "/data/ramdump/",
- "/data/vendor/ssrdump",
- "/data/vendor/ramdump/bluetooth",
- "/data/vendor/log/cbd",
-)
-CRASH_REPORT_SKIPS = (
- "RAMDUMP_RESERVED",
- "RAMDUMP_STATUS",
- "RAMDUMP_OUTPUT",
- "bluetooth",
-)
-ALWAYS_ON_LOG_PATH = "/data/vendor/radio/logs/always-on"
-DEFAULT_QXDM_LOG_PATH = "/data/vendor/radio/diag_logs"
-DEFAULT_SDM_LOG_PATH = "/data/vendor/slog/"
-DEFAULT_SCREENSHOT_PATH = "/sdcard/Pictures/screencap"
-BUG_REPORT_TIMEOUT = 1800
-PULL_TIMEOUT = 300
-PORT_RETRY_COUNT = 3
-ADB_ROOT_RETRY_COUNT = 2
-ADB_ROOT_RETRY_INTERVAL = 10
-IPERF_TIMEOUT = 60
-SL4A_APK_NAME = "com.googlecode.android_scripting"
-WAIT_FOR_DEVICE_TIMEOUT = 180
-ENCRYPTION_WINDOW = "CryptKeeper"
-DEFAULT_DEVICE_PASSWORD = "1111"
-RELEASE_ID_REGEXES = [re.compile(r"\w+\.\d+\.\d+"), re.compile(r"N\w+")]
-
-
-def create(configs: list[ControllerConfig]) -> list[AndroidDevice]:
- """Creates AndroidDevice controller objects.
-
- Args:
- configs: A list of dicts, each representing a configuration for an
- Android device.
-
- Returns:
- A list of AndroidDevice objects.
- """
- if not configs:
- raise errors.AndroidDeviceConfigError(ANDROID_DEVICE_EMPTY_CONFIG_MSG)
- elif not isinstance(configs, list):
- raise errors.AndroidDeviceConfigError(
- ANDROID_DEVICE_NOT_LIST_CONFIG_MSG
- )
- elif isinstance(configs[0], str):
- # Configs is a list of serials.
- ads = get_instances(configs)
- else:
- # Configs is a list of dicts.
- ads = get_instances_with_configs(configs)
-
- ads[0].log.info(f'The primary device under test is "{ads[0].serial}".')
-
- for ad in ads:
- if not ad.is_connected():
- raise errors.AndroidDeviceError(
- (
- "Android device %s is specified in config"
- " but is not attached."
- )
- % ad.serial,
- serial=ad.serial,
- )
- _start_services_on_ads(ads)
- for ad in ads:
- if ad.droid:
- utils.set_location_service(ad, False)
- utils.sync_device_time(ad)
- return ads
-
-
-def destroy(objects: list[AndroidDevice]) -> None:
- """Cleans up AndroidDevice objects.
-
- Args:
- ads: A list of AndroidDevice objects.
- """
- for ad in objects:
- try:
- ad.clean_up()
- except:
- ad.log.exception("Failed to clean up properly.")
-
-
-def get_info(objects: list[AndroidDevice]) -> list[Json]:
- """Get information on a list of AndroidDevice objects.
-
- Args:
- ads: A list of AndroidDevice objects.
-
- Returns:
- A list of dict, each representing info for an AndroidDevice objects.
- """
- device_info: list[Json] = []
- for ad in objects:
- info = {"serial": ad.serial, "model": ad.model}
- info.update(ad.build_info)
- device_info.append(info)
- return device_info
-
-
-def _start_services_on_ads(ads):
- """Starts long running services on multiple AndroidDevice objects.
-
- If any one AndroidDevice object fails to start services, cleans up all
- existing AndroidDevice objects and their services.
-
- Args:
- ads: A list of AndroidDevice objects whose services to start.
- """
- running_ads = []
- for ad in ads:
- running_ads.append(ad)
- try:
- ad.start_services()
- except:
- ad.log.exception("Failed to start some services, abort!")
- destroy(running_ads)
- raise
-
-
-def _parse_device_list(device_list_str, key):
- """Parses a byte string representing a list of devices. The string is
- generated by calling either adb or fastboot.
-
- Args:
- device_list_str: Output of adb or fastboot.
- key: The token that signifies a device in device_list_str.
-
- Returns:
- A list of android device serial numbers.
- """
- return re.findall(r"(\S+)\t%s" % key, device_list_str)
-
-
-def list_adb_devices():
- """List all android devices connected to the computer that are detected by
- adb.
-
- Returns:
- A list of android device serials. Empty if there's none.
- """
- out = adb.AdbProxy().devices()
- return _parse_device_list(out, "device")
-
-
-def list_fastboot_devices():
- """List all android devices connected to the computer that are in in
- fastboot mode. These are detected by fastboot.
-
- Returns:
- A list of android device serials. Empty if there's none.
- """
- out = fastboot.FastbootProxy().devices()
- return _parse_device_list(out, "fastboot")
-
-
-def get_instances(serials) -> list[AndroidDevice]:
- """Create AndroidDevice instances from a list of serials.
-
- Args:
- serials: A list of android device serials.
-
- Returns:
- A list of AndroidDevice objects.
- """
- results: list[AndroidDevice] = []
- for s in serials:
- results.append(AndroidDevice(s))
- return results
-
-
-def get_instances_with_configs(configs):
- """Create AndroidDevice instances from a list of json configs.
-
- Each config should have the required key-value pair "serial".
-
- Args:
- configs: A list of dicts each representing the configuration of one
- android device.
-
- Returns:
- A list of AndroidDevice objects.
- """
- results = []
- for c in configs:
- try:
- serial = c.pop("serial")
- except KeyError:
- raise errors.AndroidDeviceConfigError(
- f"Required value 'serial' is missing in AndroidDevice config {c}."
- )
- client_port = 0
- if ANDROID_DEVICE_SL4A_CLIENT_PORT_KEY in c:
- try:
- client_port = int(c.pop(ANDROID_DEVICE_SL4A_CLIENT_PORT_KEY))
- except ValueError:
- raise errors.AndroidDeviceConfigError(
- "'%s' is not a valid number for config %s"
- % (ANDROID_DEVICE_SL4A_CLIENT_PORT_KEY, c)
- )
- server_port = None
- if ANDROID_DEVICE_SL4A_SERVER_PORT_KEY in c:
- try:
- server_port = int(c.pop(ANDROID_DEVICE_SL4A_SERVER_PORT_KEY))
- except ValueError:
- raise errors.AndroidDeviceConfigError(
- "'%s' is not a valid number for config %s"
- % (ANDROID_DEVICE_SL4A_SERVER_PORT_KEY, c)
- )
- forwarded_port = 0
- if ANDROID_DEVICE_SL4A_FORWARDED_PORT_KEY in c:
- try:
- forwarded_port = int(
- c.pop(ANDROID_DEVICE_SL4A_FORWARDED_PORT_KEY)
- )
- except ValueError:
- raise errors.AndroidDeviceConfigError(
- "'%s' is not a valid number for config %s"
- % (ANDROID_DEVICE_SL4A_FORWARDED_PORT_KEY, c)
- )
- ssh_config = c.pop("ssh_config", None)
- ssh_connection = None
- if ssh_config is not None:
- ssh_settings = settings.from_config(ssh_config)
- ssh_connection = connection.SshConnection(ssh_settings)
- ad = AndroidDevice(
- serial,
- ssh_connection=ssh_connection,
- client_port=client_port,
- forwarded_port=forwarded_port,
- server_port=server_port,
- )
- ad.load_config(c)
- results.append(ad)
- return results
-
-
-def get_all_instances(include_fastboot: bool = False) -> list[AndroidDevice]:
- """Create AndroidDevice instances for all attached android devices.
-
- Args:
- include_fastboot: Whether to include devices in bootloader mode or not.
-
- Returns:
- A list of AndroidDevice objects each representing an android device
- attached to the computer.
- """
- if include_fastboot:
- serial_list = list_adb_devices() + list_fastboot_devices()
- return get_instances(serial_list)
- return get_instances(list_adb_devices())
-
-
-def filter_devices(ads, func):
- """Finds the AndroidDevice instances from a list that match certain
- conditions.
-
- Args:
- ads: A list of AndroidDevice instances.
- func: A function that takes an AndroidDevice object and returns True
- if the device satisfies the filter condition.
-
- Returns:
- A list of AndroidDevice instances that satisfy the filter condition.
- """
- results = []
- for ad in ads:
- if func(ad):
- results.append(ad)
- return results
-
-
-def get_device(ads, **kwargs):
- """Finds a unique AndroidDevice instance from a list that has specific
- attributes of certain values.
-
- Example:
- get_device(android_devices, label="foo", phone_number="1234567890")
- get_device(android_devices, model="angler")
-
- Args:
- ads: A list of AndroidDevice instances.
- kwargs: keyword arguments used to filter AndroidDevice instances.
-
- Returns:
- The target AndroidDevice instance.
-
- Raises:
- AndroidDeviceError is raised if none or more than one device is
- matched.
- """
-
- def _get_device_filter(ad):
- for k, v in kwargs.items():
- if not hasattr(ad, k):
- return False
- elif getattr(ad, k) != v:
- return False
- return True
-
- filtered = filter_devices(ads, _get_device_filter)
- if not filtered:
- raise ValueError(
- f"Could not find a target device that matches condition: {kwargs}."
- )
- elif len(filtered) == 1:
- return filtered[0]
- else:
- serials = [ad.serial for ad in filtered]
- raise ValueError(f"More than one device matched: {serials}")
-
-
-def take_bug_reports(ads, test_name, begin_time):
- """Takes bug reports on a list of android devices.
-
- If you want to take a bug report, call this function with a list of
- android_device objects in on_fail. But reports will be taken on all the
- devices in the list concurrently. Bug report takes a relative long
- time to take, so use this cautiously.
-
- Args:
- ads: A list of AndroidDevice instances.
- test_name: Name of the test case that triggered this bug report.
- begin_time: Logline format timestamp taken when the test started.
- """
-
- def take_br(test_name, begin_time, ad):
- ad.take_bug_report(test_name, begin_time)
-
- args = [(test_name, begin_time, ad) for ad in ads]
- utils.concurrent_exec(take_br, args)
-
-
-class AndroidDevice:
- """Class representing an android device.
-
- Each object of this class represents one Android device in ACTS, including
- handles to adb, fastboot, and sl4a clients. In addition to direct adb
- commands, this object also uses adb port forwarding to talk to the Android
- device.
-
- Attributes:
- serial: A string that's the serial number of the Android device.
- log_path: A string that is the path where all logs collected on this
- android device should be stored.
- log: A logger adapted from root logger with added token specific to an
- AndroidDevice instance.
- adb_logcat_process: A process that collects the adb logcat.
- adb: An AdbProxy object used for interacting with the device via adb.
- fastboot: A FastbootProxy object used for interacting with the device
- via fastboot.
- client_port: Preferred client port number on the PC host side for SL4A
- forwarded_port: Preferred server port number forwarded from Android
- to the host PC via adb for SL4A connections
- server_port: Preferred server port used by SL4A on Android device
-
- """
-
- def __init__(
- self,
- serial: str = "",
- ssh_connection: Runner | None = None,
- client_port: int = 0,
- forwarded_port: int = 0,
- server_port: int | None = None,
- ):
- self.serial = serial
- # logging.log_path only exists when this is used in an ACTS test run.
- log_path_base = getattr(logging, "log_path", "/tmp/logs")
- self.log_dir = f"AndroidDevice{serial}"
- self.log_path = os.path.join(log_path_base, self.log_dir)
- self.client_port = client_port
- self.forwarded_port = forwarded_port
- self.server_port = server_port
- self.log = AndroidDeviceLoggerAdapter(
- logging.getLogger(), {"serial": serial}
- )
- self._event_dispatchers = {}
- self._services = []
- self.register_service(services.AdbLogcatService(self))
- self.register_service(services.Sl4aService(self))
- self.adb_logcat_process = None
- self.adb = adb.AdbProxy(serial, ssh_connection=ssh_connection)
- self.fastboot = fastboot.FastbootProxy(
- serial, ssh_connection=ssh_connection
- )
- if not self.is_bootloader:
- self.root_adb()
- self._ssh_connection = ssh_connection
- self.skip_sl4a = False
- self.crash_report = None
- self.data_accounting = collections.defaultdict(int)
- self._sl4a_manager = sl4a_manager.create_sl4a_manager(self.adb)
- self.last_logcat_timestamp = None
- # Device info cache.
- self._user_added_device_info = {}
- self._sdk_api_level = None
-
- def clean_up(self):
- """Cleans up the AndroidDevice object and releases any resources it
- claimed.
- """
- self.stop_services()
- for service in self._services:
- service.unregister()
- self._services.clear()
- if self._ssh_connection:
- self._ssh_connection.close()
-
- def recreate_services(self, serial):
- """Clean up the AndroidDevice object and re-create adb/sl4a services.
-
- Unregister the existing services and re-create adb and sl4a services,
- call this method when the connection break after certain API call
- (e.g., enable USB tethering by #startTethering)
-
- Args:
- serial: the serial number of the AndroidDevice
- """
- # Clean the old services
- for service in self._services:
- service.unregister()
- self._services.clear()
- if self._ssh_connection:
- self._ssh_connection.close()
- self._sl4a_manager.stop_service()
-
- # Wait for old services to stop
- time.sleep(5)
-
- # Re-create the new adb and sl4a services
- self.register_service(services.AdbLogcatService(self))
- self.register_service(services.Sl4aService(self))
- self.adb.wait_for_device()
- self.terminate_all_sessions()
- self.start_services()
-
- def register_service(self, service):
- """Registers the service on the device."""
- service.register()
- self._services.append(service)
-
- # TODO(angli): This function shall be refactored to accommodate all services
- # and not have hard coded switch for SL4A when b/29157104 is done.
- def start_services(self, skip_setup_wizard=True):
- """Starts long running services on the android device.
-
- 1. Start adb logcat capture.
- 2. Start SL4A if not skipped.
-
- Args:
- skip_setup_wizard: Whether or not to skip the setup wizard.
- """
- if skip_setup_wizard:
- self.exit_setup_wizard()
-
- event_bus.post(android_events.AndroidStartServicesEvent(self))
-
- def stop_services(self):
- """Stops long running services on the android device.
-
- Stop adb logcat and terminate sl4a sessions if exist.
- """
- event_bus.post(
- android_events.AndroidStopServicesEvent(self), ignore_errors=True
- )
-
- def is_connected(self):
- out = self.adb.devices()
- devices = _parse_device_list(out, "device")
- return self.serial in devices
-
- @property
- def build_info(self):
- """Get the build info of this Android device, including build id and
- build type.
-
- This is not available if the device is in bootloader mode.
-
- Returns:
- A dict with the build info of this Android device, or None if the
- device is in bootloader mode.
- """
- if self.is_bootloader:
- self.log.error(
- "Device is in fastboot mode, could not get build " "info."
- )
- return
-
- build_id = self.adb.getprop("ro.build.id")
- incremental_build_id = self.adb.getprop("ro.build.version.incremental")
- valid_build_id = False
- for regex in RELEASE_ID_REGEXES:
- if re.match(regex, build_id):
- valid_build_id = True
- break
- if not valid_build_id:
- build_id = incremental_build_id
-
- info = {
- "build_id": build_id,
- "incremental_build_id": incremental_build_id,
- "build_type": self.adb.getprop("ro.build.type"),
- }
- return info
-
- @property
- def device_info(self):
- """Information to be pulled into controller info.
-
- The latest serial, model, and build_info are included. Additional info
- can be added via `add_device_info`.
- """
- info = {
- "serial": self.serial,
- "model": self.model,
- "build_info": self.build_info,
- "user_added_info": self._user_added_device_info,
- "flavor": self.flavor,
- }
- return info
-
- def add_device_info(self, name, info):
- """Add custom device info to the user_added_info section.
-
- Adding the same info name the second time will override existing info.
-
- Args:
- name: string, name of this info.
- info: serializable, content of the info.
- """
- self._user_added_device_info.update({name: info})
-
- def sdk_api_level(self):
- if self._sdk_api_level is not None:
- return self._sdk_api_level
- if self.is_bootloader:
- self.log.error("Device is in fastboot mode. Cannot get build info.")
- return
- self._sdk_api_level = int(
- self.adb.shell("getprop ro.build.version.sdk")
- )
- return self._sdk_api_level
-
- @property
- def is_bootloader(self):
- """True if the device is in bootloader mode."""
- return self.serial in list_fastboot_devices()
-
- @property
- def is_adb_root(self):
- """True if adb is running as root for this device."""
- try:
- return "0" == self.adb.shell("id -u")
- except AdbError:
- # Wait a bit and retry to work around adb flakiness for this cmd.
- time.sleep(0.2)
- return "0" == self.adb.shell("id -u")
-
- @property
- def model(self):
- """The Android code name for the device."""
- # If device is in bootloader mode, get mode name from fastboot.
- if self.is_bootloader:
- out = self.fastboot.getvar("product").strip()
- # "out" is never empty because of the "total time" message fastboot
- # writes to stderr.
- lines = out.split("\n", 1)
- if lines:
- tokens = lines[0].split(" ")
- if len(tokens) > 1:
- return tokens[1].lower()
- return None
- model = self.adb.getprop("ro.build.product").lower()
- if model == "sprout":
- return model
- else:
- return self.adb.getprop("ro.product.name").lower()
-
- @property
- def flavor(self):
- """Returns the specific flavor of Android build the device is using."""
- return self.adb.getprop("ro.build.flavor").lower()
-
- @property
- def droid(self):
- """Returns the RPC Service of the first Sl4aSession created."""
- if len(self._sl4a_manager.sessions) > 0:
- session_id = sorted(self._sl4a_manager.sessions.keys())[0]
- return self._sl4a_manager.sessions[session_id].rpc_client
- else:
- return None
-
- @property
- def ed(self):
- """Returns the event dispatcher of the first Sl4aSession created."""
- if len(self._sl4a_manager.sessions) > 0:
- session_id = sorted(self._sl4a_manager.sessions.keys())[0]
- return self._sl4a_manager.sessions[
- session_id
- ].get_event_dispatcher()
- else:
- return None
-
- @property
- def sl4a_sessions(self):
- """Returns a dictionary of session ids to sessions."""
- return list(self._sl4a_manager.sessions)
-
- @property
- def is_adb_logcat_on(self):
- """Whether there is an ongoing adb logcat collection."""
- if self.adb_logcat_process:
- if self.adb_logcat_process.is_running():
- return True
- else:
- # if skip_sl4a is true, there is no sl4a session
- # if logcat died due to device reboot and sl4a session has
- # not restarted there is no droid.
- if self.droid:
- self.droid.logI("Logcat died")
- self.log.info("Logcat to %s died", self.log_path)
- return False
- return False
-
- @property
- def device_log_path(self):
- """Returns the directory for all Android device logs for the current
- test context and serial.
- """
- return context.get_current_context().get_full_output_path(self.serial)
-
- def update_sdk_api_level(self):
- self._sdk_api_level = None
- self.sdk_api_level()
-
- def load_config(self, config):
- """Add attributes to the AndroidDevice object based on json config.
-
- Args:
- config: A dictionary representing the configs.
-
- Raises:
- AndroidDeviceError is raised if the config is trying to overwrite
- an existing attribute.
- """
- for k, v in config.items():
- # skip_sl4a value can be reset from config file
- if hasattr(self, k) and k != "skip_sl4a":
- raise errors.AndroidDeviceError(
- f"Attempting to set existing attribute {k} on {self.serial}",
- serial=self.serial,
- )
- setattr(self, k, v)
-
- def root_adb(self):
- """Change adb to root mode for this device if allowed.
-
- If executed on a production build, adb will not be switched to root
- mode per security restrictions.
- """
- if self.is_adb_root:
- return
-
- for attempt in range(ADB_ROOT_RETRY_COUNT):
- try:
- self.log.debug(f"Enabling ADB root mode: attempt {attempt}.")
- self.adb.root()
- except AdbError:
- if attempt == ADB_ROOT_RETRY_COUNT:
- raise
- time.sleep(ADB_ROOT_RETRY_INTERVAL)
- self.adb.wait_for_device()
-
- def get_droid(self, handle_event=True):
- """Create an sl4a connection to the device.
-
- Return the connection handler 'droid'. By default, another connection
- on the same session is made for EventDispatcher, and the dispatcher is
- returned to the caller as well.
- If sl4a server is not started on the device, try to start it.
-
- Args:
- handle_event: True if this droid session will need to handle
- events.
-
- Returns:
- droid: Android object used to communicate with sl4a on the android
- device.
- ed: An optional EventDispatcher to organize events for this droid.
-
- Examples:
- Don't need event handling:
- >>> ad = AndroidDevice()
- >>> droid = ad.get_droid(False)
-
- Need event handling:
- >>> ad = AndroidDevice()
- >>> droid, ed = ad.get_droid()
- """
- self.log.debug(
- "Creating RPC client_port={}, forwarded_port={}, server_port={}".format(
- self.client_port, self.forwarded_port, self.server_port
- )
- )
- session = self._sl4a_manager.create_session(
- client_port=self.client_port,
- forwarded_port=self.forwarded_port,
- server_port=self.server_port,
- )
- droid = session.rpc_client
- if handle_event:
- ed = session.get_event_dispatcher()
- return droid, ed
- return droid
-
- def get_package_pid(self, package_name):
- """Gets the pid for a given package. Returns None if not running.
- Args:
- package_name: The name of the package.
- Returns:
- The first pid found under a given package name. None if no process
- was found running the package.
- Raises:
- AndroidDeviceError if the output of the phone's process list was
- in an unexpected format.
- """
- for cmd in ("ps -A", "ps"):
- try:
- out = self.adb.shell(
- f'{cmd} | grep "S {package_name}"', ignore_status=True
- )
- if package_name not in out:
- continue
- try:
- pid = int(out.split()[1])
- self.log.info("apk %s has pid %s.", package_name, pid)
- return pid
- except (IndexError, ValueError) as e:
- # Possible ValueError from string to int cast.
- # Possible IndexError from split.
- self.log.warning(
- 'Command "%s" returned output line: '
- '"%s".\nError: %s',
- cmd,
- out,
- e,
- )
- except Exception as e:
- self.log.warning(
- 'Device fails to check if %s running with "%s"\n'
- "Exception %s",
- package_name,
- cmd,
- e,
- )
- self.log.debug("apk %s is not running", package_name)
- return None
-
- def get_dispatcher(self, droid):
- """Return an EventDispatcher for an sl4a session
-
- Args:
- droid: Session to create EventDispatcher for.
-
- Returns:
- ed: An EventDispatcher for specified session.
- """
- return self._sl4a_manager.sessions[droid.uid].get_event_dispatcher()
-
- def _is_timestamp_in_range(self, target, log_begin_time, log_end_time):
- low = logger.logline_timestamp_comparator(log_begin_time, target) <= 0
- high = logger.logline_timestamp_comparator(log_end_time, target) >= 0
- return low and high
-
- def cat_adb_log(
- self, tag, begin_time, end_time=None, dest_path="AdbLogExcerpts"
- ):
- """Takes an excerpt of the adb logcat log from a certain time point to
- current time.
-
- Args:
- tag: An identifier of the time period, usually the name of a test.
- begin_time: Epoch time of the beginning of the time period.
- end_time: Epoch time of the ending of the time period, default None
- dest_path: Destination path of the excerpt file.
- """
- log_begin_time = logger.epoch_to_log_line_timestamp(begin_time)
- if end_time is None:
- log_end_time = logger.get_log_line_timestamp()
- else:
- log_end_time = logger.epoch_to_log_line_timestamp(end_time)
- self.log.debug("Extracting adb log from logcat.")
- logcat_path = os.path.join(
- self.device_log_path, f"adblog_{self.serial}_debug.txt"
- )
- if not os.path.exists(logcat_path):
- self.log.warning(f"Logcat file {logcat_path} does not exist.")
- return
- adb_excerpt_dir = os.path.join(self.log_path, dest_path)
- os.makedirs(adb_excerpt_dir, exist_ok=True)
- out_name = "%s,%s.txt" % (
- logger.sanitize_filename(log_begin_time),
- self.serial,
- )
- tag_len = utils.MAX_FILENAME_LEN - len(out_name)
- out_name = f"{tag[:tag_len]},{out_name}"
- adb_excerpt_path = os.path.join(adb_excerpt_dir, out_name)
- with open(adb_excerpt_path, "w", encoding="utf-8") as out:
- in_file = logcat_path
- with open(in_file, "r", encoding="utf-8", errors="replace") as f:
- while True:
- line = None
- try:
- line = f.readline()
- if not line:
- break
- except:
- continue
- line_time = line[: logger.log_line_timestamp_len]
- if not logger.is_valid_logline_timestamp(line_time):
- continue
- if self._is_timestamp_in_range(
- line_time, log_begin_time, log_end_time
- ):
- if not line.endswith("\n"):
- line += "\n"
- out.write(line)
- return adb_excerpt_path
-
- def search_logcat(
- self, matching_string, begin_time=None, end_time=None, logcat_path=None
- ):
- """Search logcat message with given string.
-
- Args:
- matching_string: matching_string to search.
- begin_time: only the lines with time stamps later than begin_time
- will be searched.
- end_time: only the lines with time stamps earlier than end_time
- will be searched.
- logcat_path: the path of a specific file in which the search should
- be performed. If None the path will be the default device log
- path.
-
- Returns:
- A list of dictionaries with full log message, time stamp string,
- time object and message ID. For example:
- [{"log_message": "05-03 17:39:29.898 968 1001 D"
- "ActivityManager: Sending BOOT_COMPLETE user #0",
- "time_stamp": "2017-05-03 17:39:29.898",
- "datetime_obj": datetime object,
- "message_id": None}]
-
- [{"log_message": "08-12 14:26:42.611043 2360 2510 D RILJ : "
- "[0853]< DEACTIVATE_DATA_CALL [PHONE0]",
- "time_stamp": "2020-08-12 14:26:42.611043",
- "datetime_obj": datetime object},
- "message_id": "0853"}]
- """
- if not logcat_path:
- logcat_path = os.path.join(
- self.device_log_path, f"adblog_{self.serial}_debug.txt"
- )
- if not os.path.exists(logcat_path):
- self.log.warning(f"Logcat file {logcat_path} does not exist.")
- return
- output = job.run(
- f"grep '{matching_string}' {logcat_path}", ignore_status=True
- )
- if not output.stdout or output.exit_status != 0:
- return []
- if begin_time:
- if not isinstance(begin_time, datetime):
- log_begin_time = logger.epoch_to_log_line_timestamp(begin_time)
- begin_time = datetime.strptime(
- log_begin_time, "%Y-%m-%d %H:%M:%S.%f"
- )
- if end_time:
- if not isinstance(end_time, datetime):
- log_end_time = logger.epoch_to_log_line_timestamp(end_time)
- end_time = datetime.strptime(
- log_end_time, "%Y-%m-%d %H:%M:%S.%f"
- )
- result = []
- logs = re.findall(r"(\S+\s\S+)(.*)", output.stdout)
- for log in logs:
- time_stamp = log[0]
- time_obj = datetime.strptime(time_stamp, "%Y-%m-%d %H:%M:%S.%f")
-
- if begin_time and time_obj < begin_time:
- continue
-
- if end_time and time_obj > end_time:
- continue
-
- res = re.findall(r".*\[(\d+)\]", log[1])
- try:
- message_id = res[0]
- except:
- message_id = None
-
- result.append(
- {
- "log_message": "".join(log),
- "time_stamp": time_stamp,
- "datetime_obj": time_obj,
- "message_id": message_id,
- }
- )
- return result
-
- def start_adb_logcat(self):
- """Starts a standing adb logcat collection in separate subprocesses and
- save the logcat in a file.
- """
- if self.is_adb_logcat_on:
- self.log.warning(
- "Android device %s already has a running adb logcat thread. "
- % self.serial
- )
- return
- # Disable adb log spam filter. Have to stop and clear settings first
- # because 'start' doesn't support --clear option before Android N.
- self.adb.shell("logpersist.stop --clear", ignore_status=True)
- self.adb.shell("logpersist.start", ignore_status=True)
- if hasattr(self, "adb_logcat_param"):
- extra_params = self.adb_logcat_param
- else:
- extra_params = "-b all"
-
- self.adb_logcat_process = logcat.create_logcat_keepalive_process(
- self.serial, self.log_dir, extra_params
- )
- self.adb_logcat_process.start()
-
- def stop_adb_logcat(self):
- """Stops the adb logcat collection subprocess."""
- if not self.is_adb_logcat_on:
- self.log.warning(
- f"Android device {self.serial} does not have an ongoing adb logcat "
- )
- return
- # Set the last timestamp to the current timestamp. This may cause
- # a race condition that allows the same line to be logged twice,
- # but it does not pose a problem for our logging purposes.
- self.adb_logcat_process.stop()
- self.adb_logcat_process = None
-
- def get_apk_uid(self, apk_name):
- """Get the uid of the given apk.
-
- Args:
- apk_name: Name of the package, e.g., com.android.phone.
-
- Returns:
- Linux UID for the apk.
- """
- output = self.adb.shell(
- f"dumpsys package {apk_name} | grep userId=", ignore_status=True
- )
- result = re.search(r"userId=(\d+)", output)
- if result:
- return result.group(1)
- else:
- None
-
- def get_apk_version(self, package_name):
- """Get the version of the given apk.
-
- Args:
- package_name: Name of the package, e.g., com.android.phone.
-
- Returns:
- Version of the given apk.
- """
- try:
- output = self.adb.shell(
- f"dumpsys package {package_name} | grep versionName"
- )
- pattern = re.compile(r"versionName=(.+)", re.I)
- result = pattern.findall(output)
- if result:
- return result[0]
- except Exception as e:
- self.log.warning(
- "Fail to get the version of package %s: %s", package_name, e
- )
- self.log.debug("apk %s is not found", package_name)
- return None
-
- def is_apk_installed(self, package_name):
- """Check if the given apk is already installed.
-
- Args:
- package_name: Name of the package, e.g., com.android.phone.
-
- Returns:
- True if package is installed. False otherwise.
- """
-
- try:
- return bool(
- self.adb.shell(
- f'(pm list packages | grep -w "package:{package_name}") || true'
- )
- )
-
- except Exception as err:
- self.log.error(
- "Could not determine if %s is installed. "
- "Received error:\n%s",
- package_name,
- err,
- )
- return False
-
- def is_sl4a_installed(self):
- return self.is_apk_installed(SL4A_APK_NAME)
-
- def is_apk_running(self, package_name):
- """Check if the given apk is running.
-
- Args:
- package_name: Name of the package, e.g., com.android.phone.
-
- Returns:
- True if package is installed. False otherwise.
- """
- for cmd in ("ps -A", "ps"):
- try:
- out = self.adb.shell(
- f'{cmd} | grep "S {package_name}"', ignore_status=True
- )
- if package_name in out:
- self.log.info("apk %s is running", package_name)
- return True
- except Exception as e:
- self.log.warning(
- "Device fails to check is %s running by %s " "Exception %s",
- package_name,
- cmd,
- e,
- )
- continue
- self.log.debug("apk %s is not running", package_name)
- return False
-
- def is_sl4a_running(self):
- return self.is_apk_running(SL4A_APK_NAME)
-
- def force_stop_apk(self, package_name):
- """Force stop the given apk.
-
- Args:
- package_name: Name of the package, e.g., com.android.phone.
-
- Returns:
- True if package is installed. False otherwise.
- """
- try:
- self.adb.shell(f"am force-stop {package_name}", ignore_status=True)
- except Exception as e:
- self.log.warning("Fail to stop package %s: %s", package_name, e)
-
- def take_bug_report(self, test_name=None, begin_time=None):
- """Takes a bug report on the device and stores it in a file.
-
- Args:
- test_name: Name of the test case that triggered this bug report.
- begin_time: Epoch time when the test started. If none is specified,
- the current time will be used.
- """
- self.adb.wait_for_device(timeout=WAIT_FOR_DEVICE_TIMEOUT)
- new_br = True
- try:
- stdout = self.adb.shell("bugreportz -v")
- # This check is necessary for builds before N, where adb shell's ret
- # code and stderr are not propagated properly.
- if "not found" in stdout:
- new_br = False
- except AdbError:
- new_br = False
- br_path = self.device_log_path
- os.makedirs(br_path, exist_ok=True)
- epoch = begin_time if begin_time else utils.get_current_epoch_time()
- time_stamp = logger.sanitize_filename(
- logger.epoch_to_log_line_timestamp(epoch)
- )
- out_name = f"AndroidDevice{self.serial}_{time_stamp}"
- out_name = f"{out_name}.zip" if new_br else f"{out_name}.txt"
- full_out_path = os.path.join(br_path, out_name)
- # in case device restarted, wait for adb interface to return
- self.wait_for_boot_completion()
- if test_name:
- self.log.info("Taking bugreport for %s.", test_name)
- else:
- self.log.info("Taking bugreport.")
- if new_br:
- out = self.adb.shell("bugreportz", timeout=BUG_REPORT_TIMEOUT)
- if not out.startswith("OK"):
- raise errors.AndroidDeviceError(
- f"Failed to take bugreport on {self.serial}: {out}",
- serial=self.serial,
- )
- br_out_path = out.split(":")[1].strip().split()[0]
- self.adb.pull(f"{br_out_path} {full_out_path}")
- else:
- self.adb.bugreport(
- f" > {full_out_path}", timeout=BUG_REPORT_TIMEOUT
- )
- if test_name:
- self.log.info(
- "Bugreport for %s taken at %s.", test_name, full_out_path
- )
- else:
- self.log.info("Bugreport taken at %s.", test_name, full_out_path)
- self.adb.wait_for_device(timeout=WAIT_FOR_DEVICE_TIMEOUT)
-
- def get_file_names(
- self, directory, begin_time=None, skip_files=[], match_string=None
- ):
- """Get files names with provided directory."""
- cmd = f"find {directory} -type f"
- if begin_time:
- current_time = utils.get_current_epoch_time()
- seconds = int(math.ceil((current_time - begin_time) / 1000.0))
- cmd = f"{cmd} -mtime -{seconds}s"
- if match_string:
- cmd = f"{cmd} -iname {match_string}"
- for skip_file in skip_files:
- cmd = f"{cmd} ! -iname {skip_file}"
- out = self.adb.shell(cmd, ignore_status=True)
- if (
- not out
- or "No such" in out
- or "Permission denied" in out
- or "Not a directory" in out
- ):
- return []
- files = out.split("\n")
- self.log.debug("Find files in directory %s: %s", directory, files)
- return files
-
- @property
- def external_storage_path(self):
- """
- The $EXTERNAL_STORAGE path on the device. Most commonly set to '/sdcard'
- """
- return self.adb.shell("echo $EXTERNAL_STORAGE")
-
- def file_exists(self, file_path):
- """Returns whether a file exists on a device.
-
- Args:
- file_path: The path of the file to check for.
- """
- cmd = f"(test -f {file_path} && echo yes) || echo no"
- result = self.adb.shell(cmd)
- if result == "yes":
- return True
- elif result == "no":
- return False
- raise ValueError(
- "Couldn't determine if %s exists. "
- "Expected yes/no, got %s" % (file_path, result[cmd])
- )
-
- def pull_files(self, device_paths, host_path=None):
- """Pull files from devices.
-
- Args:
- device_paths: List of paths on the device to pull from.
- host_path: Destination path
- """
- if isinstance(device_paths, str):
- device_paths = [device_paths]
- if not host_path:
- host_path = self.log_path
- for device_path in device_paths:
- self.log.info(f"Pull from device: {device_path} -> {host_path}")
- self.adb.pull(f"{device_path} {host_path}", timeout=PULL_TIMEOUT)
-
- def check_crash_report(
- self, test_name=None, begin_time=None, log_crash_report=False
- ):
- """check crash report on the device."""
- crash_reports = []
- for crash_path in CRASH_REPORT_PATHS:
- try:
- cmd = f"cd {crash_path}"
- self.adb.shell(cmd)
- except Exception as e:
- self.log.debug("received exception %s", e)
- continue
- crashes = self.get_file_names(
- crash_path, skip_files=CRASH_REPORT_SKIPS, begin_time=begin_time
- )
- if crash_path == "/data/tombstones/" and crashes:
- tombstones = crashes[:]
- for tombstone in tombstones:
- if self.adb.shell(
- f'cat {tombstone} | grep "crash_dump failed to dump process"'
- ):
- crashes.remove(tombstone)
- if crashes:
- crash_reports.extend(crashes)
- if crash_reports and log_crash_report:
- crash_log_path = os.path.join(
- self.device_log_path, f"Crashes_{self.serial}"
- )
- os.makedirs(crash_log_path, exist_ok=True)
- self.pull_files(crash_reports, crash_log_path)
- return crash_reports
-
- def get_qxdm_logs(self, test_name="", begin_time=None):
- """Get qxdm logs."""
- # Sleep 10 seconds for the buffered log to be written in qxdm log file
- time.sleep(10)
- log_path = getattr(self, "qxdm_log_path", DEFAULT_QXDM_LOG_PATH)
- qxdm_logs = self.get_file_names(
- log_path, begin_time=begin_time, match_string="*.qmdl"
- )
- if qxdm_logs:
- qxdm_log_path = os.path.join(
- self.device_log_path, f"QXDM_{self.serial}"
- )
- os.makedirs(qxdm_log_path, exist_ok=True)
-
- self.log.info("Pull QXDM Log %s to %s", qxdm_logs, qxdm_log_path)
- self.pull_files(qxdm_logs, qxdm_log_path)
-
- self.adb.pull(
- f"/firmware/image/qdsp6m.qdb {qxdm_log_path}",
- timeout=PULL_TIMEOUT,
- ignore_status=True,
- )
- # Zip Folder
- utils.zip_directory(f"{qxdm_log_path}.zip", qxdm_log_path)
- shutil.rmtree(qxdm_log_path)
- else:
- self.log.error(f"Didn't find QXDM logs in {log_path}.")
- if "Verizon" in self.adb.getprop("gsm.sim.operator.alpha"):
- omadm_log_path = os.path.join(
- self.device_log_path, f"OMADM_{self.serial}"
- )
- os.makedirs(omadm_log_path, exist_ok=True)
- self.log.info("Pull OMADM Log")
- self.adb.pull(
- f"/data/data/com.android.omadm.service/files/dm/log/ {omadm_log_path}",
- timeout=PULL_TIMEOUT,
- ignore_status=True,
- )
-
- def get_sdm_logs(self, test_name="", begin_time=None):
- """Get sdm logs."""
- # Sleep 10 seconds for the buffered log to be written in sdm log file
- time.sleep(10)
- log_paths = [
- ALWAYS_ON_LOG_PATH,
- getattr(self, "sdm_log_path", DEFAULT_SDM_LOG_PATH),
- ]
- sdm_logs = []
- for path in log_paths:
- sdm_logs += self.get_file_names(
- path, begin_time=begin_time, match_string="*.sdm*"
- )
- if sdm_logs:
- sdm_log_path = os.path.join(
- self.device_log_path, f"SDM_{self.serial}"
- )
- os.makedirs(sdm_log_path, exist_ok=True)
- self.log.info("Pull SDM Log %s to %s", sdm_logs, sdm_log_path)
- self.pull_files(sdm_logs, sdm_log_path)
- else:
- self.log.error(f"Didn't find SDM logs in {log_paths}.")
- if "Verizon" in self.adb.getprop("gsm.sim.operator.alpha"):
- omadm_log_path = os.path.join(
- self.device_log_path, f"OMADM_{self.serial}"
- )
- os.makedirs(omadm_log_path, exist_ok=True)
- self.log.info("Pull OMADM Log")
- self.adb.pull(
- f"/data/data/com.android.omadm.service/files/dm/log/ {omadm_log_path}",
- timeout=PULL_TIMEOUT,
- ignore_status=True,
- )
-
- def start_new_session(self, max_connections=None, server_port=None):
- """Start a new session in sl4a.
-
- Also caches the droid in a dict with its uid being the key.
-
- Returns:
- An Android object used to communicate with sl4a on the android
- device.
-
- Raises:
- Sl4aException: Something is wrong with sl4a and it returned an
- existing uid to a new session.
- """
- session = self._sl4a_manager.create_session(
- max_connections=max_connections, server_port=server_port
- )
-
- self._sl4a_manager.sessions[session.uid] = session
- return session.rpc_client
-
- def terminate_all_sessions(self):
- """Terminate all sl4a sessions on the AndroidDevice instance.
-
- Terminate all sessions and clear caches.
- """
- self._sl4a_manager.terminate_all_sessions()
-
- def run_iperf_client_nb(
- self,
- server_host,
- extra_args="",
- timeout=IPERF_TIMEOUT,
- log_file_path=None,
- ):
- """Start iperf client on the device asynchronously.
-
- Return status as true if iperf client start successfully.
- And data flow information as results.
-
- Args:
- server_host: Address of the iperf server.
- extra_args: A string representing extra arguments for iperf client,
- e.g. "-i 1 -t 30".
- log_file_path: The complete file path to log the results.
-
- """
- cmd = f"iperf3 -c {server_host} {extra_args}"
- if log_file_path:
- cmd += f" --logfile {log_file_path} &"
- self.adb.shell_nb(cmd)
-
- def run_iperf_client(
- self, server_host, extra_args="", timeout=IPERF_TIMEOUT
- ):
- """Start iperf client on the device.
-
- Return status as true if iperf client start successfully.
- And data flow information as results.
-
- Args:
- server_host: Address of the iperf server.
- extra_args: A string representing extra arguments for iperf client,
- e.g. "-i 1 -t 30".
-
- Returns:
- status: true if iperf client start successfully.
- results: results have data flow information
- """
- out = self.adb.shell(
- f"iperf3 -c {server_host} {extra_args}", timeout=timeout
- )
- clean_out = out.split("\n")
- if "error" in clean_out[0].lower():
- return False, clean_out
- return True, clean_out
-
- def run_iperf_server(self, extra_args=""):
- """Start iperf server on the device
-
- Return status as true if iperf server started successfully.
-
- Args:
- extra_args: A string representing extra arguments for iperf server.
-
- Returns:
- status: true if iperf server started successfully.
- results: results have output of command
- """
- out = self.adb.shell(f"iperf3 -s {extra_args}")
- clean_out = out.split("\n")
- if "error" in clean_out[0].lower():
- return False, clean_out
- return True, clean_out
-
- def wait_for_boot_completion(self, timeout=900.0):
- """Waits for Android framework to broadcast ACTION_BOOT_COMPLETED.
-
- Args:
- timeout: Seconds to wait for the device to boot. Default value is
- 15 minutes.
- """
- timeout_start = time.time()
-
- self.log.debug("ADB waiting for device")
- self.adb.wait_for_device(timeout=timeout)
- self.log.debug("Waiting for sys.boot_completed")
- while time.time() < timeout_start + timeout:
- try:
- completed = self.adb.getprop("sys.boot_completed")
- if completed == "1":
- self.log.debug("Device has rebooted")
- return
- except AdbError:
- # adb shell calls may fail during certain period of booting
- # process, which is normal. Ignoring these errors.
- pass
- time.sleep(5)
- raise errors.AndroidDeviceError(
- f"Device {self.serial} booting process timed out.",
- serial=self.serial,
- )
-
- def reboot(
- self,
- stop_at_lock_screen=False,
- timeout=180,
- wait_after_reboot_complete=1,
- ):
- """Reboots the device.
-
- Terminate all sl4a sessions, reboot the device, wait for device to
- complete booting, and restart an sl4a session if restart_sl4a is True.
-
- Args:
- stop_at_lock_screen: whether to unlock after reboot. Set to False
- if want to bring the device to reboot up to password locking
- phase. Sl4a checking need the device unlocked after rebooting.
- timeout: time in seconds to wait for the device to complete
- rebooting.
- wait_after_reboot_complete: time in seconds to wait after the boot
- completion.
- """
- if self.is_bootloader:
- self.fastboot.reboot()
- return
- self.stop_services()
- self.log.info("Rebooting")
- self.adb.reboot()
-
- timeout_start = time.time()
- # b/111791239: Newer versions of android sometimes return early after
- # `adb reboot` is called. This means subsequent calls may make it to
- # the device before the reboot goes through, return false positives for
- # getprops such as sys.boot_completed.
- while time.time() < timeout_start + timeout:
- try:
- self.adb.get_state()
- time.sleep(0.1)
- except AdbError:
- # get_state will raise an error if the device is not found. We
- # want the device to be missing to prove the device has kicked
- # off the reboot.
- break
- self.wait_for_boot_completion(
- timeout=(timeout - time.time() + timeout_start)
- )
-
- self.log.debug("Wait for a while after boot completion.")
- time.sleep(wait_after_reboot_complete)
- self.root_adb()
- skip_sl4a = self.skip_sl4a
- self.skip_sl4a = self.skip_sl4a or stop_at_lock_screen
- self.start_services()
- self.skip_sl4a = skip_sl4a
-
- def restart_runtime(self):
- """Restarts android runtime.
-
- Terminate all sl4a sessions, restarts runtime, wait for framework
- complete restart, and restart an sl4a session if restart_sl4a is True.
- """
- self.stop_services()
- self.log.info("Restarting android runtime")
- self.adb.shell("stop")
- # Reset the boot completed flag before we restart the framework
- # to correctly detect when the framework has fully come up.
- self.adb.shell("setprop sys.boot_completed 0")
- self.adb.shell("start")
- self.wait_for_boot_completion()
- self.root_adb()
-
- self.start_services()
-
- def get_ipv4_address(self, interface="wlan0", timeout=5):
- for timer in range(0, timeout):
- try:
- ip_string = self.adb.shell(f"ifconfig {interface}|grep inet")
- break
- except adb.AdbError as e:
- if timer + 1 == timeout:
- self.log.warning(
- f"Unable to find IP address for {interface}."
- )
- return None
- else:
- time.sleep(1)
- result = re.search("addr:(.*) Bcast", ip_string)
- if result != None:
- ip_address = result.group(1)
- try:
- socket.inet_aton(ip_address)
- return ip_address
- except socket.error:
- return None
- else:
- return None
-
- def get_ipv4_gateway(self, timeout=5):
- for timer in range(0, timeout):
- try:
- gateway_string = self.adb.shell(
- "dumpsys wifi | grep mDhcpResults"
- )
- break
- except adb.AdbError as e:
- if timer + 1 == timeout:
- self.log.warning("Unable to find gateway")
- return None
- else:
- time.sleep(1)
- result = re.search("Gateway (.*) DNS servers", gateway_string)
- if result != None:
- ipv4_gateway = result.group(1)
- try:
- socket.inet_aton(ipv4_gateway)
- return ipv4_gateway
- except socket.error:
- return None
- else:
- return None
-
- def send_keycode(self, keycode):
- self.adb.shell(f"input keyevent KEYCODE_{keycode}")
-
- def get_my_current_focus_window(self):
- """Get the current focus window on screen"""
- output = self.adb.shell(
- "dumpsys window displays | grep -E mCurrentFocus | grep -v null",
- ignore_status=True,
- )
- if not output or "not found" in output or "Can't find" in output:
- result = ""
- else:
- result = output.split(" ")[-1].strip("}")
- self.log.debug("Current focus window is %s", result)
- return result
-
- def get_my_current_focus_app(self):
- """Get the current focus application"""
- dumpsys_cmd = [
- "dumpsys window | grep -E mFocusedApp",
- "dumpsys window displays | grep -E mFocusedApp",
- ]
- for cmd in dumpsys_cmd:
- output = self.adb.shell(cmd, ignore_status=True)
- if (
- not output
- or "not found" in output
- or "Can't find" in output
- or ("mFocusedApp=null" in output)
- ):
- result = ""
- else:
- result = output.split(" ")[-2]
- break
- self.log.debug("Current focus app is %s", result)
- return result
-
- def is_window_ready(self, window_name=None):
- current_window = self.get_my_current_focus_window()
- if window_name:
- return window_name in current_window
- return current_window and ENCRYPTION_WINDOW not in current_window
-
- def wait_for_window_ready(
- self, window_name=None, check_interval=5, check_duration=60
- ):
- elapsed_time = 0
- while elapsed_time < check_duration:
- if self.is_window_ready(window_name=window_name):
- return True
- time.sleep(check_interval)
- elapsed_time += check_interval
- self.log.info(
- "Current focus window is %s", self.get_my_current_focus_window()
- )
- return False
-
- def is_user_setup_complete(self):
- return "1" in self.adb.shell("settings get secure user_setup_complete")
-
- def is_screen_awake(self):
- """Check if device screen is in sleep mode"""
- return "Awake" in self.adb.shell("dumpsys power | grep mWakefulness=")
-
- def is_screen_emergency_dialer(self):
- """Check if device screen is in emergency dialer mode"""
- return "EmergencyDialer" in self.get_my_current_focus_window()
-
- def is_screen_in_call_activity(self):
- """Check if device screen is in in-call activity notification"""
- return "InCallActivity" in self.get_my_current_focus_window()
-
- def is_setupwizard_on(self):
- """Check if device screen is in emergency dialer mode"""
- return "setupwizard" in self.get_my_current_focus_app()
-
- def is_screen_lock_enabled(self):
- """Check if screen lock is enabled"""
- cmd = "dumpsys window policy | grep showing="
- out = self.adb.shell(cmd, ignore_status=True)
- return "true" in out
-
- def is_waiting_for_unlock_pin(self):
- """Check if device is waiting for unlock pin to boot up"""
- current_window = self.get_my_current_focus_window()
- current_app = self.get_my_current_focus_app()
- if ENCRYPTION_WINDOW in current_window:
- self.log.info("Device is in CrpytKeeper window")
- return True
- if "StatusBar" in current_window and (
- (not current_app) or "FallbackHome" in current_app
- ):
- self.log.info("Device is locked")
- return True
- return False
-
- def ensure_screen_on(self):
- """Ensure device screen is powered on"""
- if self.is_screen_lock_enabled():
- for _ in range(2):
- self.unlock_screen()
- time.sleep(1)
- if self.is_waiting_for_unlock_pin():
- self.unlock_screen(password=DEFAULT_DEVICE_PASSWORD)
- time.sleep(1)
- if (
- not self.is_waiting_for_unlock_pin()
- and self.wait_for_window_ready()
- ):
- return True
- return False
- else:
- self.wakeup_screen()
- return True
-
- def wakeup_screen(self):
- if not self.is_screen_awake():
- self.log.info("Screen is not awake, wake it up")
- self.send_keycode("WAKEUP")
-
- def go_to_sleep(self):
- if self.is_screen_awake():
- self.send_keycode("SLEEP")
-
- def send_keycode_number_pad(self, number):
- self.send_keycode(f"NUMPAD_{number}")
-
- def unlock_screen(self, password=None):
- self.log.info("Unlocking with %s", password or "swipe up")
- # Bring device to SLEEP so that unlock process can start fresh
- self.send_keycode("SLEEP")
- time.sleep(1)
- self.send_keycode("WAKEUP")
- if ENCRYPTION_WINDOW not in self.get_my_current_focus_app():
- self.send_keycode("MENU")
- if password:
- self.send_keycode("DEL")
- for number in password:
- self.send_keycode_number_pad(number)
- self.send_keycode("ENTER")
- self.send_keycode("BACK")
-
- def screenshot(self, name=""):
- """Take a screenshot on the device.
-
- Args:
- name: additional information of screenshot on the file name.
- """
- if name:
- file_name = f"{DEFAULT_SCREENSHOT_PATH}_{name}"
- file_name = f"{file_name}_{utils.get_current_epoch_time()}.png"
- self.ensure_screen_on()
- self.log.info("Log screenshot to %s", file_name)
- try:
- self.adb.shell(f"screencap -p {file_name}")
- except:
- self.log.error("Fail to log screenshot to %s", file_name)
-
- def exit_setup_wizard(self):
- # Handling Android TV's setupwizard is ignored for now.
- if "feature:android.hardware.type.television" in self.adb.shell(
- "pm list features"
- ):
- return
- if not self.is_user_setup_complete() or self.is_setupwizard_on():
- # b/116709539 need this to prevent reboot after skip setup wizard
- self.adb.shell(
- "am start -a com.android.setupwizard.EXIT", ignore_status=True
- )
- self.adb.shell(
- f"pm disable {self.get_setupwizard_package_name()}",
- ignore_status=True,
- )
- # Wait up to 5 seconds for user_setup_complete to be updated
- end_time = time.time() + 5
- while time.time() < end_time:
- if self.is_user_setup_complete() or not self.is_setupwizard_on():
- return
-
- # If fail to exit setup wizard, set local.prop and reboot
- if not self.is_user_setup_complete() and self.is_setupwizard_on():
- self.adb.shell("echo ro.test_harness=1 > /data/local.prop")
- self.adb.shell("chmod 644 /data/local.prop")
- self.reboot(stop_at_lock_screen=True)
-
- def get_setupwizard_package_name(self):
- """Finds setupwizard package/.activity
-
- Bypass setupwizard or setupwraith depending on device.
-
- Returns:
- packageName/.ActivityName
- """
- packages_to_skip = "'setupwizard|setupwraith'"
- android_package_name = "com.google.android"
- package = self.adb.shell(
- "pm list packages -f | grep -E {} | grep {}".format(
- packages_to_skip, android_package_name
- )
- )
- wizard_package = package.split("=")[1]
- activity = package.split("=")[0].split("/")[-2]
- self.log.info(f"{wizard_package}/.{activity}Activity")
- return f"{wizard_package}/.{activity}Activity"
-
- def push_system_file(self, src_file_path, dst_file_path, push_timeout=300):
- """Pushes a file onto the read-only file system.
-
- For speed, the device is left in root mode after this call, and leaves
- verity disabled. To re-enable verity, call ensure_verity_enabled().
-
- Args:
- src_file_path: The path to the system app to install.
- dst_file_path: The destination of the file.
- push_timeout: How long to wait for the push to finish.
- Returns:
- Whether or not the install was successful.
- """
- self.adb.ensure_root()
- try:
- self.ensure_verity_disabled()
- self.adb.remount()
- out = self.adb.push(
- f"{src_file_path} {dst_file_path}", timeout=push_timeout
- )
- if "error" in out:
- self.log.error(
- "Unable to push system file %s to %s due to %s",
- src_file_path,
- dst_file_path,
- out,
- )
- return False
- return True
- except Exception as e:
- self.log.error(
- "Unable to push system file %s to %s due to %s",
- src_file_path,
- dst_file_path,
- e,
- )
- return False
-
- def ensure_verity_enabled(self):
- """Ensures that verity is enabled.
-
- If verity is not enabled, this call will reboot the phone. Note that
- this only works on debuggable builds.
- """
- user = self.adb.get_user_id()
- # The below properties will only exist if verity has been enabled.
- system_verity = self.adb.getprop("partition.system.verified")
- vendor_verity = self.adb.getprop("partition.vendor.verified")
- if not system_verity or not vendor_verity:
- self.adb.ensure_root()
- self.adb.enable_verity()
- self.reboot()
- self.adb.ensure_user(user)
-
- def ensure_verity_disabled(self):
- """Ensures that verity is disabled.
-
- If verity is enabled, this call will reboot the phone.
- """
- user = self.adb.get_user_id()
- # The below properties will only exist if verity has been enabled.
- system_verity = self.adb.getprop("partition.system.verified")
- vendor_verity = self.adb.getprop("partition.vendor.verified")
- if system_verity or vendor_verity:
- self.adb.ensure_root()
- self.adb.disable_verity()
- self.reboot()
- self.adb.ensure_user(user)
-
-
-class AndroidDeviceLoggerAdapter(logging.LoggerAdapter):
- def process(self, msg, kwargs):
- msg = f"[AndroidDevice|{self.extra['serial']}] {msg}"
- return (msg, kwargs)
diff --git a/packages/antlion/controllers/android_lib/__init__.py b/packages/antlion/controllers/android_lib/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/packages/antlion/controllers/android_lib/__init__.py
+++ /dev/null
diff --git a/packages/antlion/controllers/android_lib/errors.py b/packages/antlion/controllers/android_lib/errors.py
deleted file mode 100644
index 79e3949..0000000
--- a/packages/antlion/controllers/android_lib/errors.py
+++ /dev/null
@@ -1,25 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from antlion import error
-
-
-class AndroidDeviceConfigError(Exception):
- """Raised when AndroidDevice configs are malformatted."""
-
-
-class AndroidDeviceError(error.ActsError):
- """Raised when there is an error in AndroidDevice."""
diff --git a/packages/antlion/controllers/android_lib/events.py b/packages/antlion/controllers/android_lib/events.py
deleted file mode 100644
index 811dc08..0000000
--- a/packages/antlion/controllers/android_lib/events.py
+++ /dev/null
@@ -1,58 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-# Blanket ignores to enable mypy in Antlion
-# mypy: disable-error-code="no-untyped-def"
-from antlion.event.event import Event
-
-
-class AndroidEvent(Event):
- """The base class for AndroidDevice-related events."""
-
- def __init__(self, android_device):
- self.android_device = android_device
-
- @property
- def ad(self):
- return self.android_device
-
-
-class AndroidStartServicesEvent(AndroidEvent):
- """The event posted when an AndroidDevice begins its services."""
-
-
-class AndroidStopServicesEvent(AndroidEvent):
- """The event posted when an AndroidDevice ends its services."""
-
-
-class AndroidRebootEvent(AndroidEvent):
- """The event posted when an AndroidDevice has rebooted."""
-
-
-class AndroidDisconnectEvent(AndroidEvent):
- """The event posted when an AndroidDevice has disconnected."""
-
-
-class AndroidReconnectEvent(AndroidEvent):
- """The event posted when an AndroidDevice has disconnected."""
-
-
-class AndroidBugReportEvent(AndroidEvent):
- """The event posted when an AndroidDevice captures a bugreport."""
-
- def __init__(self, android_device, bugreport_dir):
- super().__init__(android_device)
- self.bugreport_dir = bugreport_dir
diff --git a/packages/antlion/controllers/android_lib/logcat.py b/packages/antlion/controllers/android_lib/logcat.py
deleted file mode 100644
index 724a99b..0000000
--- a/packages/antlion/controllers/android_lib/logcat.py
+++ /dev/null
@@ -1,108 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-# Blanket ignores to enable mypy in Antlion
-# mypy: disable-error-code="no-untyped-def"
-import logging
-import re
-
-from antlion.libs.logging import log_stream
-from antlion.libs.logging.log_stream import LogStyles
-from antlion.libs.proc.process import Process
-
-TIMESTAMP_REGEX = r"((?:\d+-)?\d+-\d+ \d+:\d+:\d+.\d+)"
-
-
-class TimestampTracker(object):
- """Stores the last timestamp outputted by the Logcat process."""
-
- def __init__(self):
- self._last_timestamp = None
-
- @property
- def last_timestamp(self):
- return self._last_timestamp
-
- def read_output(self, message):
- """Reads the message and parses all timestamps from it."""
- all_timestamps = re.findall(TIMESTAMP_REGEX, message)
- if len(all_timestamps) > 0:
- self._last_timestamp = all_timestamps[0]
-
-
-def _get_log_level(message):
- """Returns the log level for the given message."""
- if message.startswith("-") or len(message) < 37:
- return logging.ERROR
- else:
- log_level = message[36]
- if log_level in ("V", "D"):
- return logging.DEBUG
- elif log_level == "I":
- return logging.INFO
- elif log_level == "W":
- return logging.WARNING
- elif log_level == "E":
- return logging.ERROR
- return logging.NOTSET
-
-
-def _log_line_func(log, timestamp_tracker):
- """Returns a lambda that logs a message to the given logger."""
-
- def log_line(message):
- timestamp_tracker.read_output(message)
- log.log(_get_log_level(message), message)
-
- return log_line
-
-
-def _on_retry(serial, extra_params, timestamp_tracker):
- def on_retry(_):
- begin_at = '"%s"' % (timestamp_tracker.last_timestamp or 1)
- additional_params = extra_params or ""
-
- return (
- f"adb -s {serial} logcat -T {begin_at} -v year {additional_params}"
- )
-
- return on_retry
-
-
-def create_logcat_keepalive_process(serial, logcat_dir, extra_params=""):
- """Creates a Logcat Process that automatically attempts to reconnect.
-
- Args:
- serial: The serial of the device to read the logcat of.
- logcat_dir: The directory used for logcat file output.
- extra_params: Any additional params to be added to the logcat cmdline.
-
- Returns:
- A acts.libs.proc.process.Process object.
- """
- logger = log_stream.create_logger(
- f"adblog_{serial}",
- log_name=serial,
- subcontext=logcat_dir,
- log_styles=(LogStyles.LOG_DEBUG | LogStyles.TESTCASE_LOG),
- )
- process = Process(f"adb -s {serial} logcat -T 1 -v year {extra_params}")
- timestamp_tracker = TimestampTracker()
- process.set_on_output_callback(_log_line_func(logger, timestamp_tracker))
- process.set_on_terminate_callback(
- _on_retry(serial, extra_params, timestamp_tracker)
- )
- return process
diff --git a/packages/antlion/controllers/android_lib/services.py b/packages/antlion/controllers/android_lib/services.py
deleted file mode 100644
index 47febef..0000000
--- a/packages/antlion/controllers/android_lib/services.py
+++ /dev/null
@@ -1,120 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-# Blanket ignores to enable mypy in Antlion
-# mypy: disable-error-code="no-untyped-def"
-from antlion.controllers.android_lib import errors
-from antlion.controllers.android_lib import events as android_events
-from antlion.event import event_bus
-
-
-class AndroidService(object):
- """The base class for Android long-running services.
-
- The _start method is registered to an AndroidStartServicesEvent, and
- the _stop method is registered to an AndroidStopServicesEvent.
-
- Attributes:
- ad: The AndroidDevice instance associated with the service.
- serial: The serial of the device.
- _registration_ids: List of registration IDs for the event subscriptions.
- """
-
- def __init__(self, ad):
- self.ad = ad
- self._registration_ids = []
-
- @property
- def serial(self):
- return self.ad.serial
-
- def register(self):
- """Registers the _start and _stop methods to their corresponding
- events.
- """
-
- def check_serial(event):
- return self.serial == event.ad.serial
-
- self._registration_ids = [
- event_bus.register(
- android_events.AndroidStartServicesEvent,
- self._start,
- filter_fn=check_serial,
- ),
- event_bus.register(
- android_events.AndroidStopServicesEvent,
- self._stop,
- filter_fn=check_serial,
- ),
- ]
-
- def unregister(self):
- """Unregisters all subscriptions in this service."""
- event_bus.unregister_all(from_list=self._registration_ids)
- self._registration_ids.clear()
-
- def _start(self, start_event):
- """Start the service. Called upon an AndroidStartServicesEvent.
-
- Args:
- start_event: The AndroidStartServicesEvent instance.
- """
- raise NotImplementedError
-
- def _stop(self, stop_event):
- """Stop the service. Called upon an AndroidStopServicesEvent.
-
- Args:
- stop_event: The AndroidStopServicesEvent instance.
- """
- raise NotImplementedError
-
-
-class AdbLogcatService(AndroidService):
- """Service for adb logcat."""
-
- def _start(self, _):
- self.ad.start_adb_logcat()
-
- def _stop(self, _):
- self.ad.stop_adb_logcat()
-
-
-class Sl4aService(AndroidService):
- """Service for SL4A."""
-
- def _start(self, start_event):
- if self.ad.skip_sl4a:
- return
-
- if not self.ad.is_sl4a_installed():
- self.ad.log.error("sl4a.apk is not installed")
- raise errors.AndroidDeviceError(
- "The required sl4a.apk is not installed", serial=self.serial
- )
- if not self.ad.ensure_screen_on():
- self.ad.log.error("User window cannot come up")
- raise errors.AndroidDeviceError(
- "User window cannot come up", serial=self.serial
- )
-
- droid, ed = self.ad.get_droid()
- ed.start()
-
- def _stop(self, _):
- self.ad.terminate_all_sessions()
- self.ad._sl4a_manager.stop_service()
diff --git a/packages/antlion/controllers/ap_lib/__init__.py b/packages/antlion/controllers/ap_lib/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/packages/antlion/controllers/ap_lib/__init__.py
+++ /dev/null
diff --git a/packages/antlion/controllers/ap_lib/ap_get_interface.py b/packages/antlion/controllers/ap_lib/ap_get_interface.py
deleted file mode 100644
index fac83f1..0000000
--- a/packages/antlion/controllers/ap_lib/ap_get_interface.py
+++ /dev/null
@@ -1,198 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import logging
-from typing import TYPE_CHECKING
-
-from antlion.runner import CalledProcessError
-
-if TYPE_CHECKING:
- from antlion.controllers.access_point import AccessPoint
-
-GET_ALL_INTERFACE = "ls /sys/class/net"
-GET_VIRTUAL_INTERFACE = "ls /sys/devices/virtual/net"
-BRCTL_SHOW = "brctl show"
-
-
-class ApInterfacesError(Exception):
- """Error related to AP interfaces."""
-
-
-class ApInterfaces(object):
- """Class to get network interface information for the device."""
-
- def __init__(
- self, ap: "AccessPoint", wan_interface_override: str | None = None
- ) -> None:
- """Initialize the ApInterface class.
-
- Args:
- ap: the ap object within ACTS
- wan_interface_override: wan interface to use if specified by config
- """
- self.ssh = ap.ssh
- self.wan_interface_override = wan_interface_override
-
- def get_all_interface(self) -> list[str]:
- """Get all network interfaces on the device.
-
- Returns:
- interfaces_all: list of all the network interfaces on device
- """
- output = self.ssh.run(GET_ALL_INTERFACE)
- interfaces_all = output.stdout.decode("utf-8").split("\n")
-
- return interfaces_all
-
- def get_virtual_interface(self) -> list[str]:
- """Get all virtual interfaces on the device.
-
- Returns:
- interfaces_virtual: list of all the virtual interfaces on device
- """
- output = self.ssh.run(GET_VIRTUAL_INTERFACE)
- interfaces_virtual = output.stdout.decode("utf-8").split("\n")
-
- return interfaces_virtual
-
- def get_physical_interface(self) -> list[str]:
- """Get all the physical interfaces of the device.
-
- Get all physical interfaces such as eth ports and wlan ports
-
- Returns:
- interfaces_phy: list of all the physical interfaces
- """
- interfaces_all = self.get_all_interface()
- interfaces_virtual = self.get_virtual_interface()
- interfaces_phy = list(set(interfaces_all) - set(interfaces_virtual))
-
- return interfaces_phy
-
- def get_bridge_interface(self) -> list[str]:
- """Get all the bridge interfaces of the device.
-
- Returns:
- interfaces_bridge: the list of bridge interfaces, return None if
- bridge utility is not available on the device
-
- Raises:
- ApInterfaceError: Failing to run brctl
- """
- try:
- output = self.ssh.run(BRCTL_SHOW)
- except CalledProcessError as e:
- raise ApInterfacesError(f'failed to execute "{BRCTL_SHOW}"') from e
-
- lines = output.stdout.decode("utf-8").split("\n")
- interfaces_bridge = []
- for line in lines:
- interfaces_bridge.append(line.split("\t")[0])
- interfaces_bridge.pop(0)
- return [x for x in interfaces_bridge if x != ""]
-
- def get_wlan_interface(self) -> tuple[str, str]:
- """Get all WLAN interfaces and specify 2.4 GHz and 5 GHz interfaces.
-
- Returns:
- interfaces_wlan: all wlan interfaces
- Raises:
- ApInterfacesError: Missing at least one WLAN interface
- """
- wlan_2g = None
- wlan_5g = None
- interfaces_phy = self.get_physical_interface()
- for iface in interfaces_phy:
- output = self.ssh.run(f"iwlist {iface} freq")
- if (
- b"Channel 06" in output.stdout
- and b"Channel 36" not in output.stdout
- ):
- wlan_2g = iface
- elif (
- b"Channel 36" in output.stdout
- and b"Channel 06" not in output.stdout
- ):
- wlan_5g = iface
-
- if wlan_2g is None or wlan_5g is None:
- raise ApInterfacesError("Missing at least one WLAN interface")
-
- return (wlan_2g, wlan_5g)
-
- def get_wan_interface(self) -> str:
- """Get the WAN interface which has internet connectivity. If a wan
- interface is already specified return that instead.
-
- Returns:
- wan: the only one WAN interface
- Raises:
- ApInterfacesError: no running WAN can be found
- """
- if self.wan_interface_override:
- return self.wan_interface_override
-
- wan = None
- interfaces_phy = self.get_physical_interface()
- interfaces_wlan = self.get_wlan_interface()
- interfaces_eth = list(set(interfaces_phy) - set(interfaces_wlan))
- for iface in interfaces_eth:
- network_status = self.check_ping(iface)
- if network_status == 1:
- wan = iface
- break
- if wan:
- return wan
-
- output = self.ssh.run("ifconfig")
- interfaces_all = output.stdout.decode("utf-8").split("\n")
- logging.info(f"IFCONFIG output = {interfaces_all}")
-
- raise ApInterfacesError("No WAN interface available")
-
- def get_lan_interface(self) -> str | None:
- """Get the LAN interface connecting to local devices.
-
- Returns:
- lan: the only one running LAN interface of the devices
- None, if nothing was found.
- """
- lan = None
- interfaces_phy = self.get_physical_interface()
- interfaces_wlan = self.get_wlan_interface()
- interfaces_eth = list(set(interfaces_phy) - set(interfaces_wlan))
- interface_wan = self.get_wan_interface()
- interfaces_eth.remove(interface_wan)
- for iface in interfaces_eth:
- output = self.ssh.run(f"ifconfig {iface}")
- if b"RUNNING" in output.stdout:
- lan = iface
- break
- return lan
-
- def check_ping(self, iface: str) -> int:
- """Check the ping status on specific interface to determine the WAN.
-
- Args:
- iface: the specific interface to check
- Returns:
- network_status: the connectivity status of the interface
- """
- try:
- self.ssh.run(f"ping -c 3 -I {iface} 8.8.8.8")
- return 1
- except CalledProcessError:
- return 0
diff --git a/packages/antlion/controllers/ap_lib/ap_iwconfig.py b/packages/antlion/controllers/ap_lib/ap_iwconfig.py
deleted file mode 100644
index d5b4556..0000000
--- a/packages/antlion/controllers/ap_lib/ap_iwconfig.py
+++ /dev/null
@@ -1,49 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import subprocess
-from typing import TYPE_CHECKING
-
-if TYPE_CHECKING:
- from antlion.controllers.access_point import AccessPoint
-
-
-class ApIwconfigError(Exception):
- """Error related to configuring the wireless interface via iwconfig."""
-
-
-class ApIwconfig(object):
- """Class to configure wireless interface via iwconfig"""
-
- PROGRAM_FILE = "/usr/local/sbin/iwconfig"
-
- def __init__(self, ap: "AccessPoint") -> None:
- """Initialize the ApIwconfig class.
-
- Args:
- ap: the ap object within ACTS
- """
- self.ssh = ap.ssh
-
- def ap_iwconfig(
- self, interface: str, arguments: str | None = None
- ) -> subprocess.CompletedProcess[bytes]:
- """Configure the wireless interface using iwconfig.
-
- Returns:
- output: the output of the command, if any
- """
- return self.ssh.run(f"{self.PROGRAM_FILE} {interface} {arguments}")
diff --git a/packages/antlion/controllers/ap_lib/bridge_interface.py b/packages/antlion/controllers/ap_lib/bridge_interface.py
deleted file mode 100644
index 6d9bc52..0000000
--- a/packages/antlion/controllers/ap_lib/bridge_interface.py
+++ /dev/null
@@ -1,121 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import logging
-import time
-
-from antlion.controllers.utils_lib.ssh.connection import SshConnection
-from antlion.runner import CalledProcessError
-
-_BRCTL = "brctl"
-BRIDGE_NAME = "br-lan"
-CREATE_BRIDGE = f"{_BRCTL} addbr {BRIDGE_NAME}"
-DELETE_BRIDGE = f"{_BRCTL} delbr {BRIDGE_NAME}"
-BRING_DOWN_BRIDGE = f"ifconfig {BRIDGE_NAME} down"
-
-
-class BridgeInterfaceConfigs(object):
- """Configs needed for creating bridge interface between LAN and WLAN."""
-
- def __init__(self, iface_wlan: str, iface_lan: str, bridge_ip: str) -> None:
- """Set bridge interface configs based on the channel info.
-
- Args:
- iface_wlan: the wlan interface as part of the bridge
- iface_lan: the ethernet LAN interface as part of the bridge
- bridge_ip: the ip address assigned to the bridge interface
- """
- self.iface_wlan = iface_wlan
- self.iface_lan = iface_lan
- self.bridge_ip = bridge_ip
-
-
-class BridgeInterface(object):
- """Class object for bridge interface betwen WLAN and LAN"""
-
- def __init__(self, ssh: SshConnection) -> None:
- """Initialize the BridgeInterface class.
-
- Bridge interface will be added between ethernet LAN port and WLAN port.
- Args:
- ap: AP object within ACTS
- """
- self.ssh = ssh
-
- def startup(self, brconfigs: BridgeInterfaceConfigs) -> None:
- """Start up the bridge interface.
-
- Args:
- brconfigs: the bridge interface config, type BridgeInterfaceConfigs
- """
-
- logging.info("Create bridge interface between LAN and WLAN")
- # Create the bridge
- try:
- self.ssh.run(CREATE_BRIDGE)
- except CalledProcessError:
- logging.warning(
- f"Bridge interface {BRIDGE_NAME} already exists, no action needed"
- )
-
- # Enable 4addr mode on for the wlan interface
- ENABLE_4ADDR = f"iw dev {brconfigs.iface_wlan} set 4addr on"
- try:
- self.ssh.run(ENABLE_4ADDR)
- except CalledProcessError:
- logging.warning(
- f"4addr is already enabled on {brconfigs.iface_wlan}"
- )
-
- # Add both LAN and WLAN interfaces to the bridge interface
- for interface in [brconfigs.iface_lan, brconfigs.iface_wlan]:
- ADD_INTERFACE = f"{_BRCTL} addif {BRIDGE_NAME} {interface}"
- try:
- self.ssh.run(ADD_INTERFACE)
- except CalledProcessError:
- logging.warning(
- f"{interface} has already been added to {BRIDGE_NAME}"
- )
- time.sleep(5)
-
- # Set IP address on the bridge interface to bring it up
- SET_BRIDGE_IP = f"ifconfig {BRIDGE_NAME} {brconfigs.bridge_ip}"
- self.ssh.run(SET_BRIDGE_IP)
- time.sleep(2)
-
- # Bridge interface is up
- logging.info("Bridge interface is up and running")
-
- def teardown(self, brconfigs: BridgeInterfaceConfigs) -> None:
- """Tear down the bridge interface.
-
- Args:
- brconfigs: the bridge interface config, type BridgeInterfaceConfigs
- """
- logging.info("Bringing down the bridge interface")
- # Delete the bridge interface
- self.ssh.run(BRING_DOWN_BRIDGE)
- time.sleep(1)
- self.ssh.run(DELETE_BRIDGE)
-
- # Bring down wlan interface and disable 4addr mode
- BRING_DOWN_WLAN = f"ifconfig {brconfigs.iface_wlan} down"
- self.ssh.run(BRING_DOWN_WLAN)
- time.sleep(2)
- DISABLE_4ADDR = f"iw dev {brconfigs.iface_wlan} set 4addr off"
- self.ssh.run(DISABLE_4ADDR)
- time.sleep(1)
- logging.info("Bridge interface is down")
diff --git a/packages/antlion/controllers/ap_lib/dhcp_config.py b/packages/antlion/controllers/ap_lib/dhcp_config.py
deleted file mode 100644
index a354480..0000000
--- a/packages/antlion/controllers/ap_lib/dhcp_config.py
+++ /dev/null
@@ -1,215 +0,0 @@
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import copy
-import ipaddress
-from ipaddress import IPv4Address, IPv4Network
-
-_ROUTER_DNS = "8.8.8.8, 4.4.4.4"
-
-
-class Subnet(object):
- """Configs for a subnet on the dhcp server.
-
- Attributes:
- network: ipaddress.IPv4Network, the network that this subnet is in.
- start: ipaddress.IPv4Address, the start ip address.
- end: ipaddress.IPv4Address, the end ip address.
- router: The router to give to all hosts in this subnet.
- lease_time: The lease time of all hosts in this subnet.
- additional_parameters: A dictionary corresponding to DHCP parameters.
- additional_options: A dictionary corresponding to DHCP options.
- """
-
- def __init__(
- self,
- subnet: IPv4Network,
- start: IPv4Address | None = None,
- end: IPv4Address | None = None,
- router: IPv4Address | None = None,
- lease_time: int | None = None,
- additional_parameters: dict[str, str] = {},
- additional_options: dict[str, int | str] = {},
- ):
- """
- Args:
- subnet: ipaddress.IPv4Network, The address space of the subnetwork
- served by the DHCP server.
- start: ipaddress.IPv4Address, The start of the address range to
- give hosts in this subnet. If not given, the second ip in
- the network is used, under the assumption that the first
- address is the router.
- end: ipaddress.IPv4Address, The end of the address range to give
- hosts. If not given then the address prior to the broadcast
- address (i.e. the second to last ip in the network) is used.
- router: ipaddress.IPv4Address, The router hosts should use in this
- subnet. If not given the first ip in the network is used.
- lease_time: int, The amount of lease time in seconds
- hosts in this subnet have.
- additional_parameters: A dictionary corresponding to DHCP parameters.
- additional_options: A dictionary corresponding to DHCP options.
- """
- self.network = subnet
-
- if start:
- self.start = start
- else:
- self.start = self.network[2]
-
- if not self.start in self.network:
- raise ValueError("The start range is not in the subnet.")
- if self.start.is_reserved:
- raise ValueError("The start of the range cannot be reserved.")
-
- if end:
- self.end = end
- else:
- self.end = self.network[-2]
-
- if not self.end in self.network:
- raise ValueError("The end range is not in the subnet.")
- if self.end.is_reserved:
- raise ValueError("The end of the range cannot be reserved.")
- if self.end < self.start:
- raise ValueError(
- "The end must be an address larger than the start."
- )
-
- if router:
- if router >= self.start and router <= self.end:
- raise ValueError("Router must not be in pool range.")
- if not router in self.network:
- raise ValueError("Router must be in the given subnet.")
-
- self.router = router
- else:
- # TODO: Use some more clever logic so that we don't have to search
- # every host potentially.
- # This is especially important if we support IPv6 networks in this
- # configuration. The improved logic that we can use is:
- # a) erroring out if start and end encompass the whole network, and
- # b) picking any address before self.start or after self.end.
- for host in self.network.hosts():
- if host < self.start or host > self.end:
- self.router = host
- break
-
- if not hasattr(self, "router"):
- raise ValueError("No useable host found.")
-
- self.lease_time = lease_time
- self.additional_parameters = additional_parameters
- self.additional_options = additional_options
- if "domain-name-servers" not in self.additional_options:
- self.additional_options["domain-name-servers"] = _ROUTER_DNS
-
-
-class StaticMapping(object):
- """Represents a static dhcp host.
-
- Attributes:
- identifier: How id of the host (usually the mac addres
- e.g. 00:11:22:33:44:55).
- address: ipaddress.IPv4Address, The ipv4 address to give the host.
- lease_time: How long to give a lease to this host.
- """
-
- def __init__(
- self,
- identifier: str,
- address: ipaddress.IPv4Address,
- lease_time: int | None = None,
- ) -> None:
- self.identifier = identifier
- self.ipv4_address = address
- self.lease_time = lease_time
-
-
-class DhcpConfig(object):
- """The configs for a dhcp server.
-
- Attributes:
- subnets: A list of all subnets for the dhcp server to create.
- static_mappings: A list of static host addresses.
- default_lease_time: The default time for a lease.
- max_lease_time: The max time to allow a lease.
- """
-
- def __init__(
- self,
- subnets: list[Subnet] | None = None,
- static_mappings: list[StaticMapping] | None = None,
- default_lease_time: int = 600,
- max_lease_time: int = 7200,
- ) -> None:
- self.subnets = copy.deepcopy(subnets) if subnets else []
- self.static_mappings = (
- copy.deepcopy(static_mappings) if static_mappings else []
- )
- self.default_lease_time = default_lease_time
- self.max_lease_time = max_lease_time
-
- def render_config_file(self) -> str:
- """Renders the config parameters into a format compatible with
- the ISC DHCP server (dhcpd).
- """
- lines = []
-
- if self.default_lease_time:
- lines.append(f"default-lease-time {self.default_lease_time};")
- if self.max_lease_time:
- lines.append(f"max-lease-time {self.max_lease_time};")
-
- for subnet in self.subnets:
- address = subnet.network.network_address
- mask = subnet.network.netmask
- router = subnet.router
- start = subnet.start
- end = subnet.end
- lease_time = subnet.lease_time
- additional_parameters = subnet.additional_parameters
- additional_options = subnet.additional_options
-
- lines.append("subnet %s netmask %s {" % (address, mask))
- lines.append("\tpool {")
- lines.append(f"\t\toption subnet-mask {mask};")
- lines.append(f"\t\toption routers {router};")
- lines.append(f"\t\trange {start} {end};")
- if lease_time:
- lines.append(f"\t\tdefault-lease-time {lease_time};")
- lines.append(f"\t\tmax-lease-time {lease_time};")
- for param, value in additional_parameters.items():
- lines.append(f"\t\t{param} {value};")
- for option, option_value in additional_options.items():
- lines.append(f"\t\toption {option} {option_value};")
- lines.append("\t}")
- lines.append("}")
-
- for mapping in self.static_mappings:
- identifier = mapping.identifier
- fixed_address = mapping.ipv4_address
- host_fake_name = f"host{identifier.replace(':', '')}"
- lease_time = mapping.lease_time
-
- lines.append("host %s {" % host_fake_name)
- lines.append(f"\thardware ethernet {identifier};")
- lines.append(f"\tfixed-address {fixed_address};")
- if lease_time:
- lines.append(f"\tdefault-lease-time {lease_time};")
- lines.append(f"\tmax-lease-time {lease_time};")
- lines.append("}")
-
- config_str = "\n".join(lines)
-
- return config_str
diff --git a/packages/antlion/controllers/ap_lib/dhcp_server.py b/packages/antlion/controllers/ap_lib/dhcp_server.py
deleted file mode 100644
index 1a28ed9..0000000
--- a/packages/antlion/controllers/ap_lib/dhcp_server.py
+++ /dev/null
@@ -1,221 +0,0 @@
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import logging
-import time
-
-from mobly import logger
-from tenacity import (
- retry,
- retry_if_exception_type,
- stop_after_attempt,
- wait_fixed,
-)
-
-from antlion.controllers.ap_lib.dhcp_config import DhcpConfig
-from antlion.controllers.utils_lib.commands import shell
-from antlion.runner import Runner
-
-
-class Error(Exception):
- """An error caused by the dhcp server."""
-
-
-class NoInterfaceError(Exception):
- """Error thrown when the dhcp server has no interfaces on any subnet."""
-
-
-class DhcpServer(object):
- """Manages the dhcp server program.
-
- Only one of these can run in an environment at a time.
-
- Attributes:
- config: The dhcp server configuration that is being used.
- """
-
- PROGRAM_FILE = "dhcpd"
-
- def __init__(
- self, runner: Runner, interface: str, working_dir: str = "/tmp"
- ):
- """
- Args:
- runner: Object that has a run_async and run methods for running
- shell commands.
- interface: string, The name of the interface to use.
- working_dir: The directory to work out of.
- """
- self._log = logger.PrefixLoggerAdapter(
- logging.getLogger(),
- {
- logger.PrefixLoggerAdapter.EXTRA_KEY_LOG_PREFIX: f"[DHCP Server|{interface}]",
- },
- )
-
- self._runner = runner
- self._working_dir = working_dir
- self._shell = shell.ShellCommand(runner)
- self._stdio_log_file = f"{working_dir}/dhcpd_{interface}.log"
- self._config_file = f"{working_dir}/dhcpd_{interface}.conf"
- self._lease_file = f"{working_dir}/dhcpd_{interface}.leases"
- self._pid_file = f"{working_dir}/dhcpd_{interface}.pid"
- self._identifier: int | None = None
-
- # There is a slight timing issue where if the proc filesystem in Linux
- # doesn't get updated in time as when this is called, the NoInterfaceError
- # will happening. By adding this retry, the error appears to have gone away
- # but will still show a warning if the problem occurs. The error seems to
- # happen more with bridge interfaces than standard interfaces.
- @retry(
- retry=retry_if_exception_type(NoInterfaceError),
- stop=stop_after_attempt(3),
- wait=wait_fixed(1),
- )
- def start(self, config: DhcpConfig, timeout_sec: int = 60) -> None:
- """Starts the dhcp server.
-
- Starts the dhcp server daemon and runs it in the background.
-
- Args:
- config: Configs to start the dhcp server with.
-
- Raises:
- Error: Raised when a dhcp server error is found.
- """
- if self.is_alive():
- self.stop()
-
- self._write_configs(config)
- self._shell.delete_file(self._stdio_log_file)
- self._shell.delete_file(self._pid_file)
- self._shell.touch_file(self._lease_file)
-
- dhcpd_command = (
- f"{self.PROGRAM_FILE} "
- f'-cf "{self._config_file}" '
- f"-lf {self._lease_file} "
- f'-pf "{self._pid_file}" '
- "-f -d"
- )
-
- base_command = f'cd "{self._working_dir}"; {dhcpd_command}'
- job_str = f'{base_command} > "{self._stdio_log_file}" 2>&1'
- self._identifier = int(self._runner.run_async(job_str).stdout)
-
- try:
- self._wait_for_process(timeout=timeout_sec)
- self._wait_for_server(timeout=timeout_sec)
- except:
- self._log.warning("Failed to start DHCP server.")
- self._log.info(
- f"DHCP configuration:\n{config.render_config_file()}\n"
- )
- self._log.info(f"DHCP logs:\n{self.get_logs()}\n")
- self.stop()
- raise
-
- def stop(self) -> None:
- """Kills the daemon if it is running."""
- if self._identifier and self.is_alive():
- self._shell.kill(self._identifier)
- self._identifier = None
-
- def is_alive(self) -> bool:
- """
- Returns:
- True if the daemon is running.
- """
- if self._identifier:
- return self._shell.is_alive(self._identifier)
- return False
-
- def get_logs(self) -> str:
- """Pulls the log files from where dhcp server is running.
-
- Returns:
- A string of the dhcp server logs.
- """
- return self._shell.read_file(self._stdio_log_file)
-
- def _wait_for_process(self, timeout: float = 60) -> None:
- """Waits for the process to come up.
-
- Waits until the dhcp server process is found running, or there is
- a timeout. If the program never comes up then the log file
- will be scanned for errors.
-
- Raises: See _scan_for_errors
- """
- start_time = time.time()
- while time.time() - start_time < timeout and not self.is_alive():
- self._scan_for_errors(False)
- time.sleep(0.1)
-
- self._scan_for_errors(True)
-
- def _wait_for_server(self, timeout: float = 60) -> None:
- """Waits for dhcp server to report that the server is up.
-
- Waits until dhcp server says the server has been brought up or an
- error occurs.
-
- Raises: see _scan_for_errors
- """
- start_time = time.time()
- while time.time() - start_time < timeout:
- success = self._shell.search_file(
- "Wrote [0-9]* leases to leases file", self._stdio_log_file
- )
- if success:
- return
-
- self._scan_for_errors(True)
-
- def _scan_for_errors(self, should_be_up: bool) -> None:
- """Scans the dhcp server log for any errors.
-
- Args:
- should_be_up: If true then dhcp server is expected to be alive.
- If it is found not alive while this is true an error
- is thrown.
-
- Raises:
- Error: Raised when a dhcp server error is found.
- """
- # If this is checked last we can run into a race condition where while
- # scanning the log the process has not died, but after scanning it
- # has. If this were checked last in that condition then the wrong
- # error will be thrown. To prevent this we gather the alive state first
- # so that if it is dead it will definitely give the right error before
- # just giving a generic one.
- is_dead = not self.is_alive()
-
- no_interface = self._shell.search_file(
- "Not configured to listen on any interfaces", self._stdio_log_file
- )
- if no_interface:
- raise NoInterfaceError(
- "Dhcp does not contain a subnet for any of the networks the"
- " current interfaces are on."
- )
-
- if should_be_up and is_dead:
- raise Error("Dhcp server failed to start.", self)
-
- def _write_configs(self, config: DhcpConfig) -> None:
- """Writes the configs to the dhcp server config file."""
- self._shell.delete_file(self._config_file)
- config_str = config.render_config_file()
- self._shell.write_file(self._config_file, config_str)
diff --git a/packages/antlion/controllers/ap_lib/extended_capabilities.py b/packages/antlion/controllers/ap_lib/extended_capabilities.py
deleted file mode 100644
index 4570409..0000000
--- a/packages/antlion/controllers/ap_lib/extended_capabilities.py
+++ /dev/null
@@ -1,193 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from enum import IntEnum, unique
-
-
-@unique
-class ExtendedCapability(IntEnum):
- """All extended capabilities present in IEEE 802.11-2020 Table 9-153.
-
- Each name has a value corresponding to that extended capability's bit offset
- in the specification's extended capabilities field.
-
- Note that most extended capabilities are represented by a single bit, which
- indicates whether the extended capability is advertised by the STA; but
- some are represented by multiple bits. In the enum, each extended capability
- has the value of its offset; comments indicate capabilities that use
- multiple bits.
- """
-
- TWENTY_FORTY_BSS_COEXISTENCE_MANAGEMENT_SUPPORT = 0
- GLK = 1
- EXTENDED_CHANNEL_SWITCHING = 2
- GLK_GCR = 3
- PSMP_CAPABILITY = 4
- # 5 reserved
- S_PSMP_SUPPORT = 6
- EVENT = 7
- DIAGNOSTICS = 8
- MULTICAST_DIAGNOSTICS = 9
- LOCATION_TRACKING = 10
- FMS = 11
- PROXY_ARP_SERVICE = 12
- COLLOCATED_INTERFERENCE_REPORTING = 13
- CIVIC_LOCATION = 14
- GEOSPATIAL_LOCATION = 15
- TFS = 16
- WNM_SLEEP_MODE = 17
- TIM_BROADCAST = 18
- BSS_TRANSITION = 19
- QOS_TRAFFIC_CAPABILITY = 20
- AC_STATION_COUNT = 21
- MULTIPLE_BSSID = 22
- TIMING_MEASUREMENT = 23
- CHANNEL_USAGE = 24
- SSID_LIST = 25
- DMS = 26
- UTC_TSF_OFFSET = 27
- TPU_BUFFER_STA_SUPPORT = 28
- TDLS_PEER_PSM_SUPPORT = 29
- TDLS_CHANNEL_SWITCHING = 30
- INTERWORKING = 31
- QOS_MAP = 32
- EBR = 33
- SSPN_INTERFACE = 34
- # 35 reserved
- MSGCF_CAPABILITY = 36
- TDLS_SUPPORT = 37
- TDLS_PROHIBITED = 38
- TDLS_CHANNEL_SWITCHING_PROHIBITED = 39
- REJECT_UNADMITTED_FRAME = 40
- SERVICE_INTERVAL_GRANULARITY = 41
- # Bits 41-43 contain SERVICE_INTERVAL_GRANULARITY value
- IDENTIFIER_LOCATION = 44
- U_APSD_COEXISTENCE = 45
- WNM_NOTIFICATION = 46
- QAB_CAPABILITY = 47
- UTF_8_SSID = 48
- QMF_ACTIVATED = 49
- QMF_RECONFIGURATION_ACTIVATED = 50
- ROBUST_AV_STREAMING = 51
- ADVANCED_GCR = 52
- MESH_GCR = 53
- SCS = 54
- QLOAD_REPORT = 55
- ALTERNATE_EDCA = 56
- UNPROTECTED_TXOP_NEGOTIATION = 57
- PROTECTED_TXOP_NEGOTIATION = 58
- # 59 reserved
- PROTECTED_QLOAD_REPORT = 60
- TDLS_WIDER_BANDWIDTH = 61
- OPERATING_MODE_NOTIFICATION = 62
- MAX_NUMBER_OF_MSDUS_IN_A_MSDU = 63
- # 63-64 contain MAX_NUMBER_OF_MSDUS_IN_A_MSDU value
- CHANNEL_SCHEDULE_MANAGEMENT = 65
- GEODATABASE_INBAND_ENABLING_SIGNAL = 66
- NETWORK_CHANNEL_CONTROL = 67
- WHITE_SPACE_MAP = 68
- CHANNEL_AVAILABILITY_QUERY = 69
- FINE_TIMING_MEASUREMENT_RESPONDER = 70
- FINE_TIMING_MEASUREMENT_INITIATOR = 71
- FILS_CAPABILITY = 72
- EXTENDED_SPECTRUM_MANAGEMENT_CAPABLE = 73
- FUTURE_CHANNEL_GUIDANCE = 74
- PAD = 75
- # 76-79 reserved
- COMPLETE_LIST_OF_NON_TX_BSSID_PROFILES = 80
- SAE_PASSWORD_IDENTIFIERS_IN_USE = 81
- SAE_PASSWORD_IDENTIFIERS_USED_EXCLUSIVELY = 82
- # 83 reserved
- BEACON_PROTECTION_ENABLED = 84
- MIRRORED_SCS = 85
- # 86 reserved
- LOCAL_MAC_ADDRESS_POLICY = 87
- # 88-n reserved
-
-
-def _offsets(ext_cap_offset: ExtendedCapability) -> tuple[int, int]:
- """For given capability, return the byte and bit offsets within the field.
-
- 802.11 divides the extended capability field into bytes, as does the
- ExtendedCapabilities class below. This function returns the index of the
- byte that contains the given extended capability, as well as the bit offset
- inside that byte (all offsets zero-indexed). For example,
- MULTICAST_DIAGNOSTICS is bit 9, which is within byte 1 at bit offset 1.
- """
- byte_offset = ext_cap_offset // 8
- bit_offset = ext_cap_offset % 8
- return byte_offset, bit_offset
-
-
-class ExtendedCapabilities:
- """Extended capability parsing and representation.
-
- See IEEE 802.11-2020 9.4.2.26.
- """
-
- def __init__(self, ext_cap: bytearray = bytearray()):
- """Represent the given extended capabilities field.
-
- Args:
- ext_cap: IEEE 802.11-2020 9.4.2.26 extended capabilities field.
- Default is an empty field, meaning no extended capabilities are
- advertised.
- """
- self._ext_cap = ext_cap
-
- def _capability_advertised(self, ext_cap: ExtendedCapability) -> bool:
- """Whether an extended capability is advertised.
-
- Args:
- ext_cap: an extended capability.
- Returns:
- True if the bit is present and its value is 1, otherwise False.
- Raises:
- NotImplementedError: for extended capabilities that span more than
- a single bit. These could be supported, but no callers need them
- at this time.
- """
- if ext_cap in [
- ExtendedCapability.SERVICE_INTERVAL_GRANULARITY,
- ExtendedCapability.MAX_NUMBER_OF_MSDUS_IN_A_MSDU,
- ]:
- raise NotImplementedError(
- f"{ext_cap.name} not implemented yet by {self.__class__}"
- )
- byte_offset, bit_offset = _offsets(ext_cap)
- if len(self._ext_cap) > byte_offset:
- # Use bit_offset to derive a mask that will check the correct bit.
- if self._ext_cap[byte_offset] & 2**bit_offset > 0:
- return True
- return False
-
- @property
- def bss_transition(self) -> bool:
- return self._capability_advertised(ExtendedCapability.BSS_TRANSITION)
-
- @property
- def proxy_arp_service(self) -> bool:
- return self._capability_advertised(ExtendedCapability.PROXY_ARP_SERVICE)
-
- @property
- def utc_tsf_offset(self) -> bool:
- return self._capability_advertised(ExtendedCapability.UTC_TSF_OFFSET)
-
- @property
- def wnm_sleep_mode(self) -> bool:
- return self._capability_advertised(ExtendedCapability.WNM_SLEEP_MODE)
-
- # Other extended capability property methods can be added as needed by callers.
diff --git a/packages/antlion/controllers/ap_lib/hostapd.py b/packages/antlion/controllers/ap_lib/hostapd.py
deleted file mode 100644
index e33406c..0000000
--- a/packages/antlion/controllers/ap_lib/hostapd.py
+++ /dev/null
@@ -1,476 +0,0 @@
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import collections
-import itertools
-import logging
-import re
-import time
-from datetime import datetime, timezone
-from subprocess import CalledProcessError
-from typing import Any, Iterable
-
-from tenacity import retry, retry_if_exception_type, stop_after_attempt
-
-from antlion.controllers.ap_lib import hostapd_constants
-from antlion.controllers.ap_lib.extended_capabilities import (
- ExtendedCapabilities,
-)
-from antlion.controllers.ap_lib.hostapd_config import HostapdConfig
-from antlion.controllers.ap_lib.wireless_network_management import (
- BssTransitionManagementRequest,
-)
-from antlion.controllers.utils_lib.commands import shell
-from antlion.logger import LogLevel
-from antlion.runner import Runner
-
-PROGRAM_FILE = "/usr/sbin/hostapd"
-CLI_PROGRAM_FILE = "/usr/bin/hostapd_cli"
-
-
-class Error(Exception):
- """An error caused by hostapd."""
-
-
-class InterfaceInitError(Error):
- """Interface initialization failed during hostapd start."""
-
-
-class Hostapd(object):
- """Manages the hostapd program.
-
- Attributes:
- config: The hostapd configuration that is being used.
- """
-
- def __init__(
- self, runner: Runner, interface: str, working_dir: str = "/tmp"
- ) -> None:
- """
- Args:
- runner: Object that has run_async and run methods for executing
- shell commands (e.g. connection.SshConnection)
- interface: The name of the interface to use (eg. wlan0).
- working_dir: The directory to work out of.
- """
- self._runner = runner
- self._interface = interface
- self._working_dir = working_dir
- self.config: HostapdConfig | None = None
- self._shell = shell.ShellCommand(runner)
- self._log_file = f"{working_dir}/hostapd-{self._interface}.log"
- self._ctrl_file = f"{working_dir}/hostapd-{self._interface}.ctrl"
- self._config_file = f"{working_dir}/hostapd-{self._interface}.conf"
- self._identifier = f"{PROGRAM_FILE}.*{self._config_file}"
-
- @retry(
- stop=stop_after_attempt(3),
- retry=retry_if_exception_type(InterfaceInitError),
- )
- def start(
- self,
- config: HostapdConfig,
- timeout: int = 60,
- additional_parameters: dict[str, Any] | None = None,
- ) -> None:
- """Starts hostapd
-
- Starts the hostapd daemon and runs it in the background.
-
- Args:
- config: Configs to start the hostapd with.
- timeout: Time to wait for DHCP server to come up.
- additional_parameters: A dictionary of parameters that can sent
- directly into the hostapd config file. This
- can be used for debugging and or adding one
- off parameters into the config.
-
- Returns:
- True if the daemon could be started. Note that the daemon can still
- start and not work. Invalid configurations can take a long amount
- of time to be produced, and because the daemon runs indefinitely
- it's impossible to wait on. If you need to check if configs are ok
- then periodic checks to is_running and logs should be used.
- """
- if additional_parameters is None:
- additional_parameters = {}
-
- self.stop()
-
- self.config = config
-
- self._shell.delete_file(self._ctrl_file)
- self._shell.delete_file(self._log_file)
- self._shell.delete_file(self._config_file)
- self._write_configs(additional_parameters)
-
- hostapd_command = f'{PROGRAM_FILE} -dd -t "{self._config_file}"'
- base_command = f'cd "{self._working_dir}"; {hostapd_command}'
- job_str = (
- f'rfkill unblock all; {base_command} > "{self._log_file}" 2>&1'
- )
- self._runner.run_async(job_str)
-
- try:
- self._wait_for_process(timeout=timeout)
- self._wait_for_interface(timeout=timeout)
- except:
- self.stop()
- raise
-
- def stop(self) -> None:
- """Kills the daemon if it is running."""
- if self.is_alive():
- self._shell.kill(self._identifier)
-
- def channel_switch(self, channel_num: int, csa_beacon_count: int) -> None:
- """Switches to the given channel.
-
- Args:
- channel_num: Channel to switch to.
- csa_beacon_count: Number of channel switch announcement beacons to
- send.
-
- Returns:
- acts.libs.proc.job.Result containing the results of the command.
-
- Raises: See _run_hostapd_cli_cmd
- """
- try:
- channel_freq = hostapd_constants.FREQUENCY_MAP[channel_num]
- except KeyError:
- raise ValueError(f"Invalid channel number {channel_num}")
- channel_switch_cmd = f"chan_switch {csa_beacon_count} {channel_freq}"
- self._run_hostapd_cli_cmd(channel_switch_cmd)
-
- def get_current_channel(self) -> int:
- """Returns the current channel number.
-
- Raises: See _run_hostapd_cli_cmd
- """
- status_cmd = "status"
- result = self._run_hostapd_cli_cmd(status_cmd)
- match = re.search(r"^channel=(\d+)$", result, re.MULTILINE)
- if not match:
- raise Error("Current channel could not be determined")
- try:
- channel = int(match.group(1))
- except ValueError:
- raise Error("Internal error: current channel could not be parsed")
- return channel
-
- def get_stas(self) -> set[str]:
- """Return MAC addresses of all associated STAs."""
- list_sta_result = self._run_hostapd_cli_cmd("list_sta")
- stas = set()
- for line in list_sta_result.splitlines():
- # Each line must be a valid MAC address. Capture it.
- m = re.match(r"((?:[0-9A-Fa-f]{2}:){5}[0-9A-Fa-f]{2})", line)
- if m:
- stas.add(m.group(1))
- return stas
-
- def _sta(self, sta_mac: str) -> str:
- """Return hostapd's detailed info about an associated STA.
-
- Returns:
- Results of the command.
-
- Raises: See _run_hostapd_cli_cmd
- """
- return self._run_hostapd_cli_cmd(f"sta {sta_mac}")
-
- def get_sta_extended_capabilities(
- self, sta_mac: str
- ) -> ExtendedCapabilities:
- """Get extended capabilities for the given STA, as seen by the AP.
-
- Args:
- sta_mac: MAC address of the STA in question.
- Returns:
- Extended capabilities of the given STA.
- Raises:
- Error if extended capabilities for the STA cannot be obtained.
- """
- sta_result = self._sta(sta_mac)
- # hostapd ext_capab field is a hex encoded string representation of the
- # 802.11 extended capabilities structure, each byte represented by two
- # chars (each byte having format %02x).
- m = re.search(r"ext_capab=([0-9A-Faf]+)", sta_result, re.MULTILINE)
- if not m:
- raise Error("Failed to get ext_capab from STA details")
- raw_ext_capab = m.group(1)
- try:
- return ExtendedCapabilities(bytearray.fromhex(raw_ext_capab))
- except ValueError:
- raise Error(
- f"ext_capab contains invalid hex string repr {raw_ext_capab}"
- )
-
- def sta_authenticated(self, sta_mac: str) -> bool:
- """Is the given STA authenticated?
-
- Args:
- sta_mac: MAC address of the STA in question.
- Returns:
- True if AP sees that the STA is authenticated, False otherwise.
- Raises:
- Error if authenticated status for the STA cannot be obtained.
- """
- sta_result = self._sta(sta_mac)
- m = re.search(r"flags=.*\[AUTH\]", sta_result, re.MULTILINE)
- return bool(m)
-
- def sta_associated(self, sta_mac: str) -> bool:
- """Is the given STA associated?
-
- Args:
- sta_mac: MAC address of the STA in question.
- Returns:
- True if AP sees that the STA is associated, False otherwise.
- Raises:
- Error if associated status for the STA cannot be obtained.
- """
- sta_result = self._sta(sta_mac)
- m = re.search(r"flags=.*\[ASSOC\]", sta_result, re.MULTILINE)
- return bool(m)
-
- def sta_authorized(self, sta_mac: str) -> bool:
- """Is the given STA authorized (802.1X controlled port open)?
-
- Args:
- sta_mac: MAC address of the STA in question.
- Returns:
- True if AP sees that the STA is 802.1X authorized, False otherwise.
- Raises:
- Error if authorized status for the STA cannot be obtained.
- """
- sta_result = self._sta(sta_mac)
- m = re.search(r"flags=.*\[AUTHORIZED\]", sta_result, re.MULTILINE)
- return bool(m)
-
- def _bss_tm_req(
- self, client_mac: str, request: BssTransitionManagementRequest
- ) -> None:
- """Send a hostapd BSS Transition Management request command to a STA.
-
- Args:
- client_mac: MAC address that will receive the request.
- request: BSS Transition Management request that will be sent.
- Returns:
- acts.libs.proc.job.Result containing the results of the command.
- Raises: See _run_hostapd_cli_cmd
- """
- bss_tm_req_cmd = f"bss_tm_req {client_mac}"
-
- if request.abridged:
- bss_tm_req_cmd += " abridged=1"
- if (
- request.bss_termination_included
- and request.bss_termination_duration
- ):
- bss_tm_req_cmd += (
- f" bss_term={request.bss_termination_duration.duration}"
- )
- if request.disassociation_imminent:
- bss_tm_req_cmd += " disassoc_imminent=1"
- if request.disassociation_timer is not None:
- bss_tm_req_cmd += f" disassoc_timer={request.disassociation_timer}"
- if request.preferred_candidate_list_included:
- bss_tm_req_cmd += " pref=1"
- if request.session_information_url:
- bss_tm_req_cmd += f" url={request.session_information_url}"
- if request.validity_interval:
- bss_tm_req_cmd += f" valid_int={request.validity_interval}"
-
- # neighbor= can appear multiple times, so it requires special handling.
- if request.candidate_list is not None:
- for neighbor in request.candidate_list:
- bssid = neighbor.bssid
- bssid_info = hex(neighbor.bssid_information)
- op_class = neighbor.operating_class
- chan_num = neighbor.channel_number
- phy_type = int(neighbor.phy_type)
- bss_tm_req_cmd += f" neighbor={bssid},{bssid_info},{op_class},{chan_num},{phy_type}"
-
- self._run_hostapd_cli_cmd(bss_tm_req_cmd)
-
- def send_bss_transition_management_req(
- self, sta_mac: str, request: BssTransitionManagementRequest
- ) -> None:
- """Send a BSS Transition Management request to an associated STA.
-
- Args:
- sta_mac: MAC address of the STA in question.
- request: BSS Transition Management request that will be sent.
- Returns:
- acts.libs.proc.job.Result containing the results of the command.
- Raises: See _run_hostapd_cli_cmd
- """
- self._bss_tm_req(sta_mac, request)
-
- def is_alive(self) -> bool:
- """
- Returns:
- True if the daemon is running.
- """
- return self._shell.is_alive(self._identifier)
-
- def pull_logs(self) -> str:
- """Pulls the log files from where hostapd is running.
-
- Returns:
- A string of the hostapd logs.
- """
- # TODO: Auto pulling of logs when stop is called.
- with LogLevel(self._runner.log, logging.INFO):
- log = self._shell.read_file(self._log_file)
-
- # Convert epoch to human-readable times
- result: list[str] = []
- for line in log.splitlines():
- try:
- end = line.index(":")
- epoch = float(line[:end])
- timestamp = datetime.fromtimestamp(
- epoch, timezone.utc
- ).strftime("%m-%d %H:%M:%S.%f")
- result.append(f"{timestamp} {line[end+1:]}")
- except ValueError: # Colon not found or float conversion failure
- result.append(line)
-
- return "\n".join(result)
-
- def _run_hostapd_cli_cmd(self, cmd: str) -> str:
- """Run the given hostapd_cli command.
-
- Runs the command, waits for the output (up to default timeout), and
- returns the result.
-
- Returns:
- Results of the ssh command.
-
- Raises:
- subprocess.TimeoutExpired: When the remote command took too
- long to execute.
- antlion.controllers.utils_lib.ssh.connection.Error: When the ssh
- connection failed to be created.
- subprocess.CalledProcessError: Ssh worked, but the command had an
- error executing.
- """
- hostapd_cli_job = (
- f"cd {self._working_dir}; "
- f"{CLI_PROGRAM_FILE} -p {self._ctrl_file} {cmd}"
- )
- proc = self._runner.run(hostapd_cli_job)
- if proc.returncode:
- raise CalledProcessError(
- proc.returncode, hostapd_cli_job, proc.stdout, proc.stderr
- )
- return proc.stdout.decode("utf-8")
-
- def _wait_for_process(self, timeout: int = 60) -> None:
- """Waits for the process to come up.
-
- Waits until the hostapd process is found running, or there is
- a timeout. If the program never comes up then the log file
- will be scanned for errors.
-
- Raises: See _scan_for_errors
- """
- start_time = time.time()
- while time.time() - start_time < timeout and not self.is_alive():
- self._scan_for_errors(False)
- time.sleep(0.1)
-
- def _wait_for_interface(self, timeout: int = 60) -> None:
- """Waits for hostapd to report that the interface is up.
-
- Waits until hostapd says the interface has been brought up or an
- error occurs.
-
- Raises: see _scan_for_errors
- """
- start_time = time.time()
- while time.time() - start_time < timeout:
- time.sleep(0.1)
- success = self._shell.search_file(
- "Setup of interface done", self._log_file
- )
- if success:
- return
- self._scan_for_errors(False)
-
- self._scan_for_errors(True)
-
- def _scan_for_errors(self, should_be_up: bool) -> None:
- """Scans the hostapd log for any errors.
-
- Args:
- should_be_up: If true then hostapd program is expected to be alive.
- If it is found not alive while this is true an error
- is thrown.
-
- Raises:
- Error: when a hostapd error is found.
- InterfaceInitError: when the interface fails to initialize. This is
- a recoverable error that is usually caused by other processes
- using this interface at the same time.
- """
- # Store this so that all other errors have priority.
- is_dead = not self.is_alive()
-
- bad_config = self._shell.search_file(
- "Interface initialization failed", self._log_file
- )
- if bad_config:
- raise InterfaceInitError("Interface failed to initialize", self)
-
- bad_config = self._shell.search_file(
- f"Interface {self._interface} wasn't started", self._log_file
- )
- if bad_config:
- raise Error("Interface wasn't started", self)
-
- if should_be_up and is_dead:
- raise Error("Hostapd failed to start", self)
-
- def _write_configs(self, additional_parameters: dict[str, Any]) -> None:
- """Writes the configs to the hostapd config file."""
- self._shell.delete_file(self._config_file)
-
- interface_configs = collections.OrderedDict()
- interface_configs["interface"] = self._interface
- interface_configs["ctrl_interface"] = self._ctrl_file
- pairs: Iterable[str] = (
- f"{k}={v}" for k, v in interface_configs.items()
- )
-
- packaged_configs = self.config.package_configs() if self.config else []
- if additional_parameters:
- packaged_configs.append(additional_parameters)
- for packaged_config in packaged_configs:
- config_pairs = (
- f"{k}={v}" for k, v in packaged_config.items() if v is not None
- )
- pairs = itertools.chain(pairs, config_pairs)
-
- hostapd_conf = "\n".join(pairs)
-
- logging.info(f"Writing {self._config_file}")
- logging.debug("******************Start*******************")
- logging.debug(f"\n{hostapd_conf}")
- logging.debug("*******************End********************")
-
- self._shell.write_file(self._config_file, hostapd_conf)
diff --git a/packages/antlion/controllers/ap_lib/hostapd_ap_preset.py b/packages/antlion/controllers/ap_lib/hostapd_ap_preset.py
deleted file mode 100644
index 0631e54..0000000
--- a/packages/antlion/controllers/ap_lib/hostapd_ap_preset.py
+++ /dev/null
@@ -1,576 +0,0 @@
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from __future__ import annotations
-
-from typing import Any, FrozenSet, TypeVar
-
-from antlion.controllers.ap_lib import (
- hostapd_config,
- hostapd_constants,
- hostapd_utils,
-)
-from antlion.controllers.ap_lib.hostapd_security import Security
-from antlion.controllers.ap_lib.third_party_ap_profiles import (
- actiontec,
- asus,
- belkin,
- linksys,
- netgear,
- securifi,
- tplink,
-)
-
-T = TypeVar("T")
-
-
-def _get_or_default(var: T | None, default_value: T) -> T:
- """Check variable and return non-null value.
-
- Args:
- var: Any variable.
- default_value: Value to return if the var is None.
-
- Returns:
- Variable value if not None, default value otherwise.
- """
- return var if var is not None else default_value
-
-
-def create_ap_preset(
- iface_wlan_2g: str,
- iface_wlan_5g: str,
- profile_name: str = "whirlwind",
- channel: int | None = None,
- mode: str | None = None,
- frequency: int | None = None,
- security: Security | None = None,
- pmf_support: int | None = None,
- ssid: str | None = None,
- hidden: bool | None = None,
- dtim_period: int | None = None,
- frag_threshold: int | None = None,
- rts_threshold: int | None = None,
- force_wmm: bool | None = None,
- beacon_interval: int | None = None,
- short_preamble: bool | None = None,
- n_capabilities: list[Any] | None = None,
- ac_capabilities: list[Any] | None = None,
- vht_bandwidth: int | None = None,
- wnm_features: FrozenSet[hostapd_constants.WnmFeature] = frozenset(),
- bss_settings: list[Any] = [],
- ap_max_inactivity: int | None = None,
-) -> hostapd_config.HostapdConfig:
- """AP preset config generator. This a wrapper for hostapd_config but
- but supplies the default settings for the preset that is selected.
-
- You may specify channel or frequency, but not both. Both options
- are checked for validity (i.e. you can't specify an invalid channel
- or a frequency that will not be accepted).
-
- Args:
- profile_name: The name of the device want the preset for.
- Options: whirlwind
- channel: Channel number.
- dtim: DTIM value of the AP, default is 2.
- frequency: Frequency of channel.
- security: The security settings to use.
- ssid: The name of the ssid to broadcast.
- pmf_support: Whether pmf is disabled, enabled, or required
- vht_bandwidth: VHT bandwidth for 11ac operation.
- bss_settings: The settings for all bss.
- iface_wlan_2g: the wlan 2g interface name of the AP.
- iface_wlan_5g: the wlan 5g interface name of the AP.
- mode: The hostapd 802.11 mode of operation.
- ssid: The ssid for the wireless network.
- hidden: Whether to include the ssid in the beacons.
- dtim_period: The dtim period for the BSS
- frag_threshold: Max size of packet before fragmenting the packet.
- rts_threshold: Max size of packet before requiring protection for
- rts/cts or cts to self.
- n_capabilities: 802.11n capabilities for for BSS to advertise.
- ac_capabilities: 802.11ac capabilities for for BSS to advertise.
- wnm_features: WNM features to enable on the AP.
- ap_max_inactivity: See hostapd.conf's ap_max_inactivity setting.
-
- Returns: A hostapd_config object that can be used by the hostapd object.
- """
- if security is None:
- security = Security()
-
- # Verify interfaces
- hostapd_utils.verify_interface(
- iface_wlan_2g, hostapd_constants.INTERFACE_2G_LIST
- )
- hostapd_utils.verify_interface(
- iface_wlan_5g, hostapd_constants.INTERFACE_5G_LIST
- )
-
- if channel is not None:
- frequency = hostapd_config.get_frequency_for_channel(channel)
- elif frequency is not None:
- channel = hostapd_config.get_channel_for_frequency(frequency)
-
- if channel is None or frequency is None:
- raise ValueError("Must specify channel or frequency")
-
- if profile_name == "whirlwind":
- # profile indicates phy mode is 11bgn for 2.4Ghz or 11acn for 5Ghz
- hidden = _get_or_default(hidden, False)
- force_wmm = _get_or_default(force_wmm, True)
- beacon_interval = _get_or_default(beacon_interval, 100)
- short_preamble = _get_or_default(short_preamble, True)
- dtim_period = _get_or_default(dtim_period, 2)
- frag_threshold = _get_or_default(frag_threshold, 2346)
- rts_threshold = _get_or_default(rts_threshold, 2347)
- if frequency < 5000:
- interface = iface_wlan_2g
- mode = _get_or_default(mode, hostapd_constants.Mode.MODE_11N_MIXED)
- n_capabilities = _get_or_default(
- n_capabilities,
- [
- hostapd_constants.N_CAPABILITY_LDPC,
- hostapd_constants.N_CAPABILITY_SGI20,
- hostapd_constants.N_CAPABILITY_SGI40,
- hostapd_constants.N_CAPABILITY_TX_STBC,
- hostapd_constants.N_CAPABILITY_RX_STBC1,
- hostapd_constants.N_CAPABILITY_DSSS_CCK_40,
- ],
- )
- config = hostapd_config.HostapdConfig(
- ssid=ssid,
- hidden=hidden,
- security=security,
- pmf_support=pmf_support,
- interface=interface,
- mode=mode,
- force_wmm=force_wmm,
- beacon_interval=beacon_interval,
- dtim_period=dtim_period,
- short_preamble=short_preamble,
- frequency=frequency,
- n_capabilities=n_capabilities,
- frag_threshold=frag_threshold,
- rts_threshold=rts_threshold,
- wnm_features=wnm_features,
- bss_settings=bss_settings,
- ap_max_inactivity=ap_max_inactivity,
- )
- else:
- interface = iface_wlan_5g
- vht_bandwidth = _get_or_default(vht_bandwidth, 80)
- mode = _get_or_default(mode, hostapd_constants.Mode.MODE_11AC_MIXED)
- if hostapd_config.ht40_plus_allowed(channel):
- extended_channel = hostapd_constants.N_CAPABILITY_HT40_PLUS
- elif hostapd_config.ht40_minus_allowed(channel):
- extended_channel = hostapd_constants.N_CAPABILITY_HT40_MINUS
- # Channel 165 operates in 20MHz with n or ac modes.
- if channel == 165:
- mode = hostapd_constants.Mode.MODE_11N_MIXED
- extended_channel = hostapd_constants.N_CAPABILITY_HT20
- # Define the n capability vector for 20 MHz and higher bandwidth
- if not vht_bandwidth:
- n_capabilities = _get_or_default(n_capabilities, [])
- elif vht_bandwidth >= 40:
- n_capabilities = _get_or_default(
- n_capabilities,
- [
- hostapd_constants.N_CAPABILITY_LDPC,
- extended_channel,
- hostapd_constants.N_CAPABILITY_SGI20,
- hostapd_constants.N_CAPABILITY_SGI40,
- hostapd_constants.N_CAPABILITY_TX_STBC,
- hostapd_constants.N_CAPABILITY_RX_STBC1,
- ],
- )
- else:
- n_capabilities = _get_or_default(
- n_capabilities,
- [
- hostapd_constants.N_CAPABILITY_LDPC,
- hostapd_constants.N_CAPABILITY_SGI20,
- hostapd_constants.N_CAPABILITY_SGI40,
- hostapd_constants.N_CAPABILITY_TX_STBC,
- hostapd_constants.N_CAPABILITY_RX_STBC1,
- hostapd_constants.N_CAPABILITY_HT20,
- ],
- )
- ac_capabilities = _get_or_default(
- ac_capabilities,
- [
- hostapd_constants.AC_CAPABILITY_MAX_MPDU_11454,
- hostapd_constants.AC_CAPABILITY_RXLDPC,
- hostapd_constants.AC_CAPABILITY_SHORT_GI_80,
- hostapd_constants.AC_CAPABILITY_TX_STBC_2BY1,
- hostapd_constants.AC_CAPABILITY_RX_STBC_1,
- hostapd_constants.AC_CAPABILITY_MAX_A_MPDU_LEN_EXP7,
- hostapd_constants.AC_CAPABILITY_RX_ANTENNA_PATTERN,
- hostapd_constants.AC_CAPABILITY_TX_ANTENNA_PATTERN,
- ],
- )
- config = hostapd_config.HostapdConfig(
- ssid=ssid,
- hidden=hidden,
- security=security,
- pmf_support=pmf_support,
- interface=interface,
- mode=mode,
- force_wmm=force_wmm,
- vht_channel_width=vht_bandwidth,
- beacon_interval=beacon_interval,
- dtim_period=dtim_period,
- short_preamble=short_preamble,
- frequency=frequency,
- frag_threshold=frag_threshold,
- rts_threshold=rts_threshold,
- wnm_features=wnm_features,
- n_capabilities=n_capabilities,
- ac_capabilities=ac_capabilities,
- bss_settings=bss_settings,
- ap_max_inactivity=ap_max_inactivity,
- )
- elif profile_name == "whirlwind_11ab_legacy":
- if frequency < 5000:
- mode = hostapd_constants.Mode.MODE_11B
- else:
- mode = hostapd_constants.Mode.MODE_11A
-
- config = create_ap_preset(
- iface_wlan_2g=iface_wlan_2g,
- iface_wlan_5g=iface_wlan_5g,
- ssid=ssid,
- channel=channel,
- mode=mode,
- security=security,
- pmf_support=pmf_support,
- hidden=hidden,
- force_wmm=force_wmm,
- beacon_interval=beacon_interval,
- short_preamble=short_preamble,
- dtim_period=dtim_period,
- rts_threshold=rts_threshold,
- frag_threshold=frag_threshold,
- n_capabilities=[],
- ac_capabilities=[],
- vht_bandwidth=None,
- wnm_features=wnm_features,
- ap_max_inactivity=ap_max_inactivity,
- )
- elif profile_name == "whirlwind_11ag_legacy":
- if frequency < 5000:
- mode = hostapd_constants.Mode.MODE_11G
- else:
- mode = hostapd_constants.Mode.MODE_11A
-
- config = create_ap_preset(
- iface_wlan_2g=iface_wlan_2g,
- iface_wlan_5g=iface_wlan_5g,
- ssid=ssid,
- channel=channel,
- mode=mode,
- security=security,
- pmf_support=pmf_support,
- hidden=hidden,
- force_wmm=force_wmm,
- beacon_interval=beacon_interval,
- short_preamble=short_preamble,
- dtim_period=dtim_period,
- rts_threshold=rts_threshold,
- frag_threshold=frag_threshold,
- n_capabilities=[],
- ac_capabilities=[],
- vht_bandwidth=None,
- wnm_features=wnm_features,
- ap_max_inactivity=ap_max_inactivity,
- )
- elif profile_name == "mistral":
- hidden = _get_or_default(hidden, False)
- force_wmm = _get_or_default(force_wmm, True)
- beacon_interval = _get_or_default(beacon_interval, 100)
- short_preamble = _get_or_default(short_preamble, True)
- dtim_period = _get_or_default(dtim_period, 2)
- frag_threshold = None
- rts_threshold = None
-
- # Google IE
- # Country Code IE ('us' lowercase)
- vendor_elements = {
- "vendor_elements": "dd0cf4f5e80505ff0000ffffffff"
- "070a75732024041e95051e00"
- }
- default_configs = {"bridge": "br-lan", "iapp_interface": "br-lan"}
- additional_params = (
- vendor_elements
- | default_configs
- | hostapd_constants.ENABLE_RRM_BEACON_REPORT
- | hostapd_constants.ENABLE_RRM_NEIGHBOR_REPORT
- )
-
- if frequency < 5000:
- interface = iface_wlan_2g
- mode = _get_or_default(mode, hostapd_constants.Mode.MODE_11N_MIXED)
- n_capabilities = _get_or_default(
- n_capabilities,
- [
- hostapd_constants.N_CAPABILITY_LDPC,
- hostapd_constants.N_CAPABILITY_SGI20,
- hostapd_constants.N_CAPABILITY_SGI40,
- hostapd_constants.N_CAPABILITY_TX_STBC,
- hostapd_constants.N_CAPABILITY_RX_STBC1,
- hostapd_constants.N_CAPABILITY_DSSS_CCK_40,
- ],
- )
-
- config = hostapd_config.HostapdConfig(
- ssid=ssid,
- hidden=hidden,
- security=security,
- pmf_support=pmf_support,
- interface=interface,
- mode=mode,
- force_wmm=force_wmm,
- beacon_interval=beacon_interval,
- dtim_period=dtim_period,
- short_preamble=short_preamble,
- frequency=frequency,
- n_capabilities=n_capabilities,
- frag_threshold=frag_threshold,
- rts_threshold=rts_threshold,
- wnm_features=wnm_features,
- bss_settings=bss_settings,
- additional_parameters=additional_params,
- set_ap_defaults_profile=profile_name,
- ap_max_inactivity=ap_max_inactivity,
- )
- else:
- interface = iface_wlan_5g
- vht_bandwidth = _get_or_default(vht_bandwidth, 80)
- mode = _get_or_default(mode, hostapd_constants.Mode.MODE_11AC_MIXED)
- if hostapd_config.ht40_plus_allowed(channel):
- extended_channel = hostapd_constants.N_CAPABILITY_HT40_PLUS
- elif hostapd_config.ht40_minus_allowed(channel):
- extended_channel = hostapd_constants.N_CAPABILITY_HT40_MINUS
- # Channel 165 operates in 20MHz with n or ac modes.
- if channel == 165:
- mode = hostapd_constants.Mode.MODE_11N_MIXED
- extended_channel = hostapd_constants.N_CAPABILITY_HT20
- if vht_bandwidth >= 40:
- n_capabilities = _get_or_default(
- n_capabilities,
- [
- hostapd_constants.N_CAPABILITY_LDPC,
- extended_channel,
- hostapd_constants.N_CAPABILITY_SGI20,
- hostapd_constants.N_CAPABILITY_SGI40,
- hostapd_constants.N_CAPABILITY_TX_STBC,
- hostapd_constants.N_CAPABILITY_RX_STBC1,
- ],
- )
- else:
- n_capabilities = _get_or_default(
- n_capabilities,
- [
- hostapd_constants.N_CAPABILITY_LDPC,
- hostapd_constants.N_CAPABILITY_SGI20,
- hostapd_constants.N_CAPABILITY_SGI40,
- hostapd_constants.N_CAPABILITY_TX_STBC,
- hostapd_constants.N_CAPABILITY_RX_STBC1,
- hostapd_constants.N_CAPABILITY_HT20,
- ],
- )
- ac_capabilities = _get_or_default(
- ac_capabilities,
- [
- hostapd_constants.AC_CAPABILITY_MAX_MPDU_11454,
- hostapd_constants.AC_CAPABILITY_RXLDPC,
- hostapd_constants.AC_CAPABILITY_SHORT_GI_80,
- hostapd_constants.AC_CAPABILITY_TX_STBC_2BY1,
- hostapd_constants.AC_CAPABILITY_RX_STBC_1,
- hostapd_constants.AC_CAPABILITY_MAX_A_MPDU_LEN_EXP7,
- hostapd_constants.AC_CAPABILITY_RX_ANTENNA_PATTERN,
- hostapd_constants.AC_CAPABILITY_TX_ANTENNA_PATTERN,
- hostapd_constants.AC_CAPABILITY_SU_BEAMFORMER,
- hostapd_constants.AC_CAPABILITY_SU_BEAMFORMEE,
- hostapd_constants.AC_CAPABILITY_MU_BEAMFORMER,
- hostapd_constants.AC_CAPABILITY_SOUNDING_DIMENSION_4,
- hostapd_constants.AC_CAPABILITY_BF_ANTENNA_4,
- ],
- )
-
- config = hostapd_config.HostapdConfig(
- ssid=ssid,
- hidden=hidden,
- security=security,
- pmf_support=pmf_support,
- interface=interface,
- mode=mode,
- force_wmm=force_wmm,
- vht_channel_width=vht_bandwidth,
- beacon_interval=beacon_interval,
- dtim_period=dtim_period,
- short_preamble=short_preamble,
- frequency=frequency,
- frag_threshold=frag_threshold,
- rts_threshold=rts_threshold,
- n_capabilities=n_capabilities,
- ac_capabilities=ac_capabilities,
- wnm_features=wnm_features,
- bss_settings=bss_settings,
- additional_parameters=additional_params,
- set_ap_defaults_profile=profile_name,
- ap_max_inactivity=ap_max_inactivity,
- )
- elif profile_name == "actiontec_pk5000":
- config = actiontec.actiontec_pk5000(
- iface_wlan_2g=iface_wlan_2g,
- channel=channel,
- ssid=ssid,
- security=security,
- )
- elif profile_name == "actiontec_mi424wr":
- config = actiontec.actiontec_mi424wr(
- iface_wlan_2g=iface_wlan_2g,
- channel=channel,
- ssid=ssid,
- security=security,
- )
- elif profile_name == "asus_rtac66u":
- config = asus.asus_rtac66u(
- iface_wlan_2g=iface_wlan_2g,
- iface_wlan_5g=iface_wlan_5g,
- channel=channel,
- ssid=ssid,
- security=security,
- )
- elif profile_name == "asus_rtac86u":
- config = asus.asus_rtac86u(
- iface_wlan_2g=iface_wlan_2g,
- iface_wlan_5g=iface_wlan_5g,
- channel=channel,
- ssid=ssid,
- security=security,
- )
- elif profile_name == "asus_rtac5300":
- config = asus.asus_rtac5300(
- iface_wlan_2g=iface_wlan_2g,
- iface_wlan_5g=iface_wlan_5g,
- channel=channel,
- ssid=ssid,
- security=security,
- )
- elif profile_name == "asus_rtn56u":
- config = asus.asus_rtn56u(
- iface_wlan_2g=iface_wlan_2g,
- iface_wlan_5g=iface_wlan_5g,
- channel=channel,
- ssid=ssid,
- security=security,
- )
- elif profile_name == "asus_rtn66u":
- config = asus.asus_rtn66u(
- iface_wlan_2g=iface_wlan_2g,
- iface_wlan_5g=iface_wlan_5g,
- channel=channel,
- ssid=ssid,
- security=security,
- )
- elif profile_name == "belkin_f9k1001v5":
- config = belkin.belkin_f9k1001v5(
- iface_wlan_2g=iface_wlan_2g,
- channel=channel,
- ssid=ssid,
- security=security,
- )
- elif profile_name == "linksys_ea4500":
- config = linksys.linksys_ea4500(
- iface_wlan_2g=iface_wlan_2g,
- iface_wlan_5g=iface_wlan_5g,
- channel=channel,
- ssid=ssid,
- security=security,
- )
- elif profile_name == "linksys_ea9500":
- config = linksys.linksys_ea9500(
- iface_wlan_2g=iface_wlan_2g,
- iface_wlan_5g=iface_wlan_5g,
- channel=channel,
- ssid=ssid,
- security=security,
- )
- elif profile_name == "linksys_wrt1900acv2":
- config = linksys.linksys_wrt1900acv2(
- iface_wlan_2g=iface_wlan_2g,
- iface_wlan_5g=iface_wlan_5g,
- channel=channel,
- ssid=ssid,
- security=security,
- )
- elif profile_name == "netgear_r7000":
- config = netgear.netgear_r7000(
- iface_wlan_2g=iface_wlan_2g,
- iface_wlan_5g=iface_wlan_5g,
- channel=channel,
- ssid=ssid,
- security=security,
- )
- elif profile_name == "netgear_wndr3400":
- config = netgear.netgear_wndr3400(
- iface_wlan_2g=iface_wlan_2g,
- iface_wlan_5g=iface_wlan_5g,
- channel=channel,
- ssid=ssid,
- security=security,
- )
- elif profile_name == "securifi_almond":
- config = securifi.securifi_almond(
- iface_wlan_2g=iface_wlan_2g,
- channel=channel,
- ssid=ssid,
- security=security,
- )
- elif profile_name == "tplink_archerc5":
- config = tplink.tplink_archerc5(
- iface_wlan_2g=iface_wlan_2g,
- iface_wlan_5g=iface_wlan_5g,
- channel=channel,
- ssid=ssid,
- security=security,
- )
- elif profile_name == "tplink_archerc7":
- config = tplink.tplink_archerc7(
- iface_wlan_2g=iface_wlan_2g,
- iface_wlan_5g=iface_wlan_5g,
- channel=channel,
- ssid=ssid,
- security=security,
- )
- elif profile_name == "tplink_c1200":
- config = tplink.tplink_c1200(
- iface_wlan_2g=iface_wlan_2g,
- iface_wlan_5g=iface_wlan_5g,
- channel=channel,
- ssid=ssid,
- security=security,
- )
- elif profile_name == "tplink_tlwr940n":
- config = tplink.tplink_tlwr940n(
- iface_wlan_2g=iface_wlan_2g,
- channel=channel,
- ssid=ssid,
- security=security,
- )
- else:
- raise ValueError(f"Invalid ap model specified ({profile_name})")
-
- return config
diff --git a/packages/antlion/controllers/ap_lib/hostapd_bss_settings.py b/packages/antlion/controllers/ap_lib/hostapd_bss_settings.py
deleted file mode 100644
index 2f4d261..0000000
--- a/packages/antlion/controllers/ap_lib/hostapd_bss_settings.py
+++ /dev/null
@@ -1,61 +0,0 @@
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import collections
-
-from antlion.controllers.ap_lib.hostapd_security import Security
-
-
-class BssSettings(object):
- """Settings for a bss.
-
- Settings for a bss to allow multiple network on a single device.
-
- Attributes:
- name: The name that this bss will go by.
- ssid: The name of the ssid to broadcast.
- hidden: If true then the ssid will be hidden.
- security: The security settings to use.
- bssid: The bssid to use.
- """
-
- def __init__(
- self,
- name: str,
- ssid: str,
- security: Security,
- hidden: bool = False,
- bssid: str | None = None,
- ):
- self.name = name
- self.ssid = ssid
- self.security = security
- self.hidden = hidden
- self.bssid = bssid
-
- def generate_dict(self) -> dict[str, str | int]:
- """Returns: A dictionary of bss settings."""
- settings: dict[str, str | int] = collections.OrderedDict()
- settings["bss"] = self.name
- if self.bssid:
- settings["bssid"] = self.bssid
- if self.ssid:
- settings["ssid"] = self.ssid
- settings["ignore_broadcast_ssid"] = 1 if self.hidden else 0
-
- security_settings = self.security.generate_dict()
- for k, v in security_settings.items():
- settings[k] = v
-
- return settings
diff --git a/packages/antlion/controllers/ap_lib/hostapd_config.py b/packages/antlion/controllers/ap_lib/hostapd_config.py
deleted file mode 100644
index a882230..0000000
--- a/packages/antlion/controllers/ap_lib/hostapd_config.py
+++ /dev/null
@@ -1,738 +0,0 @@
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import collections
-import logging
-from typing import Any, FrozenSet
-
-from antlion.controllers.ap_lib import hostapd_constants
-from antlion.controllers.ap_lib.hostapd_bss_settings import BssSettings
-from antlion.controllers.ap_lib.hostapd_security import Security, SecurityMode
-
-
-def ht40_plus_allowed(channel: int) -> bool:
- """Returns: True iff HT40+ is enabled for this configuration."""
- channel_supported = (
- channel
- in hostapd_constants.HT40_ALLOW_MAP[
- hostapd_constants.N_CAPABILITY_HT40_PLUS_CHANNELS
- ]
- )
- return channel_supported
-
-
-def ht40_minus_allowed(channel: int) -> bool:
- """Returns: True iff HT40- is enabled for this configuration."""
- channel_supported = (
- channel
- in hostapd_constants.HT40_ALLOW_MAP[
- hostapd_constants.N_CAPABILITY_HT40_MINUS_CHANNELS
- ]
- )
- return channel_supported
-
-
-def get_frequency_for_channel(channel: int) -> int:
- """The frequency associated with a given channel number.
-
- Args:
- value: channel number.
-
- Returns:
- int, frequency in MHz associated with the channel.
-
- """
- for frequency, channel_iter in hostapd_constants.CHANNEL_MAP.items():
- if channel == channel_iter:
- return frequency
- else:
- raise ValueError(f"Unknown channel value: {channel!r}.")
-
-
-def get_channel_for_frequency(frequency: int) -> int:
- """The channel number associated with a given frequency.
-
- Args:
- value: frequency in MHz.
-
- Returns:
- int, frequency associated with the channel.
-
- """
- return hostapd_constants.CHANNEL_MAP[frequency]
-
-
-class HostapdConfig(object):
- """The root settings for the router.
-
- All the settings for a router that are not part of an ssid.
- """
-
- def __init__(
- self,
- interface: str | None = None,
- mode: str | None = None,
- channel: int | None = None,
- frequency: int | None = None,
- n_capabilities: list[Any] | None = None,
- beacon_interval: int | None = None,
- dtim_period: int | None = None,
- frag_threshold: int | None = None,
- rts_threshold: int | None = None,
- short_preamble: bool | None = None,
- ssid: str | None = None,
- hidden: bool = False,
- security: Security | None = None,
- bssid: str | None = None,
- force_wmm: bool | None = None,
- pmf_support: int | None = None,
- obss_interval: int | None = None,
- vht_channel_width: Any | None = None,
- vht_center_channel: int | None = None,
- ac_capabilities: list[Any] | None = None,
- beacon_footer: str = "",
- spectrum_mgmt_required: bool | None = None,
- scenario_name: str | None = None,
- min_streams: int | None = None,
- wnm_features: FrozenSet[hostapd_constants.WnmFeature] = frozenset(),
- bss_settings: list[Any] | None = None,
- additional_parameters: dict[str, Any] | None = None,
- set_ap_defaults_profile: str = "whirlwind",
- ap_max_inactivity: int | None = None,
- ) -> None:
- """Construct a HostapdConfig.
-
- You may specify channel or frequency, but not both. Both options
- are checked for validity (i.e. you can't specify an invalid channel
- or a frequency that will not be accepted).
-
- Args:
- interface: The name of the interface to use.
- mode: MODE_11x defined above.
- channel: Channel number.
- frequency: Frequency of channel.
- n_capabilities: List of N_CAPABILITY_x defined above.
- beacon_interval: Beacon interval of AP.
- dtim_period: Include a DTIM every |dtim_period| beacons.
- frag_threshold: Maximum outgoing data frame size.
- rts_threshold: Maximum packet size without requiring explicit
- protection via rts/cts or cts to self.
- short_preamble: Whether to use a short preamble.
- ssid: string, The name of the ssid to broadcast.
- hidden: Should the ssid be hidden.
- security: The security settings to use.
- bssid: A MAC address like string for the BSSID.
- force_wmm: True if we should force WMM on, False if we should
- force it off, None if we shouldn't force anything.
- pmf_support: One of PMF_SUPPORT_* above. Controls whether the
- client supports/must support 802.11w. If None, defaults to
- required with wpa3, else defaults to disabled.
- obss_interval: Interval in seconds that client should be
- required to do background scans for overlapping BSSes.
- vht_channel_width: Object channel width
- vht_center_channel: Center channel of segment 0.
- ac_capabilities: List of AC_CAPABILITY_x defined above.
- beacon_footer: Containing (not validated) IE data to be
- placed at the end of the beacon.
- spectrum_mgmt_required: True if we require the DUT to support
- spectrum management.
- scenario_name: To be included in file names, instead
- of the interface name.
- min_streams: Number of spatial streams required.
- wnm_features: WNM features to enable on the AP.
- control_interface: The file name to use as the control interface.
- bss_settings: The settings for all bss.
- additional_parameters: A dictionary of additional parameters to add
- to the hostapd config.
- set_ap_defaults_profile: profile name to load defaults from
- ap_max_inactivity: See hostapd.conf's ap_max_inactivity setting.
- """
- if n_capabilities is None:
- n_capabilities = []
- if ac_capabilities is None:
- ac_capabilities = []
- if bss_settings is None:
- bss_settings = []
- if additional_parameters is None:
- additional_parameters = {}
- if security is None:
- security = Security()
-
- self.set_ap_defaults_profile = set_ap_defaults_profile
- self._interface = interface
- if channel is not None and frequency is not None:
- raise ValueError(
- "Specify either frequency or channel " "but not both."
- )
-
- unknown_caps = [
- cap
- for cap in n_capabilities
- if cap not in hostapd_constants.N_CAPABILITIES_MAPPING
- ]
- if unknown_caps:
- raise ValueError(f"Unknown capabilities: {unknown_caps!r}")
-
- if channel:
- self.channel = channel
- elif frequency:
- self.frequency = frequency
- else:
- raise ValueError("Specify either frequency or channel.")
-
- self._n_capabilities = set(n_capabilities)
- if force_wmm is not None:
- self._wmm_enabled = force_wmm
- elif self._n_capabilities:
- self._wmm_enabled = True
- if self._n_capabilities and mode is None:
- mode = hostapd_constants.Mode.MODE_11N_PURE
- self._mode = mode
-
- if not self.supports_frequency(self.frequency):
- raise ValueError(
- "Configured a mode %s that does not support "
- "frequency %d" % (self._mode, self.frequency)
- )
-
- self._beacon_interval = beacon_interval
- self._dtim_period = dtim_period
- self._frag_threshold = frag_threshold
- self._rts_threshold = rts_threshold
- self._short_preamble = short_preamble
- self._ssid = ssid
- self._hidden = hidden
- self._security = security
- self._bssid = bssid
- # Default PMF Values
- if pmf_support is None:
- if (
- self.security
- and self.security.security_mode is SecurityMode.WPA3
- ):
- # Set PMF required for WP3
- self._pmf_support = hostapd_constants.PMF_SUPPORT_REQUIRED
- elif self.security and self.security.security_mode.is_wpa3():
- # Default PMF to enabled for WPA3 mixed modes (can be
- # overwritten by explicitly provided value)
- self._pmf_support = hostapd_constants.PMF_SUPPORT_ENABLED
- else:
- # Default PMD to disabled for all other modes (can be
- # overwritten by explicitly provided value)
- self._pmf_support = hostapd_constants.PMF_SUPPORT_DISABLED
- elif pmf_support not in hostapd_constants.PMF_SUPPORT_VALUES:
- raise ValueError(f"Invalid value for pmf_support: {pmf_support!r}")
- elif (
- pmf_support != hostapd_constants.PMF_SUPPORT_REQUIRED
- and self.security
- and self.security.security_mode is SecurityMode.WPA3
- ):
- raise ValueError("PMF support must be required with wpa3.")
- else:
- self._pmf_support = pmf_support
- self._obss_interval = obss_interval
- if self.is_11ac:
- if str(vht_channel_width) == "40" or str(vht_channel_width) == "20":
- self._vht_oper_chwidth = hostapd_constants.VHT_CHANNEL_WIDTH_40
- elif str(vht_channel_width) == "80":
- self._vht_oper_chwidth = hostapd_constants.VHT_CHANNEL_WIDTH_80
- elif str(vht_channel_width) == "160":
- self._vht_oper_chwidth = hostapd_constants.VHT_CHANNEL_WIDTH_160
- elif str(vht_channel_width) == "80+80":
- self._vht_oper_chwidth = (
- hostapd_constants.VHT_CHANNEL_WIDTH_80_80
- )
- elif vht_channel_width is not None:
- raise ValueError("Invalid channel width")
- else:
- logging.warning(
- "No channel bandwidth specified. Using 80MHz for 11ac."
- )
- self._vht_oper_chwidth = 1
- if vht_center_channel is not None:
- self._vht_oper_centr_freq_seg0_idx = vht_center_channel
- elif vht_channel_width == 20 and channel is not None:
- self._vht_oper_centr_freq_seg0_idx = channel
- elif vht_channel_width == 20 and frequency is not None:
- self._vht_oper_centr_freq_seg0_idx = get_channel_for_frequency(
- frequency
- )
- else:
- self._vht_oper_centr_freq_seg0_idx = (
- self._get_11ac_center_channel_from_channel(self.channel)
- )
- self._ac_capabilities = set(ac_capabilities)
- self._beacon_footer = beacon_footer
- self._spectrum_mgmt_required = spectrum_mgmt_required
- self._scenario_name = scenario_name
- self._min_streams = min_streams
- self._wnm_features = wnm_features
- self._additional_parameters = additional_parameters
-
- self._bss_lookup: dict[str, BssSettings] = collections.OrderedDict()
- for bss in bss_settings:
- if bss.name in self._bss_lookup:
- raise ValueError(
- "Cannot have multiple bss settings with the same name."
- )
- self._bss_lookup[bss.name] = bss
-
- self._ap_max_inactivity = ap_max_inactivity
-
- def _get_11ac_center_channel_from_channel(self, channel: int) -> int:
- """Returns the center channel of the selected channel band based
- on the channel and channel bandwidth provided.
- """
- channel = int(channel)
- center_channel_delta = hostapd_constants.CENTER_CHANNEL_MAP[
- self._vht_oper_chwidth
- ]["delta"]
-
- for channel_map in hostapd_constants.CENTER_CHANNEL_MAP[
- self._vht_oper_chwidth
- ]["channels"]:
- lower_channel_bound, upper_channel_bound = channel_map
- if lower_channel_bound <= channel <= upper_channel_bound:
- return lower_channel_bound + center_channel_delta
- raise ValueError(f"Invalid channel for {self._vht_oper_chwidth}.")
-
- @property
- def _get_default_config(self) -> dict[str, str | int | None]:
- """Returns: dict of default options for hostapd."""
- if self.set_ap_defaults_profile == "mistral":
- return collections.OrderedDict(
- [
- ("logger_syslog", "-1"),
- ("logger_syslog_level", "0"),
- # default RTS and frag threshold to ``off''
- ("rts_threshold", None),
- ("fragm_threshold", None),
- ("driver", hostapd_constants.DRIVER_NAME),
- ]
- )
- else:
- return collections.OrderedDict(
- [
- ("logger_syslog", "-1"),
- ("logger_syslog_level", "0"),
- # default RTS and frag threshold to ``off''
- ("rts_threshold", "2347"),
- ("fragm_threshold", "2346"),
- ("driver", hostapd_constants.DRIVER_NAME),
- ]
- )
-
- @property
- def _hostapd_ht_capabilities(self) -> str:
- """Returns: string suitable for the ht_capab= line in a hostapd config."""
- ret = []
- for cap in hostapd_constants.N_CAPABILITIES_MAPPING.keys():
- if cap in self._n_capabilities:
- ret.append(hostapd_constants.N_CAPABILITIES_MAPPING[cap])
- return "".join(ret)
-
- @property
- def _hostapd_vht_capabilities(self) -> str:
- """Returns: string suitable for the vht_capab= line in a hostapd config."""
- ret = []
- for cap in hostapd_constants.AC_CAPABILITIES_MAPPING.keys():
- if cap in self._ac_capabilities:
- ret.append(hostapd_constants.AC_CAPABILITIES_MAPPING[cap])
- return "".join(ret)
-
- @property
- def _require_ht(self) -> bool:
- """Returns: True iff clients should be required to support HT."""
- return self._mode == hostapd_constants.Mode.MODE_11N_PURE
-
- @property
- def _require_vht(self) -> bool:
- """Returns: True if clients should be required to support VHT."""
- return self._mode == hostapd_constants.Mode.MODE_11AC_PURE
-
- @property
- def hw_mode(self) -> hostapd_constants.Mode:
- """Returns: string hardware mode understood by hostapd."""
- if self._mode == hostapd_constants.Mode.MODE_11A:
- return hostapd_constants.Mode.MODE_11A
- if self._mode == hostapd_constants.Mode.MODE_11B:
- return hostapd_constants.Mode.MODE_11B
- if self._mode == hostapd_constants.Mode.MODE_11G:
- return hostapd_constants.Mode.MODE_11G
- if self.is_11n or self.is_11ac:
- # For their own historical reasons, hostapd wants it this way.
- if self._frequency > 5000:
- return hostapd_constants.Mode.MODE_11A
- return hostapd_constants.Mode.MODE_11G
- raise ValueError("Invalid mode.")
-
- @property
- def is_11n(self) -> bool:
- """Returns: True if we're trying to host an 802.11n network."""
- return self._mode in (
- hostapd_constants.Mode.MODE_11N_MIXED,
- hostapd_constants.Mode.MODE_11N_PURE,
- )
-
- @property
- def is_11ac(self) -> bool:
- """Returns: True if we're trying to host an 802.11ac network."""
- return self._mode in (
- hostapd_constants.Mode.MODE_11AC_MIXED,
- hostapd_constants.Mode.MODE_11AC_PURE,
- )
-
- @property
- def channel(self) -> int:
- """Returns: int channel number for self.frequency."""
- return get_channel_for_frequency(self.frequency)
-
- @channel.setter
- def channel(self, value: int) -> None:
- """Sets the channel number to configure hostapd to listen on.
-
- Args:
- value: int, channel number.
-
- """
- self.frequency = get_frequency_for_channel(value)
-
- @property
- def bssid(self) -> str | None:
- return self._bssid
-
- @bssid.setter
- def bssid(self, value: str) -> None:
- self._bssid = value
-
- @property
- def frequency(self) -> int:
- """Returns: frequency for hostapd to listen on."""
- return self._frequency
-
- @frequency.setter
- def frequency(self, value: int) -> None:
- """Sets the frequency for hostapd to listen on.
-
- Args:
- value: int, frequency in MHz.
-
- """
- if value not in hostapd_constants.CHANNEL_MAP:
- raise ValueError(f"Tried to set an invalid frequency: {value!r}.")
-
- self._frequency = value
-
- @property
- def bss_lookup(self) -> dict[str, BssSettings]:
- return self._bss_lookup
-
- @property
- def ssid(self) -> str | None:
- """Returns: SsidSettings, The root Ssid settings being used."""
- return self._ssid
-
- @ssid.setter
- def ssid(self, value: str) -> None:
- """Sets the ssid for the hostapd.
-
- Args:
- value: SsidSettings, new ssid settings to use.
-
- """
- self._ssid = value
-
- @property
- def hidden(self) -> bool:
- """Returns: bool, True if the ssid is hidden, false otherwise."""
- return self._hidden
-
- @hidden.setter
- def hidden(self, value: bool) -> None:
- """Sets if this ssid is hidden.
-
- Args:
- value: If true the ssid will be hidden.
- """
- self.hidden = value
-
- @property
- def security(self) -> Security:
- """Returns: The security type being used."""
- return self._security
-
- @security.setter
- def security(self, value: Security) -> None:
- """Sets the security options to use.
-
- Args:
- value: The type of security to use.
- """
- self._security = value
-
- @property
- def ht_packet_capture_mode(self) -> str | None:
- """Get an appropriate packet capture HT parameter.
-
- When we go to configure a raw monitor we need to configure
- the phy to listen on the correct channel. Part of doing
- so is to specify the channel width for HT channels. In the
- case that the AP is configured to be either HT40+ or HT40-,
- we could return the wrong parameter because we don't know which
- configuration will be chosen by hostap.
-
- Returns:
- string, HT parameter for frequency configuration.
-
- """
- if not self.is_11n:
- return None
-
- if ht40_plus_allowed(self.channel):
- return "HT40+"
-
- if ht40_minus_allowed(self.channel):
- return "HT40-"
-
- return "HT20"
-
- @property
- def beacon_footer(self) -> str:
- return self._beacon_footer
-
- @beacon_footer.setter
- def beacon_footer(self, value: str) -> None:
- """Changes the beacon footer.
-
- Args:
- value: The beacon footer value.
- """
- self._beacon_footer = value
-
- @property
- def scenario_name(self) -> str | None:
- return self._scenario_name
-
- @property
- def min_streams(self) -> int | None:
- return self._min_streams
-
- @property
- def wnm_features(self) -> FrozenSet[hostapd_constants.WnmFeature]:
- return self._wnm_features
-
- @wnm_features.setter
- def wnm_features(
- self, value: FrozenSet[hostapd_constants.WnmFeature]
- ) -> None:
- self._wnm_features = value
-
- def __repr__(self) -> str:
- return (
- "%s(mode=%r, channel=%r, frequency=%r, "
- "n_capabilities=%r, beacon_interval=%r, "
- "dtim_period=%r, frag_threshold=%r, ssid=%r, bssid=%r, "
- "wmm_enabled=%r, security_config=%r, "
- "spectrum_mgmt_required=%r)"
- % (
- self.__class__.__name__,
- self._mode,
- self.channel,
- self.frequency,
- self._n_capabilities,
- self._beacon_interval,
- self._dtim_period,
- self._frag_threshold,
- self._ssid,
- self._bssid,
- self._wmm_enabled,
- self._security,
- self._spectrum_mgmt_required,
- )
- )
-
- def supports_channel(self, value: int) -> bool:
- """Check whether channel is supported by the current hardware mode.
-
- @param value: channel to check.
- @return True iff the current mode supports the band of the channel.
-
- """
- for freq, channel in hostapd_constants.CHANNEL_MAP.items():
- if channel == value:
- return self.supports_frequency(freq)
-
- return False
-
- def supports_frequency(self, frequency: int) -> bool:
- """Check whether frequency is supported by the current hardware mode.
-
- @param frequency: frequency to check.
- @return True iff the current mode supports the band of the frequency.
-
- """
- if self._mode == hostapd_constants.Mode.MODE_11A and frequency < 5000:
- return False
-
- if (
- self._mode
- in (
- hostapd_constants.Mode.MODE_11B,
- hostapd_constants.Mode.MODE_11G,
- )
- and frequency > 5000
- ):
- return False
-
- if frequency not in hostapd_constants.CHANNEL_MAP:
- return False
-
- channel = hostapd_constants.CHANNEL_MAP[frequency]
- supports_plus = (
- channel
- in hostapd_constants.HT40_ALLOW_MAP[
- hostapd_constants.N_CAPABILITY_HT40_PLUS_CHANNELS
- ]
- )
- supports_minus = (
- channel
- in hostapd_constants.HT40_ALLOW_MAP[
- hostapd_constants.N_CAPABILITY_HT40_MINUS_CHANNELS
- ]
- )
- if (
- hostapd_constants.N_CAPABILITY_HT40_PLUS in self._n_capabilities
- and not supports_plus
- ):
- return False
-
- if (
- hostapd_constants.N_CAPABILITY_HT40_MINUS in self._n_capabilities
- and not supports_minus
- ):
- return False
-
- return True
-
- def add_bss(self, bss: BssSettings) -> None:
- """Adds a new bss setting.
-
- Args:
- bss: The bss settings to add.
- """
- if bss.name in self._bss_lookup:
- raise ValueError("A bss with the same name already exists.")
-
- self._bss_lookup[bss.name] = bss
-
- def remove_bss(self, bss_name: str) -> None:
- """Removes a bss setting from the config."""
- del self._bss_lookup[bss_name]
-
- def package_configs(self) -> list[dict[str, str | int | None]]:
- """Package the configs.
-
- Returns:
- A list of dictionaries, one dictionary for each section of the
- config.
- """
- # Start with the default config parameters.
- conf = self._get_default_config
-
- if self._interface:
- conf["interface"] = self._interface
- if self._bssid:
- conf["bssid"] = self._bssid
- if self._ssid:
- conf["ssid"] = self._ssid
- conf["ignore_broadcast_ssid"] = 1 if self._hidden else 0
- conf["channel"] = self.channel
- conf["hw_mode"] = self.hw_mode
- if self.is_11n or self.is_11ac:
- conf["ieee80211n"] = 1
- conf["ht_capab"] = self._hostapd_ht_capabilities
- if self.is_11ac:
- conf["ieee80211ac"] = 1
- conf["vht_oper_chwidth"] = self._vht_oper_chwidth
- conf[
- "vht_oper_centr_freq_seg0_idx"
- ] = self._vht_oper_centr_freq_seg0_idx
- conf["vht_capab"] = self._hostapd_vht_capabilities
- if self._wmm_enabled is not None:
- conf["wmm_enabled"] = 1 if self._wmm_enabled else 0
- if self._require_ht:
- conf["require_ht"] = 1
- if self._require_vht:
- conf["require_vht"] = 1
- if self._beacon_interval:
- conf["beacon_int"] = self._beacon_interval
- if self._dtim_period:
- conf["dtim_period"] = self._dtim_period
- if self._frag_threshold:
- conf["fragm_threshold"] = self._frag_threshold
- if self._rts_threshold:
- conf["rts_threshold"] = self._rts_threshold
- if self._pmf_support:
- conf["ieee80211w"] = self._pmf_support
- if self._obss_interval:
- conf["obss_interval"] = self._obss_interval
- if self._short_preamble:
- conf["preamble"] = 1
- if self._spectrum_mgmt_required:
- # To set spectrum_mgmt_required, we must first set
- # local_pwr_constraint. And to set local_pwr_constraint,
- # we must first set ieee80211d. And to set ieee80211d, ...
- # Point being: order matters here.
- conf["country_code"] = "US" # Required for local_pwr_constraint
- conf["ieee80211d"] = 1 # Required for local_pwr_constraint
- conf["local_pwr_constraint"] = 0 # No local constraint
- conf["spectrum_mgmt_required"] = 1 # Requires local_pwr_constraint
- if self._ap_max_inactivity:
- conf["ap_max_inactivity"] = self._ap_max_inactivity
-
- for k, v in self._security.generate_dict().items():
- conf[k] = v
-
- for wnm_feature in self._wnm_features:
- if wnm_feature == hostapd_constants.WnmFeature.TIME_ADVERTISEMENT:
- conf.update(hostapd_constants.ENABLE_WNM_TIME_ADVERTISEMENT)
- elif wnm_feature == hostapd_constants.WnmFeature.WNM_SLEEP_MODE:
- conf.update(hostapd_constants.ENABLE_WNM_SLEEP_MODE)
- elif (
- wnm_feature
- == hostapd_constants.WnmFeature.BSS_TRANSITION_MANAGEMENT
- ):
- conf.update(
- hostapd_constants.ENABLE_WNM_BSS_TRANSITION_MANAGEMENT
- )
- elif wnm_feature == hostapd_constants.WnmFeature.PROXY_ARP:
- conf.update(hostapd_constants.ENABLE_WNM_PROXY_ARP)
- elif (
- wnm_feature
- == hostapd_constants.WnmFeature.IPV6_NEIGHBOR_ADVERTISEMENT_MULTICAST_TO_UNICAST
- ):
- conf.update(
- hostapd_constants.ENABLE_WNM_IPV6_NEIGHBOR_ADVERTISEMENT_MULTICAST_TO_UNICAST
- )
-
- bss_conf: list[dict[str, str | int | None]] = [
- {k: v for k, v in bss.generate_dict().items()}
- for bss in self._bss_lookup.values()
- ]
- all_conf = [conf] + bss_conf
-
- if self._additional_parameters:
- all_conf.append(self._additional_parameters)
-
- return all_conf
diff --git a/packages/antlion/controllers/ap_lib/hostapd_constants.py b/packages/antlion/controllers/ap_lib/hostapd_constants.py
deleted file mode 100755
index c3080dd..0000000
--- a/packages/antlion/controllers/ap_lib/hostapd_constants.py
+++ /dev/null
@@ -1,969 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import itertools
-from enum import Enum, StrEnum, auto, unique
-from typing import Literal, TypedDict
-
-# TODO(http://b/286584981): Replace with BandType
-BAND_2G = "2g"
-BAND_5G = "5g"
-
-
-@unique
-class BandType(StrEnum):
- BAND_2G = "2g"
- BAND_5G = "5g"
-
- def default_channel(self) -> int:
- match self:
- case BandType.BAND_2G:
- return 6
- case BandType.BAND_5G:
- return 36
-
-
-CHANNEL_BANDWIDTH_20MHZ = 20
-CHANNEL_BANDWIDTH_40MHZ = 40
-CHANNEL_BANDWIDTH_80MHZ = 80
-CHANNEL_BANDWIDTH_160MHZ = 160
-
-# TODO(http://b/286584981): Replace with SecurityModeInt
-WEP = 0
-WPA1 = 1
-WPA2 = 2
-WPA3 = 2 # same as wpa2 and wpa2/wpa3, distinguished by wpa_key_mgmt
-MIXED = (
- 3 # applies to wpa/wpa2, and wpa/wpa2/wpa3, distinguished by wpa_key_mgmt
-)
-ENT = 4 # get the correct constant
-
-MAX_WPA_PSK_LENGTH = 64
-MIN_WPA_PSK_LENGTH = 8
-MAX_WPA_PASSWORD_LENGTH = 63
-WPA_STRICT_REKEY = 1
-WPA_DEFAULT_CIPHER = "TKIP"
-WPA2_DEFAULT_CIPER = "CCMP"
-WPA_GROUP_KEY_ROTATION_TIME = 600
-WPA_STRICT_REKEY_DEFAULT = True
-
-# TODO(http://b/286584981): Replace these with SecurityMode enum
-WEP_STRING = "wep"
-WPA_STRING = "wpa"
-WPA2_STRING = "wpa2"
-WPA_MIXED_STRING = "wpa/wpa2"
-WPA3_STRING = "wpa3"
-WPA2_WPA3_MIXED_STRING = "wpa2/wpa3"
-WPA_WPA2_WPA3_MIXED_STRING = "wpa/wpa2/wpa3"
-ENT_STRING = "ent"
-
-# TODO(http://b/286584981): Replace with KeyManagement
-ENT_KEY_MGMT = "WPA-EAP"
-WPA_PSK_KEY_MGMT = "WPA-PSK"
-SAE_KEY_MGMT = "SAE"
-DUAL_WPA_PSK_SAE_KEY_MGMT = "WPA-PSK SAE"
-
-# TODO(http://b/286584981): Replace with SecurityMode.security_mode_int
-SECURITY_STRING_TO_SECURITY_MODE_INT = {
- WPA_STRING: WPA1,
- WPA2_STRING: WPA2,
- WPA_MIXED_STRING: MIXED,
- WPA3_STRING: WPA3,
- WPA2_WPA3_MIXED_STRING: WPA3,
- WPA_WPA2_WPA3_MIXED_STRING: MIXED,
- WEP_STRING: WEP,
- ENT_STRING: ENT,
-}
-
-# TODO(http://b/286584981): Replace with SecurityMode.key_management
-SECURITY_STRING_TO_WPA_KEY_MGMT = {
- WPA_STRING: WPA_PSK_KEY_MGMT,
- WPA2_STRING: WPA_PSK_KEY_MGMT,
- WPA_MIXED_STRING: WPA_PSK_KEY_MGMT,
- WPA3_STRING: SAE_KEY_MGMT,
- WPA2_WPA3_MIXED_STRING: DUAL_WPA_PSK_SAE_KEY_MGMT,
- WPA_WPA2_WPA3_MIXED_STRING: DUAL_WPA_PSK_SAE_KEY_MGMT,
-}
-
-# TODO(http://b/286584981): Replace with SecurityMode.fuchsia_security_type
-SECURITY_STRING_TO_DEFAULT_TARGET_SECURITY = {
- WEP_STRING: WEP_STRING,
- WPA_STRING: WPA_STRING,
- WPA2_STRING: WPA2_STRING,
- WPA_MIXED_STRING: WPA2_STRING,
- WPA3_STRING: WPA3_STRING,
- WPA2_WPA3_MIXED_STRING: WPA3_STRING,
- WPA_WPA2_WPA3_MIXED_STRING: WPA3_STRING,
-}
-
-IEEE8021X = 1
-WLAN0_STRING = "wlan0"
-WLAN1_STRING = "wlan1"
-WLAN2_STRING = "wlan2"
-WLAN3_STRING = "wlan3"
-WLAN0_GALE = "wlan-2400mhz"
-WLAN1_GALE = "wlan-5000mhz"
-WEP_DEFAULT_KEY = 0
-WEP_HEX_LENGTH = [10, 26, 32, 58]
-WEP_STR_LENGTH = [5, 13, 16]
-WEP_DEFAULT_STR_LENGTH = 13
-
-# TODO(http://b/286584981): Replace with BandType.default_channel()
-AP_DEFAULT_CHANNEL_2G = 6
-AP_DEFAULT_CHANNEL_5G = 36
-
-AP_DEFAULT_MAX_SSIDS_2G = 8
-AP_DEFAULT_MAX_SSIDS_5G = 8
-AP_SSID_LENGTH_2G = 8
-AP_SSID_MIN_LENGTH_2G = 1
-AP_SSID_MAX_LENGTH_2G = 32
-AP_PASSPHRASE_LENGTH_2G = 10
-AP_SSID_LENGTH_5G = 8
-AP_SSID_MIN_LENGTH_5G = 1
-AP_SSID_MAX_LENGTH_5G = 32
-AP_PASSPHRASE_LENGTH_5G = 10
-INTERFACE_2G_LIST = [WLAN0_STRING, WLAN0_GALE]
-INTERFACE_5G_LIST = [WLAN1_STRING, WLAN1_GALE]
-HIGH_BEACON_INTERVAL = 300
-LOW_BEACON_INTERVAL = 100
-HIGH_DTIM = 3
-LOW_DTIM = 1
-
-# A mapping of frequency to channel number. This includes some
-# frequencies used outside the US.
-CHANNEL_MAP = {
- 2412: 1,
- 2417: 2,
- 2422: 3,
- 2427: 4,
- 2432: 5,
- 2437: 6,
- 2442: 7,
- 2447: 8,
- 2452: 9,
- 2457: 10,
- 2462: 11,
- # 12, 13 are only legitimate outside the US.
- 2467: 12,
- 2472: 13,
- # 14 is for Japan, DSSS and CCK only.
- 2484: 14,
- # 34 valid in Japan.
- 5170: 34,
- # 36-116 valid in the US, except 38, 42, and 46, which have
- # mixed international support.
- 5180: 36,
- 5190: 38,
- 5200: 40,
- 5210: 42,
- 5220: 44,
- 5230: 46,
- 5240: 48,
- # DFS channels.
- 5260: 52,
- 5280: 56,
- 5300: 60,
- 5320: 64,
- 5500: 100,
- 5520: 104,
- 5540: 108,
- 5560: 112,
- 5580: 116,
- # 120, 124, 128 valid in Europe/Japan.
- 5600: 120,
- 5620: 124,
- 5640: 128,
- # 132+ valid in US.
- 5660: 132,
- 5680: 136,
- 5700: 140,
- # 144 is supported by a subset of WiFi chips
- # (e.g. bcm4354, but not ath9k).
- 5720: 144,
- # End DFS channels.
- 5745: 149,
- 5755: 151,
- 5765: 153,
- 5775: 155,
- 5795: 159,
- 5785: 157,
- 5805: 161,
- 5825: 165,
-}
-FREQUENCY_MAP = {v: k for k, v in CHANNEL_MAP.items()}
-
-US_CHANNELS_2G = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11]
-US_CHANNELS_5G = [
- 36,
- 40,
- 44,
- 48,
- 52,
- 56,
- 60,
- 64,
- 100,
- 104,
- 108,
- 112,
- 116,
- 120,
- 124,
- 128,
- 132,
- 136,
- 140,
- 144,
- 149,
- 153,
- 157,
- 161,
- 165,
-]
-
-LOWEST_5G_CHANNEL = 36
-
-
-@unique
-class Mode(StrEnum):
- MODE_11A = "a"
- MODE_11B = "b"
- MODE_11G = "g"
- MODE_11N_MIXED = "n-mixed"
- MODE_11N_PURE = "n-only"
- MODE_11AC_MIXED = "ac-mixed"
- MODE_11AC_PURE = "ac-only"
-
-
-N_CAPABILITY_LDPC = object()
-N_CAPABILITY_HT20 = object()
-N_CAPABILITY_HT40_PLUS = object()
-N_CAPABILITY_HT40_MINUS = object()
-N_CAPABILITY_GREENFIELD = object()
-N_CAPABILITY_SGI20 = object()
-N_CAPABILITY_SGI40 = object()
-N_CAPABILITY_TX_STBC = object()
-N_CAPABILITY_RX_STBC1 = object()
-N_CAPABILITY_RX_STBC12 = object()
-N_CAPABILITY_RX_STBC123 = object()
-N_CAPABILITY_DSSS_CCK_40 = object()
-N_CAPABILITY_LSIG_TXOP_PROT = object()
-N_CAPABILITY_40_INTOLERANT = object()
-N_CAPABILITY_MAX_AMSDU_7935 = object()
-N_CAPABILITY_DELAY_BLOCK_ACK = object()
-N_CAPABILITY_SMPS_STATIC = object()
-N_CAPABILITY_SMPS_DYNAMIC = object()
-N_CAPABILITIES_MAPPING = {
- N_CAPABILITY_LDPC: "[LDPC]",
- N_CAPABILITY_HT20: "[HT20]",
- N_CAPABILITY_HT40_PLUS: "[HT40+]",
- N_CAPABILITY_HT40_MINUS: "[HT40-]",
- N_CAPABILITY_GREENFIELD: "[GF]",
- N_CAPABILITY_SGI20: "[SHORT-GI-20]",
- N_CAPABILITY_SGI40: "[SHORT-GI-40]",
- N_CAPABILITY_TX_STBC: "[TX-STBC]",
- N_CAPABILITY_RX_STBC1: "[RX-STBC1]",
- N_CAPABILITY_RX_STBC12: "[RX-STBC12]",
- N_CAPABILITY_RX_STBC123: "[RX-STBC123]",
- N_CAPABILITY_DSSS_CCK_40: "[DSSS_CCK-40]",
- N_CAPABILITY_LSIG_TXOP_PROT: "[LSIG-TXOP-PROT]",
- N_CAPABILITY_40_INTOLERANT: "[40-INTOLERANT]",
- N_CAPABILITY_MAX_AMSDU_7935: "[MAX-AMSDU-7935]",
- N_CAPABILITY_DELAY_BLOCK_ACK: "[DELAYED-BA]",
- N_CAPABILITY_SMPS_STATIC: "[SMPS-STATIC]",
- N_CAPABILITY_SMPS_DYNAMIC: "[SMPS-DYNAMIC]",
-}
-N_CAPABILITIES_MAPPING_INVERSE = {
- v: k for k, v in N_CAPABILITIES_MAPPING.items()
-}
-N_CAPABILITY_HT40_MINUS_CHANNELS = object()
-N_CAPABILITY_HT40_PLUS_CHANNELS = object()
-AC_CAPABILITY_VHT160 = object()
-AC_CAPABILITY_VHT160_80PLUS80 = object()
-AC_CAPABILITY_RXLDPC = object()
-AC_CAPABILITY_SHORT_GI_80 = object()
-AC_CAPABILITY_SHORT_GI_160 = object()
-AC_CAPABILITY_TX_STBC_2BY1 = object()
-AC_CAPABILITY_RX_STBC_1 = object()
-AC_CAPABILITY_RX_STBC_12 = object()
-AC_CAPABILITY_RX_STBC_123 = object()
-AC_CAPABILITY_RX_STBC_1234 = object()
-AC_CAPABILITY_SU_BEAMFORMER = object()
-AC_CAPABILITY_SU_BEAMFORMEE = object()
-AC_CAPABILITY_BF_ANTENNA_2 = object()
-AC_CAPABILITY_BF_ANTENNA_3 = object()
-AC_CAPABILITY_BF_ANTENNA_4 = object()
-AC_CAPABILITY_SOUNDING_DIMENSION_2 = object()
-AC_CAPABILITY_SOUNDING_DIMENSION_3 = object()
-AC_CAPABILITY_SOUNDING_DIMENSION_4 = object()
-AC_CAPABILITY_MU_BEAMFORMER = object()
-AC_CAPABILITY_MU_BEAMFORMEE = object()
-AC_CAPABILITY_VHT_TXOP_PS = object()
-AC_CAPABILITY_HTC_VHT = object()
-AC_CAPABILITY_MAX_A_MPDU_LEN_EXP0 = object()
-AC_CAPABILITY_MAX_A_MPDU_LEN_EXP1 = object()
-AC_CAPABILITY_MAX_A_MPDU_LEN_EXP2 = object()
-AC_CAPABILITY_MAX_A_MPDU_LEN_EXP3 = object()
-AC_CAPABILITY_MAX_A_MPDU_LEN_EXP4 = object()
-AC_CAPABILITY_MAX_A_MPDU_LEN_EXP5 = object()
-AC_CAPABILITY_MAX_A_MPDU_LEN_EXP6 = object()
-AC_CAPABILITY_MAX_A_MPDU_LEN_EXP7 = object()
-AC_CAPABILITY_VHT_LINK_ADAPT2 = object()
-AC_CAPABILITY_VHT_LINK_ADAPT3 = object()
-AC_CAPABILITY_RX_ANTENNA_PATTERN = object()
-AC_CAPABILITY_TX_ANTENNA_PATTERN = object()
-AC_CAPABILITY_MAX_MPDU_7991 = object()
-AC_CAPABILITY_MAX_MPDU_11454 = object()
-AC_CAPABILITIES_MAPPING = {
- AC_CAPABILITY_VHT160: "[VHT160]",
- AC_CAPABILITY_VHT160_80PLUS80: "[VHT160-80PLUS80]",
- AC_CAPABILITY_RXLDPC: "[RXLDPC]",
- AC_CAPABILITY_SHORT_GI_80: "[SHORT-GI-80]",
- AC_CAPABILITY_SHORT_GI_160: "[SHORT-GI-160]",
- AC_CAPABILITY_TX_STBC_2BY1: "[TX-STBC-2BY1]",
- AC_CAPABILITY_RX_STBC_1: "[RX-STBC-1]",
- AC_CAPABILITY_RX_STBC_12: "[RX-STBC-12]",
- AC_CAPABILITY_RX_STBC_123: "[RX-STBC-123]",
- AC_CAPABILITY_RX_STBC_1234: "[RX-STBC-1234]",
- AC_CAPABILITY_SU_BEAMFORMER: "[SU-BEAMFORMER]",
- AC_CAPABILITY_SU_BEAMFORMEE: "[SU-BEAMFORMEE]",
- AC_CAPABILITY_BF_ANTENNA_2: "[BF-ANTENNA-2]",
- AC_CAPABILITY_BF_ANTENNA_3: "[BF-ANTENNA-3]",
- AC_CAPABILITY_BF_ANTENNA_4: "[BF-ANTENNA-4]",
- AC_CAPABILITY_SOUNDING_DIMENSION_2: "[SOUNDING-DIMENSION-2]",
- AC_CAPABILITY_SOUNDING_DIMENSION_3: "[SOUNDING-DIMENSION-3]",
- AC_CAPABILITY_SOUNDING_DIMENSION_4: "[SOUNDING-DIMENSION-4]",
- AC_CAPABILITY_MU_BEAMFORMER: "[MU-BEAMFORMER]",
- AC_CAPABILITY_MU_BEAMFORMEE: "[MU-BEAMFORMEE]",
- AC_CAPABILITY_VHT_TXOP_PS: "[VHT-TXOP-PS]",
- AC_CAPABILITY_HTC_VHT: "[HTC-VHT]",
- AC_CAPABILITY_MAX_A_MPDU_LEN_EXP0: "[MAX-A-MPDU-LEN-EXP0]",
- AC_CAPABILITY_MAX_A_MPDU_LEN_EXP1: "[MAX-A-MPDU-LEN-EXP1]",
- AC_CAPABILITY_MAX_A_MPDU_LEN_EXP2: "[MAX-A-MPDU-LEN-EXP2]",
- AC_CAPABILITY_MAX_A_MPDU_LEN_EXP3: "[MAX-A-MPDU-LEN-EXP3]",
- AC_CAPABILITY_MAX_A_MPDU_LEN_EXP4: "[MAX-A-MPDU-LEN-EXP4]",
- AC_CAPABILITY_MAX_A_MPDU_LEN_EXP5: "[MAX-A-MPDU-LEN-EXP5]",
- AC_CAPABILITY_MAX_A_MPDU_LEN_EXP6: "[MAX-A-MPDU-LEN-EXP6]",
- AC_CAPABILITY_MAX_A_MPDU_LEN_EXP7: "[MAX-A-MPDU-LEN-EXP7]",
- AC_CAPABILITY_VHT_LINK_ADAPT2: "[VHT-LINK-ADAPT2]",
- AC_CAPABILITY_VHT_LINK_ADAPT3: "[VHT-LINK-ADAPT3]",
- AC_CAPABILITY_RX_ANTENNA_PATTERN: "[RX-ANTENNA-PATTERN]",
- AC_CAPABILITY_TX_ANTENNA_PATTERN: "[TX-ANTENNA-PATTERN]",
- AC_CAPABILITY_MAX_MPDU_11454: "[MAX-MPDU-11454]",
- AC_CAPABILITY_MAX_MPDU_7991: "[MAX-MPDU-7991]",
-}
-AC_CAPABILITIES_MAPPING_INVERSE = {
- v: k for k, v in AC_CAPABILITIES_MAPPING.items()
-}
-VHT_CHANNEL_WIDTH_40 = 0
-VHT_CHANNEL_WIDTH_80 = 1
-VHT_CHANNEL_WIDTH_160 = 2
-VHT_CHANNEL_WIDTH_80_80 = 3
-
-VHT_CHANNEL = {
- 40: VHT_CHANNEL_WIDTH_40,
- 80: VHT_CHANNEL_WIDTH_80,
- 160: VHT_CHANNEL_WIDTH_160,
-}
-
-# This is a loose merging of the rules for US and EU regulatory
-# domains as taken from IEEE Std 802.11-2012 Appendix E. For instance,
-# we tolerate HT40 in channels 149-161 (not allowed in EU), but also
-# tolerate HT40+ on channel 7 (not allowed in the US). We take the loose
-# definition so that we don't prohibit testing in either domain.
-HT40_ALLOW_MAP = {
- N_CAPABILITY_HT40_MINUS_CHANNELS: tuple(
- itertools.chain(
- range(6, 14), range(40, 65, 8), range(104, 145, 8), [153, 161]
- )
- ),
- N_CAPABILITY_HT40_PLUS_CHANNELS: tuple(
- itertools.chain(
- range(1, 8), range(36, 61, 8), range(100, 141, 8), [149, 157]
- )
- ),
-}
-
-PMF_SUPPORT_DISABLED = 0
-PMF_SUPPORT_ENABLED = 1
-PMF_SUPPORT_REQUIRED = 2
-PMF_SUPPORT_VALUES = (
- PMF_SUPPORT_DISABLED,
- PMF_SUPPORT_ENABLED,
- PMF_SUPPORT_REQUIRED,
-)
-
-DRIVER_NAME = "nl80211"
-
-
-class VHTChannelWidth(TypedDict):
- delta: int
- channels: list[tuple[int, int]]
-
-
-CENTER_CHANNEL_MAP = {
- VHT_CHANNEL_WIDTH_40: VHTChannelWidth(
- delta=2,
- channels=[
- (36, 40),
- (44, 48),
- (52, 56),
- (60, 64),
- (100, 104),
- (108, 112),
- (116, 120),
- (124, 128),
- (132, 136),
- (140, 144),
- (149, 153),
- (157, 161),
- ],
- ),
- VHT_CHANNEL_WIDTH_80: VHTChannelWidth(
- delta=6,
- channels=[
- (36, 48),
- (52, 64),
- (100, 112),
- (116, 128),
- (132, 144),
- (149, 161),
- ],
- ),
- VHT_CHANNEL_WIDTH_160: VHTChannelWidth(
- delta=14,
- channels=[(36, 64), (100, 128)],
- ),
-}
-
-OFDM_DATA_RATES = {"supported_rates": "60 90 120 180 240 360 480 540"}
-
-CCK_DATA_RATES = {"supported_rates": "10 20 55 110"}
-
-CCK_AND_OFDM_DATA_RATES = {
- "supported_rates": "10 20 55 110 60 90 120 180 240 360 480 540"
-}
-
-OFDM_ONLY_BASIC_RATES = {"basic_rates": "60 120 240"}
-
-CCK_AND_OFDM_BASIC_RATES = {"basic_rates": "10 20 55 110"}
-
-WEP_AUTH = {
- "open": {"auth_algs": 1},
- "shared": {"auth_algs": 2},
- "open_and_shared": {"auth_algs": 3},
-}
-
-WMM_11B_DEFAULT_PARAMS = {
- "wmm_ac_bk_cwmin": 5,
- "wmm_ac_bk_cwmax": 10,
- "wmm_ac_bk_aifs": 7,
- "wmm_ac_bk_txop_limit": 0,
- "wmm_ac_be_aifs": 3,
- "wmm_ac_be_cwmin": 5,
- "wmm_ac_be_cwmax": 7,
- "wmm_ac_be_txop_limit": 0,
- "wmm_ac_vi_aifs": 2,
- "wmm_ac_vi_cwmin": 4,
- "wmm_ac_vi_cwmax": 5,
- "wmm_ac_vi_txop_limit": 188,
- "wmm_ac_vo_aifs": 2,
- "wmm_ac_vo_cwmin": 3,
- "wmm_ac_vo_cwmax": 4,
- "wmm_ac_vo_txop_limit": 102,
-}
-
-WMM_PHYS_11A_11G_11N_11AC_DEFAULT_PARAMS = {
- "wmm_ac_bk_cwmin": 4,
- "wmm_ac_bk_cwmax": 10,
- "wmm_ac_bk_aifs": 7,
- "wmm_ac_bk_txop_limit": 0,
- "wmm_ac_be_aifs": 3,
- "wmm_ac_be_cwmin": 4,
- "wmm_ac_be_cwmax": 10,
- "wmm_ac_be_txop_limit": 0,
- "wmm_ac_vi_aifs": 2,
- "wmm_ac_vi_cwmin": 3,
- "wmm_ac_vi_cwmax": 4,
- "wmm_ac_vi_txop_limit": 94,
- "wmm_ac_vo_aifs": 2,
- "wmm_ac_vo_cwmin": 2,
- "wmm_ac_vo_cwmax": 3,
- "wmm_ac_vo_txop_limit": 47,
-}
-
-WMM_NON_DEFAULT_PARAMS = {
- "wmm_ac_bk_cwmin": 5,
- "wmm_ac_bk_cwmax": 9,
- "wmm_ac_bk_aifs": 3,
- "wmm_ac_bk_txop_limit": 94,
- "wmm_ac_be_aifs": 2,
- "wmm_ac_be_cwmin": 2,
- "wmm_ac_be_cwmax": 8,
- "wmm_ac_be_txop_limit": 0,
- "wmm_ac_vi_aifs": 1,
- "wmm_ac_vi_cwmin": 7,
- "wmm_ac_vi_cwmax": 10,
- "wmm_ac_vi_txop_limit": 47,
- "wmm_ac_vo_aifs": 1,
- "wmm_ac_vo_cwmin": 6,
- "wmm_ac_vo_cwmax": 10,
- "wmm_ac_vo_txop_limit": 94,
-}
-
-WMM_DEGRADED_VO_PARAMS = {
- "wmm_ac_bk_cwmin": 7,
- "wmm_ac_bk_cwmax": 15,
- "wmm_ac_bk_aifs": 2,
- "wmm_ac_bk_txop_limit": 0,
- "wmm_ac_be_aifs": 2,
- "wmm_ac_be_cwmin": 7,
- "wmm_ac_be_cwmax": 15,
- "wmm_ac_be_txop_limit": 0,
- "wmm_ac_vi_aifs": 2,
- "wmm_ac_vi_cwmin": 7,
- "wmm_ac_vi_cwmax": 15,
- "wmm_ac_vi_txop_limit": 94,
- "wmm_ac_vo_aifs": 10,
- "wmm_ac_vo_cwmin": 7,
- "wmm_ac_vo_cwmax": 15,
- "wmm_ac_vo_txop_limit": 47,
-}
-
-WMM_DEGRADED_VI_PARAMS = {
- "wmm_ac_bk_cwmin": 7,
- "wmm_ac_bk_cwmax": 15,
- "wmm_ac_bk_aifs": 2,
- "wmm_ac_bk_txop_limit": 0,
- "wmm_ac_be_aifs": 2,
- "wmm_ac_be_cwmin": 7,
- "wmm_ac_be_cwmax": 15,
- "wmm_ac_be_txop_limit": 0,
- "wmm_ac_vi_aifs": 10,
- "wmm_ac_vi_cwmin": 7,
- "wmm_ac_vi_cwmax": 15,
- "wmm_ac_vi_txop_limit": 94,
- "wmm_ac_vo_aifs": 2,
- "wmm_ac_vo_cwmin": 7,
- "wmm_ac_vo_cwmax": 15,
- "wmm_ac_vo_txop_limit": 47,
-}
-
-WMM_IMPROVE_BE_PARAMS = {
- "wmm_ac_bk_cwmin": 7,
- "wmm_ac_bk_cwmax": 15,
- "wmm_ac_bk_aifs": 10,
- "wmm_ac_bk_txop_limit": 0,
- "wmm_ac_be_aifs": 2,
- "wmm_ac_be_cwmin": 7,
- "wmm_ac_be_cwmax": 15,
- "wmm_ac_be_txop_limit": 0,
- "wmm_ac_vi_aifs": 10,
- "wmm_ac_vi_cwmin": 7,
- "wmm_ac_vi_cwmax": 15,
- "wmm_ac_vi_txop_limit": 94,
- "wmm_ac_vo_aifs": 10,
- "wmm_ac_vo_cwmin": 7,
- "wmm_ac_vo_cwmax": 15,
- "wmm_ac_vo_txop_limit": 47,
-}
-
-WMM_IMPROVE_BK_PARAMS = {
- "wmm_ac_bk_cwmin": 7,
- "wmm_ac_bk_cwmax": 15,
- "wmm_ac_bk_aifs": 2,
- "wmm_ac_bk_txop_limit": 0,
- "wmm_ac_be_aifs": 10,
- "wmm_ac_be_cwmin": 7,
- "wmm_ac_be_cwmax": 15,
- "wmm_ac_be_txop_limit": 0,
- "wmm_ac_vi_aifs": 10,
- "wmm_ac_vi_cwmin": 7,
- "wmm_ac_vi_cwmax": 15,
- "wmm_ac_vi_txop_limit": 94,
- "wmm_ac_vo_aifs": 10,
- "wmm_ac_vo_cwmin": 7,
- "wmm_ac_vo_cwmax": 15,
- "wmm_ac_vo_txop_limit": 47,
-}
-
-WMM_ACM_BK = {"wmm_ac_bk_acm": 1}
-WMM_ACM_BE = {"wmm_ac_be_acm": 1}
-WMM_ACM_VI = {"wmm_ac_vi_acm": 1}
-WMM_ACM_VO = {"wmm_ac_vo_acm": 1}
-
-UAPSD_ENABLED = {"uapsd_advertisement_enabled": 1}
-
-UTF_8_SSID = {"utf8_ssid": 1}
-
-ENABLE_RRM_BEACON_REPORT = {"rrm_beacon_report": 1}
-ENABLE_RRM_NEIGHBOR_REPORT = {"rrm_neighbor_report": 1}
-
-# Wireless Network Management (AKA 802.11v) features.
-ENABLE_WNM_TIME_ADVERTISEMENT: dict[str, int | str] = {
- "time_advertisement": 2,
- "time_zone": "EST5",
-}
-ENABLE_WNM_SLEEP_MODE = {"wnm_sleep_mode": 1}
-ENABLE_WNM_BSS_TRANSITION_MANAGEMENT = {"bss_transition": 1}
-ENABLE_WNM_PROXY_ARP = {"proxy_arp": 1}
-ENABLE_WNM_IPV6_NEIGHBOR_ADVERTISEMENT_MULTICAST_TO_UNICAST = {
- "na_mcast_to_ucast": 1
-}
-
-VENDOR_IE = {
- "correct_length_beacon": {"vendor_elements": "dd0411223301"},
- "too_short_length_beacon": {"vendor_elements": "dd0311223301"},
- "too_long_length_beacon": {"vendor_elements": "dd0511223301"},
- "zero_length_beacon_with_data": {"vendor_elements": "dd0011223301"},
- "zero_length_beacon_without_data": {"vendor_elements": "dd00"},
- "simliar_to_wpa": {"vendor_elements": "dd040050f203"},
- "correct_length_association_response": {
- "assocresp_elements": "dd0411223301"
- },
- "too_short_length_association_response": {
- "assocresp_elements": "dd0311223301"
- },
- "too_long_length_association_response": {
- "assocresp_elements": "dd0511223301"
- },
- "zero_length_association_response_with_data": {
- "assocresp_elements": "dd0011223301"
- },
- "zero_length_association_response_without_data": {
- "assocresp_elements": "dd00"
- },
-}
-
-ENABLE_IEEE80211D = {"ieee80211d": 1}
-
-COUNTRY_STRING = {
- "ALL": {"country3": "0x20"},
- "OUTDOOR": {"country3": "0x4f"},
- "INDOOR": {"country3": "0x49"},
- "NONCOUNTRY": {"country3": "0x58"},
- "GLOBAL": {"country3": "0x04"},
-}
-
-COUNTRY_CODE = {
- "AFGHANISTAN": {"country_code": "AF"},
- "ALAND_ISLANDS": {"country_code": "AX"},
- "ALBANIA": {"country_code": "AL"},
- "ALGERIA": {"country_code": "DZ"},
- "AMERICAN_SAMOA": {"country_code": "AS"},
- "ANDORRA": {"country_code": "AD"},
- "ANGOLA": {"country_code": "AO"},
- "ANGUILLA": {"country_code": "AI"},
- "ANTARCTICA": {"country_code": "AQ"},
- "ANTIGUA_AND_BARBUDA": {"country_code": "AG"},
- "ARGENTINA": {"country_code": "AR"},
- "ARMENIA": {"country_code": "AM"},
- "ARUBA": {"country_code": "AW"},
- "AUSTRALIA": {"country_code": "AU"},
- "AUSTRIA": {"country_code": "AT"},
- "AZERBAIJAN": {"country_code": "AZ"},
- "BAHAMAS": {"country_code": "BS"},
- "BAHRAIN": {"country_code": "BH"},
- "BANGLADESH": {"country_code": "BD"},
- "BARBADOS": {"country_code": "BB"},
- "BELARUS": {"country_code": "BY"},
- "BELGIUM": {"country_code": "BE"},
- "BELIZE": {"country_code": "BZ"},
- "BENIN": {"country_code": "BJ"},
- "BERMUDA": {"country_code": "BM"},
- "BHUTAN": {"country_code": "BT"},
- "BOLIVIA": {"country_code": "BO"},
- "BONAIRE": {"country_code": "BQ"},
- "BOSNIA_AND_HERZEGOVINA": {"country_code": "BA"},
- "BOTSWANA": {"country_code": "BW"},
- "BOUVET_ISLAND": {"country_code": "BV"},
- "BRAZIL": {"country_code": "BR"},
- "BRITISH_INDIAN_OCEAN_TERRITORY": {"country_code": "IO"},
- "BRUNEI_DARUSSALAM": {"country_code": "BN"},
- "BULGARIA": {"country_code": "BG"},
- "BURKINA_FASO": {"country_code": "BF"},
- "BURUNDI": {"country_code": "BI"},
- "CAMBODIA": {"country_code": "KH"},
- "CAMEROON": {"country_code": "CM"},
- "CANADA": {"country_code": "CA"},
- "CAPE_VERDE": {"country_code": "CV"},
- "CAYMAN_ISLANDS": {"country_code": "KY"},
- "CENTRAL_AFRICAN_REPUBLIC": {"country_code": "CF"},
- "CHAD": {"country_code": "TD"},
- "CHILE": {"country_code": "CL"},
- "CHINA": {"country_code": "CN"},
- "CHRISTMAS_ISLAND": {"country_code": "CX"},
- "COCOS_ISLANDS": {"country_code": "CC"},
- "COLOMBIA": {"country_code": "CO"},
- "COMOROS": {"country_code": "KM"},
- "CONGO": {"country_code": "CG"},
- "DEMOCRATIC_REPUBLIC_CONGO": {"country_code": "CD"},
- "COOK_ISLANDS": {"country_code": "CK"},
- "COSTA_RICA": {"country_code": "CR"},
- "COTE_D_IVOIRE": {"country_code": "CI"},
- "CROATIA": {"country_code": "HR"},
- "CUBA": {"country_code": "CU"},
- "CURACAO": {"country_code": "CW"},
- "CYPRUS": {"country_code": "CY"},
- "CZECH_REPUBLIC": {"country_code": "CZ"},
- "DENMARK": {"country_code": "DK"},
- "DJIBOUTI": {"country_code": "DJ"},
- "DOMINICA": {"country_code": "DM"},
- "DOMINICAN_REPUBLIC": {"country_code": "DO"},
- "ECUADOR": {"country_code": "EC"},
- "EGYPT": {"country_code": "EG"},
- "EL_SALVADOR": {"country_code": "SV"},
- "EQUATORIAL_GUINEA": {"country_code": "GQ"},
- "ERITREA": {"country_code": "ER"},
- "ESTONIA": {"country_code": "EE"},
- "ETHIOPIA": {"country_code": "ET"},
- "FALKLAND_ISLANDS_(MALVINAS)": {"country_code": "FK"},
- "FAROE_ISLANDS": {"country_code": "FO"},
- "FIJI": {"country_code": "FJ"},
- "FINLAND": {"country_code": "FI"},
- "FRANCE": {"country_code": "FR"},
- "FRENCH_GUIANA": {"country_code": "GF"},
- "FRENCH_POLYNESIA": {"country_code": "PF"},
- "FRENCH_SOUTHERN_TERRITORIES": {"country_code": "TF"},
- "GABON": {"country_code": "GA"},
- "GAMBIA": {"country_code": "GM"},
- "GEORGIA": {"country_code": "GE"},
- "GERMANY": {"country_code": "DE"},
- "GHANA": {"country_code": "GH"},
- "GIBRALTAR": {"country_code": "GI"},
- "GREECE": {"country_code": "GR"},
- "GREENLAND": {"country_code": "GL"},
- "GRENADA": {"country_code": "GD"},
- "GUADELOUPE": {"country_code": "GP"},
- "GUAM": {"country_code": "GU"},
- "GUATEMALA": {"country_code": "GT"},
- "GUERNSEY": {"country_code": "GG"},
- "GUINEA": {"country_code": "GN"},
- "GUINEA-BISSAU": {"country_code": "GW"},
- "GUYANA": {"country_code": "GY"},
- "HAITI": {"country_code": "HT"},
- "HEARD_ISLAND_AND_MCDONALD_ISLANDS": {"country_code": "HM"},
- "VATICAN_CITY_STATE": {"country_code": "VA"},
- "HONDURAS": {"country_code": "HN"},
- "HONG_KONG": {"country_code": "HK"},
- "HUNGARY": {"country_code": "HU"},
- "ICELAND": {"country_code": "IS"},
- "INDIA": {"country_code": "IN"},
- "INDONESIA": {"country_code": "ID"},
- "IRAN": {"country_code": "IR"},
- "IRAQ": {"country_code": "IQ"},
- "IRELAND": {"country_code": "IE"},
- "ISLE_OF_MAN": {"country_code": "IM"},
- "ISRAEL": {"country_code": "IL"},
- "ITALY": {"country_code": "IT"},
- "JAMAICA": {"country_code": "JM"},
- "JAPAN": {"country_code": "JP"},
- "JERSEY": {"country_code": "JE"},
- "JORDAN": {"country_code": "JO"},
- "KAZAKHSTAN": {"country_code": "KZ"},
- "KENYA": {"country_code": "KE"},
- "KIRIBATI": {"country_code": "KI"},
- "DEMOCRATIC_PEOPLE_S_REPUBLIC_OF_KOREA": {"country_code": "KP"},
- "REPUBLIC_OF_KOREA": {"country_code": "KR"},
- "KUWAIT": {"country_code": "KW"},
- "KYRGYZSTAN": {"country_code": "KG"},
- "LAO": {"country_code": "LA"},
- "LATVIA": {"country_code": "LV"},
- "LEBANON": {"country_code": "LB"},
- "LESOTHO": {"country_code": "LS"},
- "LIBERIA": {"country_code": "LR"},
- "LIBYA": {"country_code": "LY"},
- "LIECHTENSTEIN": {"country_code": "LI"},
- "LITHUANIA": {"country_code": "LT"},
- "LUXEMBOURG": {"country_code": "LU"},
- "MACAO": {"country_code": "MO"},
- "MACEDONIA": {"country_code": "MK"},
- "MADAGASCAR": {"country_code": "MG"},
- "MALAWI": {"country_code": "MW"},
- "MALAYSIA": {"country_code": "MY"},
- "MALDIVES": {"country_code": "MV"},
- "MALI": {"country_code": "ML"},
- "MALTA": {"country_code": "MT"},
- "MARSHALL_ISLANDS": {"country_code": "MH"},
- "MARTINIQUE": {"country_code": "MQ"},
- "MAURITANIA": {"country_code": "MR"},
- "MAURITIUS": {"country_code": "MU"},
- "MAYOTTE": {"country_code": "YT"},
- "MEXICO": {"country_code": "MX"},
- "MICRONESIA": {"country_code": "FM"},
- "MOLDOVA": {"country_code": "MD"},
- "MONACO": {"country_code": "MC"},
- "MONGOLIA": {"country_code": "MN"},
- "MONTENEGRO": {"country_code": "ME"},
- "MONTSERRAT": {"country_code": "MS"},
- "MOROCCO": {"country_code": "MA"},
- "MOZAMBIQUE": {"country_code": "MZ"},
- "MYANMAR": {"country_code": "MM"},
- "NAMIBIA": {"country_code": "NA"},
- "NAURU": {"country_code": "NR"},
- "NEPAL": {"country_code": "NP"},
- "NETHERLANDS": {"country_code": "NL"},
- "NEW_CALEDONIA": {"country_code": "NC"},
- "NEW_ZEALAND": {"country_code": "NZ"},
- "NICARAGUA": {"country_code": "NI"},
- "NIGER": {"country_code": "NE"},
- "NIGERIA": {"country_code": "NG"},
- "NIUE": {"country_code": "NU"},
- "NORFOLK_ISLAND": {"country_code": "NF"},
- "NORTHERN_MARIANA_ISLANDS": {"country_code": "MP"},
- "NORWAY": {"country_code": "NO"},
- "OMAN": {"country_code": "OM"},
- "PAKISTAN": {"country_code": "PK"},
- "PALAU": {"country_code": "PW"},
- "PALESTINE": {"country_code": "PS"},
- "PANAMA": {"country_code": "PA"},
- "PAPUA_NEW_GUINEA": {"country_code": "PG"},
- "PARAGUAY": {"country_code": "PY"},
- "PERU": {"country_code": "PE"},
- "PHILIPPINES": {"country_code": "PH"},
- "PITCAIRN": {"country_code": "PN"},
- "POLAND": {"country_code": "PL"},
- "PORTUGAL": {"country_code": "PT"},
- "PUERTO_RICO": {"country_code": "PR"},
- "QATAR": {"country_code": "QA"},
- "RÉUNION": {"country_code": "RE"},
- "ROMANIA": {"country_code": "RO"},
- "RUSSIAN_FEDERATION": {"country_code": "RU"},
- "RWANDA": {"country_code": "RW"},
- "SAINT_BARTHELEMY": {"country_code": "BL"},
- "SAINT_KITTS_AND_NEVIS": {"country_code": "KN"},
- "SAINT_LUCIA": {"country_code": "LC"},
- "SAINT_MARTIN": {"country_code": "MF"},
- "SAINT_PIERRE_AND_MIQUELON": {"country_code": "PM"},
- "SAINT_VINCENT_AND_THE_GRENADINES": {"country_code": "VC"},
- "SAMOA": {"country_code": "WS"},
- "SAN_MARINO": {"country_code": "SM"},
- "SAO_TOME_AND_PRINCIPE": {"country_code": "ST"},
- "SAUDI_ARABIA": {"country_code": "SA"},
- "SENEGAL": {"country_code": "SN"},
- "SERBIA": {"country_code": "RS"},
- "SEYCHELLES": {"country_code": "SC"},
- "SIERRA_LEONE": {"country_code": "SL"},
- "SINGAPORE": {"country_code": "SG"},
- "SINT_MAARTEN": {"country_code": "SX"},
- "SLOVAKIA": {"country_code": "SK"},
- "SLOVENIA": {"country_code": "SI"},
- "SOLOMON_ISLANDS": {"country_code": "SB"},
- "SOMALIA": {"country_code": "SO"},
- "SOUTH_AFRICA": {"country_code": "ZA"},
- "SOUTH_GEORGIA": {"country_code": "GS"},
- "SOUTH_SUDAN": {"country_code": "SS"},
- "SPAIN": {"country_code": "ES"},
- "SRI_LANKA": {"country_code": "LK"},
- "SUDAN": {"country_code": "SD"},
- "SURINAME": {"country_code": "SR"},
- "SVALBARD_AND_JAN_MAYEN": {"country_code": "SJ"},
- "SWAZILAND": {"country_code": "SZ"},
- "SWEDEN": {"country_code": "SE"},
- "SWITZERLAND": {"country_code": "CH"},
- "SYRIAN_ARAB_REPUBLIC": {"country_code": "SY"},
- "TAIWAN": {"country_code": "TW"},
- "TAJIKISTAN": {"country_code": "TJ"},
- "TANZANIA": {"country_code": "TZ"},
- "THAILAND": {"country_code": "TH"},
- "TIMOR-LESTE": {"country_code": "TL"},
- "TOGO": {"country_code": "TG"},
- "TOKELAU": {"country_code": "TK"},
- "TONGA": {"country_code": "TO"},
- "TRINIDAD_AND_TOBAGO": {"country_code": "TT"},
- "TUNISIA": {"country_code": "TN"},
- "TURKEY": {"country_code": "TR"},
- "TURKMENISTAN": {"country_code": "TM"},
- "TURKS_AND_CAICOS_ISLANDS": {"country_code": "TC"},
- "TUVALU": {"country_code": "TV"},
- "UGANDA": {"country_code": "UG"},
- "UKRAINE": {"country_code": "UA"},
- "UNITED_ARAB_EMIRATES": {"country_code": "AE"},
- "UNITED_KINGDOM": {"country_code": "GB"},
- "UNITED_STATES": {"country_code": "US"},
- "UNITED_STATES_MINOR_OUTLYING_ISLANDS": {"country_code": "UM"},
- "URUGUAY": {"country_code": "UY"},
- "UZBEKISTAN": {"country_code": "UZ"},
- "VANUATU": {"country_code": "VU"},
- "VENEZUELA": {"country_code": "VE"},
- "VIETNAM": {"country_code": "VN"},
- "VIRGIN_ISLANDS_BRITISH": {"country_code": "VG"},
- "VIRGIN_ISLANDS_US": {"country_code": "VI"},
- "WALLIS_AND_FUTUNA": {"country_code": "WF"},
- "WESTERN_SAHARA": {"country_code": "EH"},
- "YEMEN": {"country_code": "YE"},
- "ZAMBIA": {"country_code": "ZM"},
- "ZIMBABWE": {"country_code": "ZW"},
- "NON_COUNTRY": {"country_code": "XX"},
-}
-
-ALL_CHANNELS_2G = {
- 1: {20, 40},
- 2: {20, 40},
- 3: {20, 40},
- 4: {20, 40},
- 5: {20, 40},
- 6: {20, 40},
- 7: {20, 40},
- 8: {20, 40},
- 9: {20, 40},
- 10: {20, 40},
- 11: {20, 40},
- 12: {20, 40},
- 13: {20, 40},
- 14: {20},
-}
-
-ALL_CHANNELS_5G = {
- 36: {20, 40, 80},
- 40: {20, 40, 80},
- 44: {20, 40, 80},
- 48: {20, 40, 80},
- 52: {20, 40, 80},
- 56: {20, 40, 80},
- 60: {20, 40, 80},
- 64: {20, 40, 80},
- 100: {20, 40, 80},
- 104: {20, 40, 80},
- 108: {20, 40, 80},
- 112: {20, 40, 80},
- 116: {20, 40, 80},
- 120: {20, 40, 80},
- 124: {20, 40, 80},
- 128: {20, 40, 80},
- 132: {20, 40, 80},
- 136: {20, 40, 80},
- 140: {20, 40, 80},
- 144: {20, 40, 80},
- 149: {20, 40, 80},
- 153: {20, 40, 80},
- 157: {20, 40, 80},
- 161: {20, 40, 80},
- 165: {20},
-}
-
-ALL_CHANNELS = ALL_CHANNELS_2G | ALL_CHANNELS_5G
-
-
-@unique
-class WnmFeature(Enum):
- """Wireless Network Management (AKA 802.11v) features hostapd supports."""
-
- TIME_ADVERTISEMENT = auto()
- WNM_SLEEP_MODE = auto()
- BSS_TRANSITION_MANAGEMENT = auto()
- PROXY_ARP = auto()
- IPV6_NEIGHBOR_ADVERTISEMENT_MULTICAST_TO_UNICAST = auto()
diff --git a/packages/antlion/controllers/ap_lib/hostapd_security.py b/packages/antlion/controllers/ap_lib/hostapd_security.py
deleted file mode 100644
index 76dd08c..0000000
--- a/packages/antlion/controllers/ap_lib/hostapd_security.py
+++ /dev/null
@@ -1,416 +0,0 @@
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import collections
-import string
-from enum import Enum, StrEnum, auto, unique
-
-from antlion.controllers.ap_lib import hostapd_constants
-
-
-class SecurityModeInt(int, Enum):
- """Possible values for hostapd's "wpa" config option.
-
- The int value is a bit field that can enable WPA and/or WPA2.
-
- bit0 = enable WPA defined by IEEE 802.11i/D3.0
- bit1 = enable RNA (WPA2) defined by IEEE 802.11i/RSN
- bit2 = enable WAPI (rejected/withdrawn)
- bit3 = enable OSEN (ENT)
- """
-
- WEP = 0
- WPA1 = 1
- WPA2 = 2
- WPA3 = 2 # same as wpa2 and wpa2/wpa3; distinguished by wpa_key_mgmt
- MIXED = 3 # applies to wpa/wpa2 and wpa/wpa2/wpa3; distinguished by wpa_key_mgmt
- ENT = 8
-
- def __str__(self) -> str:
- return str(self.value)
-
-
-@unique
-class KeyManagement(StrEnum):
- SAE = "SAE"
- WPA_PSK = "WPA-PSK"
- WPA_PSK_SAE = "WPA-PSK SAE"
- ENT = "WPA-EAP"
-
-
-# TODO(http://b/286584981): This is currently only being used for OpenWRT.
-# Investigate whether we can replace KeyManagement with OpenWRTEncryptionMode.
-@unique
-class OpenWRTEncryptionMode(StrEnum):
- """Combination of Wi-Fi encryption mode and ciphers.
-
- Only used by OpenWRT.
-
- Besides the encryption mode, the encryption option also specifies the group and peer
- ciphers to use. To override the cipher, the value of encryption must be given in the
- form "mode+cipher". This enum contains all possible combinations.
-
- See https://openwrt.org/docs/guide-user/network/wifi/basic#encryption_modes.
- """
-
- NONE = "none"
- """No authentication, no ciphers"""
- SAE = "sae"
- """WPA3 Personal (SAE) using CCMP cipher"""
- SAE_MIXED = "sae-mixed"
- """WPA2/WPA3 Personal (PSK/SAE) mixed mode using CCMP cipher"""
- PSK2_TKIP_CCMP = "psk2+tkip+ccmp"
- """WPA2 Personal (PSK) using TKIP and CCMP ciphers"""
- PSK2_TKIP_AES = "psk2+tkip+aes"
- """WPA2 Personal (PSK) using TKIP and AES ciphers"""
- PSK2_TKIP = "psk2+tkip"
- """WPA2 Personal (PSK) using TKIP cipher"""
- PSK2_CCMP = "psk2+ccmp"
- """WPA2 Personal (PSK) using CCMP cipher"""
- PSK2_AES = "psk2+aes"
- """WPA2 Personal (PSK) using AES cipher"""
- PSK2 = "psk2"
- """WPA2 Personal (PSK) using CCMP cipher"""
- PSK_TKIP_CCMP = "psk+tkip+ccmp"
- """WPA Personal (PSK) using TKIP and CCMP ciphers"""
- PSK_TKIP_AES = "psk+tkip+aes"
- """WPA Personal (PSK) using TKIP and AES ciphers"""
- PSK_TKIP = "psk+tkip"
- """WPA Personal (PSK) using TKIP cipher"""
- PSK_CCMP = "psk+ccmp"
- """WPA Personal (PSK) using CCMP cipher"""
- PSK_AES = "psk+aes"
- """WPA Personal (PSK) using AES cipher"""
- PSK = "psk"
- """WPA Personal (PSK) using CCMP cipher"""
- PSK_MIXED_TKIP_CCMP = "psk-mixed+tkip+ccmp"
- """WPA/WPA2 Personal (PSK) mixed mode using TKIP and CCMP ciphers"""
- PSK_MIXED_TKIP_AES = "psk-mixed+tkip+aes"
- """WPA/WPA2 Personal (PSK) mixed mode using TKIP and AES ciphers"""
- PSK_MIXED_TKIP = "psk-mixed+tkip"
- """WPA/WPA2 Personal (PSK) mixed mode using TKIP cipher"""
- PSK_MIXED_CCMP = "psk-mixed+ccmp"
- """WPA/WPA2 Personal (PSK) mixed mode using CCMP cipher"""
- PSK_MIXED_AES = "psk-mixed+aes"
- """WPA/WPA2 Personal (PSK) mixed mode using AES cipher"""
- PSK_MIXED = "psk-mixed"
- """WPA/WPA2 Personal (PSK) mixed mode using CCMP cipher"""
- WEP = "wep"
- """defaults to “open system” authentication aka wep+open using RC4 cipher"""
- WEP_OPEN = "wep+open"
- """“open system” authentication using RC4 cipher"""
- WEP_SHARED = "wep+shared"
- """“shared key” authentication using RC4 cipher"""
- WPA3 = "wpa3"
- """WPA3 Enterprise using CCMP cipher"""
- WPA3_MIXED = "wpa3-mixed"
- """WPA3/WPA2 Enterprise using CCMP cipher"""
- WPA2_TKIP_CCMP = "wpa2+tkip+ccmp"
- """WPA2 Enterprise using TKIP and CCMP ciphers"""
- WPA2_TKIP_AES = "wpa2+tkip+aes"
- """WPA2 Enterprise using TKIP and AES ciphers"""
- WPA2_CCMP = "wpa2+ccmp"
- """WPA2 Enterprise using CCMP cipher"""
- WPA2_AES = "wpa2+aes'"
- """WPA2 Enterprise using AES cipher"""
- WPA2 = "wpa2"
- """WPA2 Enterprise using CCMP cipher"""
- WPA2_TKIP = "wpa2+tkip"
- """WPA2 Enterprise using TKIP cipher"""
- WPA_TKIP_CCMP = "wpa+tkip+ccmp"
- """WPA Enterprise using TKIP and CCMP ciphers"""
- WPA_TKIP_AES = "wpa+tkip+aes"
- """WPA Enterprise using TKIP and AES ciphers"""
- WPA_CCMP = "wpa+ccmp"
- """WPA Enterprise using CCMP cipher"""
- WPA_AES = "wpa+aes"
- """WPA Enterprise using AES cipher"""
- WPA_TKIP = "wpa+tkip"
- """WPA Enterprise using TKIP cipher"""
- WPA = "wpa"
- """WPA Enterprise using CCMP cipher"""
- WPA_MIXED_TKIP_CCMP = "wpa-mixed+tkip+ccmp"
- """WPA/WPA2 Enterprise mixed mode using TKIP and CCMP ciphers"""
- WPA_MIXED_TKIP_AES = "wpa-mixed+tkip+aes"
- """WPA/WPA2 Enterprise mixed mode using TKIP and AES ciphers"""
- WPA_MIXED_TKIP = "wpa-mixed+tkip"
- """WPA/WPA2 Enterprise mixed mode using TKIP cipher"""
- WPA_MIXED_CCMP = "wpa-mixed+ccmp"
- """WPA/WPA2 Enterprise mixed mode using CCMP cipher"""
- WPA_MIXED_AES = "wpa-mixed+aes"
- """WPA/WPA2 Enterprise mixed mode using AES cipher"""
- WPA_MIXED = "wpa-mixed"
- """WPA/WPA2 Enterprise mixed mode using CCMP cipher"""
- OWE = "owe"
- """Opportunistic Wireless Encryption (OWE) using CCMP cipher"""
-
-
-@unique
-class FuchsiaSecurityType(StrEnum):
- """Fuchsia supported security types.
-
- Defined by the fuchsia.wlan.policy.SecurityType FIDL.
-
- https://cs.opensource.google/fuchsia/fuchsia/+/main:sdk/fidl/fuchsia.wlan.policy/types.fidl
- """
-
- NONE = "none"
- WEP = "wep"
- WPA = "wpa"
- WPA2 = "wpa2"
- WPA3 = "wpa3"
-
-
-@unique
-class SecurityMode(StrEnum):
- OPEN = auto()
- WEP = auto()
- WPA = auto()
- WPA2 = auto()
- WPA_WPA2 = auto()
- WPA3 = auto()
- WPA2_WPA3 = auto()
- WPA_WPA2_WPA3 = auto()
- ENT = auto()
-
- def security_mode_int(self) -> SecurityModeInt:
- match self:
- case SecurityMode.OPEN:
- raise TypeError("Open security doesn't have a SecurityModeInt")
- case SecurityMode.WEP:
- return SecurityModeInt.WEP
- case SecurityMode.WPA:
- return SecurityModeInt.WPA1
- case SecurityMode.WPA2:
- return SecurityModeInt.WPA2
- case SecurityMode.WPA_WPA2:
- return SecurityModeInt.MIXED
- case SecurityMode.WPA3:
- return SecurityModeInt.WPA3
- case SecurityMode.WPA2_WPA3:
- return SecurityModeInt.WPA3
- case SecurityMode.WPA_WPA2_WPA3:
- return SecurityModeInt.MIXED
- case SecurityMode.ENT:
- return SecurityModeInt.ENT
-
- def key_management(self) -> KeyManagement | None:
- match self:
- case SecurityMode.OPEN:
- return None
- case SecurityMode.WEP:
- return None
- case SecurityMode.WPA:
- return KeyManagement.WPA_PSK
- case SecurityMode.WPA2:
- return KeyManagement.WPA_PSK
- case SecurityMode.WPA_WPA2:
- return KeyManagement.WPA_PSK
- case SecurityMode.WPA3:
- return KeyManagement.SAE
- case SecurityMode.WPA2_WPA3:
- return KeyManagement.WPA_PSK_SAE
- case SecurityMode.WPA_WPA2_WPA3:
- return KeyManagement.WPA_PSK_SAE
- case SecurityMode.ENT:
- return KeyManagement.ENT
-
- def fuchsia_security_type(self) -> FuchsiaSecurityType:
- match self:
- case SecurityMode.OPEN:
- return FuchsiaSecurityType.NONE
- case SecurityMode.WEP:
- return FuchsiaSecurityType.WEP
- case SecurityMode.WPA:
- return FuchsiaSecurityType.WPA
- case SecurityMode.WPA2:
- return FuchsiaSecurityType.WPA2
- case SecurityMode.WPA_WPA2:
- return FuchsiaSecurityType.WPA2
- case SecurityMode.WPA3:
- return FuchsiaSecurityType.WPA3
- case SecurityMode.WPA2_WPA3:
- return FuchsiaSecurityType.WPA3
- case SecurityMode.WPA_WPA2_WPA3:
- return FuchsiaSecurityType.WPA3
- case SecurityMode.ENT:
- raise NotImplementedError(
- f'Fuchsia has not implemented support for security mode "{self}"'
- )
-
- def is_wpa3(self) -> bool:
- match self:
- case SecurityMode.OPEN:
- return False
- case SecurityMode.WEP:
- return False
- case SecurityMode.WPA:
- return False
- case SecurityMode.WPA2:
- return False
- case SecurityMode.WPA_WPA2:
- return False
- case SecurityMode.WPA3:
- return True
- case SecurityMode.WPA2_WPA3:
- return True
- case SecurityMode.WPA_WPA2_WPA3:
- return True
- case SecurityMode.ENT:
- return False
- raise TypeError("Unknown security mode")
-
-
-class Security(object):
- """The Security class for hostapd representing some of the security
- settings that are allowed in hostapd. If needed more can be added.
- """
-
- def __init__(
- self,
- security_mode: SecurityMode = SecurityMode.OPEN,
- password: str | None = None,
- wpa_cipher: str | None = hostapd_constants.WPA_DEFAULT_CIPHER,
- wpa2_cipher: str | None = hostapd_constants.WPA2_DEFAULT_CIPER,
- wpa_group_rekey: int = hostapd_constants.WPA_GROUP_KEY_ROTATION_TIME,
- wpa_strict_rekey: bool = hostapd_constants.WPA_STRICT_REKEY_DEFAULT,
- wep_default_key: int = hostapd_constants.WEP_DEFAULT_KEY,
- radius_server_ip: str | None = None,
- radius_server_port: int | None = None,
- radius_server_secret: str | None = None,
- ) -> None:
- """Gather all of the security settings for WPA-PSK. This could be
- expanded later.
-
- Args:
- security_mode: Type of security mode.
- password: The PSK or passphrase for the security mode.
- wpa_cipher: The cipher to be used for wpa.
- Options: TKIP, CCMP, TKIP CCMP
- Default: TKIP
- wpa2_cipher: The cipher to be used for wpa2.
- Options: TKIP, CCMP, TKIP CCMP
- Default: CCMP
- wpa_group_rekey: How often to refresh the GTK regardless of network
- changes.
- Options: An integer in seconds, None
- Default: 600 seconds
- wpa_strict_rekey: Whether to do a group key update when client
- leaves the network or not.
- Options: True, False
- Default: True
- wep_default_key: The wep key number to use when transmitting.
- radius_server_ip: Radius server IP for Enterprise auth.
- radius_server_port: Radius server port for Enterprise auth.
- radius_server_secret: Radius server secret for Enterprise auth.
- """
- self.security_mode = security_mode
- self.wpa_cipher = wpa_cipher
- self.wpa2_cipher = wpa2_cipher
- self.wpa_group_rekey = wpa_group_rekey
- self.wpa_strict_rekey = wpa_strict_rekey
- self.wep_default_key = wep_default_key
- self.radius_server_ip = radius_server_ip
- self.radius_server_port = radius_server_port
- self.radius_server_secret = radius_server_secret
- if password:
- if self.security_mode is SecurityMode.WEP:
- if len(password) in hostapd_constants.WEP_STR_LENGTH:
- self.password: str | None = f'"{password}"'
- elif len(password) in hostapd_constants.WEP_HEX_LENGTH and all(
- c in string.hexdigits for c in password
- ):
- self.password = password
- else:
- raise ValueError(
- "WEP key must be a hex string of %s characters"
- % hostapd_constants.WEP_HEX_LENGTH
- )
- else:
- if (
- len(password) < hostapd_constants.MIN_WPA_PSK_LENGTH
- or len(password) > hostapd_constants.MAX_WPA_PSK_LENGTH
- ):
- raise ValueError(
- "Password must be a minumum of %s characters and a maximum of %s"
- % (
- hostapd_constants.MIN_WPA_PSK_LENGTH,
- hostapd_constants.MAX_WPA_PSK_LENGTH,
- )
- )
- else:
- self.password = password
- else:
- self.password = None
-
- def __str__(self) -> str:
- return self.security_mode
-
- def generate_dict(self) -> dict[str, str | int]:
- """Returns: an ordered dictionary of settings"""
- if self.security_mode is SecurityMode.OPEN:
- return {}
-
- settings: dict[str, str | int] = collections.OrderedDict()
-
- if self.security_mode is SecurityMode.WEP:
- settings["wep_default_key"] = self.wep_default_key
- if self.password is not None:
- settings[f"wep_key{self.wep_default_key}"] = self.password
- elif self.security_mode == SecurityMode.ENT:
- if self.radius_server_ip is not None:
- settings["auth_server_addr"] = self.radius_server_ip
- if self.radius_server_port is not None:
- settings["auth_server_port"] = self.radius_server_port
- if self.radius_server_secret is not None:
- settings[
- "auth_server_shared_secret"
- ] = self.radius_server_secret
- settings["wpa_key_mgmt"] = hostapd_constants.ENT_KEY_MGMT
- settings["ieee8021x"] = hostapd_constants.IEEE8021X
- settings["wpa"] = hostapd_constants.WPA2
- else:
- settings["wpa"] = self.security_mode.security_mode_int().value
- if self.password:
- if len(self.password) == hostapd_constants.MAX_WPA_PSK_LENGTH:
- settings["wpa_psk"] = self.password
- else:
- settings["wpa_passphrase"] = self.password
- # For wpa, wpa/wpa2, and wpa/wpa2/wpa3, add wpa_pairwise
- if self.wpa_cipher and (
- self.security_mode is SecurityMode.WPA
- or self.security_mode is SecurityMode.WPA_WPA2
- or self.security_mode is SecurityMode.WPA_WPA2_WPA3
- ):
- settings["wpa_pairwise"] = self.wpa_cipher
- # For wpa/wpa2, wpa2, wpa3, and wpa2/wpa3, and wpa/wpa2, wpa3, add rsn_pairwise
- if self.wpa2_cipher and (
- self.security_mode is SecurityMode.WPA_WPA2
- or self.security_mode is SecurityMode.WPA2
- or self.security_mode is SecurityMode.WPA2_WPA3
- or self.security_mode is SecurityMode.WPA3
- ):
- settings["rsn_pairwise"] = self.wpa2_cipher
- # Add wpa_key_mgmt based on security mode string
- wpa_key_mgmt = self.security_mode.key_management()
- if wpa_key_mgmt is not None:
- settings["wpa_key_mgmt"] = str(wpa_key_mgmt)
- if self.wpa_group_rekey:
- settings["wpa_group_rekey"] = self.wpa_group_rekey
- if self.wpa_strict_rekey:
- settings[
- "wpa_strict_rekey"
- ] = hostapd_constants.WPA_STRICT_REKEY
-
- return settings
diff --git a/packages/antlion/controllers/ap_lib/hostapd_utils.py b/packages/antlion/controllers/ap_lib/hostapd_utils.py
deleted file mode 100644
index 40d6435..0000000
--- a/packages/antlion/controllers/ap_lib/hostapd_utils.py
+++ /dev/null
@@ -1,99 +0,0 @@
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-
-from antlion import utils
-from antlion.controllers.ap_lib import hostapd_constants
-from antlion.controllers.ap_lib.hostapd_security import Security, SecurityMode
-
-
-def generate_random_password(
- security_mode: SecurityMode = SecurityMode.OPEN,
- length: int | None = None,
- hex: int | None = None,
-) -> str:
- """Generates a random password. Defaults to an 8 character ASCII password.
-
- Args:
- security_mode: Used to determine if length should be WEP compatible
- (useful for generated tests to simply pass in security mode)
- length: Length of password to generate. Defaults to 8, unless
- security_mode is WEP, then 13
- hex: If True, generates a hex string, else ascii
- """
- if hex:
- generator_func = utils.rand_hex_str
- else:
- generator_func = utils.rand_ascii_str
-
- if length:
- return generator_func(length)
- if security_mode is SecurityMode.WEP:
- return generator_func(hostapd_constants.WEP_DEFAULT_STR_LENGTH)
- else:
- return generator_func(hostapd_constants.MIN_WPA_PSK_LENGTH)
-
-
-def verify_interface(interface: str, valid_interfaces: list[str]) -> None:
- """Raises error if interface is missing or invalid
-
- Args:
- interface: interface name
- valid_interfaces: valid interface names
- """
- if interface not in valid_interfaces:
- raise ValueError(f"Invalid interface name was passed: {interface}")
-
-
-def verify_security_mode(
- security_profile: Security, valid_security_modes: list[SecurityMode]
-) -> None:
- """Raises error if security mode is not in list of valid security modes.
-
- Args:
- security_profile: Security to verify
- valid_security_modes: Valid security modes for a profile.
- """
- if security_profile.security_mode not in valid_security_modes:
- raise ValueError(
- f"Invalid Security Mode: {security_profile.security_mode}; "
- f"Valid Security Modes for this profile: {valid_security_modes}"
- )
-
-
-def verify_cipher(security_profile: Security, valid_ciphers: list[str]) -> None:
- """Raise error if cipher is not in list of valid ciphers.
-
- Args:
- security_profile: Security profile to verify
- valid_ciphers: A list of valid ciphers for security_profile.
- """
- if security_profile.security_mode is SecurityMode.OPEN:
- raise ValueError("Security mode is open.")
- elif security_profile.security_mode is SecurityMode.WPA:
- if security_profile.wpa_cipher not in valid_ciphers:
- raise ValueError(
- f"Invalid WPA Cipher: {security_profile.wpa_cipher}. "
- f"Valid WPA Ciphers for this profile: {valid_ciphers}"
- )
- elif security_profile.security_mode is SecurityMode.WPA2:
- if security_profile.wpa2_cipher not in valid_ciphers:
- raise ValueError(
- f"Invalid WPA2 Cipher: {security_profile.wpa2_cipher}. "
- f"Valid WPA2 Ciphers for this profile: {valid_ciphers}"
- )
- else:
- raise ValueError(
- f"Invalid Security Mode: {security_profile.security_mode}"
- )
diff --git a/packages/antlion/controllers/ap_lib/radio_measurement.py b/packages/antlion/controllers/ap_lib/radio_measurement.py
deleted file mode 100644
index 5c7f2e0..0000000
--- a/packages/antlion/controllers/ap_lib/radio_measurement.py
+++ /dev/null
@@ -1,246 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from enum import IntEnum, unique
-
-
-@unique
-class ApReachability(IntEnum):
- """Neighbor Report AP Reachability values.
-
- See IEEE 802.11-2020 Figure 9-172.
- """
-
- NOT_REACHABLE = 1
- UNKNOWN = 2
- REACHABLE = 3
-
-
-class BssidInformationCapabilities:
- """Representation of Neighbor Report BSSID Information Capabilities.
-
- See IEEE 802.11-2020 Figure 9-338 and 9.4.1.4.
- """
-
- def __init__(
- self,
- spectrum_management: bool = False,
- qos: bool = False,
- apsd: bool = False,
- radio_measurement: bool = False,
- ):
- """Create a capabilities object.
-
- Args:
- spectrum_management: whether spectrum management is required.
- qos: whether QoS is implemented.
- apsd: whether APSD is implemented.
- radio_measurement: whether radio measurement is activated.
- """
- self._spectrum_management = spectrum_management
- self._qos = qos
- self._apsd = apsd
- self._radio_measurement = radio_measurement
-
- def __index__(self) -> int:
- """Convert to numeric representation of the field's bits."""
- return (
- self.spectrum_management << 5
- | self.qos << 4
- | self.apsd << 3
- | self.radio_measurement << 2
- )
-
- @property
- def spectrum_management(self) -> bool:
- return self._spectrum_management
-
- @property
- def qos(self) -> bool:
- return self._qos
-
- @property
- def apsd(self) -> bool:
- return self._apsd
-
- @property
- def radio_measurement(self) -> bool:
- return self._radio_measurement
-
-
-class BssidInformation:
- """Representation of Neighbor Report BSSID Information field.
-
- BssidInformation contains info about a neighboring AP, to be included in a
- neighbor report element. See IEEE 802.11-2020 Figure 9-337.
- """
-
- def __init__(
- self,
- ap_reachability: ApReachability = ApReachability.UNKNOWN,
- security: bool = False,
- key_scope: bool = False,
- capabilities: BssidInformationCapabilities = BssidInformationCapabilities(),
- mobility_domain: bool = False,
- high_throughput: bool = False,
- very_high_throughput: bool = False,
- ftm: bool = False,
- ):
- """Create a BSSID Information object for a neighboring AP.
-
- Args:
- ap_reachability: whether this AP is reachable by the STA that
- requested the neighbor report.
- security: whether this AP is known to support the same security
- provisioning as used by the STA in its current association.
- key_scope: whether this AP is known to have the same
- authenticator as the AP sending the report.
- capabilities: selected capabilities of this AP.
- mobility_domain: whether the AP is including an MDE in its beacon
- frames and the contents of that MDE are identical to the MDE
- advertised by the AP sending the report.
- high_throughput: whether the AP is an HT AP including the HT
- Capabilities element in its Beacons, and that the contents of
- that HT capabilities element are identical to the HT
- capabilities element advertised by the AP sending the report.
- very_high_throughput: whether the AP is a VHT AP and the VHT
- capabilities element, if included as a subelement, is
- identical in content to the VHT capabilities element included
- in the AP’s beacon.
- ftm: whether the AP is known to have the Fine Timing Measurement
- Responder extended capability.
- """
- self._ap_reachability = ap_reachability
- self._security = security
- self._key_scope = key_scope
- self._capabilities = capabilities
- self._mobility_domain = mobility_domain
- self._high_throughput = high_throughput
- self._very_high_throughput = very_high_throughput
- self._ftm = ftm
-
- def __index__(self) -> int:
- """Convert to numeric representation of the field's bits."""
- return (
- self._ap_reachability << 30
- | self.security << 29
- | self.key_scope << 28
- | int(self.capabilities) << 22
- | self.mobility_domain << 21
- | self.high_throughput << 20
- | self.very_high_throughput << 19
- | self.ftm << 18
- )
-
- @property
- def security(self) -> bool:
- return self._security
-
- @property
- def key_scope(self) -> bool:
- return self._key_scope
-
- @property
- def capabilities(self) -> BssidInformationCapabilities:
- return self._capabilities
-
- @property
- def mobility_domain(self) -> bool:
- return self._mobility_domain
-
- @property
- def high_throughput(self) -> bool:
- return self._high_throughput
-
- @property
- def very_high_throughput(self) -> bool:
- return self._very_high_throughput
-
- @property
- def ftm(self) -> bool:
- return self._ftm
-
-
-@unique
-class PhyType(IntEnum):
- """PHY type values, see dot11PhyType in 802.11-2020 Annex C."""
-
- DSSS = 2
- OFDM = 4
- HRDSS = 5
- ERP = 6
- HT = 7
- DMG = 8
- VHT = 9
- TVHT = 10
- S1G = 11
- CDMG = 12
- CMMG = 13
-
-
-class NeighborReportElement:
- """Representation of Neighbor Report element.
-
- See IEEE 802.11-2020 9.4.2.36.
- """
-
- def __init__(
- self,
- bssid: str,
- bssid_information: BssidInformation,
- operating_class: int,
- channel_number: int,
- phy_type: PhyType,
- ):
- """Create a neighbor report element.
-
- Args:
- bssid: MAC address of the neighbor.
- bssid_information: BSSID Information of the neigbor.
- operating_class: operating class of the neighbor.
- channel_number: channel number of the neighbor.
- phy_type: dot11PhyType of the neighbor.
- """
- self._bssid = bssid
- self._bssid_information = bssid_information
-
- # Operating Class, IEEE 802.11-2020 Annex E.
- self._operating_class = operating_class
-
- self._channel_number = channel_number
-
- # PHY Type, IEEE 802.11-2020 Annex C.
- self._phy_type = phy_type
-
- @property
- def bssid(self) -> str:
- return self._bssid
-
- @property
- def bssid_information(self) -> BssidInformation:
- return self._bssid_information
-
- @property
- def operating_class(self) -> int:
- return self._operating_class
-
- @property
- def channel_number(self) -> int:
- return self._channel_number
-
- @property
- def phy_type(self) -> PhyType:
- return self._phy_type
diff --git a/packages/antlion/controllers/ap_lib/radvd.py b/packages/antlion/controllers/ap_lib/radvd.py
deleted file mode 100644
index 748aa11..0000000
--- a/packages/antlion/controllers/ap_lib/radvd.py
+++ /dev/null
@@ -1,236 +0,0 @@
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import logging
-import shlex
-import tempfile
-import time
-
-from tenacity import retry, retry_if_exception_type, stop_after_delay
-
-from antlion.controllers.ap_lib.radvd_config import RadvdConfig
-from antlion.controllers.utils_lib.commands import shell
-from antlion.libs.proc import job
-from antlion.logger import LogLevel
-from antlion.runner import Runner
-
-
-class RadvdStartError(Exception):
- """Radvd failed to start."""
-
-
-class Radvd(object):
- """Manages the radvd program.
-
- https://en.wikipedia.org/wiki/Radvd
- This implements the Router Advertisement Daemon of IPv6 router addresses
- and IPv6 routing prefixes using the Neighbor Discovery Protocol.
-
- Attributes:
- config: The radvd configuration that is being used.
- """
-
- def __init__(
- self,
- runner: Runner,
- interface: str,
- working_dir: str | None = None,
- radvd_binary: str | None = None,
- ) -> None:
- """
- Args:
- runner: Object that has run_async and run methods for executing
- shell commands (e.g. connection.SshConnection)
- interface: Name of the interface to use (eg. wlan0).
- working_dir: Directory to work out of.
- radvd_binary: Location of the radvd binary
- """
- if not radvd_binary:
- logging.debug(
- "No radvd binary specified. " "Assuming radvd is in the path."
- )
- radvd_binary = "radvd"
- else:
- logging.debug(f"Using radvd binary located at {radvd_binary}")
- if working_dir is None and runner.run == job.run:
- working_dir = tempfile.gettempdir()
- else:
- working_dir = "/tmp"
- self._radvd_binary = radvd_binary
- self._runner = runner
- self._interface = interface
- self._working_dir = working_dir
- self.config: RadvdConfig | None = None
- self._shell = shell.ShellCommand(runner)
- self._log_file = f"{working_dir}/radvd-{self._interface}.log"
- self._config_file = f"{working_dir}/radvd-{self._interface}.conf"
- self._pid_file = f"{working_dir}/radvd-{self._interface}.pid"
- self._ps_identifier = f"{self._radvd_binary}.*{self._config_file}"
-
- def start(self, config: RadvdConfig) -> None:
- """Starts radvd
-
- Starts the radvd daemon and runs it in the background.
-
- Args:
- config: Configs to start the radvd with.
-
- Returns:
- True if the daemon could be started. Note that the daemon can still
- start and not work. Invalid configurations can take a long amount
- of time to be produced, and because the daemon runs indefinitely
- it's impossible to wait on. If you need to check if configs are ok
- then periodic checks to is_running and logs should be used.
-
- Raises:
- RadvdStartError: when a radvd error is found or process is dead
- """
- if self.is_alive():
- self.stop()
-
- self.config = config
-
- self._shell.delete_file(self._log_file)
- self._shell.delete_file(self._config_file)
- self._write_configs(self.config)
-
- try:
- self._launch()
- except RadvdStartError:
- self.stop()
- raise
-
- # TODO(http://b/372534563): Remove retries once the source of SIGINT is
- # found and a fix is implemented.
- @retry(
- stop=stop_after_delay(30),
- retry=retry_if_exception_type(RadvdStartError),
- )
- def _launch(self) -> None:
- """Launch the radvd process with retries.
-
- Raises:
- RadvdStartError: when a radvd error is found or process is dead
- """
- command = (
- f"{self._radvd_binary} -C {shlex.quote(self._config_file)} "
- f"-p {shlex.quote(self._pid_file)} -m logfile -d 5 "
- f'-l {self._log_file} > "{self._log_file}" 2>&1'
- )
- self._runner.run_async(command)
- self._wait_for_process(timeout=10)
-
- def stop(self) -> None:
- """Kills the daemon if it is running."""
- self._shell.kill(self._ps_identifier)
-
- def is_alive(self) -> bool:
- """
- Returns:
- True if the daemon is running.
- """
- return self._shell.is_alive(self._ps_identifier)
-
- def pull_logs(self) -> str:
- """Pulls the log files from where radvd is running.
-
- Returns:
- A string of the radvd logs.
- """
- # TODO: Auto pulling of logs when stop is called.
- with LogLevel(self._runner.log, logging.INFO):
- return self._shell.read_file(self._log_file)
-
- def _wait_for_process(self, timeout: int = 60) -> None:
- """Waits for the process to come up.
-
- Waits until the radvd process is found running, or there is
- a timeout. If the program never comes up then the log file
- will be scanned for errors.
-
- Raises:
- RadvdStartError: when a radvd error is found or process is dead
- """
- start_time = time.time()
- while time.time() - start_time < timeout and not self.is_alive():
- time.sleep(0.1)
- self._scan_for_errors(False)
- self._scan_for_errors(True)
-
- def _scan_for_errors(self, should_be_up: bool) -> None:
- """Scans the radvd log for any errors.
-
- Args:
- should_be_up: If true then radvd program is expected to be alive.
- If it is found not alive while this is true an error
- is thrown.
-
- Raises:
- RadvdStartError: when a radvd error is found or process is dead
- """
- # Store this so that all other errors have priority.
- is_dead = not self.is_alive()
-
- exited_prematurely = self._shell.search_file("Exiting", self._log_file)
- if exited_prematurely:
- raise RadvdStartError("Radvd exited prematurely.", self)
- if should_be_up and is_dead:
- raise RadvdStartError("Radvd failed to start", self)
-
- def _write_configs(self, config: RadvdConfig) -> None:
- """Writes the configs to the radvd config file.
-
- Args:
- config: a RadvdConfig object.
- """
- self._shell.delete_file(self._config_file)
- conf = config.package_configs()
- lines = ["interface %s {" % self._interface]
- for interface_option_key, interface_option in conf[
- "interface_options"
- ].items():
- lines.append(
- f"\t{str(interface_option_key)} {str(interface_option)};"
- )
- lines.append(f"\tprefix {conf['prefix']}")
- lines.append("\t{")
- for prefix_option in conf["prefix_options"].items():
- lines.append(f"\t\t{' '.join(map(str, prefix_option))};")
- lines.append("\t};")
- if conf["clients"]:
- lines.append("\tclients")
- lines.append("\t{")
- for client in conf["clients"]:
- lines.append(f"\t\t{client};")
- lines.append("\t};")
- if conf["route"]:
- lines.append("\troute %s {" % conf["route"])
- for route_option in conf["route_options"].items():
- lines.append(f"\t\t{' '.join(map(str, route_option))};")
- lines.append("\t};")
- if conf["rdnss"]:
- lines.append(
- "\tRDNSS %s {" % " ".join([str(elem) for elem in conf["rdnss"]])
- )
- for rdnss_option in conf["rdnss_options"].items():
- lines.append(f"\t\t{' '.join(map(str, rdnss_option))};")
- lines.append("\t};")
- lines.append("};")
- output_config = "\n".join(lines)
- logging.info(f"Writing {self._config_file}")
- logging.debug("******************Start*******************")
- logging.debug(f"\n{output_config}")
- logging.debug("*******************End********************")
-
- self._shell.write_file(self._config_file, output_config)
diff --git a/packages/antlion/controllers/ap_lib/radvd_config.py b/packages/antlion/controllers/ap_lib/radvd_config.py
deleted file mode 100644
index 8d671ca..0000000
--- a/packages/antlion/controllers/ap_lib/radvd_config.py
+++ /dev/null
@@ -1,313 +0,0 @@
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import collections
-from typing import Any
-
-from antlion.controllers.ap_lib import radvd_constants
-
-
-class RadvdConfig(object):
- """The root settings for the router advertisement daemon.
-
- All the settings for a router advertisement daemon.
- """
-
- def __init__(
- self,
- prefix: str = radvd_constants.DEFAULT_PREFIX,
- clients: list[str] = [],
- route: Any | None = None,
- rdnss: list[str] = [],
- ignore_if_missing: str | None = None,
- adv_send_advert: str = radvd_constants.ADV_SEND_ADVERT_ON,
- unicast_only: str | None = None,
- max_rtr_adv_interval: int | None = None,
- min_rtr_adv_interval: int | None = None,
- min_delay_between_ras: int | None = None,
- adv_managed_flag: str | None = None,
- adv_other_config_flag: str | None = None,
- adv_link_mtu: int | None = None,
- adv_reachable_time: int | None = None,
- adv_retrans_timer: int | None = None,
- adv_cur_hop_limit: int | None = None,
- adv_default_lifetime: int | None = None,
- adv_default_preference: str | None = None,
- adv_source_ll_address: str | None = None,
- adv_home_agent_flag: str | None = None,
- adv_home_agent_info: str | None = None,
- home_agent_lifetime: int | None = None,
- home_agent_preference: int | None = None,
- adv_mob_rtr_support_flag: str | None = None,
- adv_interval_opt: str | None = None,
- adv_on_link: str = radvd_constants.ADV_ON_LINK_ON,
- adv_autonomous: str = radvd_constants.ADV_AUTONOMOUS_ON,
- adv_router_addr: str | None = None,
- adv_valid_lifetime: int | None = None,
- adv_preferred_lifetime: int | None = None,
- base_6to4_interface: str | None = None,
- adv_route_lifetime: int | None = None,
- adv_route_preference: str | None = None,
- adv_rdnss_preference: int | None = None,
- adv_rdnss_open: str | None = None,
- adv_rdnss_lifetime: int | None = None,
- ) -> None:
- """Construct a RadvdConfig.
-
- Args:
- prefix: IPv6 prefix and length, ie fd::/64
- clients: A list of IPv6 link local addresses that will be the only
- clients served. All other IPv6 addresses will be ignored if
- this list is present.
- route: A route for the router advertisement with prefix.
- rdnss: A list of recursive DNS servers
- ignore_if_missing: A flag indicating whether or not the interface
- is ignored if it does not exist at start-up. By default,
- radvd exits.
- adv_send_advert: A flag indicating whether or not the router sends
- periodic router advertisements and responds to router
- solicitations.
- unicast_only: Indicates that the interface link type only supports
- unicast.
- max_rtr_adv_interval:The maximum time allowed between sending
- unsolicited multicast router advertisements from the interface,
- in seconds. Must be no less than 4 seconds and no greater than
- 1800 seconds.
- min_rtr_adv_interval: The minimum time allowed between sending
- unsolicited multicast router advertisements from the interface,
- in seconds. Must be no less than 3 seconds and no greater than
- 0.75 * max_rtr_adv_interval.
- min_delay_between_ras: The minimum time allowed between sending
- multicast router advertisements from the interface, in seconds.,
- adv_managed_flag: When set, hosts use the administered (stateful)
- protocol for address autoconfiguration in addition to any
- addresses autoconfigured using stateless address
- autoconfiguration. The use of this flag is described in
- RFC 4862.
- adv_other_config_flag: When set, hosts use the administered
- (stateful) protocol for autoconfiguration of other (non-address)
- information. The use of this flag is described in RFC 4862.
- adv_link_mtu: The MTU option is used in router advertisement
- messages to insure that all nodes on a link use the same MTU
- value in those cases where the link MTU is not well known.
- adv_reachable_time: The time, in milliseconds, that a node assumes
- a neighbor is reachable after having received a reachability
- confirmation. Used by the Neighbor Unreachability Detection
- algorithm (see Section 7.3 of RFC 4861). A value of zero means
- unspecified (by this router).
- adv_retrans_timer: The time, in milliseconds, between retransmitted
- Neighbor Solicitation messages. Used by address resolution and
- the Neighbor Unreachability Detection algorithm (see Sections
- 7.2 and 7.3 of RFC 4861). A value of zero means unspecified
- (by this router).
- adv_cur_hop_limit: The default value that should be placed in the
- Hop Count field of the IP header for outgoing (unicast) IP
- packets. The value should be set to the current diameter of the
- Internet. The value zero means unspecified (by this router).
- adv_default_lifetime: The lifetime associated with the default
- router in units of seconds. The maximum value corresponds to
- 18.2 hours. A lifetime of 0 indicates that the router is not a
- default router and should not appear on the default router list.
- The router lifetime applies only to the router's usefulness as
- a default router; it does not apply to information contained in
- other message fields or options. Options that need time limits
- for their information include their own lifetime fields.
- adv_default_preference: The preference associated with the default
- router, as either "low", "medium", or "high".
- adv_source_ll_address: When set, the link-layer address of the
- outgoing interface is included in the RA.
- adv_home_agent_flag: When set, indicates that sending router is able
- to serve as Mobile IPv6 Home Agent. When set, minimum limits
- specified by Mobile IPv6 are used for MinRtrAdvInterval and
- MaxRtrAdvInterval.
- adv_home_agent_info: When set, Home Agent Information Option
- (specified by Mobile IPv6) is included in Router Advertisements.
- adv_home_agent_flag must also be set when using this option.
- home_agent_lifetime: The length of time in seconds (relative to the
- time the packet is sent) that the router is offering Mobile IPv6
- Home Agent services. A value 0 must not be used. The maximum
- lifetime is 65520 seconds (18.2 hours). This option is ignored,
- if adv_home_agent_info is not set.
- home_agent_preference: The preference for the Home Agent sending
- this Router Advertisement. Values greater than 0 indicate more
- preferable Home Agent, values less than 0 indicate less
- preferable Home Agent. This option is ignored, if
- adv_home_agent_info is not set.
- adv_mob_rtr_support_flag: When set, the Home Agent signals it
- supports Mobile Router registrations (specified by NEMO Basic).
- adv_home_agent_info must also be set when using this option.
- adv_interval_opt: When set, Advertisement Interval Option
- (specified by Mobile IPv6) is included in Router Advertisements.
- When set, minimum limits specified by Mobile IPv6 are used for
- MinRtrAdvInterval and MaxRtrAdvInterval.
- adv_on_linkWhen set, indicates that this prefix can be used for
- on-link determination. When not set the advertisement makes no
- statement about on-link or off-link properties of the prefix.
- For instance, the prefix might be used for address configuration
- with some of the addresses belonging to the prefix being
- on-link and others being off-link.
- adv_autonomous: When set, indicates that this prefix can be used for
- autonomous address configuration as specified in RFC 4862.
- adv_router_addr: When set, indicates that the address of interface
- is sent instead of network prefix, as is required by Mobile
- IPv6. When set, minimum limits specified by Mobile IPv6 are used
- for MinRtrAdvInterval and MaxRtrAdvInterval.
- adv_valid_lifetime: The length of time in seconds (relative to the
- time the packet is sent) that the prefix is valid for the
- purpose of on-link determination. The symbolic value infinity
- represents infinity (i.e. a value of all one bits (0xffffffff)).
- The valid lifetime is also used by RFC 4862.
- adv_preferred_lifetimeThe length of time in seconds (relative to the
- time the packet is sent) that addresses generated from the
- prefix via stateless address autoconfiguration remain preferred.
- The symbolic value infinity represents infinity (i.e. a value of
- all one bits (0xffffffff)). See RFC 4862.
- base_6to4_interface: If this option is specified, this prefix will
- be combined with the IPv4 address of interface name to produce
- a valid 6to4 prefix. The first 16 bits of this prefix will be
- replaced by 2002 and the next 32 bits of this prefix will be
- replaced by the IPv4 address assigned to interface name at
- configuration time. The remaining 80 bits of the prefix
- (including the SLA ID) will be advertised as specified in the
- configuration file.
- adv_route_lifetime: The lifetime associated with the route in units
- of seconds. The symbolic value infinity represents infinity
- (i.e. a value of all one bits (0xffffffff)).
- adv_route_preference: The preference associated with the default
- router, as either "low", "medium", or "high".
- adv_rdnss_preference: The preference of the DNS server, compared to
- other DNS servers advertised and used. 0 to 7 means less
- important than manually configured nameservers in resolv.conf,
- while 12 to 15 means more important.
- adv_rdnss_open: "Service Open" flag. When set, indicates that RDNSS
- continues to be available to hosts even if they moved to a
- different subnet.
- adv_rdnss_lifetime: The maximum duration how long the RDNSS entries
- are used for name resolution. A value of 0 means the nameserver
- should no longer be used. The maximum duration how long the
- RDNSS entries are used for name resolution. A value of 0 means
- the nameserver should no longer be used. The value, if not 0,
- must be at least max_rtr_adv_interval. To ensure stale RDNSS
- info gets removed in a timely fashion, this should not be
- greater than 2*max_rtr_adv_interval.
- """
- self._prefix = prefix
- self._clients = clients
- self._route = route
- self._rdnss = rdnss
- self._ignore_if_missing = ignore_if_missing
- self._adv_send_advert = adv_send_advert
- self._unicast_only = unicast_only
- self._max_rtr_adv_interval = max_rtr_adv_interval
- self._min_rtr_adv_interval = min_rtr_adv_interval
- self._min_delay_between_ras = min_delay_between_ras
- self._adv_managed_flag = adv_managed_flag
- self._adv_other_config_flag = adv_other_config_flag
- self._adv_link_mtu = adv_link_mtu
- self._adv_reachable_time = adv_reachable_time
- self._adv_retrans_timer = adv_retrans_timer
- self._adv_cur_hop_limit = adv_cur_hop_limit
- self._adv_default_lifetime = adv_default_lifetime
- self._adv_default_preference = adv_default_preference
- self._adv_source_ll_address = adv_source_ll_address
- self._adv_home_agent_flag = adv_home_agent_flag
- self._adv_home_agent_info = adv_home_agent_info
- self._home_agent_lifetime = home_agent_lifetime
- self._home_agent_preference = home_agent_preference
- self._adv_mob_rtr_support_flag = adv_mob_rtr_support_flag
- self._adv_interval_opt = adv_interval_opt
- self._adv_on_link = adv_on_link
- self._adv_autonomous = adv_autonomous
- self._adv_router_addr = adv_router_addr
- self._adv_valid_lifetime = adv_valid_lifetime
- self._adv_preferred_lifetime = adv_preferred_lifetime
- self._base_6to4_interface = base_6to4_interface
- self._adv_route_lifetime = adv_route_lifetime
- self._adv_route_preference = adv_route_preference
- self._adv_rdnss_preference = adv_rdnss_preference
- self._adv_rdnss_open = adv_rdnss_open
- self._adv_rdnss_lifetime = adv_rdnss_lifetime
-
- def package_configs(self) -> dict[str, Any]:
- conf: dict[str, Any] = dict()
- conf["prefix"] = self._prefix
- conf["clients"] = self._clients
- conf["route"] = self._route
- conf["rdnss"] = self._rdnss
-
- conf["interface_options"] = collections.OrderedDict(
- filter(
- lambda pair: pair[1] is not None,
- (
- ("IgnoreIfMissing", self._ignore_if_missing),
- ("AdvSendAdvert", self._adv_send_advert),
- ("UnicastOnly", self._unicast_only),
- ("MaxRtrAdvInterval", self._max_rtr_adv_interval),
- ("MinRtrAdvInterval", self._min_rtr_adv_interval),
- ("MinDelayBetweenRAs", self._min_delay_between_ras),
- ("AdvManagedFlag", self._adv_managed_flag),
- ("AdvOtherConfigFlag", self._adv_other_config_flag),
- ("AdvLinkMTU", self._adv_link_mtu),
- ("AdvReachableTime", self._adv_reachable_time),
- ("AdvRetransTimer", self._adv_retrans_timer),
- ("AdvCurHopLimit", self._adv_cur_hop_limit),
- ("AdvDefaultLifetime", self._adv_default_lifetime),
- ("AdvDefaultPreference", self._adv_default_preference),
- ("AdvSourceLLAddress", self._adv_source_ll_address),
- ("AdvHomeAgentFlag", self._adv_home_agent_flag),
- ("AdvHomeAgentInfo", self._adv_home_agent_info),
- ("HomeAgentLifetime", self._home_agent_lifetime),
- ("HomeAgentPreference", self._home_agent_preference),
- ("AdvMobRtrSupportFlag", self._adv_mob_rtr_support_flag),
- ("AdvIntervalOpt", self._adv_interval_opt),
- ),
- )
- )
-
- conf["prefix_options"] = collections.OrderedDict(
- filter(
- lambda pair: pair[1] is not None,
- (
- ("AdvOnLink", self._adv_on_link),
- ("AdvAutonomous", self._adv_autonomous),
- ("AdvRouterAddr", self._adv_router_addr),
- ("AdvValidLifetime", self._adv_valid_lifetime),
- ("AdvPreferredLifetime", self._adv_preferred_lifetime),
- ("Base6to4Interface", self._base_6to4_interface),
- ),
- )
- )
-
- conf["route_options"] = collections.OrderedDict(
- filter(
- lambda pair: pair[1] is not None,
- (
- ("AdvRouteLifetime", self._adv_route_lifetime),
- ("AdvRoutePreference", self._adv_route_preference),
- ),
- )
- )
-
- conf["rdnss_options"] = collections.OrderedDict(
- filter(
- lambda pair: pair[1] is not None,
- (
- ("AdvRDNSSPreference", self._adv_rdnss_preference),
- ("AdvRDNSSOpen", self._adv_rdnss_open),
- ("AdvRDNSSLifetime", self._adv_rdnss_lifetime),
- ),
- )
- )
-
- return conf
diff --git a/packages/antlion/controllers/ap_lib/radvd_constants.py b/packages/antlion/controllers/ap_lib/radvd_constants.py
deleted file mode 100644
index b02a694..0000000
--- a/packages/antlion/controllers/ap_lib/radvd_constants.py
+++ /dev/null
@@ -1,66 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-DEFAULT_PREFIX = "fd00::/64"
-
-IGNORE_IF_MISSING_ON = "on"
-IGNORE_IF_MISSING_OFF = "off"
-
-ADV_SEND_ADVERT_ON = "on"
-ADV_SEND_ADVERT_OFF = "off"
-
-UNICAST_ONLY_ON = "on"
-UNICAST_ONLY_OFF = "off"
-
-ADV_MANAGED_FLAG_ON = "on"
-ADV_MANAGED_FLAG_OFF = "off"
-
-ADV_OTHER_CONFIG_FLAG_ON = "on"
-ADV_OTHER_CONFIG_FLAG_OFF = "off"
-
-ADV_DEFAULT_PREFERENCE_ON = "on"
-ADV_DEFAULT_PREFERENCE_OFF = "off"
-
-ADV_SOURCE_LL_ADDRESS_ON = "on"
-ADV_SOURCE_LL_ADDRESS_OFF = "off"
-
-ADV_HOME_AGENT_FLAG_ON = "on"
-ADV_HOME_AGENT_FLAG_OFF = "off"
-
-ADV_HOME_AGENT_INFO_ON = "on"
-ADV_HOME_AGENT_INFO_OFF = "off"
-
-ADV_MOB_RTR_SUPPORT_FLAG_ON = "on"
-ADV_MOB_RTR_SUPPORT_FLAG_OFF = "off"
-
-ADV_INTERVAL_OPT_ON = "on"
-ADV_INTERVAL_OPT_OFF = "off"
-
-ADV_ON_LINK_ON = "on"
-ADV_ON_LINK_OFF = "off"
-
-ADV_AUTONOMOUS_ON = "on"
-ADV_AUTONOMOUS_OFF = "off"
-
-ADV_ROUTER_ADDR_ON = "on"
-ADV_ROUTER_ADDR_OFF = "off"
-
-ADV_ROUTE_PREFERENCE_LOW = "low"
-ADV_ROUTE_PREFERENCE_MED = "medium"
-ADV_ROUTE_PREFERENCE_HIGH = "high"
-
-ADV_RDNSS_OPEN_ON = "on"
-ADV_RDNSS_OPEN_OFF = "off"
diff --git a/packages/antlion/controllers/ap_lib/regulatory_channels.py b/packages/antlion/controllers/ap_lib/regulatory_channels.py
deleted file mode 100644
index 432607c..0000000
--- a/packages/antlion/controllers/ap_lib/regulatory_channels.py
+++ /dev/null
@@ -1,710 +0,0 @@
-from dataclasses import dataclass
-
-Channel = int
-Bandwidth = int
-# TODO(http://b/281728764): Add device requirements to each frequency e.g.
-# "MUST be used indoors only" or "MUST be used with DFS".
-ChannelBandwidthMap = dict[Channel, list[Bandwidth]]
-
-
-@dataclass
-class CountryChannels:
- country_code: str
- allowed_channels: ChannelBandwidthMap
-
-
-# All antlion-supported channels and frequencies for use in regulatory testing.
-TEST_CHANNELS: ChannelBandwidthMap = {
- 1: [20],
- 2: [20],
- 3: [20],
- 4: [20],
- 5: [20],
- 6: [20],
- 7: [20],
- 8: [20],
- 9: [20],
- 10: [20],
- 11: [20],
- 12: [20],
- 13: [20],
- 14: [20],
- 36: [20, 40, 80],
- 40: [20, 40, 80],
- 44: [20, 40, 80],
- 48: [20, 40, 80],
- 52: [20, 40, 80],
- 56: [20, 40, 80],
- 60: [20, 40, 80],
- 64: [20, 40, 80],
- 100: [20, 40, 80],
- 104: [20, 40, 80],
- 108: [20, 40, 80],
- 112: [20, 40, 80],
- 116: [20, 40, 80],
- 120: [20, 40, 80],
- 124: [20, 40, 80],
- 128: [20, 40, 80],
- 132: [20, 40, 80],
- 136: [20, 40, 80],
- 140: [20, 40, 80],
- 144: [20, 40, 80],
- 149: [20, 40, 80],
- 153: [20, 40, 80],
- 157: [20, 40, 80],
- 161: [20, 40, 80],
- 165: [20],
-}
-
-# All universally accepted 2.4GHz channels and frequencies.
-WORLD_WIDE_2G_CHANNELS: ChannelBandwidthMap = {
- 1: [20],
- 2: [20],
- 3: [20],
- 4: [20],
- 5: [20],
- 6: [20],
- 7: [20],
- 8: [20],
- 9: [20],
- 10: [20],
- 11: [20],
-}
-
-# List of supported channels and frequencies by country.
-#
-# Please keep this alphabetically ordered. Thanks!
-#
-# TODO: Add missing countries: Russia, Israel, Korea, Turkey, South Africa,
-# Brazil, Bahrain, Vietnam
-COUNTRY_CHANNELS = {
- "Australia": CountryChannels(
- country_code="AU",
- allowed_channels=WORLD_WIDE_2G_CHANNELS
- | {
- 12: [20],
- 13: [20],
- 36: [20, 40, 80],
- 40: [20, 40, 80],
- 44: [20, 40, 80],
- 48: [20, 40, 80],
- 52: [20, 40, 80],
- 56: [20, 40, 80],
- 60: [20, 40, 80],
- 64: [20, 40, 80],
- 100: [20, 40, 80],
- 104: [20, 40, 80],
- 108: [20, 40, 80],
- 112: [20, 40, 80],
- 116: [20, 40, 80],
- 132: [20, 40, 80],
- 136: [20, 40, 80],
- 140: [20, 40, 80],
- 144: [20, 40, 80],
- 149: [20, 40, 80],
- 153: [20, 40, 80],
- 157: [20, 40, 80],
- 161: [20, 40, 80],
- 165: [20],
- },
- ),
- "Austria": CountryChannels(
- country_code="AT",
- allowed_channels=WORLD_WIDE_2G_CHANNELS
- | {
- 12: [20],
- 13: [20],
- 36: [20, 40, 80],
- 40: [20, 40, 80],
- 44: [20, 40, 80],
- 48: [20, 40, 80],
- 52: [20, 40, 80],
- 56: [20, 40, 80],
- 60: [20, 40, 80],
- 64: [20, 40, 80],
- 100: [20, 40, 80],
- 104: [20, 40, 80],
- 108: [20, 40, 80],
- 112: [20, 40, 80],
- 116: [20, 40, 80],
- 120: [20, 40, 80],
- 124: [20, 40, 80],
- 128: [20, 40, 80],
- 132: [20, 40, 80],
- 136: [20, 40, 80],
- 140: [20, 40, 80],
- },
- ),
- "Belgium": CountryChannels(
- country_code="BE",
- allowed_channels=WORLD_WIDE_2G_CHANNELS
- | {
- 12: [20],
- 13: [20],
- 36: [20, 40, 80],
- 40: [20, 40, 80],
- 44: [20, 40, 80],
- 48: [20, 40, 80],
- 52: [20, 40, 80],
- 56: [20, 40, 80],
- 60: [20, 40, 80],
- 64: [20, 40, 80],
- 100: [20, 40, 80],
- 104: [20, 40, 80],
- 108: [20, 40, 80],
- 112: [20, 40, 80],
- 116: [20, 40, 80],
- 120: [20, 40, 80],
- 124: [20, 40, 80],
- 128: [20, 40, 80],
- 132: [20, 40, 80],
- 136: [20, 40, 80],
- 140: [20, 40, 80],
- },
- ),
- "Canada": CountryChannels(
- country_code="CA",
- allowed_channels=WORLD_WIDE_2G_CHANNELS
- | {
- 36: [20, 40, 80],
- 40: [20, 40, 80],
- 44: [20, 40, 80],
- 48: [20, 40, 80],
- 52: [20, 40, 80],
- 56: [20, 40, 80],
- 60: [20, 40, 80],
- 64: [20, 40, 80],
- 100: [20, 40, 80],
- 104: [20, 40, 80],
- 108: [20, 40, 80],
- 112: [20, 40, 80],
- 116: [20, 40, 80],
- 132: [20, 40, 80],
- 136: [20, 40, 80],
- 140: [20, 40, 80],
- 144: [20, 40, 80],
- 149: [20, 40, 80],
- 153: [20, 40, 80],
- 157: [20, 40, 80],
- 161: [20, 40, 80],
- 165: [20],
- },
- ),
- "China": CountryChannels(
- country_code="CH",
- allowed_channels=WORLD_WIDE_2G_CHANNELS
- | {
- 12: [20],
- 13: [20],
- 36: [20, 40, 80],
- 40: [20, 40, 80],
- 44: [20, 40, 80],
- 48: [20, 40, 80],
- 52: [20, 40, 80],
- 56: [20, 40, 80],
- 60: [20, 40, 80],
- 64: [20, 40, 80],
- 100: [20, 40, 80],
- 104: [20, 40, 80],
- 108: [20, 40, 80],
- 112: [20, 40, 80],
- 116: [20, 40, 80],
- 120: [20, 40, 80],
- 124: [20, 40, 80],
- 128: [20, 40, 80],
- 132: [20, 40, 80],
- 136: [20, 40, 80],
- 140: [20, 40, 80],
- },
- ),
- "Denmark": CountryChannels(
- country_code="DK",
- allowed_channels=WORLD_WIDE_2G_CHANNELS
- | {
- 12: [20],
- 13: [20],
- 36: [20, 40, 80],
- 40: [20, 40, 80],
- 44: [20, 40, 80],
- 48: [20, 40, 80],
- 52: [20, 40, 80],
- 56: [20, 40, 80],
- 60: [20, 40, 80],
- 64: [20, 40, 80],
- 100: [20, 40, 80],
- 104: [20, 40, 80],
- 108: [20, 40, 80],
- 112: [20, 40, 80],
- 116: [20, 40, 80],
- 120: [20, 40, 80],
- 124: [20, 40, 80],
- 128: [20, 40, 80],
- 132: [20, 40, 80],
- 136: [20, 40, 80],
- 140: [20, 40, 80],
- },
- ),
- "France": CountryChannels(
- country_code="FR",
- allowed_channels=WORLD_WIDE_2G_CHANNELS
- | {
- 12: [20],
- 13: [20],
- 36: [20, 40, 80],
- 40: [20, 40, 80],
- 44: [20, 40, 80],
- 48: [20, 40, 80],
- 52: [20, 40, 80],
- 56: [20, 40, 80],
- 60: [20, 40, 80],
- 64: [20, 40, 80],
- 100: [20, 40, 80],
- 104: [20, 40, 80],
- 108: [20, 40, 80],
- 112: [20, 40, 80],
- 116: [20, 40, 80],
- 120: [20, 40, 80],
- 124: [20, 40, 80],
- 128: [20, 40, 80],
- 132: [20, 40, 80],
- 136: [20, 40, 80],
- 140: [20, 40, 80],
- },
- ),
- "Germany": CountryChannels(
- country_code="DE",
- allowed_channels=WORLD_WIDE_2G_CHANNELS
- | {
- 12: [20],
- 13: [20],
- 36: [20, 40, 80],
- 40: [20, 40, 80],
- 44: [20, 40, 80],
- 48: [20, 40, 80],
- 52: [20, 40, 80],
- 56: [20, 40, 80],
- 60: [20, 40, 80],
- 64: [20, 40, 80],
- 100: [20, 40, 80],
- 104: [20, 40, 80],
- 108: [20, 40, 80],
- 112: [20, 40, 80],
- 116: [20, 40, 80],
- 120: [20, 40, 80],
- 124: [20, 40, 80],
- 128: [20, 40, 80],
- 132: [20, 40, 80],
- 136: [20, 40, 80],
- 140: [20, 40, 80],
- },
- ),
- "India": CountryChannels(
- country_code="IN",
- allowed_channels=WORLD_WIDE_2G_CHANNELS
- | {
- 12: [20],
- 13: [20],
- 36: [20, 40, 80],
- 40: [20, 40, 80],
- 44: [20, 40, 80],
- 48: [20, 40, 80],
- 52: [20, 40, 80],
- 56: [20, 40, 80],
- 60: [20, 40, 80],
- 64: [20, 40, 80],
- 100: [20, 40, 80],
- 104: [20, 40, 80],
- 108: [20, 40, 80],
- 112: [20, 40, 80],
- 116: [20, 40, 80],
- 120: [20, 40, 80],
- 124: [20, 40, 80],
- 128: [20, 40, 80],
- 132: [20, 40, 80],
- 136: [20, 40, 80],
- 140: [20, 40, 80],
- 144: [20, 40, 80],
- 149: [20, 40, 80],
- 153: [20, 40, 80],
- 157: [20, 40, 80],
- 161: [20, 40, 80],
- 165: [20],
- },
- ),
- "Ireland": CountryChannels(
- country_code="IE",
- allowed_channels=WORLD_WIDE_2G_CHANNELS
- | {
- 12: [20],
- 13: [20],
- 36: [20, 40, 80],
- 40: [20, 40, 80],
- 44: [20, 40, 80],
- 48: [20, 40, 80],
- 52: [20, 40, 80],
- 56: [20, 40, 80],
- 60: [20, 40, 80],
- 64: [20, 40, 80],
- 100: [20, 40, 80],
- 104: [20, 40, 80],
- 108: [20, 40, 80],
- 112: [20, 40, 80],
- 116: [20, 40, 80],
- 120: [20, 40, 80],
- 124: [20, 40, 80],
- 128: [20, 40, 80],
- 132: [20, 40, 80],
- 136: [20, 40, 80],
- 140: [20, 40, 80],
- },
- ),
- "Italy": CountryChannels(
- country_code="IT",
- allowed_channels=WORLD_WIDE_2G_CHANNELS
- | {
- 12: [20],
- 13: [20],
- 36: [20, 40, 80],
- 40: [20, 40, 80],
- 44: [20, 40, 80],
- 48: [20, 40, 80],
- 52: [20, 40, 80],
- 56: [20, 40, 80],
- 60: [20, 40, 80],
- 64: [20, 40, 80],
- 100: [20, 40, 80],
- 104: [20, 40, 80],
- 108: [20, 40, 80],
- 112: [20, 40, 80],
- 116: [20, 40, 80],
- 120: [20, 40, 80],
- 124: [20, 40, 80],
- 128: [20, 40, 80],
- 132: [20, 40, 80],
- 136: [20, 40, 80],
- 140: [20, 40, 80],
- },
- ),
- "Japan": CountryChannels(
- country_code="JP",
- allowed_channels=WORLD_WIDE_2G_CHANNELS
- | {
- 12: [20],
- 13: [20],
- 36: [20, 40, 80],
- 40: [20, 40, 80],
- 44: [20, 40, 80],
- 48: [20, 40, 80],
- 52: [20, 40, 80],
- 56: [20, 40, 80],
- 60: [20, 40, 80],
- 64: [20, 40, 80],
- 100: [20, 40, 80],
- 104: [20, 40, 80],
- 108: [20, 40, 80],
- 112: [20, 40, 80],
- 116: [20, 40, 80],
- 120: [20, 40, 80],
- 124: [20, 40, 80],
- 128: [20, 40, 80],
- 132: [20, 40, 80],
- 136: [20, 40, 80],
- 140: [20, 40, 80],
- 144: [20, 40, 80],
- },
- ),
- "Mexico": CountryChannels(
- country_code="MX",
- allowed_channels=WORLD_WIDE_2G_CHANNELS
- | {
- 36: [20, 40, 80],
- 40: [20, 40, 80],
- 44: [20, 40, 80],
- 48: [20, 40, 80],
- 52: [20, 40, 80],
- 56: [20, 40, 80],
- 60: [20, 40, 80],
- 64: [20, 40, 80],
- 100: [20, 40, 80],
- 104: [20, 40, 80],
- 108: [20, 40, 80],
- 112: [20, 40, 80],
- 116: [20, 40, 80],
- 132: [20, 40, 80],
- 136: [20, 40, 80],
- 140: [20, 40, 80],
- 144: [20, 40, 80],
- 149: [20, 40, 80],
- 153: [20, 40, 80],
- 157: [20, 40, 80],
- 161: [20, 40, 80],
- 165: [20],
- },
- ),
- "Netherlands": CountryChannels(
- country_code="NL",
- allowed_channels=WORLD_WIDE_2G_CHANNELS
- | {
- 12: [20],
- 13: [20],
- 36: [20, 40, 80],
- 40: [20, 40, 80],
- 44: [20, 40, 80],
- 48: [20, 40, 80],
- 52: [20, 40, 80],
- 56: [20, 40, 80],
- 60: [20, 40, 80],
- 64: [20, 40, 80],
- 100: [20, 40, 80],
- 104: [20, 40, 80],
- 108: [20, 40, 80],
- 112: [20, 40, 80],
- 116: [20, 40, 80],
- 120: [20, 40, 80],
- 124: [20, 40, 80],
- 128: [20, 40, 80],
- 132: [20, 40, 80],
- 136: [20, 40, 80],
- 140: [20, 40, 80],
- },
- ),
- "New Zealand": CountryChannels(
- country_code="NZ",
- allowed_channels=WORLD_WIDE_2G_CHANNELS
- | {
- 12: [20],
- 13: [20],
- 36: [20, 40, 80],
- 40: [20, 40, 80],
- 44: [20, 40, 80],
- 48: [20, 40, 80],
- 52: [20, 40, 80],
- 56: [20, 40, 80],
- 60: [20, 40, 80],
- 64: [20, 40, 80],
- 100: [20, 40, 80],
- 104: [20, 40, 80],
- 108: [20, 40, 80],
- 112: [20, 40, 80],
- 116: [20, 40, 80],
- 120: [20, 40, 80],
- 124: [20, 40, 80],
- 128: [20, 40, 80],
- 132: [20, 40, 80],
- 136: [20, 40, 80],
- 140: [20, 40, 80],
- 144: [20, 40, 80],
- 149: [20, 40, 80],
- 153: [20, 40, 80],
- 157: [20, 40, 80],
- 161: [20, 40, 80],
- 165: [20],
- },
- ),
- "Norway": CountryChannels(
- country_code="NO",
- allowed_channels=WORLD_WIDE_2G_CHANNELS
- | {
- 12: [20],
- 13: [20],
- 36: [20, 40, 80],
- 40: [20, 40, 80],
- 44: [20, 40, 80],
- 48: [20, 40, 80],
- 52: [20, 40, 80],
- 56: [20, 40, 80],
- 60: [20, 40, 80],
- 64: [20, 40, 80],
- 100: [20, 40, 80],
- 104: [20, 40, 80],
- 108: [20, 40, 80],
- 112: [20, 40, 80],
- 116: [20, 40, 80],
- 120: [20, 40, 80],
- 124: [20, 40, 80],
- 128: [20, 40, 80],
- 132: [20, 40, 80],
- 136: [20, 40, 80],
- 140: [20, 40, 80],
- },
- ),
- "Singapore": CountryChannels(
- country_code="SG",
- allowed_channels=WORLD_WIDE_2G_CHANNELS
- | {
- 12: [20],
- 13: [20],
- 36: [20, 40, 80],
- 40: [20, 40, 80],
- 44: [20, 40, 80],
- 48: [20, 40, 80],
- 52: [20, 40, 80],
- 56: [20, 40, 80],
- 60: [20, 40, 80],
- 64: [20, 40, 80],
- 100: [20, 40, 80],
- 104: [20, 40, 80],
- 108: [20, 40, 80],
- 112: [20, 40, 80],
- 116: [20, 40, 80],
- 120: [20, 40, 80],
- 124: [20, 40, 80],
- 128: [20, 40, 80],
- 132: [20, 40, 80],
- 136: [20, 40, 80],
- 140: [20, 40, 80],
- 144: [20, 40, 80],
- 149: [20, 40, 80],
- 153: [20, 40, 80],
- 157: [20, 40, 80],
- 161: [20, 40, 80],
- 165: [20],
- },
- ),
- "Spain": CountryChannels(
- country_code="ES",
- allowed_channels=WORLD_WIDE_2G_CHANNELS
- | {
- 12: [20],
- 13: [20],
- 36: [20, 40, 80],
- 40: [20, 40, 80],
- 44: [20, 40, 80],
- 48: [20, 40, 80],
- 52: [20, 40, 80],
- 56: [20, 40, 80],
- 60: [20, 40, 80],
- 64: [20, 40, 80],
- 100: [20, 40, 80],
- 104: [20, 40, 80],
- 108: [20, 40, 80],
- 112: [20, 40, 80],
- 116: [20, 40, 80],
- 120: [20, 40, 80],
- 124: [20, 40, 80],
- 128: [20, 40, 80],
- 132: [20, 40, 80],
- 136: [20, 40, 80],
- 140: [20, 40, 80],
- },
- ),
- "Sweden": CountryChannels(
- country_code="SE",
- allowed_channels=WORLD_WIDE_2G_CHANNELS
- | {
- 12: [20],
- 13: [20],
- 36: [20, 40, 80],
- 40: [20, 40, 80],
- 44: [20, 40, 80],
- 48: [20, 40, 80],
- 52: [20, 40, 80],
- 56: [20, 40, 80],
- 60: [20, 40, 80],
- 64: [20, 40, 80],
- 100: [20, 40, 80],
- 104: [20, 40, 80],
- 108: [20, 40, 80],
- 112: [20, 40, 80],
- 116: [20, 40, 80],
- 120: [20, 40, 80],
- 124: [20, 40, 80],
- 128: [20, 40, 80],
- 132: [20, 40, 80],
- 136: [20, 40, 80],
- 140: [20, 40, 80],
- },
- ),
- "Taiwan": CountryChannels(
- country_code="TW",
- allowed_channels=WORLD_WIDE_2G_CHANNELS
- | {
- 12: [20],
- 13: [20],
- 36: [20, 40, 80],
- 40: [20, 40, 80],
- 44: [20, 40, 80],
- 48: [20, 40, 80],
- 52: [20, 40, 80],
- 56: [20, 40, 80],
- 60: [20, 40, 80],
- 64: [20, 40, 80],
- 100: [20, 40, 80],
- 104: [20, 40, 80],
- 108: [20, 40, 80],
- 112: [20, 40, 80],
- 116: [20, 40, 80],
- 120: [20, 40, 80],
- 124: [20, 40, 80],
- 128: [20, 40, 80],
- 132: [20, 40, 80],
- 136: [20, 40, 80],
- 140: [20, 40, 80],
- 144: [20, 40, 80],
- 149: [20, 40, 80],
- 153: [20, 40, 80],
- 157: [20, 40, 80],
- 161: [20, 40, 80],
- 165: [20],
- },
- ),
- "United Kingdom of Great Britain": CountryChannels(
- country_code="GB",
- allowed_channels=WORLD_WIDE_2G_CHANNELS
- | {
- 11: [20],
- 12: [20],
- 13: [20],
- 36: [20, 40, 80],
- 40: [20, 40, 80],
- 44: [20, 40, 80],
- 48: [20, 40, 80],
- 52: [20, 40, 80],
- 56: [20, 40, 80],
- 60: [20, 40, 80],
- 64: [20, 40, 80],
- 100: [20, 40, 80],
- 104: [20, 40, 80],
- 108: [20, 40, 80],
- 112: [20, 40, 80],
- 116: [20, 40, 80],
- 120: [20, 40, 80],
- 124: [20, 40, 80],
- 128: [20, 40, 80],
- 132: [20, 40, 80],
- 136: [20, 40, 80],
- 140: [20, 40, 80],
- },
- ),
- "United States of America": CountryChannels(
- country_code="US",
- allowed_channels=WORLD_WIDE_2G_CHANNELS
- | {
- 36: [20, 40, 80],
- 40: [20, 40, 80],
- 44: [20, 40, 80],
- 48: [20, 40, 80],
- 52: [20, 40, 80],
- 56: [20, 40, 80],
- 60: [20, 40, 80],
- 64: [20, 40, 80],
- 100: [20, 40, 80],
- 104: [20, 40, 80],
- 108: [20, 40, 80],
- 112: [20, 40, 80],
- 116: [20, 40, 80],
- 120: [20, 40, 80],
- 124: [20, 40, 80],
- 128: [20, 40, 80],
- 132: [20, 40, 80],
- 136: [20, 40, 80],
- 140: [20, 40, 80],
- 144: [20, 40, 80],
- 149: [20, 40, 80],
- 153: [20, 40, 80],
- 157: [20, 40, 80],
- 161: [20, 40, 80],
- 165: [20],
- },
- ),
-}
diff --git a/packages/antlion/controllers/ap_lib/third_party_ap_profiles/__init__.py b/packages/antlion/controllers/ap_lib/third_party_ap_profiles/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/packages/antlion/controllers/ap_lib/third_party_ap_profiles/__init__.py
+++ /dev/null
diff --git a/packages/antlion/controllers/ap_lib/third_party_ap_profiles/actiontec.py b/packages/antlion/controllers/ap_lib/third_party_ap_profiles/actiontec.py
deleted file mode 100644
index 273256d..0000000
--- a/packages/antlion/controllers/ap_lib/third_party_ap_profiles/actiontec.py
+++ /dev/null
@@ -1,172 +0,0 @@
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-
-from antlion.controllers.ap_lib import (
- hostapd_config,
- hostapd_constants,
- hostapd_utils,
-)
-from antlion.controllers.ap_lib.hostapd_security import Security, SecurityMode
-
-
-def actiontec_pk5000(
- iface_wlan_2g: str,
- channel: int,
- security: Security,
- ssid: str | None = None,
-) -> hostapd_config.HostapdConfig:
- """A simulated implementation of what a Actiontec PK5000 AP
- Args:
- iface_wlan_2g: The 2.4 interface of the test AP.
- channel: What channel to use. Only 2.4Ghz is supported for this profile
- security: A security profile. Must be open or WPA2 as this is what is
- supported by the PK5000.
- ssid: Network name
- Returns:
- A hostapd config
-
- Differences from real pk5000:
- Supported Rates IE:
- PK5000: Supported: 1, 2, 5.5, 11
- Extended: 6, 9, 12, 18, 24, 36, 48, 54
- Simulated: Supported: 1, 2, 5.5, 11, 6, 9, 12, 18
- Extended: 24, 36, 48, 54
- """
- if channel > 11:
- # Technically this should be 14 but since the PK5000 is a US only AP,
- # 11 is the highest allowable channel.
- raise ValueError(
- f"The Actiontec PK5000 does not support 5Ghz. Invalid channel ({channel})"
- )
- # Verify interface and security
- hostapd_utils.verify_interface(
- iface_wlan_2g, hostapd_constants.INTERFACE_2G_LIST
- )
- hostapd_utils.verify_security_mode(
- security, [SecurityMode.OPEN, SecurityMode.WPA2]
- )
- if security.security_mode is not SecurityMode.OPEN:
- hostapd_utils.verify_cipher(
- security, [hostapd_constants.WPA2_DEFAULT_CIPER]
- )
-
- interface = iface_wlan_2g
- short_preamble = False
- force_wmm = False
- beacon_interval = 100
- dtim_period = 3
- # Sets the basic rates and supported rates of the PK5000
- additional_params = (
- hostapd_constants.CCK_AND_OFDM_BASIC_RATES
- | hostapd_constants.CCK_AND_OFDM_DATA_RATES
- )
-
- config = hostapd_config.HostapdConfig(
- ssid=ssid,
- channel=channel,
- hidden=False,
- security=security,
- interface=interface,
- mode=hostapd_constants.Mode.MODE_11G,
- force_wmm=force_wmm,
- beacon_interval=beacon_interval,
- dtim_period=dtim_period,
- short_preamble=short_preamble,
- additional_parameters=additional_params,
- )
-
- return config
-
-
-def actiontec_mi424wr(
- iface_wlan_2g: str,
- channel: int,
- security: Security,
- ssid: str | None = None,
-) -> hostapd_config.HostapdConfig:
- # TODO(b/143104825): Permit RIFS once it is supported
- """A simulated implementation of an Actiontec MI424WR AP.
- Args:
- iface_wlan_2g: The 2.4Ghz interface of the test AP.
- channel: What channel to use (2.4Ghz or 5Ghz).
- security: A security profile.
- ssid: The network name.
- Returns:
- A hostapd config.
-
- Differences from real MI424WR:
- HT Capabilities:
- MI424WR:
- HT Rx STBC: Support for 1, 2, and 3
- Simulated:
- HT Rx STBC: Support for 1
- HT Information:
- MI424WR:
- RIFS: Premitted
- Simulated:
- RIFS: Prohibited
- """
- if channel > 11:
- raise ValueError(
- f"The Actiontec MI424WR does not support 5Ghz. Invalid channel ({channel})"
- )
- # Verify interface and security
- hostapd_utils.verify_interface(
- iface_wlan_2g, hostapd_constants.INTERFACE_2G_LIST
- )
- hostapd_utils.verify_security_mode(
- security, [SecurityMode.OPEN, SecurityMode.WPA2]
- )
- if security.security_mode is not SecurityMode.OPEN:
- hostapd_utils.verify_cipher(
- security, [hostapd_constants.WPA2_DEFAULT_CIPER]
- )
-
- n_capabilities = [
- hostapd_constants.N_CAPABILITY_TX_STBC,
- hostapd_constants.N_CAPABILITY_DSSS_CCK_40,
- hostapd_constants.N_CAPABILITY_RX_STBC1,
- ]
- rates = (
- hostapd_constants.CCK_AND_OFDM_DATA_RATES
- | hostapd_constants.CCK_AND_OFDM_BASIC_RATES
- )
- # Proprietary Atheros Communication: Adv Capability IE
- # Proprietary Atheros Communication: Unknown IE
- # Country Info: US Only IE
- vendor_elements = {
- "vendor_elements": "dd0900037f01010000ff7f"
- "dd0a00037f04010000000000"
- "0706555320010b1b"
- }
-
- additional_params = rates | vendor_elements
-
- config = hostapd_config.HostapdConfig(
- ssid=ssid,
- channel=channel,
- hidden=False,
- security=security,
- interface=iface_wlan_2g,
- mode=hostapd_constants.Mode.MODE_11N_MIXED,
- force_wmm=True,
- beacon_interval=100,
- dtim_period=1,
- short_preamble=True,
- n_capabilities=n_capabilities,
- additional_parameters=additional_params,
- )
-
- return config
diff --git a/packages/antlion/controllers/ap_lib/third_party_ap_profiles/asus.py b/packages/antlion/controllers/ap_lib/third_party_ap_profiles/asus.py
deleted file mode 100644
index 3ddc703..0000000
--- a/packages/antlion/controllers/ap_lib/third_party_ap_profiles/asus.py
+++ /dev/null
@@ -1,608 +0,0 @@
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-
-from antlion.controllers.ap_lib import (
- hostapd_config,
- hostapd_constants,
- hostapd_utils,
-)
-from antlion.controllers.ap_lib.hostapd_security import Security, SecurityMode
-
-
-def asus_rtac66u(
- iface_wlan_2g: str,
- iface_wlan_5g: str,
- channel: int,
- security: Security,
- ssid: str | None = None,
-) -> hostapd_config.HostapdConfig:
- # TODO(b/143104825): Permit RIFS once it is supported
- # TODO(b/144446076): Address non-whirlwind hardware capabilities.
- """A simulated implementation of an Asus RTAC66U AP.
- Args:
- iface_wlan_2g: The 2.4Ghz interface of the test AP.
- iface_wlan_5g: The 5Ghz interface of the test AP.
- channel: What channel to use.
- security: A security profile. Must be open or WPA2 as this is what is
- supported by the RTAC66U.
- ssid: Network name
- Returns:
- A hostapd config
- Differences from real RTAC66U:
- 2.4 GHz:
- Rates:
- RTAC66U:
- Supported: 1, 2, 5.5, 11, 18, 24, 36, 54
- Extended: 6, 9, 12, 48
- Simulated:
- Supported: 1, 2, 5.5, 11, 6, 9, 12, 18
- Extended: 24, 36, 48, 54
- HT Capab:
- Info
- RTAC66U: Green Field supported
- Simulated: Green Field not supported on Whirlwind.
- 5GHz:
- VHT Capab:
- RTAC66U:
- SU Beamformer Supported,
- SU Beamformee Supported,
- Beamformee STS Capability: 3,
- Number of Sounding Dimensions: 3,
- VHT Link Adaptation: Both
- Simulated:
- Above are not supported on Whirlwind.
- VHT Operation Info:
- RTAC66U: Basic MCS Map (0x0000)
- Simulated: Basic MCS Map (0xfffc)
- VHT Tx Power Envelope:
- RTAC66U: Local Max Tx Pwr Constraint: 1.0 dBm
- Simulated: Local Max Tx Pwr Constraint: 23.0 dBm
- Both:
- HT Capab:
- A-MPDU
- RTAC66U: MPDU Density 4
- Simulated: MPDU Density 8
- HT Info:
- RTAC66U: RIFS Permitted
- Simulated: RIFS Prohibited
- """
- # Verify interface and security
- hostapd_utils.verify_interface(
- iface_wlan_2g, hostapd_constants.INTERFACE_2G_LIST
- )
- hostapd_utils.verify_interface(
- iface_wlan_5g, hostapd_constants.INTERFACE_5G_LIST
- )
- hostapd_utils.verify_security_mode(
- security, [SecurityMode.OPEN, SecurityMode.WPA2]
- )
- if security.security_mode is not SecurityMode.OPEN:
- hostapd_utils.verify_cipher(
- security, [hostapd_constants.WPA2_DEFAULT_CIPER]
- )
-
- # Common Parameters
- rates = hostapd_constants.CCK_AND_OFDM_DATA_RATES
- vht_channel_width = 20
- n_capabilities = [
- hostapd_constants.N_CAPABILITY_LDPC,
- hostapd_constants.N_CAPABILITY_TX_STBC,
- hostapd_constants.N_CAPABILITY_RX_STBC1,
- hostapd_constants.N_CAPABILITY_MAX_AMSDU_7935,
- hostapd_constants.N_CAPABILITY_DSSS_CCK_40,
- hostapd_constants.N_CAPABILITY_SGI20,
- ]
- # WPS IE
- # Broadcom IE
- vendor_elements = {
- "vendor_elements": "dd310050f204104a00011010440001021047001093689729d373c26cb1563c6c570f33"
- "d7103c0001031049000600372a000120"
- "dd090010180200001c0000"
- }
-
- # 2.4GHz
- if channel <= 11:
- interface = iface_wlan_2g
- rates.update(hostapd_constants.CCK_AND_OFDM_BASIC_RATES)
- mode = hostapd_constants.Mode.MODE_11N_MIXED
- ac_capabilities = None
-
- # 5GHz
- else:
- interface = iface_wlan_5g
- rates.update(hostapd_constants.OFDM_ONLY_BASIC_RATES)
- mode = hostapd_constants.Mode.MODE_11AC_MIXED
- ac_capabilities = [
- hostapd_constants.AC_CAPABILITY_RXLDPC,
- hostapd_constants.AC_CAPABILITY_SHORT_GI_80,
- hostapd_constants.AC_CAPABILITY_TX_STBC_2BY1,
- hostapd_constants.AC_CAPABILITY_RX_STBC_1,
- hostapd_constants.AC_CAPABILITY_MAX_MPDU_11454,
- hostapd_constants.AC_CAPABILITY_MAX_A_MPDU_LEN_EXP7,
- ]
-
- additional_params = (
- rates | vendor_elements | hostapd_constants.UAPSD_ENABLED
- )
-
- config = hostapd_config.HostapdConfig(
- ssid=ssid,
- channel=channel,
- hidden=False,
- security=security,
- interface=interface,
- mode=mode,
- force_wmm=True,
- beacon_interval=100,
- dtim_period=3,
- short_preamble=False,
- n_capabilities=n_capabilities,
- ac_capabilities=ac_capabilities,
- vht_channel_width=vht_channel_width,
- additional_parameters=additional_params,
- )
-
- return config
-
-
-def asus_rtac86u(
- iface_wlan_2g: str,
- iface_wlan_5g: str,
- channel: int,
- security: Security,
- ssid: str | None = None,
-) -> hostapd_config.HostapdConfig:
- """A simulated implementation of an Asus RTAC86U AP.
- Args:
- iface_wlan_2g: The 2.4Ghz interface of the test AP.
- iface_wlan_5g: The 5Ghz interface of the test AP.
- channel: What channel to use.
- security: A security profile. Must be open or WPA2 as this is what is
- supported by the RTAC86U.
- ssid: Network name
- Returns:
- A hostapd config
- Differences from real RTAC86U:
- 2.4GHz:
- Rates:
- RTAC86U:
- Supported: 1, 2, 5.5, 11, 18, 24, 36, 54
- Extended: 6, 9, 12, 48
- Simulated:
- Supported: 1, 2, 5.5, 11, 6, 9, 12, 18
- Extended: 24, 36, 48, 54
- 5GHz:
- Country Code:
- Simulated: Has two country code IEs, one that matches
- the actual, and another explicit IE that was required for
- hostapd's 802.11d to work.
- Both:
- RSN Capabilities (w/ WPA2):
- RTAC86U:
- RSN PTKSA Replay Counter Capab: 16
- Simulated:
- RSN PTKSA Replay Counter Capab: 1
- """
- # Verify interface and security
- hostapd_utils.verify_interface(
- iface_wlan_2g, hostapd_constants.INTERFACE_2G_LIST
- )
- hostapd_utils.verify_interface(
- iface_wlan_5g, hostapd_constants.INTERFACE_5G_LIST
- )
- hostapd_utils.verify_security_mode(
- security, [SecurityMode.OPEN, SecurityMode.WPA2]
- )
- if security.security_mode is not SecurityMode.OPEN:
- hostapd_utils.verify_cipher(
- security, [hostapd_constants.WPA2_DEFAULT_CIPER]
- )
-
- # Common Parameters
- rates = hostapd_constants.CCK_AND_OFDM_DATA_RATES
- qbss = {"bss_load_update_period": 50, "chan_util_avg_period": 600}
-
- # 2.4GHz
- if channel <= 11:
- interface = iface_wlan_2g
- mode = hostapd_constants.Mode.MODE_11G
- rates.update(hostapd_constants.CCK_AND_OFDM_BASIC_RATES)
- spectrum_mgmt = False
- # Measurement Pilot Transmission IE
- vendor_elements = {"vendor_elements": "42020000"}
-
- # 5GHz
- else:
- interface = iface_wlan_5g
- mode = hostapd_constants.Mode.MODE_11A
- rates.update(hostapd_constants.OFDM_ONLY_BASIC_RATES)
- spectrum_mgmt = True
- # Country Information IE (w/ individual channel info)
- # TPC Report Transmit Power IE
- # Measurement Pilot Transmission IE
- vendor_elements = {
- "vendor_elements": "074255532024011e28011e2c011e30011e34011e38011e3c011e40011e64011e"
- "68011e6c011e70011e74011e84011e88011e8c011e95011e99011e9d011ea1011e"
- "a5011e"
- "23021300"
- "42020000"
- }
-
- additional_params = rates | qbss | vendor_elements
-
- config = hostapd_config.HostapdConfig(
- ssid=ssid,
- channel=channel,
- hidden=False,
- security=security,
- interface=interface,
- mode=mode,
- force_wmm=False,
- beacon_interval=100,
- dtim_period=3,
- short_preamble=False,
- spectrum_mgmt_required=spectrum_mgmt,
- additional_parameters=additional_params,
- )
- return config
-
-
-def asus_rtac5300(
- iface_wlan_2g: str,
- iface_wlan_5g: str,
- channel: int,
- security: Security,
- ssid: str | None = None,
-) -> hostapd_config.HostapdConfig:
- # TODO(b/143104825): Permit RIFS once it is supported
- # TODO(b/144446076): Address non-whirlwind hardware capabilities.
- """A simulated implementation of an Asus RTAC5300 AP.
- Args:
- iface_wlan_2g: The 2.4Ghz interface of the test AP.
- iface_wlan_5g: The 5Ghz interface of the test AP.
- channel: What channel to use.
- security: A security profile. Must be open or WPA2 as this is what is
- supported by the RTAC5300.
- ssid: Network name
- Returns:
- A hostapd config
- Differences from real RTAC5300:
- 2.4GHz:
- Rates:
- RTAC86U:
- Supported: 1, 2, 5.5, 11, 18, 24, 36, 54
- Extended: 6, 9, 12, 48
- Simulated:
- Supported: 1, 2, 5.5, 11, 6, 9, 12, 18
- Extended: 24, 36, 48, 54
- 5GHz:
- VHT Capab:
- RTAC5300:
- SU Beamformer Supported,
- SU Beamformee Supported,
- Beamformee STS Capability: 4,
- Number of Sounding Dimensions: 4,
- MU Beamformer Supported,
- VHT Link Adaptation: Both
- Simulated:
- Above are not supported on Whirlwind.
- VHT Operation Info:
- RTAC5300: Basic MCS Map (0x0000)
- Simulated: Basic MCS Map (0xfffc)
- VHT Tx Power Envelope:
- RTAC5300: Local Max Tx Pwr Constraint: 1.0 dBm
- Simulated: Local Max Tx Pwr Constraint: 23.0 dBm
- Both:
- HT Capab:
- A-MPDU
- RTAC5300: MPDU Density 4
- Simulated: MPDU Density 8
- HT Info:
- RTAC5300: RIFS Permitted
- Simulated: RIFS Prohibited
- """
- # Verify interface and security
- hostapd_utils.verify_interface(
- iface_wlan_2g, hostapd_constants.INTERFACE_2G_LIST
- )
- hostapd_utils.verify_interface(
- iface_wlan_5g, hostapd_constants.INTERFACE_5G_LIST
- )
- hostapd_utils.verify_security_mode(
- security, [SecurityMode.OPEN, SecurityMode.WPA2]
- )
- if security.security_mode is not SecurityMode.OPEN:
- hostapd_utils.verify_cipher(
- security, [hostapd_constants.WPA2_DEFAULT_CIPER]
- )
-
- # Common Parameters
- rates = hostapd_constants.CCK_AND_OFDM_DATA_RATES
- vht_channel_width = 20
- qbss = {"bss_load_update_period": 50, "chan_util_avg_period": 600}
- n_capabilities = [
- hostapd_constants.N_CAPABILITY_LDPC,
- hostapd_constants.N_CAPABILITY_TX_STBC,
- hostapd_constants.N_CAPABILITY_RX_STBC1,
- hostapd_constants.N_CAPABILITY_SGI20,
- ]
-
- # Broadcom IE
- vendor_elements = {"vendor_elements": "dd090010180200009c0000"}
-
- # 2.4GHz
- if channel <= 11:
- interface = iface_wlan_2g
- rates.update(hostapd_constants.CCK_AND_OFDM_BASIC_RATES)
- mode = hostapd_constants.Mode.MODE_11N_MIXED
- # AsusTek IE
- # Epigram 2.4GHz IE
- vendor_elements["vendor_elements"] += (
- "dd25f832e4010101020100031411b5"
- "2fd437509c30b3d7f5cf5754fb125aed3b8507045aed3b85"
- "dd1e00904c0418bf0cb2798b0faaff0000aaff0000c0050001000000c3020002"
- )
- ac_capabilities = None
-
- # 5GHz
- else:
- interface = iface_wlan_5g
- rates.update(hostapd_constants.OFDM_ONLY_BASIC_RATES)
- mode = hostapd_constants.Mode.MODE_11AC_MIXED
- # Epigram 5GHz IE
- vendor_elements["vendor_elements"] += "dd0500904c0410"
- ac_capabilities = [
- hostapd_constants.AC_CAPABILITY_RXLDPC,
- hostapd_constants.AC_CAPABILITY_SHORT_GI_80,
- hostapd_constants.AC_CAPABILITY_TX_STBC_2BY1,
- hostapd_constants.AC_CAPABILITY_RX_STBC_1,
- hostapd_constants.AC_CAPABILITY_MAX_MPDU_11454,
- hostapd_constants.AC_CAPABILITY_MAX_A_MPDU_LEN_EXP7,
- ]
-
- additional_params = (
- rates | qbss | vendor_elements | hostapd_constants.UAPSD_ENABLED
- )
-
- config = hostapd_config.HostapdConfig(
- ssid=ssid,
- channel=channel,
- hidden=False,
- security=security,
- interface=interface,
- mode=mode,
- force_wmm=True,
- beacon_interval=100,
- dtim_period=3,
- short_preamble=False,
- n_capabilities=n_capabilities,
- ac_capabilities=ac_capabilities,
- vht_channel_width=vht_channel_width,
- additional_parameters=additional_params,
- )
- return config
-
-
-def asus_rtn56u(
- iface_wlan_2g: str,
- iface_wlan_5g: str,
- channel: int,
- security: Security,
- ssid: str | None = None,
-) -> hostapd_config.HostapdConfig:
- """A simulated implementation of an Asus RTN56U AP.
- Args:
- iface_wlan_2g: The 2.4Ghz interface of the test AP.
- iface_wlan_5g: The 5Ghz interface of the test AP.
- channel: What channel to use.
- security: A security profile. Must be open or WPA2 as this is what is
- supported by the RTN56U.
- ssid: Network name
- Returns:
- A hostapd config
- Differences from real RTN56U:
- 2.4GHz:
- Rates:
- RTN56U:
- Supported: 1, 2, 5.5, 11, 18, 24, 36, 54
- Extended: 6, 9, 12, 48
- Simulated:
- Supported: 1, 2, 5.5, 11, 6, 9, 12, 18
- Extended: 24, 36, 48, 54
- Both:
- Fixed Parameters:
- RTN56U: APSD Implemented
- Simulated: APSD Not Implemented
- HT Capab:
- A-MPDU
- RTN56U: MPDU Density 4
- Simulated: MPDU Density 8
- RSN Capabilities (w/ WPA2):
- RTN56U:
- RSN PTKSA Replay Counter Capab: 1
- Simulated:
- RSN PTKSA Replay Counter Capab: 16
- """
- # Verify interface and security
- hostapd_utils.verify_interface(
- iface_wlan_2g, hostapd_constants.INTERFACE_2G_LIST
- )
- hostapd_utils.verify_interface(
- iface_wlan_5g, hostapd_constants.INTERFACE_5G_LIST
- )
- hostapd_utils.verify_security_mode(
- security, [SecurityMode.OPEN, SecurityMode.WPA2]
- )
- if security.security_mode is not SecurityMode.OPEN:
- hostapd_utils.verify_cipher(
- security, [hostapd_constants.WPA2_DEFAULT_CIPER]
- )
-
- # Common Parameters
- rates = hostapd_constants.CCK_AND_OFDM_DATA_RATES
- qbss = {"bss_load_update_period": 50, "chan_util_avg_period": 600}
- n_capabilities = [
- hostapd_constants.N_CAPABILITY_SGI20,
- hostapd_constants.N_CAPABILITY_SGI40,
- hostapd_constants.N_CAPABILITY_TX_STBC,
- hostapd_constants.N_CAPABILITY_RX_STBC1,
- ]
-
- # 2.4GHz
- if channel <= 11:
- interface = iface_wlan_2g
- rates.update(hostapd_constants.CCK_AND_OFDM_BASIC_RATES)
- # Ralink Technology IE
- # US Country Code IE
- # AP Channel Report IEs (2)
- # WPS IE
- vendor_elements = {
- "vendor_elements": "dd07000c4307000000"
- "0706555320010b14"
- "33082001020304050607"
- "33082105060708090a0b"
- "dd270050f204104a000110104400010210470010bc329e001dd811b286011c872c"
- "d33448103c000101"
- }
-
- # 5GHz
- else:
- interface = iface_wlan_5g
- rates.update(hostapd_constants.OFDM_ONLY_BASIC_RATES)
- # Ralink Technology IE
- # US Country Code IE
- vendor_elements = {
- "vendor_elements": "dd07000c4307000000" "0706555320010b14"
- }
-
- additional_params = (
- rates | vendor_elements | qbss | hostapd_constants.UAPSD_ENABLED
- )
-
- config = hostapd_config.HostapdConfig(
- ssid=ssid,
- channel=channel,
- hidden=False,
- security=security,
- interface=interface,
- mode=hostapd_constants.Mode.MODE_11N_MIXED,
- force_wmm=True,
- beacon_interval=100,
- dtim_period=1,
- short_preamble=False,
- n_capabilities=n_capabilities,
- additional_parameters=additional_params,
- )
-
- return config
-
-
-def asus_rtn66u(
- iface_wlan_2g: str,
- iface_wlan_5g: str,
- channel: int,
- security: Security,
- ssid: str | None = None,
-) -> hostapd_config.HostapdConfig:
- # TODO(b/143104825): Permit RIFS once it is supported
- """A simulated implementation of an Asus RTN66U AP.
- Args:
- iface_wlan_2g: The 2.4Ghz interface of the test AP.
- iface_wlan_5g: The 5Ghz interface of the test AP.
- channel: What channel to use.
- security: A security profile. Must be open or WPA2 as this is what is
- supported by the RTN66U.
- ssid: Network name
- Returns:
- A hostapd config
- Differences from real RTN66U:
- 2.4GHz:
- Rates:
- RTN66U:
- Supported: 1, 2, 5.5, 11, 18, 24, 36, 54
- Extended: 6, 9, 12, 48
- Simulated:
- Supported: 1, 2, 5.5, 11, 6, 9, 12, 18
- Extended: 24, 36, 48, 54
- Both:
- HT Info:
- RTN66U: RIFS Permitted
- Simulated: RIFS Prohibited
- HT Capab:
- Info:
- RTN66U: Green Field supported
- Simulated: Green Field not supported on Whirlwind.
- A-MPDU
- RTN66U: MPDU Density 4
- Simulated: MPDU Density 8
- """
- # Verify interface and security
- hostapd_utils.verify_interface(
- iface_wlan_2g, hostapd_constants.INTERFACE_2G_LIST
- )
- hostapd_utils.verify_interface(
- iface_wlan_5g, hostapd_constants.INTERFACE_5G_LIST
- )
- hostapd_utils.verify_security_mode(
- security, [SecurityMode.OPEN, SecurityMode.WPA2]
- )
- if security.security_mode is not SecurityMode.OPEN:
- hostapd_utils.verify_cipher(
- security, [hostapd_constants.WPA2_DEFAULT_CIPER]
- )
-
- # Common Parameters
- rates = hostapd_constants.CCK_AND_OFDM_DATA_RATES
- n_capabilities = [
- hostapd_constants.N_CAPABILITY_LDPC,
- hostapd_constants.N_CAPABILITY_SGI20,
- hostapd_constants.N_CAPABILITY_TX_STBC,
- hostapd_constants.N_CAPABILITY_RX_STBC1,
- hostapd_constants.N_CAPABILITY_MAX_AMSDU_7935,
- ]
- # Broadcom IE
- vendor_elements = {"vendor_elements": "dd090010180200001c0000"}
-
- # 2.4GHz
- if channel <= 11:
- interface = iface_wlan_2g
- rates.update(hostapd_constants.CCK_AND_OFDM_BASIC_RATES)
- n_capabilities.append(hostapd_constants.N_CAPABILITY_DSSS_CCK_40)
-
- # 5GHz
- else:
- interface = iface_wlan_5g
- rates.update(hostapd_constants.OFDM_ONLY_BASIC_RATES)
-
- additional_params = (
- rates | vendor_elements | hostapd_constants.UAPSD_ENABLED
- )
-
- config = hostapd_config.HostapdConfig(
- ssid=ssid,
- channel=channel,
- hidden=False,
- security=security,
- interface=interface,
- mode=hostapd_constants.Mode.MODE_11N_MIXED,
- force_wmm=True,
- beacon_interval=100,
- dtim_period=3,
- short_preamble=False,
- n_capabilities=n_capabilities,
- additional_parameters=additional_params,
- )
-
- return config
diff --git a/packages/antlion/controllers/ap_lib/third_party_ap_profiles/belkin.py b/packages/antlion/controllers/ap_lib/third_party_ap_profiles/belkin.py
deleted file mode 100644
index aa718b2..0000000
--- a/packages/antlion/controllers/ap_lib/third_party_ap_profiles/belkin.py
+++ /dev/null
@@ -1,111 +0,0 @@
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-
-from antlion.controllers.ap_lib import (
- hostapd_config,
- hostapd_constants,
- hostapd_utils,
-)
-from antlion.controllers.ap_lib.hostapd_security import Security, SecurityMode
-
-
-def belkin_f9k1001v5(
- iface_wlan_2g: str,
- channel: int,
- security: Security,
- ssid: str | None = None,
-) -> hostapd_config.HostapdConfig:
- # TODO(b/143104825): Permit RIFS once it is supported
- """A simulated implementation of what a Belkin F9K1001v5 AP
- Args:
- iface_wlan_2g: The 2.4Ghz interface of the test AP.
- channel: What channel to use.
- security: A security profile (open or WPA2).
- ssid: The network name.
- Returns:
- A hostapd config.
- Differences from real F9K1001v5:
- Rates:
- F9K1001v5:
- Supported: 1, 2, 5.5, 11, 18, 24, 36, 54
- Extended: 6, 9, 12, 48
- Simulated:
- Supported: 1, 2, 5.5, 11, 6, 9, 12, 18
- Extended: 24, 36, 48, 54
- HT Info:
- F9K1001v5:
- RIFS: Permitted
- Simulated:
- RIFS: Prohibited
- RSN Capabilities (w/ WPA2):
- F9K1001v5:
- RSN PTKSA Replay Counter Capab: 1
- Simulated:
- RSN PTKSA Replay Counter Capab: 16
- """
- if channel > 11:
- raise ValueError(
- f"The Belkin F9k1001v5 does not support 5Ghz. Invalid channel ({channel})"
- )
- # Verify interface and security
- hostapd_utils.verify_interface(
- iface_wlan_2g, hostapd_constants.INTERFACE_2G_LIST
- )
- hostapd_utils.verify_security_mode(
- security, [SecurityMode.OPEN, SecurityMode.WPA2]
- )
- if security.security_mode is not SecurityMode.OPEN:
- hostapd_utils.verify_cipher(
- security, [hostapd_constants.WPA2_DEFAULT_CIPER]
- )
-
- n_capabilities = [
- hostapd_constants.N_CAPABILITY_SGI20,
- hostapd_constants.N_CAPABILITY_SGI40,
- hostapd_constants.N_CAPABILITY_TX_STBC,
- hostapd_constants.N_CAPABILITY_MAX_AMSDU_7935,
- hostapd_constants.N_CAPABILITY_DSSS_CCK_40,
- ]
-
- rates = (
- hostapd_constants.CCK_AND_OFDM_BASIC_RATES
- | hostapd_constants.CCK_AND_OFDM_DATA_RATES
- )
-
- # Broadcom IE
- # WPS IE
- vendor_elements = {
- "vendor_elements": "dd090010180200100c0000"
- "dd180050f204104a00011010440001021049000600372a000120"
- }
-
- additional_params = rates | vendor_elements
-
- config = hostapd_config.HostapdConfig(
- ssid=ssid,
- channel=channel,
- hidden=False,
- security=security,
- interface=iface_wlan_2g,
- mode=hostapd_constants.Mode.MODE_11N_MIXED,
- force_wmm=True,
- beacon_interval=100,
- dtim_period=3,
- short_preamble=False,
- n_capabilities=n_capabilities,
- additional_parameters=additional_params,
- )
-
- return config
diff --git a/packages/antlion/controllers/ap_lib/third_party_ap_profiles/linksys.py b/packages/antlion/controllers/ap_lib/third_party_ap_profiles/linksys.py
deleted file mode 100644
index 5b36a6a..0000000
--- a/packages/antlion/controllers/ap_lib/third_party_ap_profiles/linksys.py
+++ /dev/null
@@ -1,339 +0,0 @@
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-
-from antlion.controllers.ap_lib import (
- hostapd_config,
- hostapd_constants,
- hostapd_utils,
-)
-from antlion.controllers.ap_lib.hostapd_security import Security, SecurityMode
-
-
-def linksys_ea4500(
- iface_wlan_2g: str,
- iface_wlan_5g: str,
- channel: int,
- security: Security,
- ssid: str | None = None,
-) -> hostapd_config.HostapdConfig:
- # TODO(b/143104825): Permit RIFS once it is supported
- # TODO(b/144446076): Address non-whirlwind hardware capabilities.
- """A simulated implementation of what a Linksys EA4500 AP
- Args:
- iface_wlan_2g: The 2.4Ghz interface of the test AP.
- iface_wlan_5g: The 5GHz interface of the test AP.
- channel: What channel to use.
- security: A security profile (open or WPA2).
- ssid: The network name.
- Returns:
- A hostapd config.
- Differences from real EA4500:
- CF (Contention-Free) Parameter IE:
- EA4500: has CF Parameter IE
- Simulated: does not have CF Parameter IE
- HT Capab:
- Info:
- EA4500: Green Field supported
- Simulated: Green Field not supported on Whirlwind.
- A-MPDU
- RTAC66U: MPDU Density 4
- Simulated: MPDU Density 8
- RSN Capab (w/ WPA2):
- EA4500:
- RSN PTKSA Replay Counter Capab: 1
- Simulated:
- RSN PTKSA Replay Counter Capab: 16
- """
- # Verify interface and security
- hostapd_utils.verify_interface(
- iface_wlan_2g, hostapd_constants.INTERFACE_2G_LIST
- )
- hostapd_utils.verify_interface(
- iface_wlan_5g, hostapd_constants.INTERFACE_5G_LIST
- )
- hostapd_utils.verify_security_mode(
- security, [SecurityMode.OPEN, SecurityMode.WPA2]
- )
- if security.security_mode is not SecurityMode.OPEN:
- hostapd_utils.verify_cipher(
- security, [hostapd_constants.WPA2_DEFAULT_CIPER]
- )
-
- # Common Parameters
- rates = hostapd_constants.CCK_AND_OFDM_DATA_RATES
-
- n_capabilities = [
- hostapd_constants.N_CAPABILITY_SGI20,
- hostapd_constants.N_CAPABILITY_SGI40,
- hostapd_constants.N_CAPABILITY_TX_STBC,
- hostapd_constants.N_CAPABILITY_RX_STBC1,
- hostapd_constants.N_CAPABILITY_DSSS_CCK_40,
- ]
-
- # Epigram HT Capabilities IE
- # Epigram HT Additional Capabilities IE
- # Marvell Semiconductor, Inc. IE
- vendor_elements = {
- "vendor_elements": "dd1e00904c33fc0117ffffff0000000000000000000000000000000000000000"
- "dd1a00904c3424000000000000000000000000000000000000000000"
- "dd06005043030000"
- }
-
- # 2.4GHz
- if channel <= 11:
- interface = iface_wlan_2g
- rates.update(hostapd_constants.CCK_AND_OFDM_BASIC_RATES)
- obss_interval = 180
- n_capabilities.append(hostapd_constants.N_CAPABILITY_HT40_PLUS)
-
- # 5GHz
- else:
- interface = iface_wlan_5g
- rates.update(hostapd_constants.OFDM_ONLY_BASIC_RATES)
- obss_interval = None
-
- additional_params = (
- rates | vendor_elements | hostapd_constants.UAPSD_ENABLED
- )
-
- config = hostapd_config.HostapdConfig(
- ssid=ssid,
- channel=channel,
- hidden=False,
- security=security,
- interface=interface,
- mode=hostapd_constants.Mode.MODE_11N_MIXED,
- force_wmm=True,
- beacon_interval=100,
- dtim_period=1,
- short_preamble=True,
- obss_interval=obss_interval,
- n_capabilities=n_capabilities,
- additional_parameters=additional_params,
- )
-
- return config
-
-
-def linksys_ea9500(
- iface_wlan_2g: str,
- iface_wlan_5g: str,
- channel: int,
- security: Security,
- ssid: str | None = None,
-) -> hostapd_config.HostapdConfig:
- """A simulated implementation of what a Linksys EA9500 AP
- Args:
- iface_wlan_2g: The 2.4Ghz interface of the test AP.
- iface_wlan_5g: The 5GHz interface of the test AP.
- channel: What channel to use.
- security: A security profile (open or WPA2).
- ssid: The network name.
- Returns:
- A hostapd config.
- Differences from real EA9500:
- 2.4GHz:
- Rates:
- EA9500:
- Supported: 1, 2, 5.5, 11, 18, 24, 36, 54
- Extended: 6, 9, 12, 48
- Simulated:
- Supported: 1, 2, 5.5, 11, 6, 9, 12, 18
- Extended: 24, 36, 48, 54
- RSN Capab (w/ WPA2):
- EA9500:
- RSN PTKSA Replay Counter Capab: 16
- Simulated:
- RSN PTKSA Replay Counter Capab: 1
- """
- # Verify interface and security
- hostapd_utils.verify_interface(
- iface_wlan_2g, hostapd_constants.INTERFACE_2G_LIST
- )
- hostapd_utils.verify_interface(
- iface_wlan_5g, hostapd_constants.INTERFACE_5G_LIST
- )
- hostapd_utils.verify_security_mode(
- security, [SecurityMode.OPEN, SecurityMode.WPA2]
- )
- if security.security_mode is not SecurityMode.OPEN:
- hostapd_utils.verify_cipher(
- security, [hostapd_constants.WPA2_DEFAULT_CIPER]
- )
-
- # Common Parameters
- rates = hostapd_constants.CCK_AND_OFDM_DATA_RATES
- qbss = {"bss_load_update_period": 50, "chan_util_avg_period": 600}
- # Measurement Pilot Transmission IE
- vendor_elements = {"vendor_elements": "42020000"}
-
- # 2.4GHz
- if channel <= 11:
- interface = iface_wlan_2g
- mode = hostapd_constants.Mode.MODE_11G
- rates.update(hostapd_constants.CCK_AND_OFDM_BASIC_RATES)
-
- # 5GHz
- else:
- interface = iface_wlan_5g
- mode = hostapd_constants.Mode.MODE_11A
- rates.update(hostapd_constants.OFDM_ONLY_BASIC_RATES)
-
- additional_params = rates | qbss | vendor_elements
-
- config = hostapd_config.HostapdConfig(
- ssid=ssid,
- channel=channel,
- hidden=False,
- security=security,
- interface=interface,
- mode=mode,
- force_wmm=False,
- beacon_interval=100,
- dtim_period=1,
- short_preamble=False,
- additional_parameters=additional_params,
- )
- return config
-
-
-def linksys_wrt1900acv2(
- iface_wlan_2g: str,
- iface_wlan_5g: str,
- channel: int,
- security: Security,
- ssid: str | None = None,
-) -> hostapd_config.HostapdConfig:
- # TODO(b/144446076): Address non-whirlwind hardware capabilities.
- """A simulated implementation of what a Linksys WRT1900ACV2 AP
- Args:
- iface_wlan_2g: The 2.4Ghz interface of the test AP.
- iface_wlan_5g: The 5GHz interface of the test AP.
- channel: What channel to use.
- security: A security profile (open or WPA2).
- ssid: The network name.
- Returns:
- A hostapd config.
- Differences from real WRT1900ACV2:
- 5 GHz:
- Simulated: Has two country code IEs, one that matches
- the actual, and another explicit IE that was required for
- hostapd's 802.11d to work.
- Both:
- HT Capab:
- A-MPDU
- WRT1900ACV2: MPDU Density 4
- Simulated: MPDU Density 8
- VHT Capab:
- WRT1900ACV2:
- SU Beamformer Supported,
- SU Beamformee Supported,
- Beamformee STS Capability: 4,
- Number of Sounding Dimensions: 4,
- Simulated:
- Above are not supported on Whirlwind.
- RSN Capabilities (w/ WPA2):
- WRT1900ACV2:
- RSN PTKSA Replay Counter Capab: 1
- Simulated:
- RSN PTKSA Replay Counter Capab: 16
- """
- # Verify interface and security
- hostapd_utils.verify_interface(
- iface_wlan_2g, hostapd_constants.INTERFACE_2G_LIST
- )
- hostapd_utils.verify_interface(
- iface_wlan_5g, hostapd_constants.INTERFACE_5G_LIST
- )
- hostapd_utils.verify_security_mode(
- security, [SecurityMode.OPEN, SecurityMode.WPA2]
- )
- if security.security_mode is not SecurityMode.OPEN:
- hostapd_utils.verify_cipher(
- security, [hostapd_constants.WPA2_DEFAULT_CIPER]
- )
-
- # Common Parameters
- rates = hostapd_constants.CCK_AND_OFDM_DATA_RATES
- n_capabilities = [
- hostapd_constants.N_CAPABILITY_LDPC,
- hostapd_constants.N_CAPABILITY_SGI20,
- hostapd_constants.N_CAPABILITY_SGI40,
- ]
- ac_capabilities = [
- hostapd_constants.AC_CAPABILITY_RXLDPC,
- hostapd_constants.AC_CAPABILITY_SHORT_GI_80,
- hostapd_constants.AC_CAPABILITY_RX_STBC_1,
- hostapd_constants.AC_CAPABILITY_RX_ANTENNA_PATTERN,
- hostapd_constants.AC_CAPABILITY_TX_ANTENNA_PATTERN,
- hostapd_constants.AC_CAPABILITY_MAX_A_MPDU_LEN_EXP7,
- ]
- vht_channel_width = 20
- # Epigram, Inc. HT Capabilities IE
- # Epigram, Inc. HT Additional Capabilities IE
- # Marvell Semiconductor IE
- vendor_elements = {
- "vendor_elements": "dd1e00904c336c0017ffffff0001000000000000000000000000001fff071800"
- "dd1a00904c3424000000000000000000000000000000000000000000"
- "dd06005043030000"
- }
-
- # 2.4GHz
- if channel <= 11:
- interface = iface_wlan_2g
- rates.update(hostapd_constants.CCK_AND_OFDM_BASIC_RATES)
- obss_interval = 180
- spectrum_mgmt = False
- local_pwr_constraint = {}
-
- # 5GHz
- else:
- interface = iface_wlan_5g
- rates.update(hostapd_constants.OFDM_ONLY_BASIC_RATES)
- obss_interval = None
- spectrum_mgmt = True
- local_pwr_constraint = {"local_pwr_constraint": 3}
- # Country Information IE (w/ individual channel info)
- vendor_elements["vendor_elements"] += (
- "071e5553202401112801112c011130"
- "01119501179901179d0117a10117a50117"
- )
-
- additional_params = (
- rates
- | vendor_elements
- | hostapd_constants.UAPSD_ENABLED
- | local_pwr_constraint
- )
-
- config = hostapd_config.HostapdConfig(
- ssid=ssid,
- channel=channel,
- hidden=False,
- security=security,
- interface=interface,
- mode=hostapd_constants.Mode.MODE_11AC_MIXED,
- force_wmm=True,
- beacon_interval=100,
- dtim_period=1,
- short_preamble=True,
- obss_interval=obss_interval,
- n_capabilities=n_capabilities,
- ac_capabilities=ac_capabilities,
- vht_channel_width=vht_channel_width,
- spectrum_mgmt_required=spectrum_mgmt,
- additional_parameters=additional_params,
- )
- return config
diff --git a/packages/antlion/controllers/ap_lib/third_party_ap_profiles/netgear.py b/packages/antlion/controllers/ap_lib/third_party_ap_profiles/netgear.py
deleted file mode 100644
index 0174d47..0000000
--- a/packages/antlion/controllers/ap_lib/third_party_ap_profiles/netgear.py
+++ /dev/null
@@ -1,290 +0,0 @@
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-
-from antlion.controllers.ap_lib import (
- hostapd_config,
- hostapd_constants,
- hostapd_utils,
-)
-from antlion.controllers.ap_lib.hostapd_security import Security, SecurityMode
-
-
-def netgear_r7000(
- iface_wlan_2g: str,
- iface_wlan_5g: str,
- channel: int,
- security: Security,
- ssid: str | None = None,
-) -> hostapd_config.HostapdConfig:
- # TODO(b/143104825): Permit RIFS once it is supported
- # TODO(b/144446076): Address non-whirlwind hardware capabilities.
- """A simulated implementation of what a Netgear R7000 AP
- Args:
- iface_wlan_2g: The 2.4Ghz interface of the test AP.
- iface_wlan_5g: The 5GHz interface of the test AP.
- channel: What channel to use.
- security: A security profile (open or WPA2).
- ssid: The network name.
- Returns:
- A hostapd config.
- Differences from real R7000:
- 2.4GHz:
- Rates:
- R7000:
- Supported: 1, 2, 5.5, 11, 18, 24, 36, 54
- Extended: 6, 9, 12, 48
- Simulated:
- Supported: 1, 2, 5.5, 11, 6, 9, 12, 18
- Extended: 24, 36, 48,
- 5GHz:
- VHT Capab:
- R7000:
- SU Beamformer Supported,
- SU Beamformee Supported,
- Beamformee STS Capability: 3,
- Number of Sounding Dimensions: 3,
- VHT Link Adaptation: Both
- Simulated:
- Above are not supported on Whirlwind.
- VHT Operation Info:
- R7000: Basic MCS Map (0x0000)
- Simulated: Basic MCS Map (0xfffc)
- VHT Tx Power Envelope:
- R7000: Local Max Tx Pwr Constraint: 1.0 dBm
- Simulated: Local Max Tx Pwr Constraint: 23.0 dBm
- Both:
- HT Capab:
- A-MPDU
- R7000: MPDU Density 4
- Simulated: MPDU Density 8
- HT Info:
- R7000: RIFS Permitted
- Simulated: RIFS Prohibited
- RM Capabilities:
- R7000:
- Beacon Table Measurement: Not Supported
- Statistic Measurement: Enabled
- AP Channel Report Capability: Enabled
- Simulated:
- Beacon Table Measurement: Supported
- Statistic Measurement: Disabled
- AP Channel Report Capability: Disabled
- """
- # Verify interface and security
- hostapd_utils.verify_interface(
- iface_wlan_2g, hostapd_constants.INTERFACE_2G_LIST
- )
- hostapd_utils.verify_interface(
- iface_wlan_5g, hostapd_constants.INTERFACE_5G_LIST
- )
- hostapd_utils.verify_security_mode(
- security, [SecurityMode.OPEN, SecurityMode.WPA2]
- )
- if security.security_mode is not SecurityMode.OPEN:
- hostapd_utils.verify_cipher(
- security, [hostapd_constants.WPA2_DEFAULT_CIPER]
- )
-
- # Common Parameters
- rates = hostapd_constants.CCK_AND_OFDM_DATA_RATES
- vht_channel_width = 80
- n_capabilities = [
- hostapd_constants.N_CAPABILITY_LDPC,
- hostapd_constants.N_CAPABILITY_TX_STBC,
- hostapd_constants.N_CAPABILITY_RX_STBC1,
- hostapd_constants.N_CAPABILITY_MAX_AMSDU_7935,
- hostapd_constants.N_CAPABILITY_SGI20,
- ]
- # Netgear IE
- # WPS IE
- # Epigram, Inc. IE
- # Broadcom IE
- vendor_elements = {
- "vendor_elements": "dd0600146c000000"
- "dd310050f204104a00011010440001021047001066189606f1e967f9c0102048817a7"
- "69e103c0001031049000600372a000120"
- "dd1e00904c0408bf0cb259820feaff0000eaff0000c0050001000000c3020002"
- "dd090010180200001c0000"
- }
- qbss = {"bss_load_update_period": 50, "chan_util_avg_period": 600}
-
- # 2.4GHz
- if channel <= 11:
- interface = iface_wlan_2g
- rates.update(hostapd_constants.CCK_AND_OFDM_BASIC_RATES)
- mode = hostapd_constants.Mode.MODE_11N_MIXED
- obss_interval = 300
- ac_capabilities = None
-
- # 5GHz
- else:
- interface = iface_wlan_5g
- rates.update(hostapd_constants.OFDM_ONLY_BASIC_RATES)
- mode = hostapd_constants.Mode.MODE_11AC_MIXED
- n_capabilities += [
- hostapd_constants.N_CAPABILITY_SGI40,
- ]
-
- if hostapd_config.ht40_plus_allowed(channel):
- n_capabilities.append(hostapd_constants.N_CAPABILITY_HT40_PLUS)
- elif hostapd_config.ht40_minus_allowed(channel):
- n_capabilities.append(hostapd_constants.N_CAPABILITY_HT40_MINUS)
-
- obss_interval = None
- ac_capabilities = [
- hostapd_constants.AC_CAPABILITY_RXLDPC,
- hostapd_constants.AC_CAPABILITY_SHORT_GI_80,
- hostapd_constants.AC_CAPABILITY_TX_STBC_2BY1,
- hostapd_constants.AC_CAPABILITY_RX_STBC_1,
- hostapd_constants.AC_CAPABILITY_MAX_MPDU_11454,
- hostapd_constants.AC_CAPABILITY_MAX_A_MPDU_LEN_EXP7,
- ]
-
- additional_params = (
- rates
- | vendor_elements
- | qbss
- | hostapd_constants.ENABLE_RRM_BEACON_REPORT
- | hostapd_constants.ENABLE_RRM_NEIGHBOR_REPORT
- | hostapd_constants.UAPSD_ENABLED
- )
-
- config = hostapd_config.HostapdConfig(
- ssid=ssid,
- channel=channel,
- hidden=False,
- security=security,
- interface=interface,
- mode=mode,
- force_wmm=True,
- beacon_interval=100,
- dtim_period=2,
- short_preamble=False,
- obss_interval=obss_interval,
- n_capabilities=n_capabilities,
- ac_capabilities=ac_capabilities,
- vht_channel_width=vht_channel_width,
- additional_parameters=additional_params,
- )
- return config
-
-
-def netgear_wndr3400(
- iface_wlan_2g: str,
- iface_wlan_5g: str,
- channel: int,
- security: Security,
- ssid: str | None = None,
-) -> hostapd_config.HostapdConfig:
- # TODO(b/143104825): Permit RIFS on 5GHz once it is supported
- # TODO(b/144446076): Address non-whirlwind hardware capabilities.
- """A simulated implementation of what a Netgear WNDR3400 AP
- Args:
- iface_wlan_2g: The 2.4Ghz interface of the test AP.
- iface_wlan_5g: The 5GHz interface of the test AP.
- channel: What channel to use.
- security: A security profile (open or WPA2).
- ssid: The network name.
- Returns:
- A hostapd config.
- Differences from real WNDR3400:
- 2.4GHz:
- Rates:
- WNDR3400:
- Supported: 1, 2, 5.5, 11, 18, 24, 36, 54
- Extended: 6, 9, 12, 48
- Simulated:
- Supported: 1, 2, 5.5, 11, 6, 9, 12, 18
- Extended: 24, 36, 48,
- 5GHz:
- HT Info:
- WNDR3400: RIFS Permitted
- Simulated: RIFS Prohibited
- Both:
- HT Capab:
- A-MPDU
- WNDR3400: MPDU Density 16
- Simulated: MPDU Density 8
- Info
- WNDR3400: Green Field supported
- Simulated: Green Field not supported on Whirlwind.
- """
- # Verify interface and security
- hostapd_utils.verify_interface(
- iface_wlan_2g, hostapd_constants.INTERFACE_2G_LIST
- )
- hostapd_utils.verify_interface(
- iface_wlan_5g, hostapd_constants.INTERFACE_5G_LIST
- )
- hostapd_utils.verify_security_mode(
- security, [SecurityMode.OPEN, SecurityMode.WPA2]
- )
- if security.security_mode is not SecurityMode.OPEN:
- hostapd_utils.verify_cipher(
- security, [hostapd_constants.WPA2_DEFAULT_CIPER]
- )
-
- # Common Parameters
- rates = hostapd_constants.CCK_AND_OFDM_DATA_RATES
- n_capabilities = [
- hostapd_constants.N_CAPABILITY_SGI20,
- hostapd_constants.N_CAPABILITY_SGI40,
- hostapd_constants.N_CAPABILITY_TX_STBC,
- hostapd_constants.N_CAPABILITY_MAX_AMSDU_7935,
- hostapd_constants.N_CAPABILITY_DSSS_CCK_40,
- ]
- # WPS IE
- # Broadcom IE
- vendor_elements = {
- "vendor_elements": "dd310050f204104a0001101044000102104700108c403eb883e7e225ab139828703ade"
- "dc103c0001031049000600372a000120"
- "dd090010180200f0040000"
- }
-
- # 2.4GHz
- if channel <= 11:
- interface = iface_wlan_2g
- rates.update(hostapd_constants.CCK_AND_OFDM_BASIC_RATES)
- obss_interval = 300
- n_capabilities.append(hostapd_constants.N_CAPABILITY_DSSS_CCK_40)
-
- # 5GHz
- else:
- interface = iface_wlan_5g
- rates.update(hostapd_constants.OFDM_ONLY_BASIC_RATES)
- obss_interval = None
- n_capabilities.append(hostapd_constants.N_CAPABILITY_HT40_PLUS)
-
- additional_params = (
- rates | vendor_elements | hostapd_constants.UAPSD_ENABLED
- )
-
- config = hostapd_config.HostapdConfig(
- ssid=ssid,
- channel=channel,
- hidden=False,
- security=security,
- interface=interface,
- mode=hostapd_constants.Mode.MODE_11N_MIXED,
- force_wmm=True,
- beacon_interval=100,
- dtim_period=2,
- short_preamble=False,
- obss_interval=obss_interval,
- n_capabilities=n_capabilities,
- additional_parameters=additional_params,
- )
-
- return config
diff --git a/packages/antlion/controllers/ap_lib/third_party_ap_profiles/securifi.py b/packages/antlion/controllers/ap_lib/third_party_ap_profiles/securifi.py
deleted file mode 100644
index b1e7eb8..0000000
--- a/packages/antlion/controllers/ap_lib/third_party_ap_profiles/securifi.py
+++ /dev/null
@@ -1,116 +0,0 @@
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-
-from antlion.controllers.ap_lib import (
- hostapd_config,
- hostapd_constants,
- hostapd_utils,
-)
-from antlion.controllers.ap_lib.hostapd_security import Security, SecurityMode
-
-
-def securifi_almond(
- iface_wlan_2g: str,
- channel: int,
- security: Security,
- ssid: str | None = None,
-) -> hostapd_config.HostapdConfig:
- """A simulated implementation of a Securifi Almond AP
- Args:
- iface_wlan_2g: The 2.4Ghz interface of the test AP.
- channel: What channel to use.
- security: A security profile (open or WPA2).
- ssid: The network name.
- Returns:
- A hostapd config.
- Differences from real Almond:
- Rates:
- Almond:
- Supported: 1, 2, 5.5, 11, 18, 24, 36, 54
- Extended: 6, 9, 12, 48
- Simulated:
- Supported: 1, 2, 5.5, 11, 6, 9, 12, 18
- Extended: 24, 36, 48, 54
- HT Capab:
- A-MPDU
- Almond: MPDU Density 4
- Simulated: MPDU Density 8
- RSN Capab (w/ WPA2):
- Almond:
- RSN PTKSA Replay Counter Capab: 1
- Simulated:
- RSN PTKSA Replay Counter Capab: 16
- """
- if channel > 11:
- raise ValueError(
- f"The Securifi Almond does not support 5Ghz. Invalid channel ({channel})"
- )
- # Verify interface and security
- hostapd_utils.verify_interface(
- iface_wlan_2g, hostapd_constants.INTERFACE_2G_LIST
- )
- hostapd_utils.verify_security_mode(
- security, [SecurityMode.OPEN, SecurityMode.WPA2]
- )
- if security.security_mode is not SecurityMode.OPEN:
- hostapd_utils.verify_cipher(
- security, [hostapd_constants.WPA2_DEFAULT_CIPER]
- )
-
- n_capabilities = [
- hostapd_constants.N_CAPABILITY_HT40_PLUS,
- hostapd_constants.N_CAPABILITY_SGI20,
- hostapd_constants.N_CAPABILITY_SGI40,
- hostapd_constants.N_CAPABILITY_TX_STBC,
- hostapd_constants.N_CAPABILITY_RX_STBC1,
- hostapd_constants.N_CAPABILITY_DSSS_CCK_40,
- ]
-
- rates = (
- hostapd_constants.CCK_AND_OFDM_BASIC_RATES
- | hostapd_constants.CCK_AND_OFDM_DATA_RATES
- )
-
- # Ralink Technology IE
- # Country Information IE
- # AP Channel Report IEs
- vendor_elements = {
- "vendor_elements": "dd07000c4307000000"
- "0706555320010b14"
- "33082001020304050607"
- "33082105060708090a0b"
- }
-
- qbss = {"bss_load_update_period": 50, "chan_util_avg_period": 600}
-
- additional_params = rates | vendor_elements | qbss
-
- config = hostapd_config.HostapdConfig(
- ssid=ssid,
- channel=channel,
- hidden=False,
- security=security,
- interface=iface_wlan_2g,
- mode=hostapd_constants.Mode.MODE_11N_MIXED,
- force_wmm=True,
- beacon_interval=100,
- dtim_period=1,
- short_preamble=True,
- obss_interval=300,
- n_capabilities=n_capabilities,
- additional_parameters=additional_params,
- )
-
- return config
diff --git a/packages/antlion/controllers/ap_lib/third_party_ap_profiles/tplink.py b/packages/antlion/controllers/ap_lib/third_party_ap_profiles/tplink.py
deleted file mode 100644
index 6124396..0000000
--- a/packages/antlion/controllers/ap_lib/third_party_ap_profiles/tplink.py
+++ /dev/null
@@ -1,508 +0,0 @@
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-
-from antlion.controllers.ap_lib import (
- hostapd_config,
- hostapd_constants,
- hostapd_utils,
-)
-from antlion.controllers.ap_lib.hostapd_security import Security, SecurityMode
-
-
-def tplink_archerc5(
- iface_wlan_2g: str,
- iface_wlan_5g: str,
- channel: int,
- security: Security,
- ssid: str | None = None,
-) -> hostapd_config.HostapdConfig:
- # TODO(b/144446076): Address non-whirlwind hardware capabilities.
- """A simulated implementation of an TPLink ArcherC5 AP.
- Args:
- iface_wlan_2g: The 2.4Ghz interface of the test AP.
- iface_wlan_5g: The 5GHz interface of the test AP.
- channel: What channel to use.
- security: A security profile (open or WPA2).
- ssid: The network name.
- Returns:
- A hostapd config.
- Differences from real ArcherC5:
- 2.4GHz:
- Rates:
- ArcherC5:
- Supported: 1, 2, 5.5, 11, 18, 24, 36, 54
- Extended: 6, 9, 12, 48
- Simulated:
- Supported: 1, 2, 5.5, 11, 6, 9, 12, 18
- Extended: 24, 36, 48, 54
- HT Capab:
- Info:
- ArcherC5: Green Field supported
- Simulated: Green Field not supported on Whirlwind.
- 5GHz:
- VHT Capab:
- ArcherC5:
- SU Beamformer Supported,
- SU Beamformee Supported,
- Beamformee STS Capability: 3,
- Number of Sounding Dimensions: 3,
- VHT Link Adaptation: Both
- Simulated:
- Above are not supported on Whirlwind.
- VHT Operation Info:
- ArcherC5: Basic MCS Map (0x0000)
- Simulated: Basic MCS Map (0xfffc)
- VHT Tx Power Envelope:
- ArcherC5: Local Max Tx Pwr Constraint: 1.0 dBm
- Simulated: Local Max Tx Pwr Constraint: 23.0 dBm
- Both:
- HT Capab:
- A-MPDU
- ArcherC5: MPDU Density 4
- Simulated: MPDU Density 8
- HT Info:
- ArcherC5: RIFS Permitted
- Simulated: RIFS Prohibited
- """
- # Verify interface and security
- hostapd_utils.verify_interface(
- iface_wlan_2g, hostapd_constants.INTERFACE_2G_LIST
- )
- hostapd_utils.verify_interface(
- iface_wlan_5g, hostapd_constants.INTERFACE_5G_LIST
- )
- hostapd_utils.verify_security_mode(
- security, [SecurityMode.OPEN, SecurityMode.WPA2]
- )
- if security.security_mode is not SecurityMode.OPEN:
- hostapd_utils.verify_cipher(
- security, [hostapd_constants.WPA2_DEFAULT_CIPER]
- )
-
- # Common Parameters
- rates = hostapd_constants.CCK_AND_OFDM_DATA_RATES
- vht_channel_width = 20
- n_capabilities = [
- hostapd_constants.N_CAPABILITY_SGI20,
- hostapd_constants.N_CAPABILITY_TX_STBC,
- hostapd_constants.N_CAPABILITY_RX_STBC1,
- hostapd_constants.N_CAPABILITY_MAX_AMSDU_7935,
- ]
- # WPS IE
- # Broadcom IE
- vendor_elements = {
- "vendor_elements": "dd310050f204104a000110104400010210470010d96c7efc2f8938f1efbd6e5148bfa8"
- "12103c0001031049000600372a000120"
- "dd090010180200001c0000"
- }
- qbss = {"bss_load_update_period": 50, "chan_util_avg_period": 600}
-
- # 2.4GHz
- if channel <= 11:
- interface = iface_wlan_2g
- rates.update(hostapd_constants.CCK_AND_OFDM_BASIC_RATES)
- short_preamble = True
- mode = hostapd_constants.Mode.MODE_11N_MIXED
- n_capabilities.append(hostapd_constants.N_CAPABILITY_DSSS_CCK_40)
- ac_capabilities = None
-
- # 5GHz
- else:
- interface = iface_wlan_5g
- rates.update(hostapd_constants.OFDM_ONLY_BASIC_RATES)
- short_preamble = False
- mode = hostapd_constants.Mode.MODE_11AC_MIXED
- n_capabilities.append(hostapd_constants.N_CAPABILITY_LDPC)
- ac_capabilities = [
- hostapd_constants.AC_CAPABILITY_MAX_MPDU_11454,
- hostapd_constants.AC_CAPABILITY_SHORT_GI_80,
- hostapd_constants.AC_CAPABILITY_RXLDPC,
- hostapd_constants.AC_CAPABILITY_TX_STBC_2BY1,
- hostapd_constants.AC_CAPABILITY_RX_STBC_1,
- hostapd_constants.AC_CAPABILITY_MAX_A_MPDU_LEN_EXP7,
- ]
-
- additional_params = (
- rates
- | vendor_elements
- | qbss
- | hostapd_constants.ENABLE_RRM_BEACON_REPORT
- | hostapd_constants.ENABLE_RRM_NEIGHBOR_REPORT
- | hostapd_constants.UAPSD_ENABLED
- )
-
- config = hostapd_config.HostapdConfig(
- ssid=ssid,
- channel=channel,
- hidden=False,
- security=security,
- interface=interface,
- mode=mode,
- force_wmm=True,
- beacon_interval=100,
- dtim_period=1,
- short_preamble=short_preamble,
- n_capabilities=n_capabilities,
- ac_capabilities=ac_capabilities,
- vht_channel_width=vht_channel_width,
- additional_parameters=additional_params,
- )
- return config
-
-
-def tplink_archerc7(
- iface_wlan_2g: str,
- iface_wlan_5g: str,
- channel: int,
- security: Security,
- ssid: str | None = None,
-) -> hostapd_config.HostapdConfig:
- # TODO(b/143104825): Permit RIFS once it is supported
- """A simulated implementation of an TPLink ArcherC7 AP.
- Args:
- iface_wlan_2g: The 2.4Ghz interface of the test AP.
- iface_wlan_5g: The 5GHz interface of the test AP.
- channel: What channel to use.
- security: A security profile (open or WPA2).
- ssid: The network name.
- Returns:
- A hostapd config.
- Differences from real ArcherC7:
- 5GHz:
- Country Code:
- Simulated: Has two country code IEs, one that matches
- the actual, and another explicit IE that was required for
- hostapd's 802.11d to work.
- Both:
- HT Info:
- ArcherC7: RIFS Permitted
- Simulated: RIFS Prohibited
- RSN Capabilities (w/ WPA2):
- ArcherC7:
- RSN PTKSA Replay Counter Capab: 1
- Simulated:
- RSN PTKSA Replay Counter Capab: 16
- """
- # Verify interface and security
- hostapd_utils.verify_interface(
- iface_wlan_2g, hostapd_constants.INTERFACE_2G_LIST
- )
- hostapd_utils.verify_interface(
- iface_wlan_5g, hostapd_constants.INTERFACE_5G_LIST
- )
- hostapd_utils.verify_security_mode(
- security, [SecurityMode.OPEN, SecurityMode.WPA2]
- )
- if security.security_mode is not SecurityMode.OPEN:
- hostapd_utils.verify_cipher(
- security, [hostapd_constants.WPA2_DEFAULT_CIPER]
- )
-
- # Common Parameters
- rates = hostapd_constants.CCK_AND_OFDM_DATA_RATES
- vht_channel_width: int | None = 80
- n_capabilities = [
- hostapd_constants.N_CAPABILITY_LDPC,
- hostapd_constants.N_CAPABILITY_SGI20,
- hostapd_constants.N_CAPABILITY_TX_STBC,
- hostapd_constants.N_CAPABILITY_RX_STBC1,
- ]
- # Atheros IE
- # WPS IE
- vendor_elements = {
- "vendor_elements": "dd0900037f01010000ff7f"
- "dd180050f204104a00011010440001021049000600372a000120"
- }
-
- # 2.4GHz
- if channel <= 11:
- interface = iface_wlan_2g
- rates.update(hostapd_constants.CCK_AND_OFDM_BASIC_RATES)
- short_preamble = True
- mode = hostapd_constants.Mode.MODE_11N_MIXED
- spectrum_mgmt = False
- pwr_constraint = {}
- ac_capabilities = None
- vht_channel_width = None
-
- # 5GHz
- else:
- interface = iface_wlan_5g
- rates.update(hostapd_constants.OFDM_ONLY_BASIC_RATES)
- short_preamble = False
- mode = hostapd_constants.Mode.MODE_11AC_MIXED
- spectrum_mgmt = True
- # Country Information IE (w/ individual channel info)
- vendor_elements["vendor_elements"] += (
- "074255532024011e28011e2c011e30"
- "011e3401173801173c01174001176401176801176c0117700117740117840117"
- "8801178c011795011e99011e9d011ea1011ea5011e"
- )
- pwr_constraint = {"local_pwr_constraint": 3}
- n_capabilities += [
- hostapd_constants.N_CAPABILITY_SGI40,
- hostapd_constants.N_CAPABILITY_MAX_AMSDU_7935,
- ]
-
- if hostapd_config.ht40_plus_allowed(channel):
- n_capabilities.append(hostapd_constants.N_CAPABILITY_HT40_PLUS)
- elif hostapd_config.ht40_minus_allowed(channel):
- n_capabilities.append(hostapd_constants.N_CAPABILITY_HT40_MINUS)
-
- ac_capabilities = [
- hostapd_constants.AC_CAPABILITY_MAX_MPDU_11454,
- hostapd_constants.AC_CAPABILITY_RXLDPC,
- hostapd_constants.AC_CAPABILITY_SHORT_GI_80,
- hostapd_constants.AC_CAPABILITY_TX_STBC_2BY1,
- hostapd_constants.AC_CAPABILITY_RX_STBC_1,
- hostapd_constants.AC_CAPABILITY_MAX_A_MPDU_LEN_EXP7,
- hostapd_constants.AC_CAPABILITY_RX_ANTENNA_PATTERN,
- hostapd_constants.AC_CAPABILITY_TX_ANTENNA_PATTERN,
- ]
-
- additional_params = (
- rates
- | vendor_elements
- | hostapd_constants.UAPSD_ENABLED
- | pwr_constraint
- )
-
- config = hostapd_config.HostapdConfig(
- ssid=ssid,
- channel=channel,
- hidden=False,
- security=security,
- interface=interface,
- mode=mode,
- force_wmm=True,
- beacon_interval=100,
- dtim_period=1,
- short_preamble=short_preamble,
- n_capabilities=n_capabilities,
- ac_capabilities=ac_capabilities,
- vht_channel_width=vht_channel_width,
- spectrum_mgmt_required=spectrum_mgmt,
- additional_parameters=additional_params,
- )
- return config
-
-
-def tplink_c1200(
- iface_wlan_2g: str,
- iface_wlan_5g: str,
- channel: int,
- security: Security,
- ssid: str | None = None,
-) -> hostapd_config.HostapdConfig:
- # TODO(b/143104825): Permit RIFS once it is supported
- # TODO(b/144446076): Address non-whirlwind hardware capabilities.
- """A simulated implementation of an TPLink C1200 AP.
- Args:
- iface_wlan_2g: The 2.4Ghz interface of the test AP.
- iface_wlan_5g: The 5GHz interface of the test AP.
- channel: What channel to use.
- security: A security profile (open or WPA2).
- ssid: The network name.
- Returns:
- A hostapd config.
- Differences from real C1200:
- 2.4GHz:
- Rates:
- C1200:
- Supported: 1, 2, 5.5, 11, 18, 24, 36, 54
- Extended: 6, 9, 12, 48
- Simulated:
- Supported: 1, 2, 5.5, 11, 6, 9, 12, 18
- Extended: 24, 36, 48, 54
- HT Capab:
- Info:
- C1200: Green Field supported
- Simulated: Green Field not supported on Whirlwind.
- 5GHz:
- VHT Operation Info:
- C1200: Basic MCS Map (0x0000)
- Simulated: Basic MCS Map (0xfffc)
- VHT Tx Power Envelope:
- C1200: Local Max Tx Pwr Constraint: 7.0 dBm
- Simulated: Local Max Tx Pwr Constraint: 23.0 dBm
- Both:
- HT Info:
- C1200: RIFS Permitted
- Simulated: RIFS Prohibited
- """
- # Verify interface and security
- hostapd_utils.verify_interface(
- iface_wlan_2g, hostapd_constants.INTERFACE_2G_LIST
- )
- hostapd_utils.verify_interface(
- iface_wlan_5g, hostapd_constants.INTERFACE_5G_LIST
- )
- hostapd_utils.verify_security_mode(
- security, [SecurityMode.OPEN, SecurityMode.WPA2]
- )
- if security.security_mode is not SecurityMode.OPEN:
- hostapd_utils.verify_cipher(
- security, [hostapd_constants.WPA2_DEFAULT_CIPER]
- )
-
- # Common Parameters
- rates = hostapd_constants.CCK_AND_OFDM_DATA_RATES
- vht_channel_width = 20
- n_capabilities = [
- hostapd_constants.N_CAPABILITY_SGI20,
- hostapd_constants.N_CAPABILITY_TX_STBC,
- hostapd_constants.N_CAPABILITY_RX_STBC1,
- hostapd_constants.N_CAPABILITY_MAX_AMSDU_7935,
- ]
- # WPS IE
- # Broadcom IE
- vendor_elements = {
- "vendor_elements": "dd350050f204104a000110104400010210470010000000000000000000000000000000"
- "00103c0001031049000a00372a00012005022688"
- "dd090010180200000c0000"
- }
-
- # 2.4GHz
- if channel <= 11:
- interface = iface_wlan_2g
- rates.update(hostapd_constants.CCK_AND_OFDM_BASIC_RATES)
- short_preamble = True
- mode = hostapd_constants.Mode.MODE_11N_MIXED
- ac_capabilities = None
-
- # 5GHz
- else:
- interface = iface_wlan_5g
- rates.update(hostapd_constants.OFDM_ONLY_BASIC_RATES)
- short_preamble = False
- mode = hostapd_constants.Mode.MODE_11AC_MIXED
- n_capabilities.append(hostapd_constants.N_CAPABILITY_LDPC)
- ac_capabilities = [
- hostapd_constants.AC_CAPABILITY_MAX_MPDU_11454,
- hostapd_constants.AC_CAPABILITY_SHORT_GI_80,
- hostapd_constants.AC_CAPABILITY_RXLDPC,
- hostapd_constants.AC_CAPABILITY_TX_STBC_2BY1,
- hostapd_constants.AC_CAPABILITY_RX_STBC_1,
- hostapd_constants.AC_CAPABILITY_MAX_A_MPDU_LEN_EXP7,
- ]
-
- additional_params = (
- rates
- | vendor_elements
- | hostapd_constants.ENABLE_RRM_BEACON_REPORT
- | hostapd_constants.ENABLE_RRM_NEIGHBOR_REPORT
- | hostapd_constants.UAPSD_ENABLED
- )
-
- config = hostapd_config.HostapdConfig(
- ssid=ssid,
- channel=channel,
- hidden=False,
- security=security,
- interface=interface,
- mode=mode,
- force_wmm=True,
- beacon_interval=100,
- dtim_period=1,
- short_preamble=short_preamble,
- n_capabilities=n_capabilities,
- ac_capabilities=ac_capabilities,
- vht_channel_width=vht_channel_width,
- additional_parameters=additional_params,
- )
- return config
-
-
-def tplink_tlwr940n(
- iface_wlan_2g: str,
- channel: int,
- security: Security,
- ssid: str | None = None,
-) -> hostapd_config.HostapdConfig:
- # TODO(b/143104825): Permit RIFS once it is supported
- """A simulated implementation of an TPLink TLWR940N AP.
- Args:
- iface_wlan_2g: The 2.4Ghz interface of the test AP.
- channel: What channel to use.
- security: A security profile (open or WPA2).
- ssid: The network name.
- Returns:
- A hostapd config.
- Differences from real TLWR940N:
- HT Info:
- TLWR940N: RIFS Permitted
- Simulated: RIFS Prohibited
- RSN Capabilities (w/ WPA2):
- TLWR940N:
- RSN PTKSA Replay Counter Capab: 1
- Simulated:
- RSN PTKSA Replay Counter Capab: 16
- """
- if channel > 11:
- raise ValueError(
- "The mock TP-Link TLWR940N does not support 5Ghz. "
- "Invalid channel (%s)" % channel
- )
- # Verify interface and security
- hostapd_utils.verify_interface(
- iface_wlan_2g, hostapd_constants.INTERFACE_2G_LIST
- )
- hostapd_utils.verify_security_mode(
- security, [SecurityMode.OPEN, SecurityMode.WPA2]
- )
- if security.security_mode is not SecurityMode.OPEN:
- hostapd_utils.verify_cipher(
- security, [hostapd_constants.WPA2_DEFAULT_CIPER]
- )
-
- n_capabilities = [
- hostapd_constants.N_CAPABILITY_SGI20,
- hostapd_constants.N_CAPABILITY_TX_STBC,
- hostapd_constants.N_CAPABILITY_RX_STBC1,
- ]
-
- rates = (
- hostapd_constants.CCK_AND_OFDM_BASIC_RATES
- | hostapd_constants.CCK_AND_OFDM_DATA_RATES
- )
-
- # Atheros Communications, Inc. IE
- # WPS IE
- vendor_elements = {
- "vendor_elements": "dd0900037f01010000ff7f"
- "dd260050f204104a0001101044000102104900140024e2600200010160000002000160"
- "0100020001"
- }
-
- additional_params = (
- rates | vendor_elements | hostapd_constants.UAPSD_ENABLED
- )
-
- config = hostapd_config.HostapdConfig(
- ssid=ssid,
- channel=channel,
- hidden=False,
- security=security,
- interface=iface_wlan_2g,
- mode=hostapd_constants.Mode.MODE_11N_MIXED,
- force_wmm=True,
- beacon_interval=100,
- dtim_period=1,
- short_preamble=True,
- n_capabilities=n_capabilities,
- additional_parameters=additional_params,
- )
-
- return config
diff --git a/packages/antlion/controllers/ap_lib/wireless_network_management.py b/packages/antlion/controllers/ap_lib/wireless_network_management.py
deleted file mode 100644
index 9c08da0..0000000
--- a/packages/antlion/controllers/ap_lib/wireless_network_management.py
+++ /dev/null
@@ -1,153 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from typing import NewType
-
-from antlion.controllers.ap_lib.radio_measurement import NeighborReportElement
-
-BssTransitionCandidateList = NewType(
- "BssTransitionCandidateList", list[NeighborReportElement]
-)
-
-
-class BssTerminationDuration:
- """Representation of BSS Termination Duration subelement.
-
- See IEEE 802.11-2020 Figure 9-341.
- """
-
- def __init__(self, duration: int):
- """Create a BSS Termination Duration subelement.
-
- Args:
- duration: number of minutes the BSS will be offline.
- """
- # Note: hostapd does not currently support setting BSS Termination TSF,
- # which is the other value held in this subelement.
- self._duration = duration
-
- @property
- def duration(self) -> int:
- return self._duration
-
-
-class BssTransitionManagementRequest:
- """Representation of BSS Transition Management request.
-
- See IEEE 802.11-2020 9.6.13.9.
- """
-
- def __init__(
- self,
- preferred_candidate_list_included: bool = False,
- abridged: bool = False,
- disassociation_imminent: bool = False,
- ess_disassociation_imminent: bool = False,
- disassociation_timer: int = 0,
- validity_interval: int = 1,
- bss_termination_duration: BssTerminationDuration | None = None,
- session_information_url: str | None = None,
- candidate_list: BssTransitionCandidateList | None = None,
- ):
- """Create a BSS Transition Management request.
-
- Args:
- preferred_candidate_list_included: whether the candidate list is a
- preferred candidate list, or (if False) a list of known
- candidates.
- abridged: whether a preference value of 0 is assigned to all BSSIDs
- that do not appear in the candidate list, or (if False) AP has
- no recommendation for/against anything not in the candidate
- list.
- disassociation_imminent: whether the STA is about to be
- disassociated by the AP.
- ess_disassociation_imminent: whether the STA will be disassociated
- from the ESS.
- disassociation_timer: the number of beacon transmission times
- (TBTTs) until the AP disassociates this STA (default 0, meaning
- AP has not determined when it will disassociate this STA).
- validity_interval: number of TBTTs until the candidate list is no
- longer valid (default 1).
- bss_termination_duration: BSS Termination Duration subelement.
- session_information_url: this URL is included if ESS disassociation
- is immiment.
- candidate_list: zero or more neighbor report elements.
- """
- # Request mode field, see IEEE 802.11-2020 Figure 9-924.
- self._preferred_candidate_list_included = (
- preferred_candidate_list_included
- )
- self._abridged = abridged
- self._disassociation_imminent = disassociation_imminent
- self._ess_disassociation_imminent = ess_disassociation_imminent
-
- # Disassociation Timer, see IEEE 802.11-2020 Figure 9-925
- self._disassociation_timer = disassociation_timer
-
- # Validity Interval, see IEEE 802.11-2020 9.6.13.9
- self._validity_interval = validity_interval
-
- # BSS Termination Duration, see IEEE 802.11-2020 9.6.13.9 and Figure 9-341
- self._bss_termination_duration = bss_termination_duration
-
- # Session Information URL, see IEEE 802.11-2020 Figure 9-926
- self._session_information_url = session_information_url
-
- # BSS Transition Candidate List Entries, IEEE 802.11-2020 9.6.13.9.
- self._candidate_list = candidate_list
-
- @property
- def preferred_candidate_list_included(self) -> bool:
- return self._preferred_candidate_list_included
-
- @property
- def abridged(self) -> bool:
- return self._abridged
-
- @property
- def disassociation_imminent(self) -> bool:
- return self._disassociation_imminent
-
- @property
- def bss_termination_included(self) -> bool:
- return self._bss_termination_duration is not None
-
- @property
- def ess_disassociation_imminent(self) -> bool:
- return self._ess_disassociation_imminent
-
- @property
- def disassociation_timer(self) -> int | None:
- if self.disassociation_imminent:
- return self._disassociation_timer
- # Otherwise, field is reserved.
- return None
-
- @property
- def validity_interval(self) -> int:
- return self._validity_interval
-
- @property
- def bss_termination_duration(self) -> BssTerminationDuration | None:
- return self._bss_termination_duration
-
- @property
- def session_information_url(self) -> str | None:
- return self._session_information_url
-
- @property
- def candidate_list(self) -> BssTransitionCandidateList | None:
- return self._candidate_list
diff --git a/packages/antlion/controllers/attenuator.py b/packages/antlion/controllers/attenuator.py
deleted file mode 100644
index c2aa9cb..0000000
--- a/packages/antlion/controllers/attenuator.py
+++ /dev/null
@@ -1,375 +0,0 @@
-#!/usr/bin/env python3.4
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-# Blanket ignores to enable mypy in Antlion
-# mypy: disable-error-code="no-untyped-def"
-from __future__ import annotations
-
-import enum
-import logging
-from typing import Protocol, runtime_checkable
-
-from antlion.libs.proc import job
-from antlion.types import ControllerConfig, Json
-from antlion.validation import MapValidator
-
-MOBLY_CONTROLLER_CONFIG_NAME: str = "Attenuator"
-ACTS_CONTROLLER_REFERENCE_NAME = "attenuators"
-_ATTENUATOR_OPEN_RETRIES = 3
-
-
-class Model(enum.StrEnum):
- AEROFLEX_TELNET = "aeroflex.telnet"
- MINICIRCUITS_HTTP = "minicircuits.http"
- MINICIRCUITS_TELNET = "minicircuits.telnet"
-
- def create(self, instrument_count: int) -> AttenuatorInstrument:
- match self:
- case Model.AEROFLEX_TELNET:
- import antlion.controllers.attenuator_lib.aeroflex.telnet
-
- return antlion.controllers.attenuator_lib.aeroflex.telnet.AttenuatorInstrument(
- instrument_count
- )
- case Model.MINICIRCUITS_HTTP:
- import antlion.controllers.attenuator_lib.minicircuits.http
-
- return antlion.controllers.attenuator_lib.minicircuits.http.AttenuatorInstrument(
- instrument_count
- )
- case Model.MINICIRCUITS_TELNET:
- import antlion.controllers.attenuator_lib.minicircuits.telnet
-
- return antlion.controllers.attenuator_lib.minicircuits.telnet.AttenuatorInstrument(
- instrument_count
- )
-
-
-def create(configs: list[ControllerConfig]) -> list[Attenuator]:
- attenuators: list[Attenuator] = []
- for config in configs:
- c = MapValidator(config)
- attn_model = c.get(str, "Model")
- protocol = c.get(str, "Protocol", "telnet")
- model = Model(f"{attn_model}.{protocol}")
-
- instrument_count = c.get(int, "InstrumentCount")
- attenuator_instrument = model.create(instrument_count)
-
- address = c.get(str, "Address")
- port = c.get(int, "Port")
-
- for attempt_number in range(1, _ATTENUATOR_OPEN_RETRIES + 1):
- try:
- attenuator_instrument.open(address, port)
- except Exception as e:
- logging.error(
- "Attempt %s to open connection to attenuator " "failed: %s",
- attempt_number,
- e,
- )
- if attempt_number == _ATTENUATOR_OPEN_RETRIES:
- ping_output = job.run(
- f"ping {address} -c 1 -w 1", ignore_status=True
- )
- if ping_output.returncode == 1:
- logging.error(
- "Unable to ping attenuator at %s", address
- )
- else:
- logging.error("Able to ping attenuator at %s", address)
- job.run(
- ["telnet", address, str(port)],
- stdin=b"q",
- ignore_status=True,
- )
- raise
- for i in range(instrument_count):
- attenuators.append(Attenuator(attenuator_instrument, idx=i))
- return attenuators
-
-
-def destroy(objects: list[Attenuator]) -> None:
- for attn in objects:
- attn.instrument.close()
-
-
-def get_info(objects: list[Attenuator]) -> list[Json]:
- """Get information on a list of Attenuator objects.
-
- Args:
- attenuators: A list of Attenuator objects.
-
- Returns:
- A list of dict, each representing info for Attenuator objects.
- """
- return [
- {
- "Address": attenuator.instrument.address,
- "Attenuator_Port": attenuator.idx,
- }
- for attenuator in objects
- ]
-
-
-def get_attenuators_for_device(
- device_attenuator_configs: list[ControllerConfig],
- attenuators: list[Attenuator],
- attenuator_key: str,
-) -> list[Attenuator]:
- """Gets the list of attenuators associated to a specified device and builds
- a list of the attenuator objects associated to the ip address in the
- device's section of the ACTS config and the Attenuator's IP address. In the
- example below the access point object has an attenuator dictionary with
- IP address associated to an attenuator object. The address is the only
- mandatory field and the 'attenuator_ports_wifi_2g' and
- 'attenuator_ports_wifi_5g' are the attenuator_key specified above. These
- can be anything and is sent in as a parameter to this function. The numbers
- in the list are ports that are in the attenuator object. Below is an
- standard Access_Point object and the link to a standard Attenuator object.
- Notice the link is the IP address, which is why the IP address is mandatory.
-
- "AccessPoint": [
- {
- "ssh_config": {
- "user": "root",
- "host": "192.168.42.210"
- },
- "Attenuator": [
- {
- "Address": "192.168.42.200",
- "attenuator_ports_wifi_2g": [
- 0,
- 1,
- 3
- ],
- "attenuator_ports_wifi_5g": [
- 0,
- 1
- ]
- }
- ]
- }
- ],
- "Attenuator": [
- {
- "Model": "minicircuits",
- "InstrumentCount": 4,
- "Address": "192.168.42.200",
- "Port": 23
- }
- ]
- Args:
- device_attenuator_configs: A list of attenuators config information in
- the acts config that are associated a particular device.
- attenuators: A list of all of the available attenuators objects
- in the testbed.
- attenuator_key: A string that is the key to search in the device's
- configuration.
-
- Returns:
- A list of attenuator objects for the specified device and the key in
- that device's config.
- """
- attenuator_list = []
- for device_attenuator_config in device_attenuator_configs:
- c = MapValidator(device_attenuator_config)
- ports = c.list(attenuator_key).all(int)
- for port in ports:
- for attenuator in attenuators:
- if (
- attenuator.instrument.address
- == device_attenuator_config["Address"]
- and attenuator.idx is port
- ):
- attenuator_list.append(attenuator)
- return attenuator_list
-
-
-#
-# Classes for accessing, managing, and manipulating attenuators.
-#
-# Users will instantiate a specific child class, but almost all operation should
-# be performed on the methods and data members defined here in the base classes
-# or the wrapper classes.
-#
-
-
-class AttenuatorError(Exception):
- """Base class for all errors generated by Attenuator-related modules."""
-
-
-class InvalidDataError(AttenuatorError):
- """ "Raised when an unexpected result is seen on the transport layer.
-
- When this exception is seen, closing an re-opening the link to the
- attenuator instrument is probably necessary. Something has gone wrong in
- the transport.
- """
-
-
-class InvalidOperationError(AttenuatorError):
- """Raised when the attenuator's state does not allow the given operation.
-
- Certain methods may only be accessed when the instance upon which they are
- invoked is in a certain state. This indicates that the object is not in the
- correct state for a method to be called.
- """
-
-
-INVALID_MAX_ATTEN: float = 999.9
-
-
-@runtime_checkable
-class AttenuatorInstrument(Protocol):
- """Defines the primitive behavior of all attenuator instruments.
-
- The AttenuatorInstrument class is designed to provide a simple low-level
- interface for accessing any step attenuator instrument comprised of one or
- more attenuators and a controller. All AttenuatorInstruments should override
- all the methods below and call AttenuatorInstrument.__init__ in their
- constructors. Outside of setup/teardown, devices should be accessed via
- this generic "interface".
- """
-
- @property
- def address(self) -> str | None:
- """Return the address to the attenuator."""
- ...
-
- @property
- def num_atten(self) -> int:
- """Return the index used to identify this attenuator in an instrument."""
- ...
-
- @property
- def max_atten(self) -> float:
- """Return the maximum allowed attenuation value."""
- ...
-
- def open(self, host: str, port: int, timeout_sec: int = 5) -> None:
- """Initiate a connection to the attenuator.
-
- Args:
- host: A valid hostname to an attenuator
- port: Port number to attempt connection
- timeout_sec: Seconds to wait to initiate a connection
- """
- ...
-
- def close(self) -> None:
- """Close the connection to the attenuator."""
- ...
-
- def set_atten(
- self, idx: int, value: float, strict: bool = True, retry: bool = False
- ) -> None:
- """Sets the attenuation given its index in the instrument.
-
- Args:
- idx: Index used to identify a particular attenuator in an instrument
- value: Value for nominal attenuation to be set
- strict: If True, raise an error when given out of bounds attenuation
- retry: If True, command will be retried if possible
- """
- ...
-
- def get_atten(self, idx: int, retry: bool = False) -> float:
- """Returns the current attenuation given its index in the instrument.
-
- Args:
- idx: Index used to identify a particular attenuator in an instrument
- retry: If True, command will be retried if possible
-
- Returns:
- The current attenuation value
- """
- ...
-
-
-class Attenuator(object):
- """An object representing a single attenuator in a remote instrument.
-
- A user wishing to abstract the mapping of attenuators to physical
- instruments should use this class, which provides an object that abstracts
- the physical implementation and allows the user to think only of attenuators
- regardless of their location.
- """
-
- def __init__(
- self, instrument: AttenuatorInstrument, idx: int = 0, offset: int = 0
- ) -> None:
- """This is the constructor for Attenuator
-
- Args:
- instrument: Reference to an AttenuatorInstrument on which the
- Attenuator resides
- idx: This zero-based index is the identifier for a particular
- attenuator in an instrument.
- offset: A power offset value for the attenuator to be used when
- performing future operations. This could be used for either
- calibration or to allow group operations with offsets between
- various attenuators.
-
- Raises:
- TypeError if an invalid AttenuatorInstrument is passed in.
- IndexError if the index is out of range.
- """
- if not isinstance(instrument, AttenuatorInstrument):
- raise TypeError("Must provide an Attenuator Instrument Ref")
- self.instrument = instrument
- self.idx = idx
- self.offset = offset
-
- if self.idx >= instrument.num_atten:
- raise IndexError(
- "Attenuator index out of range for attenuator instrument"
- )
-
- def set_atten(
- self, value: float, strict: bool = True, retry: bool = False
- ) -> None:
- """Sets the attenuation.
-
- Args:
- value: A floating point value for nominal attenuation to be set.
- strict: if True, function raises an error when given out of
- bounds attenuation values, if false, the function sets out of
- bounds values to 0 or max_atten.
- retry: if True, command will be retried if possible
-
- Raises:
- ValueError if value + offset is greater than the maximum value.
- """
- if value + self.offset > self.instrument.max_atten and strict:
- raise ValueError(
- "Attenuator Value+Offset greater than Max Attenuation!"
- )
-
- self.instrument.set_atten(
- self.idx, value + self.offset, strict=strict, retry=retry
- )
-
- def get_atten(self, retry: bool = False) -> float:
- """Returns the attenuation as a float, normalized by the offset."""
- return self.instrument.get_atten(self.idx, retry) - self.offset
-
- def get_max_atten(self) -> float:
- """Returns the max attenuation as a float, normalized by the offset."""
- if self.instrument.max_atten == INVALID_MAX_ATTEN:
- raise ValueError("Invalid Max Attenuator Value")
-
- return self.instrument.max_atten - self.offset
diff --git a/packages/antlion/controllers/attenuator_lib/__init__.py b/packages/antlion/controllers/attenuator_lib/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/packages/antlion/controllers/attenuator_lib/__init__.py
+++ /dev/null
diff --git a/packages/antlion/controllers/attenuator_lib/_tnhelper.py b/packages/antlion/controllers/attenuator_lib/_tnhelper.py
deleted file mode 100644
index 59b9475..0000000
--- a/packages/antlion/controllers/attenuator_lib/_tnhelper.py
+++ /dev/null
@@ -1,146 +0,0 @@
-#!/usr/bin/env python3
-
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""A helper module to communicate over telnet with AttenuatorInstruments.
-
-User code shouldn't need to directly access this class.
-"""
-
-# Blanket ignores to enable mypy in Antlion
-# mypy: disable-error-code="no-untyped-def"
-import logging
-import re
-import telnetlib
-
-from antlion.controllers import attenuator
-from antlion.libs.proc import job
-
-
-def _ascii_string(uc_string):
- return str(uc_string).encode("ASCII")
-
-
-class TelnetHelper(object):
- """An internal helper class for Telnet+SCPI command-based instruments.
-
- It should only be used by those implementation control libraries and not by
- any user code directly.
- """
-
- def __init__(
- self,
- tx_cmd_separator: str = "\n",
- rx_cmd_separator: str = "\n",
- prompt: str = "",
- ) -> None:
- self._tn: telnetlib.Telnet | None = None
- self._ip_address: str | None = None
- self._port: int | None = None
-
- self.tx_cmd_separator = tx_cmd_separator
- self.rx_cmd_separator = rx_cmd_separator
- self.prompt = prompt
-
- def open(self, host: str, port: int = 23) -> None:
- self._ip_address = host
- self._port = port
- if self._tn:
- self._tn.close()
- logging.debug("Telnet Server IP = %s", host)
- self._tn = telnetlib.Telnet(host, port, timeout=10)
-
- def is_open(self) -> bool:
- return self._tn is not None
-
- def close(self) -> None:
- if self._tn:
- self._tn.close()
- self._tn = None
-
- def diagnose_telnet(self, host: str, port: int) -> bool:
- """Function that diagnoses telnet connections.
-
- This function diagnoses telnet connections and can be used in case of
- command failures. The function checks if the devices is still reachable
- via ping, and whether or not it can close and reopen the telnet
- connection.
-
- Returns:
- False when telnet server is unreachable or unresponsive
- True when telnet server is reachable and telnet connection has been
- successfully reopened
- """
- logging.debug("Diagnosing telnet connection")
- try:
- job_result = job.run(f"ping {host} -c 5 -i 0.2")
- except Exception as e:
- logging.error("Unable to ping telnet server: %s", e)
- return False
- ping_output = job_result.stdout.decode("utf-8")
- if not re.search(r" 0% packet loss", ping_output):
- logging.error("Ping Packets Lost. Result: %s", ping_output)
- return False
- try:
- self.close()
- except Exception as e:
- logging.error("Cannot close telnet connection: %s", e)
- return False
- try:
- self.open(host, port)
- except Exception as e:
- logging.error("Cannot reopen telnet connection: %s", e)
- return False
- logging.debug("Telnet connection likely recovered")
- return True
-
- def cmd(self, cmd_str: str, retry: bool = False) -> str:
- if not isinstance(cmd_str, str):
- raise TypeError("Invalid command string", cmd_str)
-
- if self._tn is None or self._ip_address is None or self._port is None:
- raise attenuator.InvalidOperationError(
- "Telnet connection not open for commands"
- )
-
- cmd_str.strip(self.tx_cmd_separator)
- self._tn.read_until(_ascii_string(self.prompt), 2)
- self._tn.write(_ascii_string(cmd_str + self.tx_cmd_separator))
-
- match_idx, match_val, ret_text = self._tn.expect(
- [_ascii_string(f"\\S+{self.rx_cmd_separator}")], 1
- )
-
- logging.debug("Telnet Command: %s", cmd_str)
- logging.debug(
- "Telnet Reply: (%s, %s, %s)", match_idx, match_val, ret_text
- )
-
- if match_idx == -1:
- telnet_recovered = self.diagnose_telnet(
- self._ip_address, self._port
- )
- if telnet_recovered and retry:
- logging.debug("Retrying telnet command once.")
- return self.cmd(cmd_str, retry=False)
- else:
- raise attenuator.InvalidDataError(
- "Telnet command failed to return valid data"
- )
-
- ret_str = ret_text.decode()
- ret_str = ret_str.strip(
- self.tx_cmd_separator + self.rx_cmd_separator + self.prompt
- )
- return ret_str
diff --git a/packages/antlion/controllers/attenuator_lib/aeroflex/__init__.py b/packages/antlion/controllers/attenuator_lib/aeroflex/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/packages/antlion/controllers/attenuator_lib/aeroflex/__init__.py
+++ /dev/null
diff --git a/packages/antlion/controllers/attenuator_lib/aeroflex/telnet.py b/packages/antlion/controllers/attenuator_lib/aeroflex/telnet.py
deleted file mode 100644
index 9bc527c..0000000
--- a/packages/antlion/controllers/attenuator_lib/aeroflex/telnet.py
+++ /dev/null
@@ -1,149 +0,0 @@
-#!/usr/bin/env python3
-
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""
-Class for Telnet control of Aeroflex 832X and 833X Series Attenuator Modules
-
-This class provides a wrapper to the Aeroflex attenuator modules for purposes
-of simplifying and abstracting control down to the basic necessities. It is
-not the intention of the module to expose all functionality, but to allow
-interchangeable HW to be used.
-
-See http://www.aeroflex.com/ams/weinschel/PDFILES/IM-608-Models-8320-&-8321-preliminary.pdf
-"""
-
-# Blanket ignores to enable mypy in Antlion
-# mypy: disable-error-code="no-untyped-def"
-from antlion.controllers import attenuator
-from antlion.controllers.attenuator_lib import _tnhelper
-
-
-class AttenuatorInstrument(attenuator.AttenuatorInstrument):
- def __init__(self, num_atten: int = 0) -> None:
- self._num_atten = num_atten
- self._max_atten = attenuator.INVALID_MAX_ATTEN
-
- self._tnhelper = _tnhelper.TelnetHelper(
- tx_cmd_separator="\r\n", rx_cmd_separator="\r\n", prompt=">"
- )
- self._properties: dict[str, str] | None = None
- self._address: str | None = None
-
- @property
- def address(self) -> str | None:
- return self._address
-
- @property
- def num_atten(self) -> int:
- return self._num_atten
-
- @property
- def max_atten(self) -> float:
- return self._max_atten
-
- def open(self, host: str, port: int, _timeout_sec: int = 5) -> None:
- """Initiate a connection to the attenuator.
-
- Args:
- host: A valid hostname to an attenuator
- port: Port number to attempt connection
- timeout_sec: Seconds to wait to initiate a connection
- """
- self._tnhelper.open(host, port)
-
- # work around a bug in IO, but this is a good thing to do anyway
- self._tnhelper.cmd("*CLS", False)
- self._address = host
-
- if self._num_atten == 0:
- self._num_atten = int(self._tnhelper.cmd("RFCONFIG? CHAN"))
-
- configstr = self._tnhelper.cmd("RFCONFIG? ATTN 1")
-
- self._properties = dict(
- zip(
- [
- "model",
- "max_atten",
- "min_step",
- "unknown",
- "unknown2",
- "cfg_str",
- ],
- configstr.split(", ", 5),
- )
- )
-
- self._max_atten = float(self._properties["max_atten"])
-
- def close(self) -> None:
- """Close the connection to the attenuator."""
- self._tnhelper.close()
-
- def set_atten(
- self, idx: int, value: float, _strict: bool = True, _retry: bool = False
- ) -> None:
- """Sets the attenuation given its index in the instrument.
-
- Args:
- idx: Index used to identify a particular attenuator in an instrument
- value: Value for nominal attenuation to be set
- strict: If True, raise an error when given out of bounds attenuation
- retry: If True, command will be retried if possible
-
- Raises:
- InvalidOperationError if the telnet connection is not open.
- IndexError if the index is not valid for this instrument.
- ValueError if the requested set value is greater than the maximum
- attenuation value.
- """
- if not self._tnhelper.is_open():
- raise attenuator.InvalidOperationError("Connection not open!")
-
- if idx >= self._num_atten:
- raise IndexError(
- "Attenuator index out of range!", self._num_atten, idx
- )
-
- if value > self._max_atten:
- raise ValueError(
- "Attenuator value out of range!", self._max_atten, value
- )
-
- self._tnhelper.cmd(f"ATTN {idx + 1} {value}", False)
-
- def get_atten(self, idx: int, _retry: bool = False) -> float:
- """Returns the current attenuation given its index in the instrument.
-
- Args:
- idx: Index used to identify a particular attenuator in an instrument
- retry: If True, command will be retried if possible
-
- Raises:
- InvalidOperationError if the telnet connection is not open.
-
- Returns:
- The current attenuation value
- """
- if not self._tnhelper.is_open():
- raise attenuator.InvalidOperationError("Connection not open!")
-
- # Potentially redundant safety check removed for the moment
- # if idx >= self.num_atten:
- # raise IndexError("Attenuator index out of range!", self.num_atten, idx)
-
- atten_val = self._tnhelper.cmd(f"ATTN? {idx + 1}")
-
- return float(atten_val)
diff --git a/packages/antlion/controllers/attenuator_lib/minicircuits/__init__.py b/packages/antlion/controllers/attenuator_lib/minicircuits/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/packages/antlion/controllers/attenuator_lib/minicircuits/__init__.py
+++ /dev/null
diff --git a/packages/antlion/controllers/attenuator_lib/minicircuits/http.py b/packages/antlion/controllers/attenuator_lib/minicircuits/http.py
deleted file mode 100644
index 678929c..0000000
--- a/packages/antlion/controllers/attenuator_lib/minicircuits/http.py
+++ /dev/null
@@ -1,168 +0,0 @@
-#!/usr/bin/env python3
-
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""
-Class for HTTP control of Mini-Circuits RCDAT series attenuators
-
-This class provides a wrapper to the MC-RCDAT attenuator modules for purposes
-of simplifying and abstracting control down to the basic necessities. It is
-not the intention of the module to expose all functionality, but to allow
-interchangeable HW to be used.
-
-See http://www.minicircuits.com/softwaredownload/Prog_Manual-6-Programmable_Attenuator.pdf
-"""
-
-# Blanket ignores to enable mypy in Antlion
-# mypy: disable-error-code="no-untyped-def"
-import urllib.request
-
-from antlion.controllers import attenuator
-
-
-class AttenuatorInstrument(attenuator.AttenuatorInstrument):
- """A specific HTTP-controlled implementation of AttenuatorInstrument for
- Mini-Circuits RC-DAT attenuators.
-
- With the exception of HTTP-specific commands, all functionality is defined
- by the AttenuatorInstrument class.
- """
-
- def __init__(self, num_atten: int = 1) -> None:
- self._num_atten = num_atten
- self._max_atten = attenuator.INVALID_MAX_ATTEN
-
- self._ip_address: str | None = None
- self._port: int | None = None
- self._timeout: int | None = None
- self._address: str | None = None
-
- @property
- def address(self) -> str | None:
- return self._address
-
- @property
- def num_atten(self) -> int:
- return self._num_atten
-
- @property
- def max_atten(self) -> float:
- return self._max_atten
-
- def open(self, host: str, port: int = 80, timeout_sec: int = 2) -> None:
- """Initiate a connection to the attenuator.
-
- Args:
- host: A valid hostname to an attenuator
- port: Port number to attempt connection
- timeout_sec: Seconds to wait to initiate a connection
- """
- self._ip_address = host
- self._port = port
- self._timeout = timeout_sec
- self._address = host
-
- att_req = urllib.request.urlopen(
- f"http://{self._ip_address}:{self._port}/MN?"
- )
- config_str = att_req.read().decode("utf-8").strip()
- if not config_str.startswith("MN="):
- raise attenuator.InvalidDataError(
- f"Attenuator returned invalid data. Attenuator returned: {config_str}"
- )
-
- config_str = config_str[len("MN=") :]
- properties = dict(
- zip(["model", "max_freq", "max_atten"], config_str.split("-", 2))
- )
- self._max_atten = float(properties["max_atten"])
-
- def close(self) -> None:
- """Close the connection to the attenuator."""
- # Since this controller is based on HTTP requests, there is no
- # connection teardown required.
-
- def set_atten(
- self, idx: int, value: float, strict: bool = True, retry: bool = False
- ) -> None:
- """Sets the attenuation given its index in the instrument.
-
- Args:
- idx: Index used to identify a particular attenuator in an instrument
- value: Value for nominal attenuation to be set
- strict: If True, raise an error when given out of bounds attenuation
- retry: If True, command will be retried if possible
-
- Raises:
- InvalidDataError if the attenuator does not respond with the
- expected output.
- """
- if not (0 <= idx < self._num_atten):
- raise IndexError(
- "Attenuator index out of range!", self._num_atten, idx
- )
-
- if value > self._max_atten and strict:
- raise ValueError(
- "Attenuator value out of range!", self._max_atten, value
- )
- # The actual device uses one-based index for channel numbers.
- adjusted_value = min(max(0, value), self._max_atten)
- att_req = urllib.request.urlopen(
- "http://{}:{}/CHAN:{}:SETATT:{}".format(
- self._ip_address, self._port, idx + 1, adjusted_value
- ),
- timeout=self._timeout,
- )
- att_resp = att_req.read().decode("utf-8").strip()
- if att_resp != "1":
- if retry:
- self.set_atten(idx, value, strict, retry=False)
- else:
- raise attenuator.InvalidDataError(
- f"Attenuator returned invalid data. Attenuator returned: {att_resp}"
- )
-
- def get_atten(self, idx: int, retry: bool = False) -> float:
- """Returns the current attenuation of the attenuator at the given index.
-
- Args:
- idx: The index of the attenuator.
- retry: if True, command will be retried if possible
-
- Raises:
- InvalidDataError if the attenuator does not respond with the
- expected output
-
- Returns:
- the current attenuation value as a float
- """
- if not (0 <= idx < self._num_atten):
- raise IndexError(
- "Attenuator index out of range!", self._num_atten, idx
- )
- att_req = urllib.request.urlopen(
- f"http://{self._ip_address}:{self._port}/CHAN:{idx + 1}:ATT?",
- timeout=self._timeout,
- )
- att_resp = att_req.read().decode("utf-8").strip()
- try:
- return float(att_resp)
- except TypeError as e:
- if retry:
- return self.get_atten(idx, retry=False)
-
- raise attenuator.InvalidDataError(
- f"Attenuator returned invalid data. Attenuator returned: {att_resp}"
- ) from e
diff --git a/packages/antlion/controllers/attenuator_lib/minicircuits/telnet.py b/packages/antlion/controllers/attenuator_lib/minicircuits/telnet.py
deleted file mode 100644
index 5738c51..0000000
--- a/packages/antlion/controllers/attenuator_lib/minicircuits/telnet.py
+++ /dev/null
@@ -1,160 +0,0 @@
-#!/usr/bin/env python3
-
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""
-Class for Telnet control of Mini-Circuits RCDAT series attenuators
-
-This class provides a wrapper to the MC-RCDAT attenuator modules for purposes
-of simplifying and abstracting control down to the basic necessities. It is
-not the intention of the module to expose all functionality, but to allow
-interchangeable HW to be used.
-
-See http://www.minicircuits.com/softwaredownload/Prog_Manual-6-Programmable_Attenuator.pdf
-"""
-
-# Blanket ignores to enable mypy in Antlion
-# mypy: disable-error-code="no-untyped-def"
-from antlion.controllers import attenuator
-from antlion.controllers.attenuator_lib import _tnhelper
-
-
-class AttenuatorInstrument(attenuator.AttenuatorInstrument):
- """A specific telnet-controlled implementation of AttenuatorInstrument for
- Mini-Circuits RC-DAT attenuators.
-
- With the exception of telnet-specific commands, all functionality is defined
- by the AttenuatorInstrument class. Because telnet is a stateful protocol,
- the functionality of AttenuatorInstrument is contingent upon a telnet
- connection being established.
- """
-
- def __init__(self, num_atten: int = 0) -> None:
- self._num_atten = num_atten
- self._max_atten = attenuator.INVALID_MAX_ATTEN
- self.properties: dict[str, str] | None = None
- self._tnhelper = _tnhelper.TelnetHelper(
- tx_cmd_separator="\r\n", rx_cmd_separator="\r\n", prompt=""
- )
- self._address: str | None = None
-
- @property
- def address(self) -> str | None:
- return self._address
-
- @property
- def num_atten(self) -> int:
- return self._num_atten
-
- @property
- def max_atten(self) -> float:
- return self._max_atten
-
- def __del__(self) -> None:
- if self._tnhelper.is_open():
- self.close()
-
- def open(self, host: str, port: int, _timeout_sec: int = 5) -> None:
- """Initiate a connection to the attenuator.
-
- Args:
- host: A valid hostname to an attenuator
- port: Port number to attempt connection
- timeout_sec: Seconds to wait to initiate a connection
- """
- self._tnhelper.open(host, port)
- self._address = host
-
- if self._num_atten == 0:
- self._num_atten = 1
-
- config_str = self._tnhelper.cmd("MN?")
-
- if config_str.startswith("MN="):
- config_str = config_str[len("MN=") :]
-
- self.properties = dict(
- zip(["model", "max_freq", "max_atten"], config_str.split("-", 2))
- )
- self._max_atten = float(self.properties["max_atten"])
-
- def close(self) -> None:
- """Close the connection to the attenuator."""
- self._tnhelper.close()
-
- def set_atten(
- self, idx: int, value: float, strict: bool = True, retry: bool = False
- ) -> None:
- """Sets the attenuation given its index in the instrument.
-
- Args:
- idx: Index used to identify a particular attenuator in an instrument
- value: Value for nominal attenuation to be set
- strict: If True, raise an error when given out of bounds attenuation
- retry: If True, command will be retried if possible
-
- Raises:
- InvalidOperationError if the telnet connection is not open.
- IndexError if the index is not valid for this instrument.
- ValueError if the requested set value is greater than the maximum
- attenuation value.
- """
-
- if not self._tnhelper.is_open():
- raise attenuator.InvalidOperationError("Connection not open!")
-
- if idx >= self._num_atten:
- raise IndexError(
- "Attenuator index out of range!", self._num_atten, idx
- )
-
- if value > self._max_atten and strict:
- raise ValueError(
- "Attenuator value out of range!", self._max_atten, value
- )
- # The actual device uses one-based index for channel numbers.
- adjusted_value = min(max(0, value), self._max_atten)
- self._tnhelper.cmd(
- f"CHAN:{idx + 1}:SETATT:{adjusted_value}", retry=retry
- )
-
- def get_atten(self, idx: int, retry: bool = False) -> float:
- """Returns the current attenuation given its index in the instrument.
-
- Args:
- idx: Index used to identify a particular attenuator in an instrument
- retry: If True, command will be retried if possible
-
- Returns:
- The current attenuation value
-
- Raises:
- InvalidOperationError if the telnet connection is not open.
- """
- if not self._tnhelper.is_open():
- raise attenuator.InvalidOperationError("Connection not open!")
-
- if idx >= self._num_atten or idx < 0:
- raise IndexError(
- "Attenuator index out of range!", self._num_atten, idx
- )
-
- if self._num_atten == 1:
- atten_val_str = self._tnhelper.cmd(":ATT?", retry=retry)
- else:
- atten_val_str = self._tnhelper.cmd(
- f"CHAN:{idx + 1}:ATT?", retry=retry
- )
- atten_val = float(atten_val_str)
- return atten_val
diff --git a/packages/antlion/controllers/fastboot.py b/packages/antlion/controllers/fastboot.py
deleted file mode 100755
index 2de2321..0000000
--- a/packages/antlion/controllers/fastboot.py
+++ /dev/null
@@ -1,88 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-# Blanket ignores to enable mypy in Antlion
-# mypy: disable-error-code="no-untyped-def"
-from antlion import error
-from antlion.libs.proc import job
-
-
-class FastbootError(error.ActsError):
- """Raised when there is an error in fastboot operations."""
-
- def __init__(self, cmd, stdout, stderr, ret_code):
- super().__init__()
- self.cmd = cmd
- self.stdout = stdout
- self.stderr = stderr
- self.ret_code = ret_code
-
- def __str__(self):
- return (
- "Error executing fastboot cmd '%s'. ret: %d, stdout: %s,"
- " stderr: %s"
- ) % (self.cmd, self.ret_code, self.stdout, self.stderr)
-
-
-class FastbootProxy:
- """Proxy class for fastboot.
-
- For syntactic reasons, the '-' in fastboot commands need to be replaced
- with '_'. Can directly execute fastboot commands on an object:
- >> fb = FastbootProxy(<serial>)
- >> fb.devices() # will return the console output of "fastboot devices".
- """
-
- def __init__(self, serial="", ssh_connection=None):
- self.serial = serial
- if serial:
- self.fastboot_str = f"fastboot -s {serial}"
- else:
- self.fastboot_str = "fastboot"
- self.ssh_connection = ssh_connection
-
- def _exec_fastboot_cmd(
- self, name, arg_str, ignore_status=False, timeout=60
- ):
- command = f"{self.fastboot_str} {name} {arg_str}"
- if self.ssh_connection:
- result = self.ssh_connection.run(
- command, ignore_status=True, timeout_sec=timeout
- )
- else:
- result = job.run(command, ignore_status=True, timeout_sec=timeout)
- ret, out, err = result.exit_status, result.stdout, result.stderr
- # TODO: This is only a temporary workaround for b/34815412.
- # fastboot getvar outputs to stderr instead of stdout
- if "getvar" in command:
- out = err
- if ret == 0 or ignore_status:
- return out
- else:
- raise FastbootError(
- cmd=command, stdout=out, stderr=err, ret_code=ret
- )
-
- def args(self, *args, **kwargs):
- return job.run(" ".join((self.fastboot_str,) + args), **kwargs).stdout
-
- def __getattr__(self, name):
- def fastboot_call(*args, **kwargs):
- clean_name = name.replace("_", "-")
- arg_str = " ".join(str(elem) for elem in args)
- return self._exec_fastboot_cmd(clean_name, arg_str, **kwargs)
-
- return fastboot_call
diff --git a/packages/antlion/controllers/fuchsia_device.py b/packages/antlion/controllers/fuchsia_device.py
deleted file mode 100644
index b2e1948..0000000
--- a/packages/antlion/controllers/fuchsia_device.py
+++ /dev/null
@@ -1,852 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from __future__ import annotations
-
-import logging
-import os
-import re
-import socket
-import textwrap
-import time
-from ipaddress import ip_address
-from typing import Any
-
-import honeydew
-from honeydew.affordances.connectivity.wlan.utils.types import CountryCode
-from honeydew.auxiliary_devices.power_switch.power_switch_using_dmc import (
- PowerSwitchDmcError,
- PowerSwitchUsingDmc,
-)
-from honeydew.transports.ffx.config import FfxConfig
-from honeydew.transports.ffx.ffx import FFX
-from honeydew.typing.custom_types import DeviceInfo, IpPort
-from mobly import logger, signals
-
-from antlion import context, utils
-from antlion.capabilities.ssh import DEFAULT_SSH_PORT, SSHConfig
-from antlion.controllers import pdu
-from antlion.controllers.fuchsia_lib.lib_controllers.wlan_controller import (
- WlanController,
-)
-from antlion.controllers.fuchsia_lib.lib_controllers.wlan_policy_controller import (
- WlanPolicyController,
-)
-from antlion.controllers.fuchsia_lib.package_server import PackageServer
-from antlion.controllers.fuchsia_lib.sl4f import SL4F
-from antlion.controllers.fuchsia_lib.ssh import (
- DEFAULT_SSH_PRIVATE_KEY,
- DEFAULT_SSH_USER,
- FuchsiaSSHProvider,
-)
-from antlion.decorators import cached_property
-from antlion.runner import CalledProcessError
-from antlion.types import ControllerConfig, Json
-from antlion.utils import (
- PingResult,
- get_fuchsia_mdns_ipv6_address,
- get_interface_ip_addresses,
-)
-from antlion.validation import FieldNotFoundError, MapValidator
-
-MOBLY_CONTROLLER_CONFIG_NAME: str = "FuchsiaDevice"
-ACTS_CONTROLLER_REFERENCE_NAME = "fuchsia_devices"
-
-FUCHSIA_RECONNECT_AFTER_REBOOT_TIME = 5
-
-FUCHSIA_REBOOT_TYPE_SOFT = "soft"
-FUCHSIA_REBOOT_TYPE_HARD = "hard"
-
-FUCHSIA_DEFAULT_CONNECT_TIMEOUT = 90
-FUCHSIA_DEFAULT_COMMAND_TIMEOUT = 60
-
-FUCHSIA_DEFAULT_CLEAN_UP_COMMAND_TIMEOUT = 15
-
-FUCHSIA_COUNTRY_CODE_TIMEOUT = 15
-FUCHSIA_DEFAULT_COUNTRY_CODE_US = "US"
-
-MDNS_LOOKUP_RETRY_MAX = 3
-
-FFX_PROXY_TIMEOUT_SEC = 3
-
-# Duration to wait for the Fuchsia device to acquire an IP address after
-# requested to join a network.
-#
-# Acquiring an IP address after connecting to a WLAN network could take up to
-# 15 seconds if we get unlucky:
-#
-# 1. An outgoing passive scan just started (~7s)
-# 2. An active scan is queued for the newly saved network (~7s)
-# 3. The initial connection attempt fails (~1s)
-IP_ADDRESS_TIMEOUT = 30
-
-
-class FuchsiaDeviceError(signals.ControllerError):
- pass
-
-
-class FuchsiaConfigError(signals.ControllerError):
- """Incorrect FuchsiaDevice configuration."""
-
-
-def create(configs: list[ControllerConfig]) -> list[FuchsiaDevice]:
- return [FuchsiaDevice(c) for c in configs]
-
-
-def destroy(objects: list[FuchsiaDevice]) -> None:
- for fd in objects:
- fd.clean_up()
- del fd
-
-
-def get_info(objects: list[FuchsiaDevice]) -> list[Json]:
- """Get information on a list of FuchsiaDevice objects."""
- return [{"ip": fd.ip} for fd in objects]
-
-
-class FuchsiaDevice:
- """Class representing a Fuchsia device.
-
- Each object of this class represents one Fuchsia device in ACTS.
-
- Attributes:
- ip: The full address or Fuchsia abstract name to contact the Fuchsia
- device at
- log: A logger object.
- ssh_port: The SSH TCP port number of the Fuchsia device.
- sl4f_port: The SL4F HTTP port number of the Fuchsia device.
- ssh_config: The ssh_config for connecting to the Fuchsia device.
- """
-
- def __init__(self, controller_config: ControllerConfig) -> None:
- config = MapValidator(controller_config)
- self.ip = config.get(str, "ip")
- if "%" in self.ip:
- addr, scope_id = self.ip.split("%", 1)
- try:
- if_name = socket.if_indextoname(int(scope_id))
- self.ip = f"{addr}%{if_name}"
- except ValueError:
- # Scope ID is likely already the interface name, no change necessary.
- pass
- self.orig_ip = self.ip
- self.sl4f_port = config.get(int, "sl4f_port", 80)
- self.ssh_username = config.get(str, "ssh_username", DEFAULT_SSH_USER)
- self.ssh_port = config.get(int, "ssh_port", DEFAULT_SSH_PORT)
- self.ssh_binary_path = config.get(str, "ssh_binary_path", "ssh")
-
- def expand(path: str) -> str:
- return os.path.expandvars(os.path.expanduser(path))
-
- def path_from_config(
- name: str, default: str | None = None
- ) -> str | None:
- path = config.get(str, name, default)
- return None if path is None else expand(path)
-
- def assert_exists(name: str, path: str | None) -> None:
- if path is None:
- raise FuchsiaDeviceError(
- f'Please specify "${name}" in your configuration file'
- )
- if not os.path.exists(path):
- raise FuchsiaDeviceError(
- f'Please specify a correct "${name}" in your configuration '
- f'file: "{path}" does not exist'
- )
-
- self.specific_image: str | None = path_from_config("specific_image")
- if self.specific_image:
- assert_exists("specific_image", self.specific_image)
-
- # Path to a tar.gz archive with pm and amber-files, as necessary for
- # starting a package server.
- self.packages_archive_path: str | None = path_from_config(
- "packages_archive_path"
- )
- if self.packages_archive_path:
- assert_exists("packages_archive_path", self.packages_archive_path)
-
- def required_path_from_config(
- name: str, default: str | None = None
- ) -> str:
- path = path_from_config(name, default)
- if path is None:
- raise FuchsiaConfigError(f"{name} is a required config field")
- assert_exists(name, path)
- return path
-
- self.ssh_priv_key: str = required_path_from_config(
- "ssh_priv_key", DEFAULT_SSH_PRIVATE_KEY
- )
- self.ffx_binary_path: str = required_path_from_config(
- "ffx_binary_path", "${FUCHSIA_DIR}/.jiri_root/bin/ffx"
- )
- self.ffx_subtools_search_path: str | None = path_from_config(
- "ffx_subtools_search_path"
- )
-
- self.authorized_file = config.get(str, "authorized_file_loc", None)
- self.serial_number = config.get(str, "serial_number", None)
- self.device_type = config.get(str, "device_type", None)
- self.product_type = config.get(str, "product_type", None)
- self.board_type = config.get(str, "board_type", None)
- self.build_number = config.get(str, "build_number", None)
- self.build_type = config.get(str, "build_type", None)
- self.mdns_name = config.get(str, "mdns_name", None)
-
- self.hard_reboot_on_fail = config.get(
- bool, "hard_reboot_on_fail", False
- )
- self.take_bug_report_on_fail = config.get(
- bool, "take_bug_report_on_fail", False
- )
- self.device_pdu_config = config.get(dict, "PduDevice", {})
- self.config_country_code = config.get(
- str, "country_code", FUCHSIA_DEFAULT_COUNTRY_CODE_US
- ).upper()
-
- output_path = context.get_current_context().get_base_output_path()
- self.ssh_config = os.path.join(output_path, f"ssh_config_{self.ip}")
- self._generate_ssh_config(self.ssh_config)
-
- # WLAN interface info is populated inside configure_wlan
- self.wlan_client_interfaces: dict[str, Any] = {}
- self.wlan_ap_interfaces: dict[str, Any] = {}
- self.wlan_client_test_interface_name = config.get(
- str, "wlan_client_test_interface", None
- )
- self.wlan_ap_test_interface_name = config.get(
- str, "wlan_ap_test_interface", None
- )
- try:
- self.wlan_features: list[str] = config.list("wlan_features").all(
- str
- )
- except FieldNotFoundError:
- self.wlan_features = []
-
- # Whether to use 'policy' or 'drivers' for WLAN connect/disconnect calls
- # If set to None, wlan is not configured.
- self.association_mechanism: str | None = None
- # Defaults to policy layer, unless otherwise specified in the config
- self.default_association_mechanism = config.get(
- str, "association_mechanism", "policy"
- )
-
- # Whether to clear and preserve existing saved networks and client
- # connections state, to be restored at device teardown.
- self.default_preserve_saved_networks = config.get(
- bool, "preserve_saved_networks", True
- )
-
- if not utils.is_valid_ipv4_address(
- self.ip
- ) and not utils.is_valid_ipv6_address(self.ip):
- mdns_ip = None
- for _ in range(MDNS_LOOKUP_RETRY_MAX):
- mdns_ip = get_fuchsia_mdns_ipv6_address(self.ip)
- if mdns_ip:
- break
- else:
- time.sleep(1)
- if mdns_ip and utils.is_valid_ipv6_address(mdns_ip):
- # self.ip was actually an mdns name. Use it for self.mdns_name
- # unless one was explicitly provided.
- self.mdns_name = self.mdns_name or self.ip
- self.ip = mdns_ip
- else:
- raise ValueError(f"Invalid IP: {self.ip}")
-
- self.log = logger.PrefixLoggerAdapter(
- logging.getLogger(),
- {
- logger.PrefixLoggerAdapter.EXTRA_KEY_LOG_PREFIX: f"[FuchsiaDevice | {self.orig_ip}]",
- },
- )
-
- self.ping_rtt_match = re.compile(
- r"RTT Min/Max/Avg = \[ ([0-9.]+) / ([0-9.]+) / ([0-9.]+) \] ms"
- )
- self.serial = re.sub("[.:%]", "_", self.ip)
- self.package_server: PackageServer | None = None
-
- # Create honeydew fuchsia_device.
- if not self.mdns_name:
- raise FuchsiaConfigError(
- 'Must provide "mdns_name: <device mDNS name>" in the device config'
- )
-
- ffx_config = FfxConfig()
- ffx_config.setup(
- binary_path=self.ffx_binary_path,
- isolate_dir=None,
- logs_dir=f"{getattr(logging, 'log_path')}/ffx/",
- logs_level="None",
- enable_mdns=False,
- subtools_search_path=self.ffx_subtools_search_path,
- proxy_timeout_secs=FFX_PROXY_TIMEOUT_SEC,
- )
-
- self.honeydew_fd = honeydew.create_device(
- device_info=DeviceInfo(
- name=self.mdns_name,
- ip_port=IpPort(ip_address(self.ip), self.ssh_port),
- serial_socket=None,
- ),
- ffx_config_data=ffx_config.get_config(),
- config={
- "affordances": {
- "wlan": {
- "implementation": "fuchsia-controller",
- },
- },
- },
- )
-
- @cached_property
- def sl4f(self) -> SL4F:
- """Get the sl4f module configured for this device."""
- self.log.info("Started SL4F server")
- return SL4F(self.ssh, self.sl4f_port)
-
- @cached_property
- def ssh(self) -> FuchsiaSSHProvider:
- """Get the SSH provider module configured for this device."""
- if not self.ssh_port:
- raise FuchsiaConfigError(
- 'Must provide "ssh_port: <int>" in the device config'
- )
- if not self.ssh_priv_key:
- raise FuchsiaConfigError(
- 'Must provide "ssh_priv_key: <file path>" in the device config'
- )
- return FuchsiaSSHProvider(
- SSHConfig(
- self.ssh_username,
- self.ip,
- self.ssh_priv_key,
- port=self.ssh_port,
- ssh_binary=self.ssh_binary_path,
- )
- )
-
- @property
- def ffx(self) -> FFX:
- """Returns the underlying Honeydew FFX transport object.
-
- Returns:
- The underlying Honeydew FFX transport object.
-
- Raises:
- FfxCommandError: Failed to instantiate.
- """
- return self.honeydew_fd.ffx
-
- @cached_property
- def wlan_policy_controller(self) -> WlanPolicyController:
- return WlanPolicyController(self.honeydew_fd, self.ssh)
-
- @cached_property
- def wlan_controller(self) -> WlanController:
- return WlanController(self.honeydew_fd)
-
- def _generate_ssh_config(self, file_path: str) -> None:
- """Generate and write an SSH config for Fuchsia to disk.
-
- Args:
- file_path: Path to write the generated SSH config
- """
- content = textwrap.dedent(
- f"""\
- Host *
- CheckHostIP no
- StrictHostKeyChecking no
- ForwardAgent no
- ForwardX11 no
- GSSAPIDelegateCredentials no
- UserKnownHostsFile /dev/null
- User fuchsia
- IdentitiesOnly yes
- IdentityFile {self.ssh_priv_key}
- ControlPersist yes
- ControlMaster auto
- ControlPath /tmp/fuchsia--%r@%h:%p
- ServerAliveInterval 1
- ServerAliveCountMax 1
- LogLevel ERROR
- """
- )
-
- with open(file_path, "w", encoding="utf-8") as file:
- file.write(content)
-
- def start_package_server(self) -> None:
- if not self.packages_archive_path:
- self.log.warn(
- "packages_archive_path is not specified. "
- "Assuming a package server is already running and configured on "
- "the DUT. If this is not the case, either run your own package "
- "server, or configure these fields appropriately. "
- "This is usually required for the Fuchsia iPerf3 client or "
- "other testing utilities not on device cache."
- )
- return
- if self.package_server:
- self.log.warn(
- "Skipping to start the package server since is already running"
- )
- return
-
- self.package_server = PackageServer(self.packages_archive_path)
- self.package_server.start()
- self.package_server.configure_device(self.ssh)
-
- def update_wlan_interfaces(self) -> None:
- """Retrieves WLAN interfaces from device and sets the FuchsiaDevice
- attributes.
- """
- self.wlan_client_interfaces = {}
- self.wlan_ap_interfaces = {}
-
- # TODO(http://fxb/75909): This tedium is necessary to get the interface name
- # because only netstack has that information. The bug linked here is
- # to reconcile some of the information between the two perspectives, at
- # which point we can eliminate this step.
- netstack_interfaces = self.honeydew_fd.netstack.list_interfaces()
- wlan_interfaces_by_mac = self.honeydew_fd.wlan_core.query_interfaces()
-
- for netstack_iface in netstack_interfaces:
- if netstack_iface.mac is None:
- self.log.debug(
- f"No MAC address for iface {netstack_iface.name}"
- )
- continue
-
- if netstack_iface.mac in wlan_interfaces_by_mac.client:
- self.wlan_client_interfaces[
- netstack_iface.name
- ] = wlan_interfaces_by_mac.client[netstack_iface.mac]
- elif netstack_iface.mac in wlan_interfaces_by_mac.ap:
- self.wlan_ap_interfaces[
- netstack_iface.name
- ] = wlan_interfaces_by_mac.ap[netstack_iface.mac]
-
- # Set test interfaces to value from config, else the first found
- # interface, else None
- if self.wlan_client_test_interface_name is None:
- self.wlan_client_test_interface_name = next(
- iter(self.wlan_client_interfaces), None
- )
-
- if self.wlan_ap_test_interface_name is None:
- self.wlan_ap_test_interface_name = next(
- iter(self.wlan_ap_interfaces), None
- )
-
- def configure_wlan(
- self,
- association_mechanism: str | None = None,
- preserve_saved_networks: bool | None = None,
- ) -> None:
- """
- Readies device for WLAN functionality. If applicable, connects to the
- policy layer and clears/saves preexisting saved networks.
-
- Args:
- association_mechanism: either 'policy' or 'drivers'. If None, uses
- the default value from init (can be set by ACTS config)
- preserve_saved_networks: whether to clear existing saved
- networks, and preserve them for restoration later. If None, uses
- the default value from init (can be set by ACTS config)
-
- Raises:
- FuchsiaDeviceError, if configuration fails
- """
- self.wlan_controller.set_country_code(
- CountryCode(self.config_country_code)
- )
-
- # If args aren't provided, use the defaults, which can be set in the
- # config.
- if association_mechanism is None:
- association_mechanism = self.default_association_mechanism
- if preserve_saved_networks is None:
- preserve_saved_networks = self.default_preserve_saved_networks
-
- if association_mechanism not in {None, "policy", "drivers"}:
- raise FuchsiaDeviceError(
- f"Invalid FuchsiaDevice association_mechanism: {association_mechanism}"
- )
-
- # Allows for wlan to be set up differently in different tests
- if self.association_mechanism:
- self.log.info("Deconfiguring WLAN")
- self.deconfigure_wlan()
-
- self.association_mechanism = association_mechanism
-
- self.log.info(
- f"Configuring WLAN w/ association mechanism: {association_mechanism}"
- )
- if association_mechanism == "drivers":
- self.log.warn(
- "You may encounter unusual device behavior when using the "
- "drivers directly for WLAN. This should be reserved for "
- "debugging specific issues. Normal test runs should use the "
- "policy layer."
- )
- if preserve_saved_networks:
- self.log.warn(
- "Unable to preserve saved networks when using drivers "
- "association mechanism (requires policy layer control)."
- )
- else:
- # This requires SL4F calls, so it can only happen with actual
- # devices, not with unit tests.
- self.wlan_policy_controller.configure_wlan(preserve_saved_networks)
-
- # Retrieve WLAN client and AP interfaces
- self.update_wlan_interfaces()
-
- def deconfigure_wlan(self) -> None:
- """
- Stops WLAN functionality (if it has been started). Used to allow
- different tests to use WLAN differently (e.g. some tests require using
- wlan policy, while the abstract wlan_device can be setup to use policy
- or drivers)
-
- Raises:
- FuchsiaDeviceError, if deconfigure fails.
- """
- if not self.association_mechanism:
- self.log.warning(
- "WLAN not configured before deconfigure was called."
- )
- return
- # If using policy, stop client connections. Otherwise, just clear
- # variables.
- if self.association_mechanism != "drivers":
- self.wlan_policy_controller._deconfigure_wlan()
- self.association_mechanism = None
-
- def reboot(
- self,
- unreachable_timeout: int = FUCHSIA_DEFAULT_CONNECT_TIMEOUT,
- reboot_type: str = FUCHSIA_REBOOT_TYPE_SOFT,
- testbed_pdus: list[pdu.PduDevice] | None = None,
- ) -> None:
- """Reboot a FuchsiaDevice.
-
- Soft reboots the device, verifies it becomes unreachable, then verifies
- it comes back online. Re-initializes services so the tests can continue.
-
- Args:
- use_ssh: if True, use fuchsia shell command via ssh to reboot
- instead of SL4F.
- unreachable_timeout: time to wait for device to become unreachable.
- reboot_type: 'soft' or 'hard'.
- testbed_pdus: all testbed PDUs.
-
- Raises:
- ConnectionError, if device fails to become unreachable or fails to
- come back up.
- """
- if reboot_type == FUCHSIA_REBOOT_TYPE_SOFT:
- self.log.info("Soft rebooting")
- self.honeydew_fd.reboot()
-
- elif reboot_type == FUCHSIA_REBOOT_TYPE_HARD:
- self.log.info("Hard rebooting via PDU")
-
- # Use dmc (client of DMS, device management server) if available
- # for rebooting the device. This tool is only available when
- # running in Fuchsia infrastructure.
- dmc: PowerSwitchUsingDmc | None = None
- if self.mdns_name:
- try:
- dmc = PowerSwitchUsingDmc(device_name=self.mdns_name)
- except PowerSwitchDmcError:
- self.log.info("dmc not found, falling back to using PDU")
-
- if dmc:
- self.log.info("Killing power to FuchsiaDevice with dmc")
- dmc.power_off()
- self.honeydew_fd.wait_for_offline()
-
- self.log.info("Restoring power to FuchsiaDevice with dmc")
- dmc.power_on()
- self.honeydew_fd.wait_for_online()
- self.honeydew_fd.on_device_boot()
- else:
- # Find the matching PDU in the Mobly config.
- if not testbed_pdus:
- raise AttributeError(
- "Testbed PDUs must be supplied to hard reboot a fuchsia_device."
- )
- device_pdu, device_pdu_port = pdu.get_pdu_port_for_device(
- self.device_pdu_config, testbed_pdus
- )
-
- self.log.info("Killing power to FuchsiaDevice")
- device_pdu.off(device_pdu_port)
- self.honeydew_fd.wait_for_offline()
-
- self.log.info("Restoring power to FuchsiaDevice")
- device_pdu.on(device_pdu_port)
- self.honeydew_fd.wait_for_online()
- self.honeydew_fd.on_device_boot()
-
- else:
- raise ValueError(f"Invalid reboot type: {reboot_type}")
-
- # Cleanup services
- self.stop_services()
-
- # TODO(http://b/246852449): Move configure_wlan to other controllers.
- # If wlan was configured before reboot, it must be configured again
- # after rebooting, as it was before reboot. No preserving should occur.
- if self.association_mechanism:
- pre_reboot_association_mechanism = self.association_mechanism
- # Prevent configure_wlan from thinking it needs to deconfigure first
- self.association_mechanism = None
- self.configure_wlan(
- association_mechanism=pre_reboot_association_mechanism,
- preserve_saved_networks=False,
- )
-
- self.log.info("Device has rebooted")
-
- def ping(
- self,
- dest_ip: str,
- count: int = 3,
- interval: int = 1000,
- timeout: int = 1000,
- size: int = 25,
- additional_ping_params: str | None = None,
- ) -> PingResult:
- """Pings from a Fuchsia device to an IPv4 address or hostname
-
- Args:
- dest_ip: (str) The ip or hostname to ping.
- count: (int) How many icmp packets to send.
- interval: (int) How long to wait between pings (ms)
- timeout: (int) How long to wait before having the icmp packet
- timeout (ms).
- size: (int) Size of the icmp packet.
- additional_ping_params: (str) command option flags to
- append to the command string
-
- Returns:
- A dictionary for the results of the ping. The dictionary contains
- the following items:
- status: Whether the ping was successful.
- rtt_min: The minimum round trip time of the ping.
- rtt_max: The minimum round trip time of the ping.
- rtt_avg: The avg round trip time of the ping.
- stdout: The standard out of the ping command.
- stderr: The standard error of the ping command.
- """
- self.log.debug(f"Pinging {dest_ip}...")
- if not additional_ping_params:
- additional_ping_params = ""
-
- try:
- ping_result = self.ssh.run(
- f"ping -c {count} -i {interval} -t {timeout} -s {size} "
- f"{additional_ping_params} {dest_ip}"
- )
- except CalledProcessError as e:
- self.log.debug(f"Failed to ping from host: {e}")
- return PingResult(
- exit_status=e.returncode,
- stdout=e.stdout.decode("utf-8"),
- stderr=e.stderr.decode("utf-8"),
- transmitted=None,
- received=None,
- time_ms=None,
- rtt_min_ms=None,
- rtt_avg_ms=None,
- rtt_max_ms=None,
- rtt_mdev_ms=None,
- )
-
- rtt_stats: re.Match[str] | None = None
-
- if not ping_result.stderr:
- rtt_lines = ping_result.stdout.decode("utf-8").split("\n")[:-1]
- rtt_line = rtt_lines[-1]
- rtt_stats = re.search(self.ping_rtt_match, rtt_line)
- if rtt_stats is None:
- raise FuchsiaDeviceError(
- f'Unable to parse ping output: "{rtt_line}"'
- )
-
- return PingResult(
- exit_status=ping_result.returncode,
- stdout=ping_result.stdout.decode("utf-8"),
- stderr=ping_result.stderr.decode("utf-8"),
- transmitted=None,
- received=None,
- time_ms=None,
- rtt_min_ms=float(rtt_stats.group(1)) if rtt_stats else None,
- rtt_avg_ms=float(rtt_stats.group(3)) if rtt_stats else None,
- rtt_max_ms=float(rtt_stats.group(2)) if rtt_stats else None,
- rtt_mdev_ms=None,
- )
-
- def clean_up(self) -> None:
- """Cleans up the FuchsiaDevice object, releases any resources it
- claimed, and restores saved networks if applicable. For reboots, use
- clean_up_services only.
-
- Note: Any exceptions thrown in this method must be caught and handled,
- ensuring that clean_up_services is run. Otherwise, the syslog listening
- thread will never join and will leave tests hanging.
- """
- # If and only if wlan is configured, and using the policy layer
- if self.association_mechanism == "policy":
- try:
- self.wlan_policy_controller.clean_up()
- except Exception as err:
- self.log.warning(f"Unable to clean up WLAN Policy layer: {err}")
-
- self.stop_services()
-
- if self.package_server:
- self.package_server.clean_up()
-
- def get_interface_ip_addresses(
- self, interface: str
- ) -> dict[str, list[str]]:
- return get_interface_ip_addresses(self, interface)
-
- def wait_for_ipv4_addr(self, interface: str) -> None:
- """Checks if device has an ipv4 private address. Sleeps 1 second between
- retries.
-
- Args:
- interface: name of interface from which to get ipv4 address.
-
- Raises:
- ConnectionError, if device does not have an ipv4 address after all
- timeout.
- """
- self.log.info(
- f"Checking for valid ipv4 addr. Retry {IP_ADDRESS_TIMEOUT} seconds."
- )
- timeout = time.time() + IP_ADDRESS_TIMEOUT
- while time.time() < timeout:
- ip_addrs = self.get_interface_ip_addresses(interface)
-
- if len(ip_addrs["ipv4_private"]) > 0:
- self.log.info(
- f"Device has an ipv4 address: {ip_addrs['ipv4_private'][0]}"
- )
- break
- else:
- self.log.debug(
- "Device does not yet have an ipv4 address...retrying in 1 second."
- )
- time.sleep(1)
- else:
- raise ConnectionError("Device failed to get an ipv4 address.")
-
- def wait_for_ipv6_addr(self, interface: str) -> None:
- """Checks if device has an ipv6 private local address. Sleeps 1 second
- between retries.
-
- Args:
- interface: name of interface from which to get ipv6 address.
-
- Raises:
- ConnectionError, if device does not have an ipv6 address after all
- timeout.
- """
- self.log.info(
- f"Checking for valid ipv6 addr. Retry {IP_ADDRESS_TIMEOUT} seconds."
- )
- timeout = time.time() + IP_ADDRESS_TIMEOUT
- while time.time() < timeout:
- ip_addrs = self.get_interface_ip_addresses(interface)
- if len(ip_addrs["ipv6_private_local"]) > 0:
- self.log.info(
- "Device has an ipv6 private local address: "
- f"{ip_addrs['ipv6_private_local'][0]}"
- )
- break
- else:
- self.log.debug(
- "Device does not yet have an ipv6 address...retrying in 1 second."
- )
- time.sleep(1)
- else:
- raise ConnectionError("Device failed to get an ipv6 address.")
-
- def stop_services(self) -> None:
- """Stops all host-side clients to the Fuchsia device.
-
- This is necessary whenever the device's state is unknown. These cases can be
- found after device reboots, for example.
- """
- self.log.info("Stopping host device services.")
- del self.wlan_policy_controller
- del self.wlan_controller
- del self.sl4f
- del self.ssh
-
- def take_bug_report(self) -> None:
- """Takes a bug report on the device and stores it in a file."""
- self.log.info(f"Taking snapshot of {self.mdns_name}")
-
- time_stamp = logger.sanitize_filename(
- logger.epoch_to_log_line_timestamp(utils.get_current_epoch_time())
- )
- out_dir = context.get_current_context().get_full_output_path()
- out_path = os.path.join(out_dir, f"{self.mdns_name}_{time_stamp}.zip")
-
- try:
- with open(out_path, "wb") as file:
- snapshot_bytes = self.ssh.run(
- "snapshot", log_output=False
- ).stdout
- file.write(snapshot_bytes)
- self.log.info(f"Snapshot saved to {out_path}")
- except Exception as err:
- self.log.error(f"Failed to take snapshot: {err}")
-
- def take_bt_snoop_log(self, custom_name: str | None = None) -> None:
- """Takes a the bt-snoop log from the device and stores it in a file
- in a pcap format.
- """
- bt_snoop_path = context.get_current_context().get_full_output_path()
- time_stamp = logger.sanitize_filename(
- logger.epoch_to_log_line_timestamp(time.time())
- )
- out_name = "FuchsiaDevice%s_%s" % (
- self.serial,
- time_stamp.replace(" ", "_").replace(":", "-"),
- )
- out_name = f"{out_name}.pcap"
- if custom_name:
- out_name = f"{self.serial}_{custom_name}.pcap"
- else:
- out_name = f"{out_name}.pcap"
- full_out_path = os.path.join(bt_snoop_path, out_name)
- with open(full_out_path, "wb") as file:
- pcap_bytes = self.ssh.run("bt-snoop-cli -d -f pcap").stdout
- file.write(pcap_bytes)
diff --git a/packages/antlion/controllers/fuchsia_lib/OWNERS b/packages/antlion/controllers/fuchsia_lib/OWNERS
deleted file mode 100644
index bc76ac3..0000000
--- a/packages/antlion/controllers/fuchsia_lib/OWNERS
+++ /dev/null
@@ -1,8 +0,0 @@
-chcl@google.com
-haydennix@google.com
-jmbrenna@google.com
-mnck@google.com
-nickchee@google.com
-sbalana@google.com
-silberst@google.com
-tturney@google.com
diff --git a/packages/antlion/controllers/fuchsia_lib/__init__.py b/packages/antlion/controllers/fuchsia_lib/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/packages/antlion/controllers/fuchsia_lib/__init__.py
+++ /dev/null
diff --git a/packages/antlion/controllers/fuchsia_lib/base_lib.py b/packages/antlion/controllers/fuchsia_lib/base_lib.py
deleted file mode 100644
index 1171d98..0000000
--- a/packages/antlion/controllers/fuchsia_lib/base_lib.py
+++ /dev/null
@@ -1,100 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import json
-import logging
-from typing import Any, Mapping
-from urllib.request import Request, urlopen
-
-from mobly.logger import PrefixLoggerAdapter
-
-DEFAULT_SL4F_RESPONSE_TIMEOUT_SEC = 30
-
-
-class DeviceOffline(Exception):
- """Exception if the device is no longer reachable via the network."""
-
-
-class SL4FCommandFailed(Exception):
- """A SL4F command to the server failed."""
-
-
-class BaseLib:
- def __init__(self, addr: str, logger_tag: str) -> None:
- self.address = addr
- self.log = PrefixLoggerAdapter(
- logging.getLogger(),
- {
- PrefixLoggerAdapter.EXTRA_KEY_LOG_PREFIX: f"SL4F | {self.address} | {logger_tag}"
- },
- )
-
- def send_command(
- self,
- cmd: str,
- args: Mapping[str, object] | None = None,
- response_timeout: float = DEFAULT_SL4F_RESPONSE_TIMEOUT_SEC,
- ) -> dict[str, Any]:
- """Builds and sends a JSON command to SL4F server.
-
- Args:
- cmd: SL4F method name of command.
- args: Arguments required to execute cmd.
- response_timeout: Seconds to wait for a response before
- throwing an exception.
-
- Returns:
- Response from SL4F server.
-
- Throws:
- TimeoutError: The HTTP request timed out waiting for a response
- """
- data = {
- "jsonrpc": "2.0",
- # id is required by the SL4F server to parse test_data but is not
- # currently used.
- "id": "",
- "method": cmd,
- "params": args,
- }
- data_json = json.dumps(data).encode("utf-8")
- req = Request(
- self.address,
- data=data_json,
- headers={
- "Content-Type": "application/json; charset=utf-8",
- "Content-Length": str(len(data_json)),
- },
- )
-
- self.log.debug(
- f'Sending request "{cmd}" with args: {args} with timeout {response_timeout}'
- )
- response = urlopen(req, timeout=response_timeout)
-
- response_body = response.read().decode("utf-8")
- try:
- response_json = json.loads(response_body)
- self.log.debug(f'Received response for "{cmd}": {response_json}')
- except json.JSONDecodeError as e:
- raise SL4FCommandFailed(response_body) from e
-
- # If the SL4F command fails it returns a str, without an 'error' field
- # to get.
- if not isinstance(response_json, dict):
- raise SL4FCommandFailed(response_json)
-
- return response_json
diff --git a/packages/antlion/controllers/fuchsia_lib/lib_controllers/__init__.py b/packages/antlion/controllers/fuchsia_lib/lib_controllers/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/packages/antlion/controllers/fuchsia_lib/lib_controllers/__init__.py
+++ /dev/null
diff --git a/packages/antlion/controllers/fuchsia_lib/lib_controllers/wlan_controller.py b/packages/antlion/controllers/fuchsia_lib/lib_controllers/wlan_controller.py
deleted file mode 100644
index 2bcb832..0000000
--- a/packages/antlion/controllers/fuchsia_lib/lib_controllers/wlan_controller.py
+++ /dev/null
@@ -1,78 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import logging
-import time
-from dataclasses import dataclass
-
-from honeydew.affordances.connectivity.wlan.utils.types import CountryCode
-from honeydew.fuchsia_device.fuchsia_device import (
- FuchsiaDevice as HdFuchsiaDevice,
-)
-from mobly import logger, signals
-
-from antlion import utils
-
-TIME_TO_SLEEP_BETWEEN_RETRIES = 1
-TIME_TO_WAIT_FOR_COUNTRY_CODE = 10
-
-
-class WlanControllerError(signals.ControllerError):
- pass
-
-
-class WlanController:
- """Contains methods related to wlan core, to be used in FuchsiaDevice object"""
-
- def __init__(self, honeydew: HdFuchsiaDevice) -> None:
- self.honeydew = honeydew
- self.log = logger.PrefixLoggerAdapter(
- logging.getLogger(),
- {
- logger.PrefixLoggerAdapter.EXTRA_KEY_LOG_PREFIX: f"[WlanController | {self.honeydew.device_name}]",
- },
- )
-
- def set_country_code(self, country_code: CountryCode) -> None:
- """Sets country code through the regulatory region service and waits
- for the code to be applied to WLAN PHY.
-
- Args:
- country_code: the 2 character country code to set
-
- Raises:
- EnvironmentError - failure to get/set regulatory region
- ConnectionError - failure to query PHYs
- """
- self.log.info(f"Setting DUT country code to {country_code}")
- self.honeydew.wlan_core.set_region(country_code)
-
- self.log.info(
- f"Verifying DUT country code was correctly set to {country_code}."
- )
- phy_ids_response = self.honeydew.wlan_core.get_phy_id_list()
-
- end_time = time.time() + TIME_TO_WAIT_FOR_COUNTRY_CODE
- while time.time() < end_time:
- for id in phy_ids_response:
- resp = self.honeydew.wlan_core.get_country(id)
- if resp == country_code:
- return
- time.sleep(TIME_TO_SLEEP_BETWEEN_RETRIES)
- else:
- raise EnvironmentError(
- f"Failed to set DUT country code to {country_code}."
- )
diff --git a/packages/antlion/controllers/fuchsia_lib/lib_controllers/wlan_policy_controller.py b/packages/antlion/controllers/fuchsia_lib/lib_controllers/wlan_policy_controller.py
deleted file mode 100644
index 0960a54..0000000
--- a/packages/antlion/controllers/fuchsia_lib/lib_controllers/wlan_policy_controller.py
+++ /dev/null
@@ -1,376 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import logging
-import time
-from dataclasses import dataclass
-
-from honeydew.affordances.connectivity.wlan.utils.errors import (
- HoneydewWlanError,
-)
-from honeydew.affordances.connectivity.wlan.utils.types import (
- ConnectionState,
- DisconnectStatus,
- NetworkConfig,
- NetworkState,
- WlanClientState,
-)
-from honeydew.fuchsia_device.fuchsia_device import (
- FuchsiaDevice as HdFuchsiaDevice,
-)
-from mobly import logger, signals
-
-from antlion.controllers.fuchsia_lib.ssh import FuchsiaSSHProvider
-
-SESSION_MANAGER_TIMEOUT_SEC = 10
-FUCHSIA_DEFAULT_WLAN_CONFIGURE_TIMEOUT = 30
-DEFAULT_GET_UPDATE_TIMEOUT = 60
-
-
-class WlanPolicyControllerError(signals.ControllerError):
- pass
-
-
-@dataclass
-class PreservedState:
- saved_networks: list[NetworkConfig] | None
- client_connections_state: WlanClientState | None
-
-
-@dataclass
-class ClientState:
- state: str
- networks: list[dict[str, object]]
-
-
-# TODO(http://b/309854439): Add a ClientStateWatcher and refactor tests to allow test
-# developers more control when update listeners are set and the client update state is
-# reset.
-class WlanPolicyController:
- """Contains methods related to the wlan policy layer, to be used in the
- FuchsiaDevice object."""
-
- def __init__(
- self, honeydew: HdFuchsiaDevice, ssh: FuchsiaSSHProvider
- ) -> None:
- self.preserved_networks_and_client_state: PreservedState | None = None
- self.policy_configured = False
- self.honeydew = honeydew
- self.ssh = ssh
- self.log = logger.PrefixLoggerAdapter(
- logging.getLogger(),
- {
- logger.PrefixLoggerAdapter.EXTRA_KEY_LOG_PREFIX: f"[WlanPolicyController | {self.ssh.config.host_name}]",
- },
- )
-
- def configure_wlan(
- self,
- preserve_saved_networks: bool,
- timeout_sec: int = FUCHSIA_DEFAULT_WLAN_CONFIGURE_TIMEOUT,
- ) -> None:
- """Sets up wlan policy layer.
-
- Args:
- preserve_saved_networks: whether to clear existing saved
- networks and client state, to be restored at test close.
- timeout_sec: time to wait for device to configure WLAN.
- """
-
- # We need to stop session manager to free control of
- # fuchsia.wlan.policy.ClientController, which can only be used by a
- # single caller at a time. Fuchsia Controller needs the ClientController
- # to trigger WLAN policy state changes. On eng builds the
- # session_manager can be restarted after being stopped during reboot so
- # we attempt killing the session manager process for 10 seconds.
- # See https://cs.opensource.google/fuchsia/fuchsia/+/main:sdk/fidl/fuchsia.wlan.policy/client_provider.fidl
- if b"cast_agent.cm" in self.ssh.run("ps").stdout:
- session_manager_expiration = (
- time.time() + SESSION_MANAGER_TIMEOUT_SEC
- )
- while time.time() < session_manager_expiration:
- self.ssh.stop_component(
- "session_manager", is_cfv2_component=True
- )
-
- # Acquire control of policy layer
- self.honeydew.wlan_policy.create_client_controller()
- self.log.info("ACTS tests now have control of the WLAN policy layer.")
-
- if (
- preserve_saved_networks
- and not self.preserved_networks_and_client_state
- ):
- self.preserved_networks_and_client_state = (
- self.remove_and_preserve_networks_and_client_state()
- )
-
- self.honeydew.wlan_policy.start_client_connections()
- self.policy_configured = True
-
- def _deconfigure_wlan(self) -> None:
- self.honeydew.wlan_policy.stop_client_connections()
- self.policy_configured = False
-
- def clean_up(self) -> None:
- if self.preserved_networks_and_client_state is not None:
- # It is possible for policy to have been configured before, but
- # deconfigured before test end. In this case, in must be setup
- # before restoring networks
- if not self.policy_configured:
- self.configure_wlan(False)
-
- self.restore_preserved_networks_and_client_state()
-
- def _find_network(
- self, ssid: str, networks: list[NetworkState]
- ) -> NetworkState | None:
- """Helper method to find network in list of network states.
-
- Args:
- ssid: The network name to look for.
- networks: The list of network states to look in.
-
- Returns:
- Network state of target ssid or None if not found in networks.
- """
- for network in networks:
- if network.network_identifier.ssid == ssid:
- return network
- return None
-
- def wait_for_network_state(
- self,
- ssid: str,
- expected_states: ConnectionState | set[ConnectionState],
- expected_status: DisconnectStatus | None = None,
- timeout_sec: int = DEFAULT_GET_UPDATE_TIMEOUT,
- ) -> ConnectionState:
- """Waits until the device returns with expected network state.
-
- Args:
- ssid: The network name to check the state of.
- expected_states: The network state or states we are expecting to see.
- expected_status: The disconnect status of the network. Only relevant when
- expected_state is FAILED or DISCONNECTED.
- timeout_sec: The number of seconds to wait for a update showing connection.
-
- Returns:
- Current network state if network converges on one of the expected states.
-
- Raises:
- TypeError: If DisconnectStatus provided with a CONNECTING or CONNECTED
- state.
- WlanPolicyControllerError: If no network is found before timeout or fails to
- converge to one of the expected states.
- """
-
- if not isinstance(expected_states, set):
- expected_states = {expected_states}
-
- if (
- expected_states
- == {ConnectionState.CONNECTING, ConnectionState.CONNECTED}
- or expected_states.issubset(
- {ConnectionState.CONNECTING, ConnectionState.CONNECTED}
- )
- and expected_status is not None
- ):
- raise TypeError(
- "Disconnect status not valid for CONNECTING or CONNECTED states."
- )
-
- self.honeydew.wlan_policy.set_new_update_listener()
- network: NetworkState | None = None
-
- end_time = time.time() + timeout_sec
- while time.time() < end_time:
- time_left = max(1.0, end_time - time.time())
- try:
- client = self.honeydew.wlan_policy.get_update(timeout=time_left)
- except TimeoutError as e:
- self.log.debug("Timeout waiting for WLAN state updates: %s", e)
- continue
-
- # If we don't find the network initially, wait and retry.
- network = self._find_network(ssid, client.networks)
- if network is None:
- self.log.debug(
- f"{ssid} not found in client networks: {client.networks}"
- )
- continue
-
- if network.connection_state in expected_states:
- # Check optional disconnect status matches.
- if expected_status:
- if network.disconnect_status is not expected_status:
- raise WlanPolicyControllerError(
- f"Disconnect status is not {expected_status}"
- )
- elif network.connection_state is ConnectionState.CONNECTING:
- self.log.debug(f"Network {ssid} still attempting to connect.")
- continue
- else:
- raise WlanPolicyControllerError(
- f'Expected network "{ssid}" to be in state {expected_states}, '
- f"got {network.connection_state}"
- )
-
- # Successfully converged on expected state and status
- return network.connection_state
-
- if network is None:
- raise WlanPolicyControllerError(
- f"Timed out trying to find ssid: {ssid}"
- )
- raise WlanPolicyControllerError(
- f'Timed out waiting for "{ssid}" to reach state {expected_states} and '
- f"status {expected_status}"
- )
-
- def wait_for_client_state(
- self,
- expected_state: WlanClientState,
- timeout_sec: int = DEFAULT_GET_UPDATE_TIMEOUT,
- ) -> None:
- """Waits until the client converges to expected state.
-
- Args:
- expected_state: The client state we are waiting to see.
- timeout_sec: Duration to wait for the desired_state.
-
- Raises:
- WlanPolicyControllerError: If client still has not converged to expected
- state at end of timeout.
- """
- self.honeydew.wlan_policy.set_new_update_listener()
-
- last_err: TimeoutError | None = None
- end_time = time.time() + timeout_sec
- while time.time() < end_time:
- time_left = max(1, int(end_time - time.time()))
- try:
- client = self.honeydew.wlan_policy.get_update(timeout=time_left)
- except TimeoutError as e:
- last_err = e
- continue
- if client.state is not expected_state:
- # Continue getting updates.
- continue
- else:
- return
- else:
- self.log.error(
- f"Client state did not converge to the expected state: {expected_state}"
- f" Waited:{timeout_sec}s"
- )
- raise WlanPolicyControllerError from last_err
-
- def wait_for_no_connections(
- self, timeout_sec: int = DEFAULT_GET_UPDATE_TIMEOUT
- ) -> None:
- """Waits to see that there are no connections to the device.
-
- Args:
- timeout_sec: The time to wait to see no connections.
-
- Raises:
- WlanPolicyControllerError: If client update has no networks or if client
- still has connections at end of timeout.
- """
- self.honeydew.wlan_policy.set_new_update_listener()
-
- last_err: TimeoutError | None = None
- end_time = time.time() + timeout_sec
- while time.time() < end_time:
- curr_connected_networks: list[NetworkState] = []
- time_left = max(1, int(end_time - time.time()))
- try:
- client = self.honeydew.wlan_policy.get_update(timeout=time_left)
- except TimeoutError as e:
- # Retry to handle the cases in negative testing where we expect
- # to receive an 'error'.
- last_err = e
- continue
-
- # Iterate through networks checking to see if any are still connected.
- for network in client.networks:
- if network.connection_state in {
- ConnectionState.CONNECTING,
- ConnectionState.CONNECTED,
- }:
- curr_connected_networks.append(network)
-
- if len(curr_connected_networks) != 0:
- # Continue getting updates.
- continue
- else:
- return
-
- self.log.error(f"Networks still connected. Waited: {timeout_sec}s")
- raise WlanPolicyControllerError from last_err
-
- def remove_and_preserve_networks_and_client_state(self) -> PreservedState:
- """Preserves networks already saved on devices before removing them.
-
- This method is used to set up a clean test environment. Records the state of
- client connections before tests.
-
- Returns:
- PreservedState: State of the client containing NetworkConfigs and client
- connection state.
- """
- client = self.honeydew.wlan_policy.get_update()
- networks = self.honeydew.wlan_policy.get_saved_networks()
- self.honeydew.wlan_policy.remove_all_networks()
- self.log.info("Saved networks cleared and preserved.")
- return PreservedState(
- saved_networks=networks, client_connections_state=client.state
- )
-
- def restore_preserved_networks_and_client_state(self) -> None:
- """Restore preserved networks and client state onto device."""
- if self.preserved_networks_and_client_state is None:
- self.log.info("No preserved networks or client state to restore")
- return
-
- self.honeydew.wlan_policy.remove_all_networks()
-
- saved_networks = self.preserved_networks_and_client_state.saved_networks
- if saved_networks is not None:
- for network in saved_networks:
- try:
- self.honeydew.wlan_policy.save_network(
- network.ssid,
- network.security_type,
- network.credential_value,
- )
- except HoneydewWlanError as e:
- self.log.warning(
- 'Failed to restore network "%s": %s', network.ssid, e
- )
-
- client_state = (
- self.preserved_networks_and_client_state.client_connections_state
- )
- if client_state is not None:
- if client_state is WlanClientState.CONNECTIONS_ENABLED:
- self.honeydew.wlan_policy.start_client_connections()
- else:
- self.honeydew.wlan_policy.stop_client_connections()
-
- self.log.info("Preserved networks and client state restored.")
- self.preserved_networks_and_client_state = None
diff --git a/packages/antlion/controllers/fuchsia_lib/package_server.py b/packages/antlion/controllers/fuchsia_lib/package_server.py
deleted file mode 100644
index 32d5726..0000000
--- a/packages/antlion/controllers/fuchsia_lib/package_server.py
+++ /dev/null
@@ -1,261 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import json
-import logging
-import os
-import shutil
-import socket
-import subprocess
-import tarfile
-import tempfile
-from dataclasses import dataclass
-from datetime import datetime
-from typing import TextIO
-
-from mobly import logger, signals
-
-from antlion import context, utils
-from antlion.capabilities.ssh import SSHProvider
-from antlion.net import wait_for_port
-from antlion.runner import CalledProcessError
-
-DEFAULT_FUCHSIA_REPO_NAME = "fuchsia.com"
-PM_SERVE_STOP_TIMEOUT_SEC = 5
-
-
-class PackageServerError(signals.TestAbortClass):
- pass
-
-
-def random_port() -> int:
- s = socket.socket()
- s.bind(("", 0))
- return int(s.getsockname()[1])
-
-
-@dataclass
-class Route:
- """Represent a route in the routing table."""
-
- preferred_source: str | None
-
-
-def find_routes_to(dest_ip: str) -> list[Route]:
- """Find the routes used to reach a destination.
-
- Look through the routing table for the routes that would be used without
- sending any packets. This is especially helpful for when the device is
- currently unreachable.
-
- Only natively supported on Linux. MacOS has iproute2mac, but it doesn't
- support JSON formatted output.
-
- TODO(http://b/238924195): Add support for MacOS.
-
- Args:
- dest_ip: IP address of the destination
-
- Throws:
- CalledProcessError: if the ip command returns a non-zero exit code
- JSONDecodeError: if the ip command doesn't return JSON
-
- Returns:
- Routes with destination to dest_ip.
- """
- resp = subprocess.run(
- f"ip -json route get {dest_ip}".split(), capture_output=True, check=True
- )
- routes = json.loads(resp.stdout)
- return [Route(r.get("prefsrc")) for r in routes]
-
-
-def find_host_ip(device_ip: str) -> str:
- """Find the host's source IP used to reach a device.
-
- Not all host interfaces can talk to a given device. This limitation can
- either be physical through hardware or virtual through routing tables.
- Look through the routing table without sending any packets then return the
- preferred source IP address.
-
- Args:
- device_ip: IP address of the device
-
- Raises:
- PackageServerError: if there are multiple or no routes to device_ip, or
- if the route doesn't contain "prefsrc"
-
- Returns:
- The host IP used to reach device_ip.
- """
- routes = find_routes_to(device_ip)
- if len(routes) != 1:
- raise PackageServerError(
- f"Expected only one route to {device_ip}, got {routes}"
- )
-
- route = routes[0]
- if not route.preferred_source:
- raise PackageServerError(f'Route does not contain "prefsrc": {route}')
- return route.preferred_source
-
-
-class PackageServer:
- """Package manager for Fuchsia; an interface to the "pm" CLI tool."""
-
- def __init__(self, packages_archive_path: str) -> None:
- """
- Args:
- packages_archive_path: Path to an archive containing the pm binary
- and amber-files.
- """
- self.log = logger.PrefixLoggerAdapter(
- logging.getLogger(),
- {
- logger.PrefixLoggerAdapter.EXTRA_KEY_LOG_PREFIX: "[pm]",
- },
- )
-
- self._server_log: TextIO | None = None
- self._server_proc: subprocess.Popen[bytes] | None = None
- self._log_path: str | None = None
-
- self._tmp_dir = tempfile.mkdtemp(prefix="packages-")
- tar = tarfile.open(packages_archive_path, "r:gz")
- tar.extractall(self._tmp_dir)
-
- self._binary_path = os.path.join(self._tmp_dir, "pm")
- self._packages_path = os.path.join(self._tmp_dir, "amber-files")
- self._port = random_port()
-
- self._assert_repo_has_not_expired()
-
- def clean_up(self) -> None:
- if self._server_proc:
- self.stop_server()
- if self._tmp_dir:
- shutil.rmtree(self._tmp_dir)
-
- def _assert_repo_has_not_expired(self) -> None:
- """Abort if the repository metadata has expired.
-
- Raises:
- TestAbortClass: when the timestamp.json file has expired
- """
- with open(f"{self._packages_path}/repository/timestamp.json", "r") as f:
- data = json.load(f)
- expiresAtRaw = data["signed"]["expires"]
- expiresAt = datetime.strptime(expiresAtRaw, "%Y-%m-%dT%H:%M:%SZ")
- if expiresAt <= datetime.now():
- raise signals.TestAbortClass(
- f"{self._packages_path}/repository/timestamp.json has expired on {expiresAtRaw}"
- )
-
- def start(self) -> None:
- """Start the package server.
-
- Does not check for errors; view the log file for any errors.
- """
- if self._server_proc:
- self.log.warn(
- "Skipping to start the server since it has already been started"
- )
- return
-
- pm_command = f"{self._binary_path} serve -c 2 -repo {self._packages_path} -l :{self._port}"
-
- root_dir = context.get_current_context().get_full_output_path()
- epoch = utils.get_current_epoch_time()
- time_stamp = logger.normalize_log_line_timestamp(
- logger.epoch_to_log_line_timestamp(epoch)
- )
- self._log_path = os.path.join(root_dir, f"pm_server.{time_stamp}.log")
-
- self._server_log = open(self._log_path, "a+")
- self._server_proc = subprocess.Popen(
- pm_command.split(),
- preexec_fn=os.setpgrp,
- stdout=self._server_log,
- stderr=subprocess.STDOUT,
- )
- try:
- wait_for_port("127.0.0.1", self._port)
- except TimeoutError as e:
- if self._server_log:
- self._server_log.close()
- if self._log_path:
- with open(self._log_path, "r") as f:
- logs = f.read()
- else:
- logs = "Log path not configured"
- raise TimeoutError(
- f"pm serve failed to expose port {self._port}. Logs:\n{logs}"
- ) from e
-
- self.log.info(f"Serving packages on port {self._port}")
-
- def configure_device(
- self,
- ssh: SSHProvider,
- repo_name: str = DEFAULT_FUCHSIA_REPO_NAME,
- ) -> None:
- """Configure the device to use this package server.
-
- Args:
- ssh: Device SSH transport channel
- repo_name: Name of the repo to alias this package server
- """
- # Remove any existing repositories that may be stale.
- try:
- ssh.run(["pkgctl", "repo", "rm", f"fuchsia-pkg://{repo_name}"])
- except CalledProcessError as e:
- if b"NOT_FOUND" not in e.stderr:
- raise e
-
- # Configure the device with the new repository.
- host_ip = find_host_ip(ssh.config.host_name)
- repo_url = f"http://{host_ip}:{self._port}"
- ssh.run(
- f"pkgctl repo add url -f 2 -n {repo_name} {repo_url}/config.json"
- )
- self.log.info(
- f'Added repo "{repo_name}" as {repo_url} on device {ssh.config.host_name}'
- )
-
- def stop_server(self) -> None:
- """Stop the package server."""
- if not self._server_proc:
- self.log.warn(
- "Skipping to stop the server since it hasn't been started yet"
- )
- return
-
- self._server_proc.terminate()
- try:
- self._server_proc.wait(timeout=PM_SERVE_STOP_TIMEOUT_SEC)
- except subprocess.TimeoutExpired:
- self.log.warn(
- f"Taking over {PM_SERVE_STOP_TIMEOUT_SEC}s to stop. Killing the server"
- )
- self._server_proc.kill()
- self._server_proc.wait(timeout=PM_SERVE_STOP_TIMEOUT_SEC)
- finally:
- if self._server_log:
- self._server_log.close()
-
- self._server_proc = None
- self._log_path = None
- self._server_log = None
diff --git a/packages/antlion/controllers/fuchsia_lib/sl4f.py b/packages/antlion/controllers/fuchsia_lib/sl4f.py
deleted file mode 100644
index 1c79b8d..0000000
--- a/packages/antlion/controllers/fuchsia_lib/sl4f.py
+++ /dev/null
@@ -1,91 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import ipaddress
-import logging
-
-from mobly import logger
-
-from antlion.controllers.fuchsia_lib.ssh import FuchsiaSSHProvider
-from antlion.controllers.fuchsia_lib.wlan_deprecated_configuration_lib import (
- FuchsiaWlanDeprecatedConfigurationLib,
-)
-from antlion.net import wait_for_port
-from antlion.runner import CalledProcessError
-
-DEFAULT_SL4F_PORT = 80
-START_SL4F_V2_CMD = "start_sl4f"
-
-
-class SL4F:
- """Module for Fuchsia devices to interact with the SL4F tool.
-
- Attributes:
- ssh: Transport to start and stop SL4F.
- address: http address for SL4F server including SL4F port.
- log: Logger for the device-specific instance of SL4F.
- """
-
- def __init__(
- self,
- ssh: FuchsiaSSHProvider,
- port: int = DEFAULT_SL4F_PORT,
- ) -> None:
- """
- Args:
- ssh: Transport to start and stop SL4F.
- port: Port for the SL4F server to listen on.
- """
- ip = ipaddress.ip_address(ssh.config.host_name)
- if ip.version == 4:
- self.address = f"http://{ip}:{port}"
- elif ip.version == 6:
- self.address = f"http://[{ip}]:{port}"
-
- self.log = logger.PrefixLoggerAdapter(
- logging.getLogger(),
- {
- logger.PrefixLoggerAdapter.EXTRA_KEY_LOG_PREFIX: f"[SL4F | {self.address}]",
- },
- )
-
- try:
- ssh.stop_component("sl4f")
- ssh.run(START_SL4F_V2_CMD).stdout
- except CalledProcessError:
- # TODO(fxbug.dev/42181764) Remove support to run SL4F in CFv1 mode
- # once ACTS no longer use images that comes with only CFv1 SL4F.
- self.log.warn(
- "Running SL4F in CFv1 mode, "
- "this is deprecated for images built after 5/9/2022, "
- "see https://fxbug.dev/42157029 for more info."
- )
- ssh.stop_component("sl4f")
- ssh.start_v1_component("sl4f")
-
- try:
- wait_for_port(ssh.config.host_name, port)
- self.log.info("SL4F server is reachable")
- except TimeoutError as e:
- raise TimeoutError("SL4F server is unreachable") from e
-
- self._init_libraries()
-
- def _init_libraries(self) -> None:
- # Grabs command from FuchsiaWlanDeprecatedConfigurationLib
- self.wlan_deprecated_configuration_lib = (
- FuchsiaWlanDeprecatedConfigurationLib(self.address)
- )
diff --git a/packages/antlion/controllers/fuchsia_lib/ssh.py b/packages/antlion/controllers/fuchsia_lib/ssh.py
deleted file mode 100644
index ea6af52..0000000
--- a/packages/antlion/controllers/fuchsia_lib/ssh.py
+++ /dev/null
@@ -1,85 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import time
-
-from antlion.capabilities.ssh import SSHProvider
-from antlion.runner import CalledProcessError
-
-DEFAULT_SSH_USER: str = "fuchsia"
-DEFAULT_SSH_PRIVATE_KEY: str = "~/.ssh/fuchsia_ed25519"
-# The default package repository for all components.
-FUCHSIA_PACKAGE_REPO_NAME = "fuchsia.com"
-
-
-class FuchsiaSSHProvider(SSHProvider):
- """Device-specific provider for SSH clients."""
-
- def start_v1_component(
- self,
- component: str,
- timeout_sec: int = 5,
- repo: str = FUCHSIA_PACKAGE_REPO_NAME,
- ) -> None:
- """Start a CFv1 component in the background.
-
- Args:
- component: Name of the component without ".cmx".
- timeout_sec: Seconds to wait for the process to show up in 'ps'.
- repo: Default package repository for all components.
-
- Raises:
- TimeoutError: when the component doesn't launch within timeout_sec
- """
- # The "run -d" command will hang when executed without a pseudo-tty
- # allocated.
- self.config.force_tty = True
- self.run(
- f"run -d fuchsia-pkg://{repo}/{component}#meta/{component}.cmx",
- )
- self.config.force_tty = False
-
- timeout = time.perf_counter() + timeout_sec
- while True:
- ps_cmd = self.run("ps")
- if f"{component}.cmx" in ps_cmd.stdout.decode("utf-8"):
- return
- if time.perf_counter() > timeout:
- raise TimeoutError(
- f'Failed to start "{component}.cmx" after {timeout_sec}s'
- )
-
- def stop_component(
- self, component: str, is_cfv2_component: bool = False
- ) -> None:
- """Stop all instances of a CFv1 or CFv2 component.
-
- Args:
- component: Name of the component without suffix("cm" or "cmx").
- is_cfv2_component: Determines the component suffix to use.
- """
- suffix = "cm" if is_cfv2_component else "cmx"
-
- try:
- self.run(["killall", f"{component}.{suffix}"])
- self.log.info(f"Stopped component: {component}.{suffix}")
- except CalledProcessError as e:
- if b"no tasks found" in e.stderr:
- self.log.debug(
- f"Could not find component: {component}.{suffix}"
- )
- return
- raise e
diff --git a/packages/antlion/controllers/fuchsia_lib/wlan_deprecated_configuration_lib.py b/packages/antlion/controllers/fuchsia_lib/wlan_deprecated_configuration_lib.py
deleted file mode 100644
index dd0b481..0000000
--- a/packages/antlion/controllers/fuchsia_lib/wlan_deprecated_configuration_lib.py
+++ /dev/null
@@ -1,34 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from antlion.controllers.fuchsia_lib.base_lib import BaseLib
-
-
-class FuchsiaWlanDeprecatedConfigurationLib(BaseLib):
- def __init__(self, addr: str) -> None:
- super().__init__(addr, "wlan_deprecated")
-
- def wlanSuggestAccessPointMacAddress(self, addr: str) -> dict[str, str]:
- """Suggests a mac address to soft AP interface, to support
- cast legacy behavior.
-
- Args:
- addr: string of mac address to suggest (e.g. '12:34:56:78:9a:bc')
- """
- test_cmd = "wlan_deprecated.suggest_ap_mac"
- test_args = {"mac": addr}
-
- return self.send_command(test_cmd, test_args)
diff --git a/packages/antlion/controllers/iperf_client.py b/packages/antlion/controllers/iperf_client.py
deleted file mode 100644
index 96778b8..0000000
--- a/packages/antlion/controllers/iperf_client.py
+++ /dev/null
@@ -1,348 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-# Blanket ignores to enable mypy in Antlion
-# mypy: disable-error-code="no-untyped-def, attr-defined"
-from __future__ import annotations
-
-import logging
-import os
-import socket
-import subprocess
-import threading
-from abc import ABC, abstractmethod
-
-from antlion import context
-from antlion.capabilities.ssh import SSHConfig
-from antlion.controllers.adb_lib.error import AdbCommandError
-from antlion.controllers.android_device import AndroidDevice
-from antlion.controllers.fuchsia_lib.ssh import SSHProvider
-from antlion.controllers.utils_lib.commands.date import LinuxDateCommand
-from antlion.types import ControllerConfig, Json
-from antlion.validation import MapValidator
-
-MOBLY_CONTROLLER_CONFIG_NAME: str = "IPerfClient"
-
-
-class IPerfError(Exception):
- """Raised on execution errors of iPerf."""
-
-
-def create(configs: list[ControllerConfig]) -> list[IPerfClientBase]:
- """Factory method for iperf clients.
-
- The function creates iperf clients based on at least one config.
- If configs contain ssh settings or and AndroidDevice, remote iperf clients
- will be started on those devices, otherwise, a the client will run on the
- local machine.
-
- Args:
- configs: config parameters for the iperf server
- """
- results: list[IPerfClientBase] = []
- for config in configs:
- c = MapValidator(config)
- if "ssh_config" in config:
- results.append(
- IPerfClientOverSsh(
- SSHProvider(
- SSHConfig.from_config(c.get(dict, "ssh_config"))
- ),
- test_interface=c.get(str, "test_interface"),
- sync_date=True,
- )
- )
- else:
- results.append(IPerfClient())
- return results
-
-
-def destroy(objects: list[IPerfClientBase]) -> None:
- # No cleanup needed.
- pass
-
-
-def get_info(objects: list[IPerfClientBase]) -> list[Json]:
- return []
-
-
-class RouteNotFound(ConnectionError):
- """Failed to find a route to the iperf server."""
-
-
-class IPerfClientBase(ABC):
- """The Base class for all IPerfClients.
-
- This base class is responsible for synchronizing the logging to prevent
- multiple IPerfClients from writing results to the same file, as well
- as providing the interface for IPerfClient objects.
- """
-
- # Keeps track of the number of IPerfClient logs to prevent file name
- # collisions.
- __log_file_counter = 0
-
- __log_file_lock = threading.Lock()
-
- @property
- @abstractmethod
- def test_interface(self) -> str | None:
- """Find the test interface.
-
- Returns:
- Name of the interface used to communicate with server_ap, or None if
- not set.
- """
- ...
-
- @staticmethod
- def _get_full_file_path(tag: str = "") -> str:
- """Returns the full file path for the IPerfClient log file.
-
- Note: If the directory for the file path does not exist, it will be
- created.
-
- Args:
- tag: The tag passed in to the server run.
- """
- current_context = context.get_current_context()
- full_out_dir = os.path.join(
- current_context.get_full_output_path(), "iperf_client_files"
- )
-
- with IPerfClientBase.__log_file_lock:
- os.makedirs(full_out_dir, exist_ok=True)
- tags = ["IPerfClient", tag, IPerfClientBase.__log_file_counter]
- out_file_name = "%s.log" % (
- ",".join([str(x) for x in tags if x != "" and x is not None])
- )
- IPerfClientBase.__log_file_counter += 1
-
- return os.path.join(full_out_dir, out_file_name)
-
- def start(
- self,
- ip: str,
- iperf_args: str,
- tag: str,
- timeout: int = 3600,
- iperf_binary: str | None = None,
- ) -> str:
- """Starts iperf client, and waits for completion.
-
- Args:
- ip: iperf server ip address.
- iperf_args: A string representing arguments to start iperf
- client. Eg: iperf_args = "-t 10 -p 5001 -w 512k/-u -b 200M -J".
- tag: A string to further identify iperf results file
- timeout: the maximum amount of time the iperf client can run.
- iperf_binary: Location of iperf3 binary. If none, it is assumed the
- the binary is in the path.
-
- Returns:
- full_out_path: iperf result path.
- """
- raise NotImplementedError("start() must be implemented.")
-
-
-class IPerfClient(IPerfClientBase):
- """Class that handles iperf3 client operations."""
-
- @property
- def test_interface(self) -> str | None:
- return None
-
- def start(
- self,
- ip: str,
- iperf_args: str,
- tag: str,
- timeout: int = 3600,
- iperf_binary: str | None = None,
- ) -> str:
- """Starts iperf client, and waits for completion.
-
- Args:
- ip: iperf server ip address.
- iperf_args: A string representing arguments to start iperf
- client. Eg: iperf_args = "-t 10 -p 5001 -w 512k/-u -b 200M -J".
- tag: tag to further identify iperf results file
- timeout: unused.
- iperf_binary: Location of iperf3 binary. If none, it is assumed the
- the binary is in the path.
-
- Returns:
- full_out_path: iperf result path.
- """
- if not iperf_binary:
- logging.debug(
- "No iperf3 binary specified. "
- "Assuming iperf3 is in the path."
- )
- iperf_binary = "iperf3"
- else:
- logging.debug(f"Using iperf3 binary located at {iperf_binary}")
- iperf_cmd = [str(iperf_binary), "-c", ip] + iperf_args.split(" ")
- full_out_path = self._get_full_file_path(tag)
-
- with open(full_out_path, "w") as out_file:
- subprocess.call(iperf_cmd, stdout=out_file)
-
- return full_out_path
-
-
-class IPerfClientOverSsh(IPerfClientBase):
- """Class that handles iperf3 client operations on remote machines."""
-
- def __init__(
- self,
- ssh_provider: SSHProvider,
- test_interface: str | None = None,
- sync_date: bool = True,
- ):
- self._ssh_provider = ssh_provider
- self._test_interface = test_interface
-
- if sync_date:
- # iperf clients are not given internet access, so their system time
- # needs to be manually set to be accurate.
- LinuxDateCommand(self._ssh_provider).sync()
-
- @property
- def test_interface(self) -> str | None:
- return self._test_interface
-
- def start(
- self,
- ip: str,
- iperf_args: str,
- tag: str,
- timeout: int = 3600,
- iperf_binary: str | None = None,
- ) -> str:
- """Starts iperf client, and waits for completion.
-
- Args:
- ip: iperf server ip address.
- iperf_args: A string representing arguments to start iperf
- client. Eg: iperf_args = "-t 10 -p 5001 -w 512k/-u -b 200M -J".
- tag: tag to further identify iperf results file
- timeout: the maximum amount of time to allow the iperf client to run
- iperf_binary: Location of iperf3 binary. If none, it is assumed the
- the binary is in the path.
-
- Returns:
- full_out_path: iperf result path.
- """
- if not iperf_binary:
- logging.debug(
- "No iperf3 binary specified. "
- "Assuming iperf3 is in the path."
- )
- iperf_binary = "iperf3"
- else:
- logging.debug(f"Using iperf3 binary located at {iperf_binary}")
- iperf_cmd = f"{iperf_binary} -c {ip} {iperf_args}"
- full_out_path = self._get_full_file_path(tag)
-
- try:
- iperf_process = self._ssh_provider.run(
- iperf_cmd, timeout_sec=timeout
- )
- iperf_output = iperf_process.stdout
- with open(full_out_path, "wb") as out_file:
- out_file.write(iperf_output)
- except socket.timeout:
- raise TimeoutError(
- "Socket timeout. Timed out waiting for iperf "
- "client to finish."
- )
- except Exception as err:
- logging.exception(f"iperf run failed: {err}")
-
- return full_out_path
-
-
-class IPerfClientOverAdb(IPerfClientBase):
- """Class that handles iperf3 operations over ADB devices."""
-
- def __init__(
- self, android_device: AndroidDevice, test_interface: str | None = None
- ):
- """Creates a new IPerfClientOverAdb object.
-
- Args:
- android_device_or_serial: Either an AndroidDevice object, or the
- serial that corresponds to the AndroidDevice. Note that the
- serial must be present in an AndroidDevice entry in the ACTS
- config.
- test_interface: The network interface that will be used to send
- traffic to the iperf server.
- """
- self._android_device = android_device
- self._test_interface = test_interface
-
- @property
- def test_interface(self) -> str | None:
- return self._test_interface
-
- def start(
- self,
- ip: str,
- iperf_args: str,
- tag: str,
- timeout: int = 3600,
- iperf_binary: str | None = None,
- ) -> str:
- """Starts iperf client, and waits for completion.
-
- Args:
- ip: iperf server ip address.
- iperf_args: A string representing arguments to start iperf
- client. Eg: iperf_args = "-t 10 -p 5001 -w 512k/-u -b 200M -J".
- tag: tag to further identify iperf results file
- timeout: the maximum amount of time to allow the iperf client to run
- iperf_binary: Location of iperf3 binary. If none, it is assumed the
- the binary is in the path.
-
- Returns:
- The iperf result file path.
- """
- clean_out = ""
- try:
- if not iperf_binary:
- logging.debug(
- "No iperf3 binary specified. "
- "Assuming iperf3 is in the path."
- )
- iperf_binary = "iperf3"
- else:
- logging.debug(f"Using iperf3 binary located at {iperf_binary}")
- iperf_cmd = f"{iperf_binary} -c {ip} {iperf_args}"
- out = self._android_device.adb.shell(
- str(iperf_cmd), timeout=timeout
- )
- clean_out = out.split("\n")
- if "error" in clean_out[0].lower():
- raise IPerfError(clean_out)
- except (subprocess.TimeoutExpired, AdbCommandError):
- logging.warning("TimeoutError: Iperf measurement failed.")
-
- full_out_path = self._get_full_file_path(tag)
- with open(full_out_path, "w") as out_file:
- out_file.write("\n".join(clean_out))
-
- return full_out_path
diff --git a/packages/antlion/controllers/iperf_server.py b/packages/antlion/controllers/iperf_server.py
deleted file mode 100755
index 9244433..0000000
--- a/packages/antlion/controllers/iperf_server.py
+++ /dev/null
@@ -1,661 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-# Blanket ignores to enable mypy in Antlion
-# mypy: disable-error-code="no-untyped-def"
-from __future__ import annotations
-
-import json
-import logging
-import math
-import os
-import shlex
-import subprocess
-import threading
-import time
-from typing import IO
-
-from mobly import logger, signals
-
-from antlion import context, utils
-from antlion.controllers.utils_lib.commands import nmcli
-from antlion.controllers.utils_lib.commands.command import optional, require
-from antlion.controllers.utils_lib.commands.journalctl import (
- LinuxJournalctlCommand,
-)
-from antlion.controllers.utils_lib.ssh import connection, settings
-from antlion.libs.proc import job
-from antlion.types import ControllerConfig, Json
-from antlion.validation import MapValidator
-
-MOBLY_CONTROLLER_CONFIG_NAME: str = "IPerfServer"
-KILOBITS = 1024
-MEGABITS = KILOBITS * 1024
-GIGABITS = MEGABITS * 1024
-BITS_IN_BYTE = 8
-
-
-def create(
- configs: list[ControllerConfig],
-) -> list[IPerfServer | IPerfServerOverSsh]:
- """Factory method for iperf servers.
-
- The function creates iperf servers based on at least one config.
- If configs only specify a port number, a regular local IPerfServer object
- will be created. If configs contains ssh settings or and AndroidDevice,
- remote iperf servers will be started on those devices
-
- Args:
- configs: config parameters for the iperf server
- """
- results: list[IPerfServer | IPerfServerOverSsh] = []
- for c in configs:
- if isinstance(c, (str, int)) and str(c).isdigit():
- results.append(IPerfServer(int(c)))
- elif isinstance(c, dict) and "ssh_config" in c and "port" in c:
- config = MapValidator(c)
- results.append(
- IPerfServerOverSsh(
- settings.from_config(config.get(dict, "ssh_config")),
- config.get(int, "port"),
- test_interface=config.get(str, "test_interface"),
- use_killall=config.get(bool, "use_killall", False),
- )
- )
- else:
- raise ValueError(
- f"Config entry {c} in {configs} is not a valid IPerfServer config."
- )
- return results
-
-
-def destroy(
- objects: list[IPerfServer | IPerfServerOverSsh],
-) -> None:
- for iperf_server in objects:
- try:
- iperf_server.stop()
- except Exception:
- logging.exception(f"Unable to properly clean up {iperf_server}.")
-
-
-def get_info(
- objects: list[IPerfServer | IPerfServerOverSsh],
-) -> list[Json]:
- return []
-
-
-class IPerfResult(object):
- def __init__(self, result_path, reporting_speed_units="Mbytes"):
- """Loads iperf result from file.
-
- Loads iperf result from JSON formatted server log. File can be accessed
- before or after server is stopped. Note that only the first JSON object
- will be loaded and this funtion is not intended to be used with files
- containing multiple iperf client runs.
- """
- # if result_path isn't a path, treat it as JSON
- self.reporting_speed_units = reporting_speed_units
- if not os.path.exists(result_path):
- self.result = json.loads(result_path)
- else:
- try:
- with open(result_path, "r") as f:
- iperf_output = f.readlines()
- if "}\n" in iperf_output:
- iperf_output = iperf_output[
- : iperf_output.index("}\n") + 1
- ]
- iperf_string = "".join(iperf_output)
- iperf_string = iperf_string.replace("nan", "0")
- self.result = json.loads(iperf_string)
- except ValueError:
- with open(result_path, "r") as f:
- # Possibly a result from interrupted iperf run,
- # skip first line and try again.
- lines = f.readlines()[1:]
- self.result = json.loads("".join(lines))
-
- def _has_data(self):
- """Checks if the iperf result has valid throughput data.
-
- Returns:
- True if the result contains throughput data. False otherwise.
- """
- return ("end" in self.result) and (
- "sum_received" in self.result["end"] or "sum" in self.result["end"]
- )
-
- def _get_reporting_speed(
- self, network_speed_in_bits_per_second: int | float
- ) -> float:
- """Sets the units for the network speed reporting based on how the
- object was initiated. Defaults to Megabytes per second. Currently
- supported, bits per second (bits), kilobits per second (kbits), megabits
- per second (mbits), gigabits per second (gbits), bytes per second
- (bytes), kilobits per second (kbytes), megabits per second (mbytes),
- gigabytes per second (gbytes).
-
- Args:
- network_speed_in_bits_per_second: The network speed from iperf in
- bits per second.
-
- Returns:
- The value of the throughput in the appropriate units.
- """
- speed_divisor = 1
- if self.reporting_speed_units[1:].lower() == "bytes":
- speed_divisor = speed_divisor * BITS_IN_BYTE
- if self.reporting_speed_units[0:1].lower() == "k":
- speed_divisor = speed_divisor * KILOBITS
- if self.reporting_speed_units[0:1].lower() == "m":
- speed_divisor = speed_divisor * MEGABITS
- if self.reporting_speed_units[0:1].lower() == "g":
- speed_divisor = speed_divisor * GIGABITS
- return network_speed_in_bits_per_second / speed_divisor
-
- def get_json(self):
- """Returns the raw json output from iPerf."""
- return self.result
-
- @property
- def error(self):
- return self.result.get("error", None)
-
- @property
- def avg_rate(self):
- """Average UDP rate in MB/s over the entire run.
-
- This is the average UDP rate observed at the terminal the iperf result
- is pulled from. According to iperf3 documentation this is calculated
- based on bytes sent and thus is not a good representation of the
- quality of the link. If the result is not from a success run, this
- property is None.
- """
- if not self._has_data() or "sum" not in self.result["end"]:
- return None
- bps = self.result["end"]["sum"]["bits_per_second"]
- return self._get_reporting_speed(bps)
-
- @property
- def avg_receive_rate(self):
- """Average receiving rate in MB/s over the entire run.
-
- This data may not exist if iperf was interrupted. If the result is not
- from a success run, this property is None.
- """
- if not self._has_data() or "sum_received" not in self.result["end"]:
- return None
- bps = self.result["end"]["sum_received"]["bits_per_second"]
- return self._get_reporting_speed(bps)
-
- @property
- def avg_send_rate(self):
- """Average sending rate in MB/s over the entire run.
-
- This data may not exist if iperf was interrupted. If the result is not
- from a success run, this property is None.
- """
- if not self._has_data() or "sum_sent" not in self.result["end"]:
- return None
- bps = self.result["end"]["sum_sent"]["bits_per_second"]
- return self._get_reporting_speed(bps)
-
- @property
- def instantaneous_rates(self):
- """Instantaneous received rate in MB/s over entire run.
-
- This data may not exist if iperf was interrupted. If the result is not
- from a success run, this property is None.
- """
- if not self._has_data():
- return None
- intervals = [
- self._get_reporting_speed(interval["sum"]["bits_per_second"])
- for interval in self.result["intervals"]
- ]
- return intervals
-
- @property
- def std_deviation(self):
- """Standard deviation of rates in MB/s over entire run.
-
- This data may not exist if iperf was interrupted. If the result is not
- from a success run, this property is None.
- """
- return self.get_std_deviation(0)
-
- def get_std_deviation(self, iperf_ignored_interval):
- """Standard deviation of rates in MB/s over entire run.
-
- This data may not exist if iperf was interrupted. If the result is not
- from a success run, this property is None. A configurable number of
- beginning (and the single last) intervals are ignored in the
- calculation as they are inaccurate (e.g. the last is from a very small
- interval)
-
- Args:
- iperf_ignored_interval: number of iperf interval to ignored in
- calculating standard deviation
-
- Returns:
- The standard deviation.
- """
- if not self._has_data():
- return None
- instantaneous_rates = self.instantaneous_rates[
- iperf_ignored_interval:-1
- ]
- avg_rate = math.fsum(instantaneous_rates) / len(instantaneous_rates)
- sqd_deviations = [
- (rate - avg_rate) ** 2 for rate in instantaneous_rates
- ]
- std_dev = math.sqrt(
- math.fsum(sqd_deviations) / (len(sqd_deviations) - 1)
- )
- return std_dev
-
-
-class IPerfServerBase(object):
- # Keeps track of the number of IPerfServer logs to prevent file name
- # collisions.
- __log_file_counter = 0
-
- __log_file_lock = threading.Lock()
-
- def __init__(self, port: int):
- self._port = port
- # TODO(markdr): We shouldn't be storing the log files in an array like
- # this. Nobody should be reading this property either. Instead, the
- # IPerfResult should be returned in stop() with all the necessary info.
- # See aosp/1012824 for a WIP implementation.
- self.log_files: list[str] = []
-
- @property
- def port(self) -> int:
- raise NotImplementedError("port must be specified.")
-
- @property
- def started(self) -> bool:
- raise NotImplementedError("started must be specified.")
-
- def start(self, extra_args: str = "", tag: str = "") -> None:
- """Starts an iperf3 server.
-
- Args:
- extra_args: Extra arguments to start iperf server with.
- tag: Appended to log file name to identify logs from different
- iperf runs.
- """
- raise NotImplementedError("start() must be specified.")
-
- def stop(self) -> str | None:
- """Stops the iperf server.
-
- Returns:
- The name of the log file generated from the terminated session, or
- None if iperf wasn't started or ran successfully.
- """
- raise NotImplementedError("stop() must be specified.")
-
- def _get_full_file_path(self, tag: str | None = None) -> str:
- """Returns the full file path for the IPerfServer log file.
-
- Note: If the directory for the file path does not exist, it will be
- created.
-
- Args:
- tag: The tag passed in to the server run.
- """
- out_dir = self.log_path
-
- with IPerfServerBase.__log_file_lock:
- tags = [tag, IPerfServerBase.__log_file_counter]
- out_file_name = "IPerfServer,%s.log" % (
- ",".join([str(x) for x in tags if x != "" and x is not None])
- )
- IPerfServerBase.__log_file_counter += 1
-
- file_path = os.path.join(out_dir, out_file_name)
- self.log_files.append(file_path)
- return file_path
-
- @property
- def log_path(self) -> str:
- current_context = context.get_current_context()
- full_out_dir = os.path.join(
- current_context.get_full_output_path(), f"IPerfServer{self.port}"
- )
-
- # Ensure the directory exists.
- os.makedirs(full_out_dir, exist_ok=True)
-
- return full_out_dir
-
-
-def _get_port_from_ss_output(ss_output, pid):
- pid = str(pid)
- lines = ss_output.split("\n")
- for line in lines:
- if pid in line:
- # Expected format:
- # tcp LISTEN 0 5 *:<PORT> *:* users:(("cmd",pid=<PID>,fd=3))
- return line.split()[4].split(":")[-1]
- else:
- raise ProcessLookupError("Could not find started iperf3 process.")
-
-
-class IPerfServer(IPerfServerBase):
- """Class that handles iperf server commands on localhost."""
-
- def __init__(self, port: int = 5201) -> None:
- super().__init__(port)
- self._hinted_port = port
- self._current_log_file: str | None = None
- self._iperf_process: subprocess.Popen[bytes] | None = None
- self._last_opened_file: IO[bytes] | None = None
-
- @property
- def port(self) -> int:
- return self._port
-
- @property
- def started(self) -> bool:
- return self._iperf_process is not None
-
- def start(self, extra_args: str = "", tag: str = "") -> None:
- """Starts iperf server on local machine.
-
- Args:
- extra_args: A string representing extra arguments to start iperf
- server with.
- tag: Appended to log file name to identify logs from different
- iperf runs.
- """
- if self._iperf_process is not None:
- return
-
- self._current_log_file = self._get_full_file_path(tag)
-
- # Run an iperf3 server on the hinted port with JSON output.
- command = ["iperf3", "-s", "-p", str(self._hinted_port), "-J"]
-
- command.extend(shlex.split(extra_args))
-
- if self._last_opened_file:
- self._last_opened_file.close()
- self._last_opened_file = open(self._current_log_file, "wb")
- self._iperf_process = subprocess.Popen(
- command, stdout=self._last_opened_file, stderr=subprocess.DEVNULL
- )
- for attempts_left in reversed(range(3)):
- try:
- self._port = int(
- _get_port_from_ss_output(
- job.run("ss -l -p -n | grep iperf").stdout,
- self._iperf_process.pid,
- )
- )
- break
- except ProcessLookupError:
- if attempts_left == 0:
- raise
- logging.debug("iperf3 process not started yet.")
- time.sleep(0.01)
-
- def stop(self) -> str | None:
- """Stops the iperf server.
-
- Returns:
- The name of the log file generated from the terminated session, or
- None if iperf wasn't started or ran successfully.
- """
- if self._iperf_process is None:
- return None
-
- if self._last_opened_file:
- self._last_opened_file.close()
- self._last_opened_file = None
-
- self._iperf_process.terminate()
- self._iperf_process = None
-
- return self._current_log_file
-
- def __del__(self) -> None:
- self.stop()
-
-
-class IPerfServerOverSsh(IPerfServerBase):
- """Class that handles iperf3 operations on remote machines."""
-
- def __init__(
- self,
- ssh_settings: settings.SshSettings,
- port: int,
- test_interface: str,
- use_killall: bool = False,
- ):
- super().__init__(port)
- self.test_interface = test_interface
- self.hostname = ssh_settings.hostname
- self.log = logger.PrefixLoggerAdapter(
- logging.getLogger(),
- {
- logger.PrefixLoggerAdapter.EXTRA_KEY_LOG_PREFIX: f"[IPerfServer | {self.hostname}]",
- },
- )
- self._ssh_settings = ssh_settings
- self._ssh_session: connection.SshConnection | None = (
- connection.SshConnection(ssh_settings)
- )
- self._journalctl = require(LinuxJournalctlCommand(self._ssh_session))
-
- self._iperf_pid: str | None = None
- self._current_tag: str | None = None
- self._use_killall = str(use_killall).lower() == "true"
-
- # The control and test interfaces have to be different, otherwise
- # performing a DHCP release+renewal risks severing the SSH connection
- # and bricking the device.
- control_interface = utils.get_interface_based_on_ip(
- self._ssh_session, self.hostname
- )
- if control_interface == test_interface:
- raise signals.TestAbortAll(
- f"iperf server control interface ({control_interface}) cannot be the "
- f"same as the test interface ({test_interface})."
- )
-
- # Disable NetworkManager on the test interface
- self._nmcli = optional(nmcli.LinuxNmcliCommand(self._ssh_session))
- if self._nmcli:
- self._nmcli.setup_device(self.test_interface)
-
- @property
- def port(self) -> int:
- return self._port
-
- @property
- def started(self) -> bool:
- return self._iperf_pid is not None
-
- def _get_remote_log_path(self) -> str:
- return f"/tmp/iperf_server_port{self.port}.log"
-
- def get_interface_ip_addresses(
- self, interface: str
- ) -> dict[str, list[str]]:
- """Gets all of the ip addresses, ipv4 and ipv6, associated with a
- particular interface name.
-
- Args:
- interface: The interface name on the device, ie eth0
-
- Returns:
- A list of dictionaries of the various IP addresses. See
- utils.get_interface_ip_addresses.
- """
- return utils.get_interface_ip_addresses(self._get_ssh(), interface)
-
- def renew_test_interface_ip_address(self) -> None:
- """Renews the test interface's IPv4 address.
-
- Necessary for changing DHCP scopes during a test.
- """
- utils.renew_linux_ip_address(self._get_ssh(), self.test_interface)
-
- def get_addr(
- self, addr_type: str = "ipv4_private", timeout_sec: int | None = None
- ) -> str:
- """Wait until a type of IP address on the test interface is available
- then return it.
- """
- return utils.get_addr(
- self._get_ssh(), self.test_interface, addr_type, timeout_sec
- )
-
- def _cleanup_iperf_port(self) -> None:
- """Checks and kills zombie iperf servers occupying intended port."""
- assert self._ssh_session is not None
-
- netstat = self._ssh_session.run(["netstat", "-tupln"]).stdout.decode(
- "utf-8"
- )
- for line in netstat.splitlines():
- if (
- "LISTEN" in line
- and "iperf3" in line
- and f":{self.port}" in line
- ):
- pid = int(line.split()[-1].split("/")[0])
- logging.debug(
- "Killing zombie server on port %i: %i", self.port, pid
- )
- self._ssh_session.run(["kill", "-9", str(pid)])
-
- def start(
- self,
- extra_args: str = "",
- tag: str = "",
- iperf_binary: str | None = None,
- ) -> None:
- """Starts iperf server on specified machine and port.
-
- Args:
- extra_args: Extra arguments to start iperf server with.
- tag: Appended to log file name to identify logs from different
- iperf runs.
- iperf_binary: Location of iperf3 binary. If none, it is assumed the
- the binary is in the path.
- """
- if self.started:
- return
-
- self._cleanup_iperf_port()
- if not iperf_binary:
- logging.debug(
- "No iperf3 binary specified. "
- "Assuming iperf3 is in the path."
- )
- iperf_binary = "iperf3"
- else:
- logging.debug(f"Using iperf3 binary located at {iperf_binary}")
- iperf_command = f"{iperf_binary} -s -J -p {self.port}"
-
- cmd = f"{iperf_command} {extra_args} > {self._get_remote_log_path()}"
-
- job_result = self._get_ssh().run_async(cmd)
- self._iperf_pid = job_result.stdout.decode("utf-8")
- self._current_tag = tag
-
- def stop(self) -> str | None:
- """Stops the iperf server.
-
- Returns:
- The name of the log file generated from the terminated session, or
- None if iperf wasn't started or ran successfully.
- """
- if not self.started:
- return None
-
- ssh = self._get_ssh()
-
- if self._use_killall:
- ssh.run(["killall", "iperf3"], ignore_status=True)
- elif self._iperf_pid:
- ssh.run(["kill", "-9", self._iperf_pid])
-
- iperf_result = ssh.run(f"cat {self._get_remote_log_path()}")
-
- log_file = self._get_full_file_path(self._current_tag)
- with open(log_file, "wb") as f:
- f.write(iperf_result.stdout)
-
- ssh.run(["rm", self._get_remote_log_path()])
- self._iperf_pid = None
- return log_file
-
- def _get_ssh(self) -> connection.SshConnection:
- if self._ssh_session is None:
- self._ssh_session = connection.SshConnection(self._ssh_settings)
-
- # Disable NetworkManager on the test interface
- self._nmcli = optional(nmcli.LinuxNmcliCommand(self._ssh_session))
- if self._nmcli:
- self._nmcli.setup_device(self.test_interface)
-
- return self._ssh_session
-
- def close_ssh(self) -> None:
- """Closes the ssh session to the iperf server, if one exists, preventing
- connection reset errors when rebooting server device.
- """
- if self.started:
- self.stop()
- if self._ssh_session:
- self._ssh_session.close()
- self._ssh_session = None
-
- def get_systemd_journal(self) -> str:
- had_ssh = False if self._ssh_session is None else True
-
- self._journalctl.set_runner(self._get_ssh())
- logs = self._journalctl.logs()
-
- if not had_ssh:
- # Return to closed state
- self.close_ssh()
-
- return logs
-
- def download_logs(self, path: str) -> None:
- """Download all available logs to path.
-
- Args:
- path: Path to write logs to.
- """
- timestamp = logger.normalize_log_line_timestamp(
- logger.epoch_to_log_line_timestamp(utils.get_current_epoch_time())
- )
-
- systemd_journal = self.get_systemd_journal()
- systemd_journal_path = os.path.join(
- path, f"iperf_systemd_{timestamp}.log"
- )
- with open(systemd_journal_path, "a") as f:
- f.write(systemd_journal)
- self.log.info(f"Wrote systemd journal to {systemd_journal_path}")
diff --git a/packages/antlion/controllers/openwrt_ap.py b/packages/antlion/controllers/openwrt_ap.py
deleted file mode 100644
index 9c59ab1..0000000
--- a/packages/antlion/controllers/openwrt_ap.py
+++ /dev/null
@@ -1,540 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2024 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""Controller for Open WRT access point."""
-
-# Blanket ignores to enable mypy in Antlion
-# mypy: disable-error-code="no-untyped-def"
-from __future__ import annotations
-
-import logging
-import random
-import re
-import time
-from typing import Literal
-
-import yaml
-from mobly import logger, signals
-
-from antlion.controllers.openwrt_lib import (
- network_settings,
- wireless_config,
- wireless_settings_applier,
-)
-from antlion.controllers.openwrt_lib.openwrt_constants import SYSTEM_INFO_CMD
-from antlion.controllers.openwrt_lib.openwrt_constants import (
- OpenWrtModelMap as modelmap,
-)
-from antlion.controllers.openwrt_lib.openwrt_constants import OpenWrtWifiSetting
-from antlion.controllers.utils_lib.ssh import connection, settings
-from antlion.types import ControllerConfig, Json
-
-MOBLY_CONTROLLER_CONFIG_NAME: str = "OpenWrtAP"
-ACTS_CONTROLLER_REFERENCE_NAME = "access_points"
-OWE_SECURITY = "owe"
-SAE_SECURITY = "sae"
-SAEMIXED_SECURITY = "sae-mixed"
-ENABLE_RADIO = "0"
-PMF_ENABLED = 2
-WAIT_TIME = 20
-DEFAULT_RADIOS = ("radio0", "radio1")
-
-
-def create(configs: list[ControllerConfig]) -> list[OpenWrtAP]:
- """Creates ap controllers from a json config.
-
- Creates an ap controller from either a list, or a single element. The element
- can either be just the hostname or a dictionary containing the hostname and
- username of the AP to connect to over SSH.
-
- Args:
- configs: The json configs that represent this controller.
-
- Returns:
- OpenWrtAP objects
-
- Example:
- Below is the config file entry for OpenWrtAP as a list. A testbed can have
- 1 or more APs to configure. Each AP has a "ssh_config" key to provide SSH
- login information. OpenWrtAP#__init__() uses this to create SSH object.
-
- "OpenWrtAP": [
- {
- "ssh_config": {
- "user" : "root",
- "host" : "192.168.1.1"
- }
- },
- {
- "ssh_config": {
- "user" : "root",
- "host" : "192.168.1.2"
- }
- }
- ]
- """
- return [OpenWrtAP(c) for c in configs]
-
-
-def destroy(objects: list[OpenWrtAP]) -> None:
- """Destroys a list of OpenWrtAP.
-
- Args:
- aps: The list of OpenWrtAP to destroy.
- """
- for ap in objects:
- ap.close()
- ap.close_ssh()
-
-
-def get_info(objects: list[OpenWrtAP]) -> list[Json]:
- """Get information on a list of access points.
-
- Args:
- aps: A list of OpenWrtAP.
-
- Returns:
- A list of all aps hostname.
- """
- return [ap.ssh_settings.hostname for ap in objects]
-
-
-BSSIDMap = dict[Literal["2g", "5g"], dict[str, str]]
-
-
-class OpenWrtAP(object):
- """An OpenWrtAP controller.
-
- Attributes:
- ssh: The ssh connection to the AP.
- ssh_settings: The ssh settings being used by the ssh connection.
- log: Logging object for OpenWrtAP.
- wireless_setting: object holding wireless configuration.
- network_setting: Object for network configuration.
- model: OpenWrt HW model.
- radios: Fit interface for test.
- """
-
- def __init__(self, config):
- """Initialize AP."""
- self.ssh_settings = settings.from_config(config["ssh_config"])
- self.ssh = connection.SshConnection(self.ssh_settings)
- self.log = logger.PrefixLoggerAdapter(
- logging.getLogger(),
- {
- logger.PrefixLoggerAdapter.EXTRA_KEY_LOG_PREFIX: f"[OpenWrtAP|{self.ssh_settings.hostname}]",
- },
- )
- self.wireless_setting: (
- wireless_settings_applier.WirelessSettingsApplier | None
- ) = None
- self.network_setting = network_settings.NetworkSettings(
- self.ssh, self.ssh_settings, self.log
- )
- self.model = self.get_model_name()
- if self.model in modelmap.__dict__:
- self.radios = modelmap.__dict__[self.model]
- else:
- self.radios = DEFAULT_RADIOS
-
- def configure_ap(
- self,
- wireless_configs: list[wireless_config.WirelessConfig],
- channel_2g: int,
- channel_5g: int,
- ):
- """Configure AP with the required settings.
-
- Each test class inherits WifiBaseTest. Based on the test, we may need to
- configure PSK, WEP, OPEN, ENT networks on 2G and 5G bands in any
- combination. We call WifiBaseTest methods get_psk_network(),
- get_open_network(), get_wep_network() and get_ent_network() to create
- dictionaries which contains this information. 'wifi_configs' is a list of
- such dictionaries. Example below configures 2 WiFi networks - 1 PSK 2G and
- 1 Open 5G on one AP. configure_ap() is called from WifiBaseTest to
- configure the APs.
-
- wifi_configs = [
- {
- '2g': {
- 'SSID': '2g_AkqXWPK4',
- 'security': 'psk2',
- 'password': 'YgYuXqDO9H',
- 'hiddenSSID': False
- },
- },
- {
- '5g': {
- 'SSID': '5g_8IcMR1Sg',
- 'security': 'none',
- 'hiddenSSID': False
- },
- }
- ]
-
- Args:
- wifi_configs: list of network settings for 2G and 5G bands.
- channel_2g: channel for 2G band.
- channel_5g: channel for 5G band.
- """
- self.wireless_setting = (
- wireless_settings_applier.WirelessSettingsApplier(
- self.ssh,
- wireless_configs,
- channel_2g,
- channel_5g,
- self.radios[1],
- self.radios[0],
- )
- )
- self.wireless_setting.apply_wireless_settings()
-
- def start_ap(self):
- """Starts the AP with the settings in /etc/config/wireless."""
- self.ssh.run("wifi up")
- curr_time = time.time()
- while time.time() < curr_time + WAIT_TIME:
- if self.get_wifi_status():
- return
- time.sleep(3)
- if not self.get_wifi_status():
- raise ValueError("Failed to turn on WiFi on the AP.")
-
- def stop_ap(self):
- """Stops the AP."""
- self.ssh.run("wifi down")
- curr_time = time.time()
- while time.time() < curr_time + WAIT_TIME:
- if not self.get_wifi_status():
- return
- time.sleep(3)
- if self.get_wifi_status():
- raise ValueError("Failed to turn off WiFi on the AP.")
-
- def get_bssids_for_wifi_networks(self) -> BSSIDMap:
- """Get BSSIDs for wifi networks configured.
-
- Returns:
- Dictionary of SSID - BSSID map for both bands.
- """
- bssid_map: BSSIDMap = {"2g": {}, "5g": {}}
- for radio in self.radios:
- ssid_ifname_map = self.get_ifnames_for_ssids(radio)
- if radio == self.radios[0]:
- for ssid, ifname in ssid_ifname_map.items():
- bssid_map["5g"][ssid] = self.get_bssid(ifname)
- elif radio == self.radios[1]:
- for ssid, ifname in ssid_ifname_map.items():
- bssid_map["2g"][ssid] = self.get_bssid(ifname)
- return bssid_map
-
- def get_ifnames_for_ssids(self, radio) -> dict[str, str]:
- """Get interfaces for wifi networks.
-
- Args:
- radio: 2g or 5g radio get the bssids from.
-
- Returns:
- dictionary of ssid - ifname mappings.
- """
- ssid_ifname_map: dict[str, str] = {}
- str_output = self.ssh.run(f"wifi status {radio}").stdout.decode("utf-8")
- wifi_status = yaml.load(
- str_output.replace("\t", "").replace("\n", ""),
- Loader=yaml.SafeLoader,
- )
- wifi_status = wifi_status[radio]
- if wifi_status["up"]:
- interfaces = wifi_status["interfaces"]
- for config in interfaces:
- ssid = config["config"]["ssid"]
- ifname = config["ifname"]
- ssid_ifname_map[ssid] = ifname
- return ssid_ifname_map
-
- def get_bssid(self, ifname):
- """Get MAC address from an interface.
-
- Args:
- ifname: interface name of the corresponding MAC.
-
- Returns:
- BSSID of the interface.
- """
- ifconfig = self.ssh.run(f"ifconfig {ifname}").stdout.decode("utf-8")
- mac_addr = ifconfig.split("\n")[0].split()[-1]
- return mac_addr
-
- def set_wpa_encryption(self, encryption):
- """Set different encryptions to wpa or wpa2.
-
- Args:
- encryption: ccmp, tkip, or ccmp+tkip.
- """
- str_output = self.ssh.run("wifi status").stdout.decode("utf-8")
- wifi_status = yaml.load(
- str_output.replace("\t", "").replace("\n", ""),
- Loader=yaml.SafeLoader,
- )
-
- # Counting how many interface are enabled.
- total_interface = 0
- for radio in self.radios:
- num_interface = len(wifi_status[radio]["interfaces"])
- total_interface += num_interface
-
- # Iterates every interface to get and set wpa encryption.
- default_extra_interface = 2
- for i in range(total_interface + default_extra_interface):
- origin_encryption = self.ssh.run(
- f"uci get wireless.@wifi-iface[{i}].encryption"
- ).stdout.decode("utf-8")
- origin_psk_pattern = re.match(r"psk\b", origin_encryption)
- target_psk_pattern = re.match(r"psk\b", encryption)
- origin_psk2_pattern = re.match(r"psk2\b", origin_encryption)
- target_psk2_pattern = re.match(r"psk2\b", encryption)
-
- if origin_psk_pattern == target_psk_pattern:
- self.ssh.run(
- f"uci set wireless.@wifi-iface[{i}].encryption={encryption}"
- )
-
- if origin_psk2_pattern == target_psk2_pattern:
- self.ssh.run(
- f"uci set wireless.@wifi-iface[{i}].encryption={encryption}"
- )
-
- self.ssh.run("uci commit wireless")
- self.ssh.run("wifi")
-
- def set_password(self, pwd_5g=None, pwd_2g=None):
- """Set password for individual interface.
-
- Args:
- pwd_5g: 8 ~ 63 chars, ascii letters and digits password for 5g network.
- pwd_2g: 8 ~ 63 chars, ascii letters and digits password for 2g network.
- """
- if pwd_5g:
- if len(pwd_5g) < 8 or len(pwd_5g) > 63:
- self.log.error("Password must be 8~63 characters long")
- # Only accept ascii letters and digits
- elif not re.match("^[A-Za-z0-9]*$", pwd_5g):
- self.log.error(
- "Password must only contains ascii letters and digits"
- )
- else:
- self.ssh.run(f"uci set wireless.@wifi-iface[{3}].key={pwd_5g}")
- self.log.info(f"Set 5G password to :{pwd_5g}")
-
- if pwd_2g:
- if len(pwd_2g) < 8 or len(pwd_2g) > 63:
- self.log.error("Password must be 8~63 characters long")
- # Only accept ascii letters and digits
- elif not re.match("^[A-Za-z0-9]*$", pwd_2g):
- self.log.error(
- "Password must only contains ascii letters and digits"
- )
- else:
- self.ssh.run(f"uci set wireless.@wifi-iface[{2}].key={pwd_2g}")
- self.log.info(f"Set 2G password to :{pwd_2g}")
-
- self.ssh.run("uci commit wireless")
- self.ssh.run("wifi")
-
- def set_ssid(self, ssid_5g=None, ssid_2g=None):
- """Set SSID for individual interface.
-
- Args:
- ssid_5g: 8 ~ 63 chars for 5g network.
- ssid_2g: 8 ~ 63 chars for 2g network.
- """
- if ssid_5g:
- if len(ssid_5g) < 8 or len(ssid_5g) > 63:
- self.log.error("SSID must be 8~63 characters long")
- # Only accept ascii letters and digits
- else:
- self.ssh.run(
- f"uci set wireless.@wifi-iface[{3}].ssid={ssid_5g}"
- )
- self.log.info(f"Set 5G SSID to :{ssid_5g}")
-
- if ssid_2g:
- if len(ssid_2g) < 8 or len(ssid_2g) > 63:
- self.log.error("SSID must be 8~63 characters long")
- # Only accept ascii letters and digits
- else:
- self.ssh.run(
- f"uci set wireless.@wifi-iface[{2}].ssid={ssid_2g}"
- )
- self.log.info(f"Set 2G SSID to :{ssid_2g}")
-
- self.ssh.run("uci commit wireless")
- self.ssh.run("wifi")
-
- def generate_mobility_domain(self):
- """Generate 4-character hexadecimal ID.
-
- Returns:
- String; a 4-character hexadecimal ID.
- """
- md = f"{random.getrandbits(16):04x}"
- self.log.info(f"Mobility Domain ID: {md}")
- return md
-
- def enable_80211r(self, iface, md):
- """Enable 802.11r for one single radio.
-
- Args:
- iface: index number of wifi-iface.
- 2: radio1
- 3: radio0
- md: mobility domain. a 4-character hexadecimal ID.
- Raises:
- TestSkip if 2g or 5g radio is not up or 802.11r is not enabled.
- """
- str_output = self.ssh.run("wifi status").stdout.decode("utf-8")
- wifi_status = yaml.load(
- str_output.replace("\t", "").replace("\n", ""),
- Loader=yaml.SafeLoader,
- )
- # Check if the radio is up.
- if iface == OpenWrtWifiSetting.IFACE_2G:
- if wifi_status[self.radios[1]]["up"]:
- self.log.info("2g network is ENABLED")
- else:
- raise signals.TestSkip("2g network is NOT ENABLED")
- elif iface == OpenWrtWifiSetting.IFACE_5G:
- if wifi_status[self.radios[0]]["up"]:
- self.log.info("5g network is ENABLED")
- else:
- raise signals.TestSkip("5g network is NOT ENABLED")
-
- # Setup 802.11r.
- self.ssh.run(f"uci set wireless.@wifi-iface[{iface}].ieee80211r='1'")
- self.ssh.run(
- f"uci set wireless.@wifi-iface[{iface}].ft_psk_generate_local='1'"
- )
- self.ssh.run(
- f"uci set wireless.@wifi-iface[{iface}].mobility_domain='{md}'"
- )
- self.ssh.run("uci commit wireless")
- self.ssh.run("wifi")
-
- # Check if 802.11r is enabled.
- result = self.ssh.run(
- f"uci get wireless.@wifi-iface[{iface}].ieee80211r"
- ).stdout.decode("utf-8")
- if result == "1":
- self.log.info("802.11r is ENABLED")
- else:
- raise signals.TestSkip("802.11r is NOT ENABLED")
-
- def get_wifi_network(self, security=None, band=None):
- """Return first match wifi interface's config.
-
- Args:
- security: psk2 or none
- band: '2g' or '5g'
-
- Returns:
- A dict contains match wifi interface's config.
- """
- if not self.wireless_setting:
- raise RuntimeError(
- "The AP has not been configured yet; run configure_ap()"
- )
-
- for wifi_iface in self.wireless_setting.wireless_configs:
- match_list = []
- wifi_network = wifi_iface.__dict__
- if security:
- match_list.append(security == wifi_network["security"])
- if band:
- match_list.append(band == wifi_network["band"])
-
- if all(match_list):
- wifi_network["SSID"] = wifi_network["ssid"]
- if not wifi_network["password"]:
- del wifi_network["password"]
- return wifi_network
- return None
-
- def get_wifi_status(self):
- """Check if radios are up. Default are 2G and 5G bands.
-
- Returns:
- True if both radios are up. False if not.
- """
- status = True
- for radio in self.radios:
- try:
- str_output = self.ssh.run(f"wifi status {radio}").stdout.decode(
- "utf-8"
- )
- wifi_status = yaml.load(
- str_output.replace("\t", "").replace("\n", ""),
- Loader=yaml.SafeLoader,
- )
- status = wifi_status[radio]["up"] and status
- except:
- self.log.info("Failed to make ssh connection to the OpenWrt")
- return False
- return status
-
- def verify_wifi_status(self, timeout=20):
- """Ensure wifi interfaces are ready.
-
- Args:
- timeout: An integer that is the number of times to try
- wait for interface ready.
- Returns:
- True if both radios are up. False if not.
- """
- start_time = time.time()
- end_time = start_time + timeout
- while time.time() < end_time:
- if self.get_wifi_status():
- return True
- time.sleep(1)
- return False
-
- def get_model_name(self):
- """Get Openwrt model name.
-
- Returns:
- A string include device brand and model. e.g. NETGEAR_R8000
- """
- out = self.ssh.run(SYSTEM_INFO_CMD).stdout.decode("utf-8").split("\n")
- for line in out:
- if "board_name" in line:
- model = line.split()[1].strip('",').split(",")
- return "_".join(map(lambda i: i.upper(), model))
- self.log.info("Failed to retrieve OpenWrt model information.")
- return None
-
- def close(self):
- """Reset wireless and network settings to default and stop AP."""
- if self.network_setting.config:
- self.network_setting.cleanup_network_settings()
- if self.wireless_setting:
- self.wireless_setting.cleanup_wireless_settings()
-
- def close_ssh(self):
- """Close SSH connection to AP."""
- self.ssh.close()
-
- def reboot(self):
- """Reboot Openwrt."""
- self.ssh.run("reboot")
diff --git a/packages/antlion/controllers/openwrt_lib/OWNERS b/packages/antlion/controllers/openwrt_lib/OWNERS
deleted file mode 100644
index 6ddb5ea..0000000
--- a/packages/antlion/controllers/openwrt_lib/OWNERS
+++ /dev/null
@@ -1,4 +0,0 @@
-jerrypcchen@google.com
-gmoturu@google.com
-martschneider@google.com
-sishichen@google.com
diff --git a/packages/antlion/controllers/openwrt_lib/__init__.py b/packages/antlion/controllers/openwrt_lib/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/packages/antlion/controllers/openwrt_lib/__init__.py
+++ /dev/null
diff --git a/packages/antlion/controllers/openwrt_lib/network_const.py b/packages/antlion/controllers/openwrt_lib/network_const.py
deleted file mode 100644
index 7d551cf..0000000
--- a/packages/antlion/controllers/openwrt_lib/network_const.py
+++ /dev/null
@@ -1,290 +0,0 @@
-# mypy: disable-error-code="no-untyped-def"
-LOCALHOST = "192.168.1.1"
-
-# params for ipsec.conf
-IPSEC_CONF = {
- "config setup": {
- "charondebug": "chd 2,ike 2,knl 2,net 2,esp 2,dmn 2,"
- "mgr 2,lib 1,cfg 2,enc 1".__repr__(),
- "uniqueids": "never",
- },
- "conn %default": {"ike": "aes128-sha-modp1024", "esp": "aes128-sha1"},
-}
-
-IPSEC_L2TP_PSK = {
- "conn L2TP_PSK": {
- "keyexchange": "ikev1",
- "type": "transport",
- "left": LOCALHOST,
- "leftprotoport": "17/1701",
- "leftauth": "psk",
- "right": "%any",
- "rightprotoport": "17/%any",
- "rightsubnet": "0.0.0.0/0",
- "rightauth": "psk",
- "auto": "add",
- }
-}
-
-IPSEC_L2TP_RSA = {
- "conn L2TP_RSA": {
- "keyexchange": "ikev1",
- "type": "transport",
- "left": LOCALHOST,
- "leftprotoport": "17/1701",
- "leftauth": "pubkey",
- "leftcert": "serverCert.der",
- "right": "%any",
- "rightprotoport": "17/%any",
- "rightsubnet": "0.0.0.0/0",
- "rightauth": "pubkey",
- "auto": "add",
- }
-}
-
-IPSEC_HYBRID_RSA = {
- "conn HYBRID_RSA": {
- "keyexchange": "ikev1",
- "left": LOCALHOST,
- "leftsubnet": "0.0.0.0/0",
- "leftauth": "pubkey",
- "leftcert": "serverCert.der",
- "leftsendcert": "always",
- "right": "%any",
- "rightsubnet": "0.0.0.0/0",
- "rightauth": "pubkey",
- "rightauth2": "xauth",
- "xauth": "server",
- "auto": "add",
- }
-}
-
-IPSEC_XAUTH_PSK = {
- "conn XAUTH_PSK": {
- "keyexchange": "ikev1",
- "left": LOCALHOST,
- "leftsubnet": "0.0.0.0/0",
- "leftauth": "psk",
- "right": "%any",
- "rightsubnet": "0.0.0.0/0",
- "rightauth": "psk",
- "rightauth2": "xauth",
- "auto": "add",
- }
-}
-
-IPSEC_XAUTH_RSA = {
- "conn XAUTH_RSA": {
- "keyexchange": "ikev1",
- "left": LOCALHOST,
- "leftsubnet": "0.0.0.0/0",
- "leftcert": "serverCert.der",
- "leftsendcert": "always",
- "right": "%any",
- "rightsubnet": "0.0.0.0/0",
- "rightauth": "xauth",
- "xauth": "server",
- "auto": "add",
- }
-}
-
-IPSEC_IKEV2_MSCHAPV2 = {
- "conn IKEV2_MSCHAPV2": {
- "keyexchange": "ikev2",
- "left": LOCALHOST,
- "leftid": LOCALHOST,
- "leftcert": "serverCert.der",
- "leftsubnet": "0.0.0.0/0",
- "leftauth": "pubkey",
- "leftsendcert": "always",
- "right": "%any",
- "rightid": "vpntest",
- "rightauth": "eap-mschapv2",
- "auto": "add",
- }
-}
-
-IPSEC_IKEV2_PSK = {
- "conn IKEV2_PSK": {
- "keyexchange": "ikev2",
- "left": LOCALHOST,
- "leftid": LOCALHOST,
- "leftauth": "psk",
- "leftsubnet": "0.0.0.0/0",
- "right": "%any",
- "rightid": "vpntest",
- "rightauth": "psk",
- "auto": "add",
- }
-}
-
-IPSEC_IKEV2_RSA = {
- "conn IKEV2_RSA": {
- "keyexchange": "ikev2",
- "left": LOCALHOST,
- "leftid": LOCALHOST,
- "leftcert": "serverCert.der",
- "leftsubnet": "0.0.0.0/0",
- "leftauth": "pubkey",
- "leftsendcert": "always",
- "right": "%any",
- "rightid": f"vpntest@{LOCALHOST}",
- "rightauth": "pubkey",
- "rightcert": "clientCert.pem",
- "auto": "add",
- }
-}
-
-IPSEC_IKEV2_MSCHAPV2_HOSTNAME = {
- "conn IKEV2_MSCHAPV2_HOSTNAME": {
- "keyexchange": "ikev2",
- "left": LOCALHOST,
- "leftid": "strongswan-vpn-server.android-iperf.com",
- "leftcert": "serverCert.der",
- "leftsubnet": "0.0.0.0/0",
- "leftauth": "pubkey",
- "leftsendcert": "always",
- "right": "%any",
- "rightid": "vpntest",
- "rightauth": "eap-mschapv2",
- "auto": "add",
- }
-}
-
-IPSEC_IKEV2_PSK_HOSTNAME = {
- "conn IKEV2_PSK_HOSTNAME": {
- "keyexchange": "ikev2",
- "left": LOCALHOST,
- "leftid": "strongswan-vpn-server.android-iperf.com",
- "leftauth": "psk",
- "leftsubnet": "0.0.0.0/0",
- "right": "%any",
- "rightid": "vpntest",
- "rightauth": "psk",
- "auto": "add",
- }
-}
-
-IPSEC_IKEV2_RSA_HOSTNAME = {
- "conn IKEV2_RSA_HOSTNAME": {
- "keyexchange": "ikev2",
- "left": LOCALHOST,
- "leftid": "strongswan-vpn-server.android-iperf.com",
- "leftcert": "serverCert.der",
- "leftsubnet": "0.0.0.0/0",
- "leftauth": "pubkey",
- "leftsendcert": "always",
- "right": "%any",
- "rightid": "vpntest@strongswan-vpn-server.android-iperf.com",
- "rightauth": "pubkey",
- "rightcert": "clientCert.pem",
- "auto": "add",
- }
-}
-
-# parmas for lx2tpd
-
-XL2TPD_CONF_GLOBAL = (
- "[global]",
- "ipsec saref = no",
- "debug tunnel = no",
- "debug avp = no",
- "debug network = no",
- "debug state = no",
- "access control = no",
- "rand source = dev",
- "port = 1701",
-)
-
-XL2TPD_CONF_INS = (
- "[lns default]",
- "require authentication = yes",
- "pass peer = yes",
- "ppp debug = no",
- "length bit = yes",
- "refuse pap = yes",
- "refuse chap = yes",
-)
-
-XL2TPD_OPTION = (
- "require-mschap-v2",
- "refuse-mschap",
- "ms-dns 8.8.8.8",
- "ms-dns 8.8.4.4",
- "asyncmap 0",
- "auth",
- "crtscts",
- "idle 1800",
- "mtu 1410",
- "mru 1410",
- "connect-delay 5000",
- "lock",
- "hide-password",
- "local",
- "debug",
- "modem",
- "proxyarp",
- "lcp-echo-interval 30",
- "lcp-echo-failure 4",
- "nomppe",
-)
-
-# iptable rules for vpn_pptp
-FIREWALL_RULES_FOR_PPTP = (
- "iptables -A input_rule -i ppp+ -j ACCEPT",
- "iptables -A output_rule -o ppp+ -j ACCEPT",
- "iptables -A forwarding_rule -i ppp+ -j ACCEPT",
-)
-
-# iptable rules for vpn_l2tp
-FIREWALL_RULES_FOR_L2TP = (
- "iptables -I INPUT -m policy --dir in --pol ipsec --proto esp -j ACCEPT",
- "iptables -I FORWARD -m policy --dir in --pol ipsec --proto esp -j ACCEPT",
- "iptables -I FORWARD -m policy --dir out --pol ipsec --proto esp -j ACCEPT",
- "iptables -I OUTPUT -m policy --dir out --pol ipsec --proto esp -j ACCEPT",
- "iptables -t nat -I POSTROUTING -m policy --pol ipsec --dir out -j ACCEPT",
- "iptables -A INPUT -p esp -j ACCEPT",
- "iptables -A INPUT -i eth0.2 -p udp --dport 500 -j ACCEPT",
- "iptables -A INPUT -i eth0.2 -p tcp --dport 500 -j ACCEPT",
- "iptables -A INPUT -i eth0.2 -p udp --dport 4500 -j ACCEPT",
- "iptables -A INPUT -p udp --dport 500 -j ACCEPT",
- "iptables -A INPUT -p udp --dport 4500 -j ACCEPT",
- "iptables -A INPUT -p udp -m policy --dir in --pol ipsec -m udp --dport 1701 -j ACCEPT",
-)
-
-FIREWALL_RULES_DISABLE_DNS_RESPONSE = (
- "iptables -I OUTPUT -p udp --sport 53 -j DROP",
- "iptables -I OUTPUT -p tcp --sport 53 -j DROP",
- "ip6tables -I OUTPUT -p udp --sport 53 -j DROP",
- "ip6tables -I OUTPUT -p tcp --sport 53 -j DROP",
-)
-
-
-# Object for vpn profile
-class VpnL2tp(object):
- """Profile for vpn l2tp type.
-
- Attributes:
- hostname: vpn server domain name
- address: vpn server address
- username: vpn user account
- password: vpn user password
- psk_secret: psk for ipsec
- name: vpn server name for register in OpenWrt
- """
-
- def __init__(
- self,
- vpn_server_hostname,
- vpn_server_address,
- vpn_username,
- vpn_password,
- psk_secret,
- server_name,
- ):
- self.name = server_name
- self.hostname = vpn_server_hostname
- self.address = vpn_server_address
- self.username = vpn_username
- self.password = vpn_password
- self.psk_secret = psk_secret
diff --git a/packages/antlion/controllers/openwrt_lib/network_settings.py b/packages/antlion/controllers/openwrt_lib/network_settings.py
deleted file mode 100644
index 432d2d9..0000000
--- a/packages/antlion/controllers/openwrt_lib/network_settings.py
+++ /dev/null
@@ -1,1185 +0,0 @@
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-# Blanket ignores to enable mypy in Antlion
-# mypy: disable-error-code="no-untyped-def"
-import re
-import time
-
-from mobly import signals
-
-from antlion import utils
-from antlion.controllers.openwrt_lib import network_const
-
-SERVICE_DNSMASQ = "dnsmasq"
-SERVICE_STUNNEL = "stunnel"
-SERVICE_NETWORK = "network"
-SERVICE_PPTPD = "pptpd"
-SERVICE_FIREWALL = "firewall"
-SERVICE_IPSEC = "ipsec"
-SERVICE_XL2TPD = "xl2tpd"
-SERVICE_ODHCPD = "odhcpd"
-SERVICE_OPENNDS = "opennds"
-SERVICE_UHTTPD = "uhttpd"
-PPTP_PACKAGE = "pptpd kmod-nf-nathelper-extra"
-L2TP_PACKAGE = "strongswan-full openssl-util xl2tpd"
-NAT6_PACKAGE = "ip6tables kmod-ipt-nat6"
-CAPTIVE_PORTAL_PACKAGE = "opennds php7-cli php7-mod-openssl php7-cgi php7"
-MDNS_PACKAGE = "avahi-utils avahi-daemon-service-http avahi-daemon-service-ssh libavahi-client avahi-dbus-daemon"
-STUNNEL_CONFIG_PATH = "/etc/stunnel/DoTServer.conf"
-HISTORY_CONFIG_PATH = "/etc/dirty_configs"
-PPTPD_OPTION_PATH = "/etc/ppp/options.pptpd"
-XL2TPD_CONFIG_PATH = "/etc/xl2tpd/xl2tpd.conf"
-XL2TPD_OPTION_CONFIG_PATH = "/etc/ppp/options.xl2tpd"
-FIREWALL_CUSTOM_OPTION_PATH = "/etc/firewall.user"
-PPP_CHAP_SECRET_PATH = "/etc/ppp/chap-secrets"
-IKEV2_VPN_CERT_KEYS_PATH = "/var/ikev2_cert.sh"
-TCPDUMP_DIR = "/tmp/tcpdump/"
-LOCALHOST = "192.168.1.1"
-DEFAULT_PACKAGE_INSTALL_TIMEOUT = 200
-
-
-class NetworkSettings(object):
- """Class for network settings.
-
- Attributes:
- ssh: ssh connection object.
- ssh_settings: ssh settings for AccessPoint.
- service_manager: Object manage service configuration.
- user: username for ssh.
- ip: ip address for AccessPoint.
- log: Logging object for AccessPoint.
- config: A list to store changes on network settings.
- firewall_rules_list: A list of firewall rule name list.
- l2tp: profile for vpn l2tp server.
- """
-
- def __init__(self, ssh, ssh_settings, logger):
- """Initialize wireless settings.
-
- Args:
- ssh: ssh connection object.
- ssh_settings: ssh settings for AccessPoint.
- logger: Logging object for AccessPoint.
- """
- self.ssh = ssh
- self.service_manager = ServiceManager(ssh)
- self.ssh_settings = ssh_settings
- self.user = self.ssh_settings.username
- self.ip = self.ssh_settings.hostname
- self.log = logger
- self.config = set()
- self.firewall_rules_list = []
- # This map contains cleanup functions to restore the configuration to
- # its default state. We write these keys to HISTORY_CONFIG_PATH prior to
- # making any changes to that subsystem.
- # This makes it easier to recover after an aborted test.
- self.update_firewall_rules_list()
- self.cleanup_network_settings()
- self.clear_tcpdump()
-
- def cleanup_network_settings(self):
- """Reset all changes on Access point."""
-
- # Detect if any changes that is not clean up.
- if self.file_exists(HISTORY_CONFIG_PATH):
- out = self.ssh.run(f"cat {HISTORY_CONFIG_PATH}").stdout
- if out:
- self.config = set(out.split("\n"))
-
- if self.config:
- temp = self.config.copy()
- for change in temp:
- change_list = change.split()
-
- command = change_list[0]
- args = change_list[1:]
- if command == "setup_dns_server":
- self.remove_dns_server()
- elif command == "setup_vpn_pptp_server":
- self.remove_vpn_pptp_server()
- elif command == "setup_vpn_l2tp_server":
- self.remove_vpn_l2tp_server()
- elif command == "disable_ipv6":
- self.enable_ipv6()
- elif command == "setup_ipv6_bridge":
- self.remove_ipv6_bridge()
- elif command == "default_dns":
- addr_list = str(change_list[1])
- self.del_default_dns(addr_list)
- elif command == "default_v6_dns":
- addr_list = str(change_list[1])
- self.del_default_v6_dns(addr_list)
- elif command == "ipv6_prefer_option":
- self.remove_ipv6_prefer_option()
- elif command == "block_dns_response":
- self.unblock_dns_response()
- elif command == "setup_mdns":
- self.remove_mdns()
- elif command == "add_dhcp_rapid_commit":
- self.remove_dhcp_rapid_commit()
- elif command == "setup_captive_portal":
- try:
- fas_port = int(change_list[1])
- except IndexError:
- fas_port = 1000
- self.remove_cpative_portal(fas_port)
- else:
- raise TypeError(f'Unknown command "{change}"')
-
- self.config = set()
-
- if self.file_exists(HISTORY_CONFIG_PATH):
- out = self.ssh.run(f"cat {HISTORY_CONFIG_PATH}").stdout
- if not out:
- self.ssh.run(f"rm {HISTORY_CONFIG_PATH}")
-
- def commit_changes(self):
- """Apply changes on Access point."""
- self.ssh.run("uci commit")
- self.service_manager.restart_services()
- self.create_config_file("\n".join(self.config), HISTORY_CONFIG_PATH)
-
- def package_install(self, package_list):
- """Install packages on OpenWrtAP via opkg If not installed.
-
- Args:
- package_list: package list to install.
- e.g. "pptpd kmod-mppe kmod-nf-nathelper-extra"
- """
- self.ssh.run("opkg update")
- for package_name in package_list.split(" "):
- if not self._package_installed(package_name):
- self.ssh.run(
- f"opkg install {package_name}",
- timeout=DEFAULT_PACKAGE_INSTALL_TIMEOUT,
- )
- self.log.info(f"Package: {package_name} installed.")
- else:
- self.log.info(
- f"Package: {package_name} skipped (already installed)."
- )
-
- def package_remove(self, package_list):
- """Remove packages on OpenWrtAP via opkg If existed.
-
- Args:
- package_list: package list to remove.
- """
- for package_name in package_list.split(" "):
- if self._package_installed(package_name):
- self.ssh.run(f"opkg remove {package_name}")
- self.log.info(f"Package: {package_name} removed.")
- else:
- self.log.info(f"No exist package {package_name} found.")
-
- def _package_installed(self, package_name):
- """Check if target package installed on OpenWrtAP.
-
- Args:
- package_name: package name want to check.
-
- Returns:
- True if installed.
- """
- if self.ssh.run(f"opkg list-installed {package_name}").stdout:
- return True
- return False
-
- def file_exists(self, abs_file_path):
- """Check if target file exist on specific path on OpenWrt.
-
- Args:
- abs_file_path: Absolute path for the file.
-
- Returns:
- True if Existed.
- """
- path, file_name = abs_file_path.rsplit("/", 1)
- if self.ssh.run(
- f"ls {path} | grep {file_name}", ignore_status=True
- ).stdout:
- return True
- return False
-
- def path_exists(self, abs_path):
- """Check if dir exist on OpenWrt.
-
- Args:
- abs_path: absolutely path for create folder.
- """
- try:
- self.ssh.run(f"ls {abs_path}")
- except:
- return False
- return True
-
- def create_folder(self, abs_path):
- """If dir not exist, create it.
-
- Args:
- abs_path: absolutely path for create folder.
- """
- if not self.path_exists(abs_path):
- self.ssh.run(f"mkdir {abs_path}")
- else:
- self.log.info(f"{abs_path} already existed.")
-
- def count(self, config, key):
- """Count in uci config.
-
- Args:
- config: config or section to research
- key: keywords to e.g. rule, domain
- Returns:
- Numbers of the count.
- """
- count = self.ssh.run(
- f"uci show {config} | grep ={key}", ignore_status=True
- ).stdout
- return len(count.split("\n"))
-
- def create_config_file(self, config, file_path):
- """Create config file. Overwrite if file already exist.
-
- Args:
- config: A string of content of config.
- file_path: Config's abs_path.
- """
- self.ssh.run(f'echo -e "{config}" > {file_path}')
-
- def replace_config_option(self, old_option, new_option, file_path):
- """Replace config option if pattern match.
-
- If find match pattern with old_option, then replace it with new_option.
- Else add new_option to the file.
-
- Args:
- old_option: the regexp pattern to replace.
- new_option: the option to add.
- file_path: Config's abs_path.
- """
- config = self.ssh.run(f"cat {file_path}").stdout
- config, count = re.subn(old_option, new_option, config)
- if not count:
- config = f"{config}\n{new_option}"
- self.create_config_file(config, file_path)
-
- def remove_config_option(self, option, file_path):
- """Remove option from config file.
-
- Args:
- option: Option to remove. Support regular expression.
- file_path: Config's abs_path.
- Returns:
- Boolean for find option to remove.
- """
- config = self.ssh.run(f"cat {file_path}").stdout.split("\n")
- for line in config:
- count = re.subn(option, "", line)[1]
- if count > 0:
- config.remove(line)
- self.create_config_file("\n".join(config), file_path)
- return True
- self.log.warning("No match option to remove.")
- return False
-
- def setup_dns_server(self, domain_name):
- """Setup DNS server on OpenWrtAP.
-
- Args:
- domain_name: Local dns domain name.
- """
- self.config.add("setup_dns_server")
- self.log.info(f"Setup DNS server with domain name {domain_name}")
- self.ssh.run(f"uci set dhcp.@dnsmasq[0].local='/{domain_name}/'")
- self.ssh.run(f"uci set dhcp.@dnsmasq[0].domain='{domain_name}'")
- self.add_resource_record(domain_name, self.ip)
- self.service_manager.need_restart(SERVICE_DNSMASQ)
- self.commit_changes()
-
- # Check stunnel package is installed
- self.package_install("stunnel")
- self.service_manager.stop(SERVICE_STUNNEL)
- self.service_manager.disable(SERVICE_STUNNEL)
-
- # Enable stunnel
- self.create_stunnel_config()
- self.ssh.run("stunnel /etc/stunnel/DoTServer.conf")
-
- def remove_dns_server(self):
- """Remove DNS server on OpenWrtAP."""
- if self.file_exists("/var/run/stunnel.pid"):
- self.ssh.run("kill $(cat /var/run/stunnel.pid)")
- self.ssh.run("uci set dhcp.@dnsmasq[0].local='/lan/'")
- self.ssh.run("uci set dhcp.@dnsmasq[0].domain='lan'")
- self.clear_resource_record()
- self.service_manager.need_restart(SERVICE_DNSMASQ)
- self.config.discard("setup_dns_server")
- self.commit_changes()
-
- def add_resource_record(self, domain_name, domain_ip):
- """Add resource record.
-
- Args:
- domain_name: A string for domain name.
- domain_ip: A string for domain ip.
- """
- self.ssh.run("uci add dhcp domain")
- self.ssh.run(f"uci set dhcp.@domain[-1].name='{domain_name}'")
- self.ssh.run(f"uci set dhcp.@domain[-1].ip='{domain_ip}'")
- self.service_manager.need_restart(SERVICE_DNSMASQ)
-
- def del_resource_record(self):
- """Delete the last resource record."""
- self.ssh.run("uci delete dhcp.@domain[-1]")
- self.service_manager.need_restart(SERVICE_DNSMASQ)
-
- def clear_resource_record(self):
- """Delete the all resource record."""
- rr = self.ssh.run(
- "uci show dhcp | grep =domain", ignore_status=True
- ).stdout
- if rr:
- for _ in rr.split("\n"):
- self.del_resource_record()
- self.service_manager.need_restart(SERVICE_DNSMASQ)
-
- def create_stunnel_config(self):
- """Create config for stunnel service."""
- stunnel_config = [
- "pid = /var/run/stunnel.pid",
- "[dns]",
- "accept = 853",
- "connect = 127.0.0.1:53",
- "cert = /etc/stunnel/fullchain.pem",
- "key = /etc/stunnel/privkey.pem",
- ]
- config_string = "\n".join(stunnel_config)
- self.create_config_file(config_string, STUNNEL_CONFIG_PATH)
-
- def setup_vpn_pptp_server(self, local_ip, user, password):
- """Setup pptp vpn server on OpenWrt.
-
- Args:
- local_ip: local pptp server ip address.
- user: username for pptp user.
- password: password for pptp user.
- """
- # Install pptp service
- self.package_install(PPTP_PACKAGE)
-
- self.config.add("setup_vpn_pptp_server")
- # Edit /etc/config/pptpd & /etc/ppp/options.pptpd
- self.setup_pptpd(local_ip, user, password)
- # Edit /etc/config/firewall & /etc/firewall.user
- self.setup_firewall_rules_for_pptp()
- # Enable service
- self.service_manager.enable(SERVICE_PPTPD)
- self.service_manager.need_restart(SERVICE_PPTPD)
- self.service_manager.need_restart(SERVICE_FIREWALL)
- self.commit_changes()
-
- def remove_vpn_pptp_server(self):
- """Remove pptp vpn server on OpenWrt."""
- # Edit /etc/config/pptpd
- self.restore_pptpd()
- # Edit /etc/config/firewall & /etc/firewall.user
- self.restore_firewall_rules_for_pptp()
- # Disable service
- self.service_manager.disable(SERVICE_PPTPD)
- self.service_manager.need_restart(SERVICE_PPTPD)
- self.service_manager.need_restart(SERVICE_FIREWALL)
- self.config.discard("setup_vpn_pptp_server")
- self.commit_changes()
-
- self.package_remove(PPTP_PACKAGE)
- self.ssh.run("rm /etc/ppp/options.pptpd")
- self.ssh.run("rm /etc/config/pptpd")
-
- def setup_pptpd(self, local_ip, username, password, ms_dns="8.8.8.8"):
- """Setup pptpd config for ip addr and account.
-
- Args:
- local_ip: vpn server address
- username: pptp vpn username
- password: pptp vpn password
- ms_dns: DNS server
- """
- # Calculate remote ip address
- # e.g. local_ip = 10.10.10.9
- # remote_ip = 10.10.10.10 -250
- remote_ip = local_ip.split(".")
- remote_ip.append(str(int(remote_ip.pop(-1)) + 1))
- remote_ip = ".".join(remote_ip)
- # Enable pptp service and set ip addr
- self.ssh.run("uci set pptpd.pptpd.enabled=1")
- self.ssh.run(f"uci set pptpd.pptpd.localip='{local_ip}'")
- self.ssh.run(f"uci set pptpd.pptpd.remoteip='{remote_ip}-250'")
-
- # Setup pptp service account
- self.ssh.run(f"uci set pptpd.@login[0].username='{username}'")
- self.ssh.run(f"uci set pptpd.@login[0].password='{password}'")
- self.service_manager.need_restart(SERVICE_PPTPD)
-
- self.replace_config_option(
- r"#*ms-dns \d+.\d+.\d+.\d+", f"ms-dns {ms_dns}", PPTPD_OPTION_PATH
- )
- self.replace_config_option(
- "(#no)*proxyarp", "proxyarp", PPTPD_OPTION_PATH
- )
-
- def restore_pptpd(self):
- """Disable pptpd."""
- self.ssh.run("uci set pptpd.pptpd.enabled=0")
- self.remove_config_option(
- r"\S+ pptp-server \S+ \*", PPP_CHAP_SECRET_PATH
- )
- self.service_manager.need_restart(SERVICE_PPTPD)
-
- def setup_vpn_l2tp_server(
- self,
- vpn_server_hostname,
- vpn_server_address,
- vpn_username,
- vpn_password,
- psk_secret,
- server_name,
- country,
- org,
- ):
- """Setup l2tp vpn server on OpenWrt.
-
- Args:
- vpn_server_hostname: vpn server domain name
- vpn_server_address: vpn server addr
- vpn_username: vpn account
- vpn_password: vpn password
- psk_secret: psk for ipsec
- server_name: vpn server name for register in OpenWrt
- country: country code for generate cert keys.
- org: Organization name for generate cert keys.
- """
- self.l2tp = network_const.VpnL2tp(
- vpn_server_hostname,
- vpn_server_address,
- vpn_username,
- vpn_password,
- psk_secret,
- server_name,
- )
-
- self.package_install(L2TP_PACKAGE)
- self.config.add("setup_vpn_l2tp_server")
-
- # /etc/strongswan.conf: Strongswan configuration file
- self.setup_strongswan()
- # /etc/ipsec.conf /etc/ipsec.secrets
- self.setup_ipsec()
- # /etc/xl2tpd/xl2tpd.conf & /etc/ppp/options.xl2tpd
- self.setup_xl2tpd()
- # /etc/ppp/chap-secrets
- self.setup_ppp_secret()
- # /etc/config/firewall & /etc/firewall.user
- self.setup_firewall_rules_for_l2tp()
- # setup vpn server local ip
- self.setup_vpn_local_ip()
- # generate cert and key for rsa
- if self.l2tp.name == "ikev2-server":
- self.generate_ikev2_vpn_cert_keys(country, org)
- self.add_resource_record(self.l2tp.hostname, LOCALHOST)
- else:
- self.generate_vpn_cert_keys(country, org)
- # restart service
- self.service_manager.need_restart(SERVICE_IPSEC)
- self.service_manager.need_restart(SERVICE_XL2TPD)
- self.service_manager.need_restart(SERVICE_FIREWALL)
- self.commit_changes()
-
- def remove_vpn_l2tp_server(self):
- """Remove l2tp vpn server on OpenWrt."""
- self.config.discard("setup_vpn_l2tp_server")
- self.restore_firewall_rules_for_l2tp()
- self.remove_vpn_local_ip()
- if self.l2tp.name == "ikev2-server":
- self.clear_resource_record()
- self.service_manager.need_restart(SERVICE_IPSEC)
- self.service_manager.need_restart(SERVICE_XL2TPD)
- self.service_manager.need_restart(SERVICE_FIREWALL)
- self.commit_changes()
- self.package_remove(L2TP_PACKAGE)
- if hasattr(self, "l2tp"):
- delattr(self, "l2tp")
-
- def setup_strongswan(self, dns="8.8.8.8"):
- """Setup strongswan config."""
- config = [
- "charon {",
- " load_modular = yes",
- " plugins {",
- " include strongswan.d/charon/*.conf",
- " }",
- f" dns1={dns}",
- "}",
- ]
- self.create_config_file("\n".join(config), "/etc/strongswan.conf")
-
- def setup_ipsec(self):
- """Setup ipsec config."""
-
- config: list[str] = []
-
- def load_ipsec_config(data, rightsourceip=False):
- for i in data.keys():
- config.append(i)
- for j in data[i].keys():
- config.append(f"\t {j}={data[i][j]}")
- if rightsourceip:
- config.append(
- f"\t rightsourceip={self.l2tp.address.rsplit('.', 1)[0]}.16/26"
- )
- config.append("")
-
- load_ipsec_config(network_const.IPSEC_IKEV2_MSCHAPV2, True)
- load_ipsec_config(network_const.IPSEC_IKEV2_PSK, True)
- load_ipsec_config(network_const.IPSEC_IKEV2_RSA, True)
- load_ipsec_config(network_const.IPSEC_IKEV2_MSCHAPV2_HOSTNAME, True)
- load_ipsec_config(network_const.IPSEC_IKEV2_PSK_HOSTNAME, True)
- load_ipsec_config(network_const.IPSEC_IKEV2_RSA_HOSTNAME, True)
- load_ipsec_config(network_const.IPSEC_CONF)
- load_ipsec_config(network_const.IPSEC_L2TP_PSK)
- load_ipsec_config(network_const.IPSEC_L2TP_RSA)
- load_ipsec_config(network_const.IPSEC_HYBRID_RSA, True)
- load_ipsec_config(network_const.IPSEC_XAUTH_PSK, True)
- load_ipsec_config(network_const.IPSEC_XAUTH_RSA, True)
- self.create_config_file("\n".join(config), "/etc/ipsec.conf")
-
- ipsec_secret = []
- ipsec_secret.append(r": PSK \"%s\"" % self.l2tp.psk_secret)
- ipsec_secret.append(r": RSA \"%s\"" % "serverKey.der")
- ipsec_secret.append(
- r"%s : XAUTH \"%s\"" % (self.l2tp.username, self.l2tp.password)
- )
- self.create_config_file("\n".join(ipsec_secret), "/etc/ipsec.secrets")
-
- def setup_xl2tpd(self, ip_range=20):
- """Setup xl2tpd config."""
- net_id, host_id = self.l2tp.address.rsplit(".", 1)
- xl2tpd_conf = list(network_const.XL2TPD_CONF_GLOBAL)
- xl2tpd_conf.append(f"auth file = {PPP_CHAP_SECRET_PATH}")
- xl2tpd_conf.extend(network_const.XL2TPD_CONF_INS)
- xl2tpd_conf.append(
- f"ip range = {net_id}.{host_id}-{net_id}.{str(int(host_id) + ip_range)}"
- )
- xl2tpd_conf.append(f"local ip = {self.l2tp.address}")
- xl2tpd_conf.append(f"name = {self.l2tp.name}")
- xl2tpd_conf.append(f"pppoptfile = {XL2TPD_OPTION_CONFIG_PATH}")
-
- self.create_config_file("\n".join(xl2tpd_conf), XL2TPD_CONFIG_PATH)
- xl2tpd_option = list(network_const.XL2TPD_OPTION)
- xl2tpd_option.append(f"name {self.l2tp.name}")
- self.create_config_file(
- "\n".join(xl2tpd_option), XL2TPD_OPTION_CONFIG_PATH
- )
-
- def setup_ppp_secret(self):
- self.replace_config_option(
- r"\S+ %s \S+ \*" % self.l2tp.name,
- f"{self.l2tp.username} {self.l2tp.name} {self.l2tp.password} *",
- PPP_CHAP_SECRET_PATH,
- )
-
- def generate_vpn_cert_keys(self, country, org):
- """Generate cert and keys for vpn server."""
- rsa = "--type rsa"
- lifetime = "--lifetime 365"
- size = "--size 4096"
-
- self.ssh.run(f"ipsec pki --gen {rsa} {size} --outform der > caKey.der")
- self.ssh.run(
- "ipsec pki --self --ca %s --in caKey.der %s --dn "
- '"C=%s, O=%s, CN=%s" --outform der > caCert.der'
- % (lifetime, rsa, country, org, self.l2tp.hostname)
- )
- self.ssh.run(
- f"ipsec pki --gen {size} {rsa} --outform der > serverKey.der"
- )
- self.ssh.run(
- "ipsec pki --pub --in serverKey.der %s | ipsec pki "
- "--issue %s --cacert caCert.der --cakey caKey.der "
- '--dn "C=%s, O=%s, CN=%s" --san %s --flag serverAuth'
- " --flag ikeIntermediate --outform der > serverCert.der"
- % (rsa, lifetime, country, org, self.l2tp.hostname, LOCALHOST)
- )
- self.ssh.run(
- f"ipsec pki --gen {size} {rsa} --outform der > clientKey.der"
- )
- self.ssh.run(
- "ipsec pki --pub --in clientKey.der %s | ipsec pki "
- "--issue %s --cacert caCert.der --cakey caKey.der "
- '--dn "C=%s, O=%s, CN=%s@%s" --outform der > '
- "clientCert.der"
- % (
- rsa,
- lifetime,
- country,
- org,
- self.l2tp.username,
- self.l2tp.hostname,
- )
- )
-
- self.ssh.run(
- "openssl rsa -inform DER -in clientKey.der"
- " -out clientKey.pem -outform PEM"
- )
- self.ssh.run(
- "openssl x509 -inform DER -in clientCert.der"
- " -out clientCert.pem -outform PEM"
- )
- self.ssh.run(
- "openssl x509 -inform DER -in caCert.der"
- " -out caCert.pem -outform PEM"
- )
- self.ssh.run(
- "openssl pkcs12 -in clientCert.pem -inkey clientKey.pem"
- " -certfile caCert.pem -export -out clientPkcs.p12 -passout pass:"
- )
-
- self.ssh.run("mv caCert.pem /etc/ipsec.d/cacerts/")
- self.ssh.run("mv *Cert* /etc/ipsec.d/certs/")
- self.ssh.run("mv *Key* /etc/ipsec.d/private/")
- if not self.path_exists("/www/downloads/"):
- self.ssh.run("mkdir /www/downloads/")
- self.ssh.run("mv clientPkcs.p12 /www/downloads/")
- self.ssh.run("chmod 664 /www/downloads/clientPkcs.p12")
-
- def generate_ikev2_vpn_cert_keys(self, country, org):
- rsa = "--type rsa"
- lifetime = "--lifetime 365"
- size = "--size 4096"
-
- if not self.path_exists("/www/downloads/"):
- self.ssh.run("mkdir /www/downloads/")
-
- ikev2_vpn_cert_keys = [
- f"ipsec pki --gen {rsa} {size} --outform der > caKey.der",
- "ipsec pki --self --ca %s --in caKey.der %s --dn "
- '"C=%s, O=%s, CN=%s" --outform der > caCert.der'
- % (lifetime, rsa, country, org, self.l2tp.hostname),
- f"ipsec pki --gen {size} {rsa} --outform der > serverKey.der",
- "ipsec pki --pub --in serverKey.der %s | ipsec pki --issue %s "
- r"--cacert caCert.der --cakey caKey.der --dn \"C=%s, O=%s, CN=%s\" "
- "--san %s --san %s --flag serverAuth --flag ikeIntermediate "
- "--outform der > serverCert.der"
- % (
- rsa,
- lifetime,
- country,
- org,
- self.l2tp.hostname,
- LOCALHOST,
- self.l2tp.hostname,
- ),
- f"ipsec pki --gen {size} {rsa} --outform der > clientKey.der",
- "ipsec pki --pub --in clientKey.der %s | ipsec pki --issue %s "
- r"--cacert caCert.der --cakey caKey.der --dn \"C=%s, O=%s, CN=%s@%s\" "
- r"--san \"%s\" --san \"%s@%s\" --san \"%s@%s\" --outform der "
- "> clientCert.der"
- % (
- rsa,
- lifetime,
- country,
- org,
- self.l2tp.username,
- self.l2tp.hostname,
- self.l2tp.username,
- self.l2tp.username,
- LOCALHOST,
- self.l2tp.username,
- self.l2tp.hostname,
- ),
- "openssl rsa -inform DER -in clientKey.der "
- "-out clientKey.pem -outform PEM",
- "openssl x509 -inform DER -in clientCert.der "
- "-out clientCert.pem -outform PEM",
- "openssl x509 -inform DER -in caCert.der "
- "-out caCert.pem -outform PEM",
- "openssl pkcs12 -in clientCert.pem -inkey clientKey.pem "
- "-certfile caCert.pem -export -out clientPkcs.p12 -passout pass:",
- "mv caCert.pem /etc/ipsec.d/cacerts/",
- "mv *Cert* /etc/ipsec.d/certs/",
- "mv *Key* /etc/ipsec.d/private/",
- "mv clientPkcs.p12 /www/downloads/",
- "chmod 664 /www/downloads/clientPkcs.p12",
- ]
- file_string = "\n".join(ikev2_vpn_cert_keys)
- self.create_config_file(file_string, IKEV2_VPN_CERT_KEYS_PATH)
-
- self.ssh.run(f"chmod +x {IKEV2_VPN_CERT_KEYS_PATH}")
- self.ssh.run(f"{IKEV2_VPN_CERT_KEYS_PATH}")
-
- def update_firewall_rules_list(self):
- """Update rule list in /etc/config/firewall."""
- new_rules_list = []
- for i in range(self.count("firewall", "rule")):
- rule = self.ssh.run(f"uci get firewall.@rule[{i}].name").stdout
- new_rules_list.append(rule)
- self.firewall_rules_list = new_rules_list
-
- def setup_firewall_rules_for_pptp(self):
- """Setup firewall for vpn pptp server."""
- self.update_firewall_rules_list()
- if "pptpd" not in self.firewall_rules_list:
- self.ssh.run("uci add firewall rule")
- self.ssh.run("uci set firewall.@rule[-1].name='pptpd'")
- self.ssh.run("uci set firewall.@rule[-1].target='ACCEPT'")
- self.ssh.run("uci set firewall.@rule[-1].proto='tcp'")
- self.ssh.run("uci set firewall.@rule[-1].dest_port='1723'")
- self.ssh.run("uci set firewall.@rule[-1].family='ipv4'")
- self.ssh.run("uci set firewall.@rule[-1].src='wan'")
-
- if "GRP" not in self.firewall_rules_list:
- self.ssh.run("uci add firewall rule")
- self.ssh.run("uci set firewall.@rule[-1].name='GRP'")
- self.ssh.run("uci set firewall.@rule[-1].target='ACCEPT'")
- self.ssh.run("uci set firewall.@rule[-1].src='wan'")
- self.ssh.run("uci set firewall.@rule[-1].proto='47'")
-
- iptable_rules = list(network_const.FIREWALL_RULES_FOR_PPTP)
- self.add_custom_firewall_rules(iptable_rules)
- self.service_manager.need_restart(SERVICE_FIREWALL)
-
- def restore_firewall_rules_for_pptp(self):
- """Restore firewall for vpn pptp server."""
- self.update_firewall_rules_list()
- if "pptpd" in self.firewall_rules_list:
- self.ssh.run(
- f"uci del firewall.@rule[{self.firewall_rules_list.index('pptpd')}]"
- )
- self.update_firewall_rules_list()
- if "GRP" in self.firewall_rules_list:
- self.ssh.run(
- f"uci del firewall.@rule[{self.firewall_rules_list.index('GRP')}]"
- )
- self.remove_custom_firewall_rules()
- self.service_manager.need_restart(SERVICE_FIREWALL)
-
- def setup_firewall_rules_for_l2tp(self):
- """Setup firewall for vpn l2tp server."""
- self.update_firewall_rules_list()
- if "ipsec esp" not in self.firewall_rules_list:
- self.ssh.run("uci add firewall rule")
- self.ssh.run("uci set firewall.@rule[-1].name='ipsec esp'")
- self.ssh.run("uci set firewall.@rule[-1].target='ACCEPT'")
- self.ssh.run("uci set firewall.@rule[-1].proto='esp'")
- self.ssh.run("uci set firewall.@rule[-1].src='wan'")
-
- if "ipsec nat-t" not in self.firewall_rules_list:
- self.ssh.run("uci add firewall rule")
- self.ssh.run("uci set firewall.@rule[-1].name='ipsec nat-t'")
- self.ssh.run("uci set firewall.@rule[-1].target='ACCEPT'")
- self.ssh.run("uci set firewall.@rule[-1].src='wan'")
- self.ssh.run("uci set firewall.@rule[-1].proto='udp'")
- self.ssh.run("uci set firewall.@rule[-1].dest_port='4500'")
-
- if "auth header" not in self.firewall_rules_list:
- self.ssh.run("uci add firewall rule")
- self.ssh.run("uci set firewall.@rule[-1].name='auth header'")
- self.ssh.run("uci set firewall.@rule[-1].target='ACCEPT'")
- self.ssh.run("uci set firewall.@rule[-1].src='wan'")
- self.ssh.run("uci set firewall.@rule[-1].proto='ah'")
-
- net_id = self.l2tp.address.rsplit(".", 1)[0]
- iptable_rules = list(network_const.FIREWALL_RULES_FOR_L2TP)
- iptable_rules.append(f"iptables -A FORWARD -s {net_id}.0/24 -j ACCEPT")
- iptable_rules.append(
- f"iptables -t nat -A POSTROUTING -s {net_id}.0/24 -o eth0.2 -j MASQUERADE"
- )
-
- self.add_custom_firewall_rules(iptable_rules)
- self.service_manager.need_restart(SERVICE_FIREWALL)
-
- def restore_firewall_rules_for_l2tp(self):
- """Restore firewall for vpn l2tp server."""
- self.update_firewall_rules_list()
- if "ipsec esp" in self.firewall_rules_list:
- self.ssh.run(
- f"uci del firewall.@rule[{self.firewall_rules_list.index('ipsec esp')}]"
- )
- self.update_firewall_rules_list()
- if "ipsec nat-t" in self.firewall_rules_list:
- self.ssh.run(
- "uci del firewall.@rule[%s]"
- % self.firewall_rules_list.index("ipsec nat-t")
- )
- self.update_firewall_rules_list()
- if "auth header" in self.firewall_rules_list:
- self.ssh.run(
- "uci del firewall.@rule[%s]"
- % self.firewall_rules_list.index("auth header")
- )
- self.remove_custom_firewall_rules()
- self.service_manager.need_restart(SERVICE_FIREWALL)
-
- def add_custom_firewall_rules(self, rules):
- """Backup current custom rules and replace with arguments.
-
- Args:
- rules: A list of iptable rules to apply.
- """
- backup_file_path = f"{FIREWALL_CUSTOM_OPTION_PATH}.backup"
- if not self.file_exists(backup_file_path):
- self.ssh.run(f"mv {FIREWALL_CUSTOM_OPTION_PATH} {backup_file_path}")
- for rule in rules:
- self.ssh.run(f"echo {rule} >> {FIREWALL_CUSTOM_OPTION_PATH}")
-
- def remove_custom_firewall_rules(self):
- """Clean up and recover custom firewall rules."""
- backup_file_path = f"{FIREWALL_CUSTOM_OPTION_PATH}.backup"
- if self.file_exists(backup_file_path):
- self.ssh.run(f"mv {backup_file_path} {FIREWALL_CUSTOM_OPTION_PATH}")
- else:
- self.log.debug(f"Did not find {backup_file_path}")
- self.ssh.run(f"echo > {FIREWALL_CUSTOM_OPTION_PATH}")
-
- def disable_pptp_service(self):
- """Disable pptp service."""
- self.package_remove(PPTP_PACKAGE)
-
- def setup_vpn_local_ip(self):
- """Setup VPN Server local ip on OpenWrt for client ping verify."""
- self.ssh.run("uci set network.lan2=interface")
- self.ssh.run("uci set network.lan2.type=bridge")
- self.ssh.run("uci set network.lan2.ifname=eth1.2")
- self.ssh.run("uci set network.lan2.proto=static")
- self.ssh.run(f'uci set network.lan2.ipaddr="{self.l2tp.address}"')
- self.ssh.run("uci set network.lan2.netmask=255.255.255.0")
- self.ssh.run("uci set network.lan2=interface")
- self.service_manager.reload(SERVICE_NETWORK)
- self.commit_changes()
-
- def remove_vpn_local_ip(self):
- """Discard vpn local ip on OpenWrt."""
- self.ssh.run("uci delete network.lan2")
- self.service_manager.reload(SERVICE_NETWORK)
- self.commit_changes()
-
- def enable_ipv6(self):
- """Enable ipv6 on OpenWrt."""
- self.ssh.run("uci set network.lan.ipv6=1")
- self.ssh.run("uci set network.wan.ipv6=1")
- self.service_manager.enable("odhcpd")
- self.service_manager.reload(SERVICE_NETWORK)
- self.config.discard("disable_ipv6")
- self.commit_changes()
-
- def disable_ipv6(self):
- """Disable ipv6 on OpenWrt."""
- self.config.add("disable_ipv6")
- self.ssh.run("uci set network.lan.ipv6=0")
- self.ssh.run("uci set network.wan.ipv6=0")
- self.service_manager.disable("odhcpd")
- self.service_manager.reload(SERVICE_NETWORK)
- self.commit_changes()
-
- def setup_ipv6_bridge(self):
- """Setup ipv6 bridge for client have ability to access network."""
- self.config.add("setup_ipv6_bridge")
-
- self.ssh.run("uci set dhcp.lan.dhcpv6=relay")
- self.ssh.run("uci set dhcp.lan.ra=relay")
- self.ssh.run("uci set dhcp.lan.ndp=relay")
-
- self.ssh.run("uci set dhcp.wan6=dhcp")
- self.ssh.run("uci set dhcp.wan6.dhcpv6=relay")
- self.ssh.run("uci set dhcp.wan6.ra=relay")
- self.ssh.run("uci set dhcp.wan6.ndp=relay")
- self.ssh.run("uci set dhcp.wan6.master=1")
- self.ssh.run("uci set dhcp.wan6.interface=wan6")
-
- # Enable service
- self.service_manager.need_restart(SERVICE_ODHCPD)
- self.commit_changes()
-
- def remove_ipv6_bridge(self):
- """Discard ipv6 bridge on OpenWrt."""
- if "setup_ipv6_bridge" in self.config:
- self.config.discard("setup_ipv6_bridge")
-
- self.ssh.run("uci set dhcp.lan.dhcpv6=server")
- self.ssh.run("uci set dhcp.lan.ra=server")
- self.ssh.run("uci delete dhcp.lan.ndp")
-
- self.ssh.run("uci delete dhcp.wan6")
-
- self.service_manager.need_restart(SERVICE_ODHCPD)
- self.commit_changes()
-
- def _add_dhcp_option(self, args):
- self.ssh.run(f'uci add_list dhcp.lan.dhcp_option="{args}"')
-
- def _remove_dhcp_option(self, args):
- self.ssh.run(f'uci del_list dhcp.lan.dhcp_option="{args}"')
-
- def add_default_dns(self, addr_list):
- """Add default dns server for client.
-
- Args:
- addr_list: dns ip address for Openwrt client.
- """
- self._add_dhcp_option(f'6,{",".join(addr_list)}')
- self.config.add(f"default_dns {addr_list}")
- self.service_manager.need_restart(SERVICE_DNSMASQ)
- self.commit_changes()
-
- def del_default_dns(self, addr_list: str):
- """Remove default dns server for client.
-
- Args:
- addr_list: list of dns ip address for Openwrt client.
- """
- self._remove_dhcp_option(f"6,{addr_list}")
- self.config.discard(f"default_dns {addr_list}")
- self.service_manager.need_restart(SERVICE_DNSMASQ)
- self.commit_changes()
-
- def add_default_v6_dns(self, addr_list: str):
- """Add default v6 dns server for client.
-
- Args:
- addr_list: list of dns ip address for Openwrt client.
- """
- self.ssh.run(f'uci add_list dhcp.lan.dns="{addr_list}"')
- self.config.add(f"default_v6_dns {addr_list}")
- self.service_manager.need_restart(SERVICE_ODHCPD)
- self.commit_changes()
-
- def del_default_v6_dns(self, addr_list: str):
- """Del default v6 dns server for client.
-
- Args:
- addr_list: list of dns ip address for Openwrt client.
- """
- self.ssh.run(f'uci del_list dhcp.lan.dns="{addr_list}"')
- self.config.add(f"default_v6_dns {addr_list}")
- self.service_manager.need_restart(SERVICE_ODHCPD)
- self.commit_changes()
-
- def add_ipv6_prefer_option(self):
- self._add_dhcp_option("108,1800i")
- self.config.add("ipv6_prefer_option")
- self.service_manager.need_restart(SERVICE_DNSMASQ)
- self.commit_changes()
-
- def remove_ipv6_prefer_option(self):
- self._remove_dhcp_option("108,1800i")
- self.config.discard("ipv6_prefer_option")
- self.service_manager.need_restart(SERVICE_DNSMASQ)
- self.commit_changes()
-
- def add_dhcp_rapid_commit(self):
- self.create_config_file("dhcp-rapid-commit\n", "/etc/dnsmasq.conf")
- self.config.add("add_dhcp_rapid_commit")
- self.service_manager.need_restart(SERVICE_DNSMASQ)
- self.commit_changes()
-
- def remove_dhcp_rapid_commit(self):
- self.create_config_file("", "/etc/dnsmasq.conf")
- self.config.discard("add_dhcp_rapid_commit")
- self.service_manager.need_restart(SERVICE_DNSMASQ)
- self.commit_changes()
-
- def start_tcpdump(self, test_name, args="", interface="br-lan"):
- """ "Start tcpdump on OpenWrt.
-
- Args:
- test_name: Test name for create tcpdump file name.
- args: Option args for tcpdump.
- interface: Interface to logging.
- Returns:
- tcpdump_file_name: tcpdump file name on OpenWrt.
- pid: tcpdump process id.
- """
- self.package_install("tcpdump")
- if not self.path_exists(TCPDUMP_DIR):
- self.ssh.run(f"mkdir {TCPDUMP_DIR}")
- now = (time.strftime("%Y-%m-%d_%H-%M-%S", time.localtime(time.time())),)
- tcpdump_file_name = f"openwrt_{test_name}_{now}.pcap"
- tcpdump_file_path = f"{TCPDUMP_DIR}{tcpdump_file_name}"
- cmd = f"tcpdump -i {interface} -s0 {args} -w {tcpdump_file_path}"
- self.ssh.run_async(cmd)
- pid = self._get_tcpdump_pid(tcpdump_file_name)
- if not pid:
- raise signals.TestFailure("Fail to start tcpdump on OpenWrt.")
- # Set delay to prevent tcpdump fail to capture target packet.
- time.sleep(15)
- return tcpdump_file_name
-
- def stop_tcpdump(self, tcpdump_file_name, pull_dir=None):
- """Stop tcpdump on OpenWrt and pull the pcap file.
-
- Args:
- tcpdump_file_name: tcpdump file name on OpenWrt.
- pull_dir: Keep none if no need to pull.
- Returns:
- tcpdump abs_path on host.
- """
- # Set delay to prevent tcpdump fail to capture target packet.
- time.sleep(15)
- pid = self._get_tcpdump_pid(tcpdump_file_name)
- self.ssh.run(f"kill -9 {pid}", ignore_status=True)
- if self.path_exists(TCPDUMP_DIR) and pull_dir:
- tcpdump_path = f"{TCPDUMP_DIR}{tcpdump_file_name}"
- tcpdump_remote_path = f"{pull_dir}/{tcpdump_file_name}"
- tcpdump_local_path = f"{self.user}@{self.ip}:{tcpdump_path}"
- utils.exe_cmd(f"scp {tcpdump_local_path} {tcpdump_remote_path}")
-
- if self._get_tcpdump_pid(tcpdump_file_name):
- raise signals.TestFailure("Failed to stop tcpdump on OpenWrt.")
- if self.file_exists(tcpdump_path):
- self.ssh.run(f"rm -f {tcpdump_path}")
- return tcpdump_remote_path if pull_dir else None
-
- def clear_tcpdump(self):
- self.ssh.run("killall tcpdump", ignore_status=True)
- if self.ssh.run("pgrep tcpdump", ignore_status=True).stdout:
- raise signals.TestFailure("Failed to clean up tcpdump process.")
- if self.path_exists(TCPDUMP_DIR):
- self.ssh.run(f"rm -f {TCPDUMP_DIR}/*")
-
- def _get_tcpdump_pid(self, tcpdump_file_name):
- """Check tcpdump process on OpenWrt."""
- return self.ssh.run(
- f"pgrep -f {tcpdump_file_name}", ignore_status=True
- ).stdout
-
- def setup_mdns(self):
- self.config.add("setup_mdns")
- self.package_install(MDNS_PACKAGE)
- self.commit_changes()
-
- def remove_mdns(self):
- self.config.discard("setup_mdns")
- self.package_remove(MDNS_PACKAGE)
- self.commit_changes()
-
- def block_dns_response(self):
- self.config.add("block_dns_response")
- iptable_rules = list(network_const.FIREWALL_RULES_DISABLE_DNS_RESPONSE)
- self.add_custom_firewall_rules(iptable_rules)
- self.service_manager.need_restart(SERVICE_FIREWALL)
- self.commit_changes()
-
- def unblock_dns_response(self):
- self.config.discard("block_dns_response")
- self.remove_custom_firewall_rules()
- self.service_manager.need_restart(SERVICE_FIREWALL)
- self.commit_changes()
-
- def setup_captive_portal(self, fas_fdqn, fas_port=2080):
- """Create captive portal with Forwarding Authentication Service.
-
- Args:
- fas_fdqn: String for captive portal page's fdqn add to local dns server.
- fas_port: Port for captive portal page.
- """
- self.package_install(CAPTIVE_PORTAL_PACKAGE)
- self.config.add(f"setup_captive_portal {fas_port}")
- self.ssh.run("uci set opennds.@opennds[0].fas_secure_enabled=2")
- self.ssh.run("uci set opennds.@opennds[0].gatewayport=2050")
- self.ssh.run(f"uci set opennds.@opennds[0].fasport={fas_port}")
- self.ssh.run(f"uci set opennds.@opennds[0].fasremotefqdn={fas_fdqn}")
- self.ssh.run('uci set opennds.@opennds[0].faspath="/nds/fas-aes.php"')
- self.ssh.run("uci set opennds.@opennds[0].faskey=1234567890")
- self.service_manager.need_restart(SERVICE_OPENNDS)
- # Config uhttpd
- self.ssh.run("uci set uhttpd.main.interpreter=.php=/usr/bin/php-cgi")
- self.ssh.run(f"uci add_list uhttpd.main.listen_http=0.0.0.0:{fas_port}")
- self.ssh.run(f"uci add_list uhttpd.main.listen_http=[::]:{fas_port}")
- self.service_manager.need_restart(SERVICE_UHTTPD)
- # cp fas-aes.php
- self.create_folder("/www/nds/")
- self.ssh.run("cp /etc/opennds/fas-aes.php /www/nds")
- # Add fdqn
- self.add_resource_record(fas_fdqn, LOCALHOST)
- self.commit_changes()
-
- def remove_cpative_portal(self, fas_port: int = 2080):
- """Remove captive portal.
-
- Args:
- fas_port: Port for captive portal page.
- """
- # Remove package
- self.package_remove(CAPTIVE_PORTAL_PACKAGE)
- # Clean up config
- self.ssh.run("rm /etc/config/opennds")
- # Remove fdqn
- self.clear_resource_record()
- # Restore uhttpd
- self.ssh.run("uci del uhttpd.main.interpreter")
- self.ssh.run(
- f"uci del_list uhttpd.main.listen_http='0.0.0.0:{fas_port}'"
- )
- self.ssh.run(f"uci del_list uhttpd.main.listen_http='[::]:{fas_port}'")
- self.service_manager.need_restart(SERVICE_UHTTPD)
- # Clean web root
- self.ssh.run("rm -r /www/nds")
- self.config.discard(f"setup_captive_portal {fas_port}")
- self.commit_changes()
-
-
-class ServiceManager(object):
- """Class for service on OpenWrt.
-
- Attributes:
- ssh: ssh object for the AP.
- _need_restart: Record service need to restart.
- """
-
- def __init__(self, ssh):
- self.ssh = ssh
- self._need_restart = set()
-
- def enable(self, service_name):
- """Enable service auto start."""
- self.ssh.run(f"/etc/init.d/{service_name} enable")
-
- def disable(self, service_name):
- """Disable service auto start."""
- self.ssh.run(f"/etc/init.d/{service_name} disable")
-
- def restart(self, service_name):
- """Restart the service."""
- self.ssh.run(f"/etc/init.d/{service_name} restart")
-
- def reload(self, service_name):
- """Restart the service."""
- self.ssh.run(f"/etc/init.d/{service_name} reload")
-
- def restart_services(self):
- """Restart all services need to restart."""
- for service in self._need_restart:
- if service == SERVICE_NETWORK:
- self.reload(service)
- self.restart(service)
- self._need_restart = set()
-
- def stop(self, service_name):
- """Stop the service."""
- self.ssh.run(f"/etc/init.d/{service_name} stop")
-
- def need_restart(self, service_name):
- self._need_restart.add(service_name)
diff --git a/packages/antlion/controllers/openwrt_lib/openwrt_constants.py b/packages/antlion/controllers/openwrt_lib/openwrt_constants.py
deleted file mode 100644
index 848b22b..0000000
--- a/packages/antlion/controllers/openwrt_lib/openwrt_constants.py
+++ /dev/null
@@ -1,40 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-# Blanket ignores to enable mypy in Antlion
-# mypy: disable-error-code="no-untyped-def"
-SYSTEM_INFO_CMD = "ubus call system board"
-
-
-class OpenWrtWifiSecurity:
- # Used by OpenWrt AP
- WPA_PSK_DEFAULT = "psk"
- WPA_PSK_CCMP = "psk+ccmp"
- WPA_PSK_TKIP = "psk+tkip"
- WPA_PSK_TKIP_AND_CCMP = "psk+tkip+ccmp"
- WPA2_PSK_DEFAULT = "psk2"
- WPA2_PSK_CCMP = "psk2+ccmp"
- WPA2_PSK_TKIP = "psk2+tkip"
- WPA2_PSK_TKIP_AND_CCMP = "psk2+tkip+ccmp"
-
-
-class OpenWrtWifiSetting:
- IFACE_2G = 2
- IFACE_5G = 3
-
-
-class OpenWrtModelMap:
- NETGEAR_R8000 = ("radio2", "radio1")
diff --git a/packages/antlion/controllers/openwrt_lib/wireless_config.py b/packages/antlion/controllers/openwrt_lib/wireless_config.py
deleted file mode 100644
index d97e197..0000000
--- a/packages/antlion/controllers/openwrt_lib/wireless_config.py
+++ /dev/null
@@ -1,53 +0,0 @@
-"""Class for Wireless config."""
-
-from antlion.controllers.ap_lib.hostapd_security import OpenWRTEncryptionMode
-
-
-class WirelessConfig(object):
- """Creates an object to hold wireless config.
-
- Attributes:
- name: name of the wireless config
- ssid: SSID of the network.
- security: security of the wifi network.
- band: band of the wifi network.
- iface: network interface of the wifi network.
- password: password for psk network.
- wep_key: wep keys for wep network.
- wep_key_num: key number for wep network.
- radius_server_ip: IP address of radius server.
- radius_server_port: Port number of radius server.
- radius_server_secret: Secret key of radius server.
- hidden: Boolean, if the wifi network is hidden.
- ieee80211w: PMF bit of the wifi network.
- """
-
- def __init__(
- self,
- name: str,
- ssid: str,
- security: OpenWRTEncryptionMode,
- band: str,
- iface: str = "lan",
- password: str | None = None,
- wep_key: list[str] | None = None,
- wep_key_num: int = 1,
- radius_server_ip: str | None = None,
- radius_server_port: int | None = None,
- radius_server_secret: str | None = None,
- hidden: bool = False,
- ieee80211w: int | None = None,
- ):
- self.name = name
- self.ssid = ssid
- self.security = security
- self.band = band
- self.iface = iface
- self.password = password
- self.wep_key = wep_key
- self.wep_key_num = wep_key_num
- self.radius_server_ip = radius_server_ip
- self.radius_server_port = radius_server_port
- self.radius_server_secret = radius_server_secret
- self.hidden = hidden
- self.ieee80211w = ieee80211w
diff --git a/packages/antlion/controllers/openwrt_lib/wireless_settings_applier.py b/packages/antlion/controllers/openwrt_lib/wireless_settings_applier.py
deleted file mode 100644
index 6277553..0000000
--- a/packages/antlion/controllers/openwrt_lib/wireless_settings_applier.py
+++ /dev/null
@@ -1,202 +0,0 @@
-"""Class to configure wireless settings."""
-
-# Blanket ignores to enable mypy in Antlion
-# mypy: disable-error-code="no-untyped-def"
-import time
-
-from antlion.controllers.ap_lib import hostapd_constants
-from antlion.controllers.openwrt_lib.network_settings import (
- SERVICE_DNSMASQ,
- ServiceManager,
-)
-from antlion.controllers.openwrt_lib.wireless_config import WirelessConfig
-
-LEASE_FILE = "/tmp/dhcp.leases"
-OPEN_SECURITY = "none"
-PSK1_SECURITY = "psk"
-PSK_SECURITY = "psk2"
-WEP_SECURITY = "wep"
-ENT_SECURITY = "wpa2"
-OWE_SECURITY = "owe"
-SAE_SECURITY = "sae"
-SAEMIXED_SECURITY = "sae-mixed"
-ENABLE_RADIO = "0"
-DISABLE_RADIO = "1"
-ENABLE_HIDDEN = "1"
-RADIO_2G = "radio1"
-RADIO_5G = "radio0"
-
-
-class WirelessSettingsApplier(object):
- """Class for wireless settings.
-
- Attributes:
- ssh: ssh object for the AP.
- service_manager: Object manage service configuration
- wireless_configs: a list of
- antlion.controllers.openwrt_lib.wireless_config.WirelessConfig.
- channel_2g: channel for 2G band.
- channel_5g: channel for 5G band.
- """
-
- def __init__(
- self,
- ssh,
- configs,
- channel_2g,
- channel_5g,
- radio_2g=RADIO_2G,
- radio_5g=RADIO_5G,
- ):
- """Initialize wireless settings.
-
- Args:
- ssh: ssh connection object.
- configs: a list of
- antlion.controllers.openwrt_lib.wireless_config.WirelessConfig.
- channel_2g: channel for 2G band.
- channel_5g: channel for 5G band.
- """
- self.ssh = ssh
- self.service_manager = ServiceManager(ssh)
- self.wireless_configs: list[WirelessConfig] = configs
- self.channel_2g = channel_2g
- self.channel_5g = channel_5g
- self.radio_2g = radio_2g
- self.radio_5g = radio_5g
-
- def apply_wireless_settings(self):
- """Configure wireless settings from a list of configs."""
- default_2g_iface = f"default_{self.radio_2g}"
- default_5g_iface = f"default_{self.radio_5g}"
-
- # set channels for 2G and 5G bands
- self.ssh.run(
- f"uci set wireless.{self.radio_2g}.channel='{self.channel_2g}'"
- )
- self.ssh.run(
- f"uci set wireless.{self.radio_5g}.channel='{self.channel_5g}'"
- )
- if self.channel_5g == 165:
- self.ssh.run(f"uci set wireless.{self.radio_5g}.htmode='VHT20'")
- elif self.channel_5g == 132 or self.channel_5g == 136:
- self.ssh.run("iw reg set ZA")
- self.ssh.run(f"uci set wireless.{self.radio_5g}.htmode='VHT40'")
-
- if self.channel_2g == 13:
- self.ssh.run("iw reg set AU")
-
- # disable default OpenWrt SSID
- self.ssh.run(
- f"uci set wireless.{default_2g_iface}.disabled='{DISABLE_RADIO}'"
- )
- self.ssh.run(
- f"uci set wireless.{default_5g_iface}.disabled='{DISABLE_RADIO}'"
- )
-
- # Enable radios
- self.ssh.run(
- f"uci set wireless.{self.radio_2g}.disabled='{ENABLE_RADIO}'"
- )
- self.ssh.run(
- f"uci set wireless.{self.radio_5g}.disabled='{ENABLE_RADIO}'"
- )
-
- for config in self.wireless_configs:
- # configure open network
- if config.security == OPEN_SECURITY:
- if config.band == hostapd_constants.BAND_2G:
- self.ssh.run(
- f"uci set wireless.{default_2g_iface}.ssid='{config.ssid}'"
- )
- self.ssh.run(
- f"uci set wireless.{default_2g_iface}.disabled='{ENABLE_RADIO}'"
- )
- if config.hidden:
- self.ssh.run(
- "uci set wireless.%s.hidden='%s'"
- % (default_2g_iface, ENABLE_HIDDEN)
- )
- elif config.band == hostapd_constants.BAND_5G:
- self.ssh.run(
- f"uci set wireless.{default_5g_iface}.ssid='{config.ssid}'"
- )
- self.ssh.run(
- f"uci set wireless.{default_5g_iface}.disabled='{ENABLE_RADIO}'"
- )
- if config.hidden:
- self.ssh.run(
- "uci set wireless.%s.hidden='%s'"
- % (default_5g_iface, ENABLE_HIDDEN)
- )
- continue
-
- self.ssh.run(f"uci set wireless.{config.name}='wifi-iface'")
- if config.band == hostapd_constants.BAND_2G:
- self.ssh.run(
- f"uci set wireless.{config.name}.device='{self.radio_2g}'"
- )
- else:
- self.ssh.run(
- f"uci set wireless.{config.name}.device='{self.radio_5g}'"
- )
- self.ssh.run(
- f"uci set wireless.{config.name}.network='{config.iface}'"
- )
- self.ssh.run(f"uci set wireless.{config.name}.mode='ap'")
- self.ssh.run(f"uci set wireless.{config.name}.ssid='{config.ssid}'")
- self.ssh.run(
- f"uci set wireless.{config.name}.encryption='{config.security}'"
- )
- if (
- config.security == PSK_SECURITY
- or config.security == SAE_SECURITY
- or config.security == PSK1_SECURITY
- or config.security == SAEMIXED_SECURITY
- ):
- self.ssh.run(
- f"uci set wireless.{config.name}.key='{config.password}'"
- )
- elif config.security == WEP_SECURITY:
- self.ssh.run(
- "uci set wireless.%s.key%s='%s'"
- % (config.name, config.wep_key_num, config.wep_key)
- )
- self.ssh.run(
- f"uci set wireless.{config.name}.key='{config.wep_key_num}'"
- )
- elif config.security == ENT_SECURITY:
- self.ssh.run(
- "uci set wireless.%s.auth_secret='%s'"
- % (config.name, config.radius_server_secret)
- )
- self.ssh.run(
- "uci set wireless.%s.auth_server='%s'"
- % (config.name, config.radius_server_ip)
- )
- self.ssh.run(
- "uci set wireless.%s.auth_port='%s'"
- % (config.name, config.radius_server_port)
- )
- if config.ieee80211w:
- self.ssh.run(
- f"uci set wireless.{config.name}.ieee80211w='{config.ieee80211w}'"
- )
- if config.hidden:
- self.ssh.run(
- f"uci set wireless.{config.name}.hidden='{ENABLE_HIDDEN}'"
- )
-
- self.ssh.run("uci commit wireless")
- self.ssh.run(f"cp {LEASE_FILE} {LEASE_FILE}.tmp")
-
- def cleanup_wireless_settings(self):
- """Reset wireless settings to default."""
- self.ssh.run("wifi down")
- self.ssh.run("rm -f /etc/config/wireless")
- self.ssh.run("wifi config")
- if self.channel_5g == 132:
- self.ssh.run("iw reg set US")
- self.ssh.run(f"cp {LEASE_FILE}.tmp {LEASE_FILE}")
- self.service_manager.restart(SERVICE_DNSMASQ)
- time.sleep(9)
diff --git a/packages/antlion/controllers/packet_capture.py b/packages/antlion/controllers/packet_capture.py
deleted file mode 100755
index 147c4fd..0000000
--- a/packages/antlion/controllers/packet_capture.py
+++ /dev/null
@@ -1,338 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-# Blanket ignores to enable mypy in Antlion
-# mypy: disable-error-code="no-untyped-def"
-from __future__ import annotations
-
-import io
-import logging
-import os
-import threading
-import time
-from dataclasses import dataclass
-
-from mobly import asserts, logger
-
-from antlion.controllers.ap_lib.hostapd_constants import (
- CENTER_CHANNEL_MAP,
- FREQUENCY_MAP,
- VHT_CHANNEL,
-)
-from antlion.controllers.utils_lib.ssh import connection, formatter, settings
-from antlion.libs.proc.process import Process
-from antlion.types import ControllerConfig, Json
-
-MOBLY_CONTROLLER_CONFIG_NAME: str = "PacketCapture"
-BSS = "BSS"
-BSSID = "BSSID"
-FREQ = "freq"
-FREQUENCY = "frequency"
-LEVEL = "level"
-MON_2G = "mon0"
-MON_5G = "mon1"
-BAND_IFACE = {"2G": MON_2G, "5G": MON_5G}
-SCAN_IFACE = "wlan2"
-SCAN_TIMEOUT = 60
-SEP = ":"
-SIGNAL = "signal"
-SSID = "SSID"
-
-
-def create(configs: list[ControllerConfig]) -> list[PacketCapture]:
- return [PacketCapture(c) for c in configs]
-
-
-def destroy(objects: list[PacketCapture]) -> None:
- for pcap in objects:
- pcap.close()
-
-
-def get_info(objects: list[PacketCapture]) -> list[Json]:
- return [pcap.ssh_settings.hostname for pcap in objects]
-
-
-@dataclass(frozen=True)
-class PcapProperties:
- """Packet capture properties."""
-
- proc: Process
- """Process object of tcpdump."""
-
- pcap_fname: str
- """File name of the tcpdump output file."""
-
- pcap_file: io.BufferedRandom
- """File object for the tcpdump output file."""
-
-
-class PacketCaptureError(Exception):
- """Error related to Packet capture."""
-
-
-class PacketCapture(object):
- """Class representing packet capturer.
-
- An instance of this class creates and configures two interfaces for monitor
- mode; 'mon0' for 2G and 'mon1' for 5G and one interface for scanning for
- wifi networks; 'wlan2' which is a dual band interface.
-
- Attributes:
- pcap_properties: dict that specifies packet capture properties for a
- band.
- """
-
- def __init__(self, configs: ControllerConfig) -> None:
- """Initialize objects.
-
- Args:
- configs: config for the packet capture.
- """
- self.ssh_settings = settings.from_config(configs["ssh_config"])
- self.ssh = connection.SshConnection(self.ssh_settings)
- self.log = logger.PrefixLoggerAdapter(
- logging.getLogger(),
- {
- logger.PrefixLoggerAdapter.EXTRA_KEY_LOG_PREFIX: f"[PacketCapture|{self.ssh_settings.hostname}]",
- },
- )
-
- self._create_interface(MON_2G, "monitor")
- self._create_interface(MON_5G, "monitor")
- self.managed_mode = True
- result = self.ssh.run("ifconfig -a", ignore_status=True)
- if result.stderr or SCAN_IFACE not in result.stdout.decode("utf-8"):
- self.managed_mode = False
- if self.managed_mode:
- self._create_interface(SCAN_IFACE, "managed")
-
- self.pcap_properties: dict[str, PcapProperties] = {}
- self._pcap_stop_lock = threading.Lock()
-
- def _create_interface(self, iface: str, mode: str) -> None:
- """Create interface of monitor/managed mode.
-
- Create mon0/mon1 for 2G/5G monitor mode and wlan2 for managed mode.
- """
- if mode == "monitor":
- self.ssh.run(f"ifconfig wlan{iface[-1]} down", ignore_status=True)
- self.ssh.run(f"iw dev {iface} del", ignore_status=True)
- self.ssh.run(
- f"iw phy{iface[-1]} interface add {iface} type {mode}",
- ignore_status=True,
- )
- self.ssh.run(f"ip link set {iface} up", ignore_status=True)
- result = self.ssh.run(f"iw dev {iface} info", ignore_status=True)
- if result.stderr or iface not in result.stdout.decode("utf-8"):
- raise PacketCaptureError(f"Failed to configure interface {iface}")
-
- def _cleanup_interface(self, iface: str) -> None:
- """Clean up monitor mode interfaces."""
- self.ssh.run(f"iw dev {iface} del", ignore_status=True)
- result = self.ssh.run(f"iw dev {iface} info", ignore_status=True)
- if not result.stderr or "No such device" not in result.stderr.decode(
- "utf-8"
- ):
- raise PacketCaptureError(
- f"Failed to cleanup monitor mode for {iface}"
- )
-
- def _parse_scan_results(
- self, scan_result: str
- ) -> list[dict[str, str | int]]:
- """Parses the scan dump output and returns list of dictionaries.
-
- Args:
- scan_result: scan dump output from scan on mon interface.
-
- Returns:
- Dictionary of found network in the scan.
- The attributes returned are
- a.) SSID - SSID of the network.
- b.) LEVEL - signal level.
- c.) FREQUENCY - WiFi band the network is on.
- d.) BSSID - BSSID of the network.
- """
- scan_networks: list[dict[str, str | int]] = []
- network: dict[str, str | int] = {}
- for line in scan_result.splitlines():
- if SEP not in line:
- continue
- if BSS in line:
- network[BSSID] = line.split("(")[0].split()[-1]
- field, value = line.lstrip().rstrip().split(SEP)[0:2]
- value = value.lstrip()
- if SIGNAL in line:
- network[LEVEL] = int(float(value.split()[0]))
- elif FREQ in line:
- network[FREQUENCY] = int(value)
- elif SSID in line:
- network[SSID] = value
- scan_networks.append(network)
- network = {}
- return scan_networks
-
- def get_wifi_scan_results(self) -> list[dict[str, str | int]]:
- """Starts a wifi scan on wlan2 interface.
-
- Returns:
- List of dictionaries each representing a found network.
- """
- if not self.managed_mode:
- raise PacketCaptureError("Managed mode not setup")
- result = self.ssh.run(f"iw dev {SCAN_IFACE} scan")
- if result.stderr:
- raise PacketCaptureError("Failed to get scan dump")
- if not result.stdout:
- return []
- return self._parse_scan_results(result.stdout.decode("utf-8"))
-
- def start_scan_and_find_network(self, ssid: str) -> bool:
- """Start a wifi scan on wlan2 interface and find network.
-
- Args:
- ssid: SSID of the network.
-
- Returns:
- True/False if the network if found or not.
- """
- curr_time = time.time()
- while time.time() < curr_time + SCAN_TIMEOUT:
- found_networks = self.get_wifi_scan_results()
- for network in found_networks:
- if network[SSID] == ssid:
- return True
- time.sleep(3) # sleep before next scan
- return False
-
- def configure_monitor_mode(
- self, band: str, channel: int, bandwidth: int = 20
- ) -> bool:
- """Configure monitor mode.
-
- Args:
- band: band to configure monitor mode for.
- channel: channel to set for the interface.
- bandwidth : bandwidth for VHT channel as 40,80,160
-
- Returns:
- True if configure successful.
- False if not successful.
- """
-
- band = band.upper()
- if band not in BAND_IFACE:
- self.log.error("Invalid band. Must be 2g/2G or 5g/5G")
- return False
-
- iface = BAND_IFACE[band]
- if bandwidth == 20:
- self.ssh.run(
- f"iw dev {iface} set channel {channel}", ignore_status=True
- )
- else:
- center_freq = None
- for i, j in CENTER_CHANNEL_MAP[VHT_CHANNEL[bandwidth]]["channels"]:
- if channel in range(i, j + 1):
- center_freq = (FREQUENCY_MAP[i] + FREQUENCY_MAP[j]) / 2
- break
- asserts.assert_true(
- center_freq, "No match channel in VHT channel list."
- )
- self.ssh.run(
- "iw dev %s set freq %s %s %s"
- % (iface, FREQUENCY_MAP[channel], bandwidth, center_freq),
- ignore_status=True,
- )
-
- result = self.ssh.run(f"iw dev {iface} info", ignore_status=True)
- if result.stderr or f"channel {channel}" not in result.stdout.decode(
- "utf-8"
- ):
- self.log.error(f"Failed to configure monitor mode for {band}")
- return False
- return True
-
- def start_packet_capture(
- self, band: str, log_path: str, pcap_fname: str
- ) -> Process | None:
- """Start packet capture for band.
-
- band = 2G starts tcpdump on 'mon0' interface.
- band = 5G starts tcpdump on 'mon1' interface.
-
- Args:
- band: '2g' or '2G' and '5g' or '5G'.
- log_path: test log path to save the pcap file.
- pcap_fname: name of the pcap file.
-
- Returns:
- pcap_proc: Process object of the tcpdump.
- """
- band = band.upper()
- if band not in BAND_IFACE.keys() or band in self.pcap_properties:
- self.log.error("Invalid band or packet capture already running")
- return None
-
- pcap_name = f"{pcap_fname}_{band}.pcap"
- pcap_fname = os.path.join(log_path, pcap_name)
- pcap_file = open(pcap_fname, "w+b")
-
- tcpdump_cmd = f"tcpdump -i {BAND_IFACE[band]} -w - -U 2>/dev/null"
- cmd = formatter.SshFormatter().format_command(
- tcpdump_cmd, self.ssh_settings, extra_flags={"-q": None}
- )
- pcap_proc = Process(cmd)
-
- def write_to_pcap(data: bytes | str) -> None:
- if isinstance(data, str):
- data = data.encode("utf-8")
- pcap_file.write(data)
-
- pcap_proc.set_on_output_callback(write_to_pcap, binary=True)
- pcap_proc.start()
-
- self.pcap_properties[band] = PcapProperties(
- pcap_proc, pcap_fname, pcap_file
- )
- return pcap_proc
-
- def stop_packet_capture(self, proc: Process) -> None:
- """Stop the packet capture.
-
- Args:
- proc: Process object of tcpdump to kill.
- """
- for key, val in self.pcap_properties.items():
- if val.proc is proc:
- break
- else:
- self.log.error("Failed to stop tcpdump. Invalid process.")
- return
-
- proc.stop()
- with self._pcap_stop_lock:
- self.pcap_properties[key].pcap_file.close()
- del self.pcap_properties[key]
-
- def close(self) -> None:
- """Cleanup.
-
- Cleans up all the monitor mode interfaces and closes ssh connections.
- """
- self._cleanup_interface(MON_2G)
- self._cleanup_interface(MON_5G)
- self.ssh.close()
diff --git a/packages/antlion/controllers/pdu.py b/packages/antlion/controllers/pdu.py
deleted file mode 100644
index e700bbd..0000000
--- a/packages/antlion/controllers/pdu.py
+++ /dev/null
@@ -1,301 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from __future__ import annotations
-
-import enum
-import logging
-import time
-from enum import IntEnum, unique
-from typing import Protocol
-
-from antlion.types import ControllerConfig, Json
-from antlion.validation import MapValidator
-
-MOBLY_CONTROLLER_CONFIG_NAME: str = "PduDevice"
-
-# Allow time for capacitors to discharge.
-DEFAULT_REBOOT_DELAY_SEC = 5.0
-
-
-class PduType(enum.StrEnum):
- NP02B = "synaccess.np02b"
- WEBPOWERSWITCH = "digital_loggers.webpowerswitch"
-
-
-class PduError(Exception):
- """An exception for use within PduDevice implementations"""
-
-
-def create(configs: list[ControllerConfig]) -> list[PduDevice]:
- """Creates a PduDevice for each config in configs.
-
- Args:
- configs: List of configs from PduDevice field.
- Fields:
- device: a string "<brand>.<model>" that corresponds to module
- in pdu_lib/
- host: a string of the device ip address
- username (optional): a string of the username for device sign-in
- password (optional): a string of the password for device sign-in
- Return:
- A list of PduDevice objects.
- """
- pdus: list[PduDevice] = []
- for config in configs:
- c = MapValidator(config)
- device = c.get(str, "device")
- pduType = PduType(device)
-
- host = c.get(str, "host")
- username = c.get(str, "username", None)
- password = c.get(str, "password", None)
-
- match pduType:
- case PduType.NP02B:
- from antlion.controllers.pdu_lib.synaccess.np02b import (
- PduDevice as NP02B,
- )
-
- pdus.append(NP02B(host, username, password))
- case PduType.WEBPOWERSWITCH:
- from antlion.controllers.pdu_lib.digital_loggers.webpowerswitch import (
- PduDevice as WebPowerSwitch,
- )
-
- pdus.append(WebPowerSwitch(host, username, password))
- return pdus
-
-
-def destroy(objects: list[PduDevice]) -> None:
- """Ensure any connections to devices are closed.
-
- Args:
- pdu_list: A list of PduDevice objects.
- """
- for pdu in objects:
- pdu.close()
-
-
-def get_info(objects: list[PduDevice]) -> list[Json]:
- """Retrieves info from a list of PduDevice objects.
-
- Args:
- pdu_list: A list of PduDevice objects.
- Return:
- A list containing a dictionary for each PduDevice, with keys:
- 'host': a string of the device ip address
- 'username': a string of the username
- 'password': a string of the password
- """
- info: list[Json] = []
- for pdu in objects:
- info.append(
- {
- "host": pdu.host,
- "username": pdu.username,
- "password": pdu.password,
- }
- )
- return info
-
-
-def get_pdu_port_for_device(
- device_pdu_config: dict[str, Json], pdus: list[PduDevice]
-) -> tuple[PduDevice, int]:
- """Retrieves the pdu object and port of that PDU powering a given device.
- This is especially necessary when there are multilpe devices on a single PDU
- or multiple PDUs registered.
-
- Args:
- device_pdu_config: a dict, representing the config of the device.
- pdus: a list of registered PduDevice objects.
-
- Returns:
- A tuple: (PduObject for the device, string port number on that PDU).
-
- Raises:
- ValueError, if there is no PDU matching the given host in the config.
-
- Example ACTS config:
- ...
- "testbed": [
- ...
- "FuchsiaDevice": [
- {
- "ip": "<device_ip>",
- "ssh_config": "/path/to/sshconfig",
- "PduDevice": {
- "host": "192.168.42.185",
- "port": 2
- }
- }
- ],
- "AccessPoint": [
- {
- "ssh_config": {
- ...
- },
- "PduDevice": {
- "host": "192.168.42.185",
- "port" 1
- }
- }
- ],
- "PduDevice": [
- {
- "device": "synaccess.np02b",
- "host": "192.168.42.185"
- }
- ]
- ],
- ...
- """
- config = MapValidator(device_pdu_config)
- pdu_ip = config.get(str, "host")
- port = config.get(int, "port")
- for pdu in pdus:
- if pdu.host == pdu_ip:
- return pdu, port
- raise ValueError(f"No PduDevice with host: {pdu_ip}")
-
-
-class PDU(Protocol):
- """Control power delivery to a device with a PDU."""
-
- def port(self, index: int) -> Port:
- """Access a single port.
-
- Args:
- index: Index of the port, likely the number identifier above the outlet.
-
- Returns:
- Controller for the specified port.
- """
- ...
-
- def __len__(self) -> int:
- """Count the number of ports.
-
- Returns:
- Number of ports on this PDU.
- """
- ...
-
-
-class Port(Protocol):
- """Controlling the power delivery to a single port of a PDU."""
-
- def status(self) -> PowerState:
- """Return the power state for this port.
-
- Returns:
- Power state
- """
- ...
-
- def set(self, state: PowerState) -> None:
- """Set the power state for this port.
-
- Args:
- state: Desired power state
- """
- ...
-
- def reboot(self, delay_sec: float = DEFAULT_REBOOT_DELAY_SEC) -> None:
- """Set the power state OFF then ON after a delay.
-
- Args:
- delay_sec: Length to wait before turning back ON. This is important to allow
- the device's capacitors to discharge.
- """
- self.set(PowerState.OFF)
- time.sleep(delay_sec)
- self.set(PowerState.ON)
-
-
-@unique
-class PowerState(IntEnum):
- OFF = 0
- ON = 1
-
-
-class PduDevice(object):
- """An object that defines the basic Pdu functionality and abstracts
- the actual hardware.
-
- This is a pure abstract class. Implementations should be of the same
- class name (eg. class PduDevice(pdu.PduDevice)) and exist in
- pdu_lib/<brand>/<device_name>.py. PduDevice objects should not be
- instantiated by users directly.
-
- TODO(http://b/318877544): Replace PduDevice with PDU
- """
-
- def __init__(
- self, host: str, username: str | None, password: str | None
- ) -> None:
- if type(self) is PduDevice:
- raise NotImplementedError(
- "Base class: cannot be instantiated directly"
- )
- self.host = host
- self.username = username
- self.password = password
- self.log = logging.getLogger()
-
- def on_all(self) -> None:
- """Turns on all outlets on the device."""
- raise NotImplementedError("Base class: cannot be called directly")
-
- def off_all(self) -> None:
- """Turns off all outlets on the device."""
- raise NotImplementedError("Base class: cannot be called directly")
-
- def on(self, outlet: int) -> None:
- """Turns on specific outlet on the device.
- Args:
- outlet: index of the outlet to turn on.
- """
- raise NotImplementedError("Base class: cannot be called directly")
-
- def off(self, outlet: int) -> None:
- """Turns off specific outlet on the device.
- Args:
- outlet: index of the outlet to turn off.
- """
- raise NotImplementedError("Base class: cannot be called directly")
-
- def reboot(self, outlet: int) -> None:
- """Toggles a specific outlet on the device to off, then to on.
- Args:
- outlet: index of the outlet to reboot.
- """
- raise NotImplementedError("Base class: cannot be called directly")
-
- def status(self) -> dict[str, bool]:
- """Retrieves the status of the outlets on the device.
-
- Return:
- A dictionary matching outlet string to:
- True: if outlet is On
- False: if outlet is Off
- """
- raise NotImplementedError("Base class: cannot be called directly")
-
- def close(self) -> None:
- """Closes connection to the device."""
- raise NotImplementedError("Base class: cannot be called directly")
diff --git a/packages/antlion/controllers/pdu_lib/__init__.py b/packages/antlion/controllers/pdu_lib/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/packages/antlion/controllers/pdu_lib/__init__.py
+++ /dev/null
diff --git a/packages/antlion/controllers/pdu_lib/digital_loggers/__init__.py b/packages/antlion/controllers/pdu_lib/digital_loggers/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/packages/antlion/controllers/pdu_lib/digital_loggers/__init__.py
+++ /dev/null
diff --git a/packages/antlion/controllers/pdu_lib/digital_loggers/webpowerswitch.py b/packages/antlion/controllers/pdu_lib/digital_loggers/webpowerswitch.py
deleted file mode 100644
index 3b806bc..0000000
--- a/packages/antlion/controllers/pdu_lib/digital_loggers/webpowerswitch.py
+++ /dev/null
@@ -1,166 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from typing import Literal
-
-from mobly import signals
-
-from antlion.controllers import pdu
-
-# Create an optional dependency for dlipower since it has a transitive
-# dependency on beautifulsoup4. This library is difficult to maintain as a
-# third_party dependency in Fuchsia since it is hosted on launchpad.
-#
-# TODO(b/246999212): Explore alternatives to the dlipower package
-try:
- import dlipower
-
- HAS_IMPORT_DLIPOWER = True
-except ImportError:
- HAS_IMPORT_DLIPOWER = False
-
-
-class PduDevice(pdu.PduDevice):
- """Implementation of pure abstract PduDevice object for the Digital Loggers
- WebPowerSwitch PDUs.
-
- This controller supports the following Digital Loggers PDUs:
- - Pro (VII)
- - WebPowerSwitch V
- - WebPowerSwitch IV
- - WebPowerSwitch III
- - WebPowerSwitch II
- - Ethernet Power Controller III
- """
-
- def __init__(
- self, host: str, username: str | None, password: str | None
- ) -> None:
- """
- Note: This may require allowing plaintext password sign in on the
- power switch, which can be configure in the device's control panel.
- """
- super(PduDevice, self).__init__(host, username, password)
-
- if not HAS_IMPORT_DLIPOWER:
- raise signals.ControllerError(
- "Digital Loggers PDUs are not supported with current installed "
- "packages; install the dlipower package to add support"
- )
-
- self.power_switch = dlipower.PowerSwitch(
- hostname=host, userid=username, password=password
- )
- # Connection is made at command execution, this verifies the device
- # can be reached before continuing.
- if not self.power_switch.statuslist():
- raise pdu.PduError(
- "Failed to connect get WebPowerSwitch status. Incorrect host, "
- "userid, or password?"
- )
- else:
- self.log.info(f"Connected to WebPowerSwitch ({host}).")
-
- def on_all(self) -> None:
- """Turn on power to all outlets."""
- for outlet in self.power_switch:
- outlet.on()
- self._verify_state(outlet.name, "ON")
-
- def off_all(self) -> None:
- """Turn off power to all outlets."""
- for outlet in self.power_switch:
- outlet.off()
- self._verify_state(outlet.name, "OFF")
-
- def on(self, outlet: str | int) -> None:
- """Turn on power to given outlet
-
- Args:
- outlet: string or int, the outlet name/number
- """
- self.power_switch.command_on_outlets("on", str(outlet))
- self._verify_state(outlet, "ON")
-
- def off(self, outlet: str | int) -> None:
- """Turn off power to given outlet
-
- Args:
- outlet: string or int, the outlet name/number
- """
- self.power_switch.command_on_outlets("off", str(outlet))
- self._verify_state(outlet, "OFF")
-
- def reboot(self, outlet: str | int) -> None:
- """Cycle the given outlet to OFF and back ON.
-
- Args:
- outlet: string or int, the outlet name/number
- """
- self.power_switch.command_on_outlets("cycle", str(outlet))
- self._verify_state(outlet, "ON")
-
- def status(self) -> dict[str, bool]:
- """Return the status of the switch outlets.
-
- Return:
- a dict mapping outlet string numbers to:
- True if outlet is ON
- False if outlet is OFF
- """
- status_list = self.power_switch.statuslist()
- return {str(outlet): state == "ON" for outlet, _, state in status_list}
-
- def close(self) -> None:
- # Since there isn't a long-running connection, close is not applicable.
- pass
-
- def _verify_state(
- self,
- outlet: str | int,
- expected_state: Literal["ON"] | Literal["OFF"],
- timeout: int = 3,
- ) -> None:
- """Verify that the state of a given outlet is at an expected state.
- There can be a slight delay in when the device receives the
- command and when the state actually changes (especially when powering
- on). This function is used to verify the change has occurred before
- exiting.
-
- Args:
- outlet: string, the outlet name or number to check state.
- expected_state: string, 'ON' or 'OFF'
-
- Returns if actual state reaches expected state.
-
- Raises:
- PduError: if state has not reached expected state at timeout.
- """
- actual_state = None
- for _ in range(timeout):
- actual_state = self.power_switch.status(str(outlet))
- if actual_state == expected_state:
- return
- else:
- self.log.debug(
- f"Outlet {outlet} not yet in state {expected_state}"
- )
- raise pdu.PduError(
- "Outlet %s on WebPowerSwitch (%s) failed to reach expected state. \n"
- "Expected State: %s\n"
- "Actual State: %s"
- % (outlet, self.host, expected_state, actual_state)
- )
diff --git a/packages/antlion/controllers/pdu_lib/synaccess/__init__.py b/packages/antlion/controllers/pdu_lib/synaccess/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/packages/antlion/controllers/pdu_lib/synaccess/__init__.py
+++ /dev/null
diff --git a/packages/antlion/controllers/pdu_lib/synaccess/np02b.py b/packages/antlion/controllers/pdu_lib/synaccess/np02b.py
deleted file mode 100644
index 01db908..0000000
--- a/packages/antlion/controllers/pdu_lib/synaccess/np02b.py
+++ /dev/null
@@ -1,274 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from __future__ import annotations
-
-import logging
-import urllib.parse
-import urllib.request
-from dataclasses import dataclass
-from enum import StrEnum, unique
-from typing import Protocol
-
-from mobly import signals
-from mobly.logger import PrefixLoggerAdapter
-
-from antlion.controllers import pdu
-
-
-class PduDevice(pdu.PduDevice):
- """Implementation of pure abstract PduDevice object for the Synaccess np02b
- Pdu.
-
- TODO(http://b/318877544): Replace with NP02B
- """
-
- def __init__(
- self, host: str, username: str | None, password: str | None
- ) -> None:
- username = username or "admin" # default username
- password = password or "admin" # default password
- super().__init__(host, username, password)
- self.np02b = NP02B(host, username, password)
-
- def on_all(self) -> None:
- for i in range(len(self.np02b)):
- self.np02b.port(i).set(pdu.PowerState.ON)
-
- def off_all(self) -> None:
- for i in range(len(self.np02b)):
- self.np02b.port(i).set(pdu.PowerState.OFF)
-
- def on(self, outlet: int) -> None:
- self.np02b.port(outlet).set(pdu.PowerState.ON)
-
- def off(self, outlet: int) -> None:
- self.np02b.port(outlet).set(pdu.PowerState.OFF)
-
- def reboot(self, outlet: int) -> None:
- self.np02b.port(outlet).reboot()
-
- def status(self) -> dict[str, bool]:
- """Returns the status of the np02b outlets.
-
- Return:
- Mapping of outlet index ('1' and '2') to true if ON, otherwise
- false.
- """
- return {
- "1": self.np02b.port(1).status() is pdu.PowerState.ON,
- "2": self.np02b.port(2).status() is pdu.PowerState.ON,
- }
-
- def close(self) -> None:
- """Ensure connection to device is closed.
-
- In this implementation, this shouldn't be necessary, but could be in
- others that open on creation.
- """
- return
-
-
-class NP02B(pdu.PDU):
- """Controller for a Synaccess netBooter NP-02B.
-
- See https://www.synaccess-net.com/np-02b
- """
-
- def __init__(self, host: str, username: str, password: str) -> None:
- self.client = Client(host, username, password)
-
- def port(self, index: int) -> pdu.Port:
- return Port(self.client, index)
-
- def __len__(self) -> int:
- return 2
-
-
-class ParsePDUResponseError(signals.TestError):
- """Error when the PDU returns an unexpected response."""
-
-
-class Client:
- def __init__(self, host: str, user: str, password: str) -> None:
- self._url = f"http://{host}/cmd.cgi"
-
- password_manager = urllib.request.HTTPPasswordMgrWithDefaultRealm()
- password_manager.add_password(None, host, user, password)
- auth_handler = urllib.request.HTTPBasicAuthHandler(password_manager)
- self._opener = urllib.request.build_opener(auth_handler)
-
- self.log = PrefixLoggerAdapter(
- logging.getLogger(),
- {PrefixLoggerAdapter.EXTRA_KEY_LOG_PREFIX: f"[pdu | {host}]"},
- )
-
- def request(self, command: Command) -> Response:
- cmd = command.code()
- args = command.args()
- if args:
- cmd += f' {" ".join(args)}'
-
- url = f"{self._url}?{urllib.parse.quote_plus(cmd)}"
- self.log.debug(f"Sending request {url}")
-
- with self._opener.open(url) as res:
- body = res.read().decode("utf-8")
-
- self.log.debug(f"Received response: {body}")
-
- # Syntax for the response should be in the form:
- # "<StatusCode>[,<PowerStatus>]"
- # For example, StatusCommand returns "$A5,01" when Port 1 is ON and
- # Port 2 is OFF.
- try:
- tokens = body.split(",", 1)
- if len(tokens) == 0:
- raise ParsePDUResponseError(
- f'Expected a response, found "{body}"'
- )
- code = tokens[0]
- status_code = StatusCode(code)
- power_status = PowerStatus(tokens[1]) if len(tokens) == 2 else None
- except Exception as e:
- raise ParsePDUResponseError(
- f'Failed to parse response from "{body}"'
- ) from e
-
- return Response(status_code, power_status)
-
-
-class Port(pdu.Port):
- def __init__(self, client: Client, port: int) -> None:
- if port == 0:
- raise TypeError("Invalid port index 0: ports are 1-indexed")
- if port > 2:
- raise TypeError(
- f"Invalid port index {port}: NP-02B only has 2 ports"
- )
-
- self.client = client
- self.port = port
-
- def status(self) -> pdu.PowerState:
- resp = self.client.request(StatusCommand())
- if resp.status != StatusCode.OK:
- raise ParsePDUResponseError(
- f"Expected PDU response to be {StatusCode.OK}, got {resp.status}"
- )
- if not resp.power:
- raise ParsePDUResponseError(
- "Expected PDU response to contain power, got None"
- )
- return resp.power.state(self.port)
-
- def set(self, state: pdu.PowerState) -> None:
- """Set the power state for this port on the PDU.
-
- Args:
- state: Desired power state
- """
- resp = self.client.request(SetCommand(self.port, state))
- if resp.status != StatusCode.OK:
- raise ParsePDUResponseError(
- f"Expected PDU response to be {StatusCode.OK}, got {resp.status}"
- )
-
- # Verify the newly set power state.
- status = self.status()
- if status is not state:
- raise ParsePDUResponseError(
- f"Expected PDU port {self.port} to be {state}, got {status}"
- )
-
-
-@dataclass
-class Response:
- status: StatusCode
- power: PowerStatus | None
-
-
-@unique
-class StatusCode(StrEnum):
- OK = "$A0"
- FAILED = "$AF"
-
-
-class Command(Protocol):
- def code(self) -> str:
- """Return the cmdCode for this command."""
- ...
-
- def args(self) -> list[str]:
- """Return the list of arguments for this command."""
- ...
-
-
-class PowerStatus:
- """State of all ports"""
-
- def __init__(self, states: str) -> None:
- self.states: list[pdu.PowerState] = []
- for state in states:
- self.states.insert(0, pdu.PowerState(int(state)))
-
- def ports(self) -> int:
- return len(self.states)
-
- def state(self, port: int) -> pdu.PowerState:
- return self.states[port - 1]
-
-
-class SetCommand(Command):
- def __init__(self, port: int, state: pdu.PowerState) -> None:
- self.port = port
- self.state = state
-
- def code(self) -> str:
- return "$A3"
-
- def args(self) -> list[str]:
- return [str(self.port), str(self.state)]
-
-
-class RebootCommand(Command):
- def __init__(self, port: int) -> None:
- self.port = port
-
- def code(self) -> str:
- return "$A4"
-
- def args(self) -> list[str]:
- return [str(self.port)]
-
-
-class StatusCommand(Command):
- def code(self) -> str:
- return "$A5"
-
- def args(self) -> list[str]:
- return []
-
-
-class SetAllCommand(Command):
- def __init__(self, state: pdu.PowerState) -> None:
- self.state = state
-
- def code(self) -> str:
- return "$A7"
-
- def args(self) -> list[str]:
- return [str(self.state)]
diff --git a/packages/antlion/controllers/sl4a_lib/__init__.py b/packages/antlion/controllers/sl4a_lib/__init__.py
deleted file mode 100644
index 7f1a899..0000000
--- a/packages/antlion/controllers/sl4a_lib/__init__.py
+++ /dev/null
@@ -1,15 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
diff --git a/packages/antlion/controllers/sl4a_lib/error_reporter.py b/packages/antlion/controllers/sl4a_lib/error_reporter.py
deleted file mode 100644
index 0574555..0000000
--- a/packages/antlion/controllers/sl4a_lib/error_reporter.py
+++ /dev/null
@@ -1,256 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-# Blanket ignores to enable mypy in Antlion
-# mypy: disable-error-code="no-untyped-def"
-import logging
-import re
-import threading
-import time
-
-from antlion import utils
-
-
-class ErrorLogger(logging.LoggerAdapter): # type: ignore # Blanket ignore to enable mypy
- """A logger for a given error report."""
-
- def __init__(self, label):
- self.label = label
- super(ErrorLogger, self).__init__(logging.getLogger(), {})
-
- def process(self, msg, kwargs):
- """Transforms a log message to be in a given format."""
- return f"[Error Report|{self.label}] {msg}", kwargs
-
-
-class ErrorReporter(object):
- """A class that reports errors and diagnoses possible points of failure.
-
- Attributes:
- max_reports: The maximum number of reports that should be reported.
- Defaulted to 1 to prevent multiple reports from reporting at the
- same time over one another.
- name: The name of the report to be used in the error logs.
- """
-
- def __init__(self, name, max_reports=1):
- """Creates an error report.
-
- Args:
- name: The name of the error report.
- max_reports: Sets the maximum number of reports to this value.
- """
- self.name = name
- self.max_reports = max_reports
- self._ticket_number = 0
- self._ticket_lock = threading.Lock()
- self._current_request_count = 0
- self._accept_requests = True
-
- def create_error_report(self, sl4a_manager, sl4a_session, rpc_connection):
- """Creates an error report, if possible.
-
- Returns:
- False iff a report cannot be created.
- """
- if not self._accept_requests:
- return False
-
- self._current_request_count += 1
-
- try:
- ticket = self._get_report_ticket()
- if not ticket:
- return False
-
- report = ErrorLogger(f"{self.name}|{ticket}")
- report.info("Creating error report.")
-
- (
- self.report_on_adb(sl4a_manager.adb, report)
- and self.report_device_processes(sl4a_manager.adb, report)
- and self.report_sl4a_state(
- rpc_connection, sl4a_manager.adb, report
- )
- and self.report_sl4a_session(sl4a_manager, sl4a_session, report)
- )
-
- return True
- finally:
- self._current_request_count -= 1
-
- def report_on_adb(self, adb, report):
- """Creates an error report for ADB. Returns false if ADB has failed."""
- adb_uptime = utils.get_command_uptime('"adb .* server"')
- if adb_uptime:
- report.info(
- f"The adb daemon has an uptime of {adb_uptime} ([[dd-]hh:]mm:ss)."
- )
- else:
- report.warning(
- "The adb daemon (on the host machine) is not "
- "running. All forwarded ports have been removed."
- )
- return False
-
- devices_output = adb.devices()
- if adb.serial not in devices_output:
- report.warning(
- "This device cannot be found by ADB. The device may have shut "
- "down or disconnected."
- )
- return False
- elif re.findall(r"%s\s+offline" % adb.serial, devices_output):
- report.warning(
- "The device is marked as offline in ADB. We are no longer able "
- "to access the device."
- )
- return False
- else:
- report.info(
- "The device is online and accessible through ADB calls."
- )
- return True
-
- def report_device_processes(self, adb, report):
- """Creates an error report for the device's required processes.
-
- Returns:
- False iff user-apks cannot be communicated with over tcp.
- """
- zygote_uptime = utils.get_device_process_uptime(adb, "zygote")
- if zygote_uptime:
- report.info(
- "Zygote has been running for %s ([[dd-]hh:]mm:ss). If this "
- "value is low, the phone may have recently crashed."
- % zygote_uptime
- )
- else:
- report.warning(
- "Zygote has been killed. It is likely the Android Runtime has "
- "crashed. Check the bugreport/logcat for more information."
- )
- return False
-
- netd_uptime = utils.get_device_process_uptime(adb, "netd")
- if netd_uptime:
- report.info(
- "Netd has been running for %s ([[dd-]hh:]mm:ss). If this "
- "value is low, the phone may have recently crashed."
- % zygote_uptime
- )
- else:
- report.warning(
- "Netd has been killed. The Android Runtime may have crashed. "
- "Check the bugreport/logcat for more information."
- )
- return False
-
- adbd_uptime = utils.get_device_process_uptime(adb, "adbd")
- if netd_uptime:
- report.info(
- "Adbd has been running for %s ([[dd-]hh:]mm:ss). If this "
- "value is low, the phone may have recently crashed."
- % adbd_uptime
- )
- else:
- report.warning("Adbd is not running.")
- return False
- return True
-
- def report_sl4a_state(self, rpc_connection, adb, report):
- """Creates an error report for the state of SL4A."""
- report.info(
- f"Diagnosing Failure over connection {rpc_connection.ports}."
- )
-
- ports = rpc_connection.ports
- forwarded_ports_output = adb.forward("--list")
-
- expected_output = "%s tcp:%s tcp:%s" % (
- adb.serial,
- ports.forwarded_port,
- ports.server_port,
- )
- if expected_output not in forwarded_ports_output:
- formatted_output = re.sub(
- "^", " ", forwarded_ports_output, flags=re.MULTILINE
- )
- report.warning(
- "The forwarded port for the failed RpcConnection is missing.\n"
- "Expected:\n %s\nBut found:\n%s"
- % (expected_output, formatted_output)
- )
- return False
- else:
- report.info(
- "The connection port has been properly forwarded to "
- "the device."
- )
-
- sl4a_uptime = utils.get_device_process_uptime(
- adb, "com.googlecode.android_scripting"
- )
- if sl4a_uptime:
- report.info(
- "SL4A has been running for %s ([[dd-]hh:]mm:ss). If this "
- "value is lower than the test case, it must have been "
- "restarted during the test." % sl4a_uptime
- )
- else:
- report.warning(
- "The SL4A scripting service is not running. SL4A may have "
- "crashed, or have been terminated by the Android Runtime."
- )
- return False
- return True
-
- def report_sl4a_session(self, sl4a_manager, session, report):
- """Reports the state of an SL4A session."""
- if session.server_port not in sl4a_manager.sl4a_ports_in_use:
- report.warning(
- "SL4A server port %s not found in set of open "
- "ports %s"
- % (session.server_port, sl4a_manager.sl4a_ports_in_use)
- )
- return False
-
- if session not in sl4a_manager.sessions.values():
- report.warning(
- "SL4A session %s over port %s is not managed by "
- "the SL4A Manager. This session is already dead."
- % (session.uid, session.server_port)
- )
- return False
- return True
-
- def finalize_reports(self):
- self._accept_requests = False
- while self._current_request_count > 0:
- # Wait for other threads to finish.
- time.sleep(0.1)
-
- def _get_report_ticket(self):
- """Returns the next ticket, or none if all tickets have been used."""
- logging.debug("Getting ticket for SL4A error report.")
- with self._ticket_lock:
- self._ticket_number += 1
- ticket_number = self._ticket_number
-
- if ticket_number <= self.max_reports:
- return ticket_number
- else:
- return None
diff --git a/packages/antlion/controllers/sl4a_lib/event_dispatcher.py b/packages/antlion/controllers/sl4a_lib/event_dispatcher.py
deleted file mode 100644
index 2764c04..0000000
--- a/packages/antlion/controllers/sl4a_lib/event_dispatcher.py
+++ /dev/null
@@ -1,503 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-# Blanket ignores to enable mypy in Antlion
-# mypy: disable-error-code="no-untyped-def"
-import logging
-import queue
-import re
-import threading
-import time
-from concurrent.futures import ThreadPoolExecutor
-
-from mobly import logger
-
-from antlion.controllers.sl4a_lib import rpc_client
-
-
-class EventDispatcherError(Exception):
- """The base class for all EventDispatcher exceptions."""
-
-
-class IllegalStateError(EventDispatcherError):
- """Raise when user tries to put event_dispatcher into an illegal state."""
-
-
-class DuplicateError(EventDispatcherError):
- """Raise when two event handlers have been assigned to an event name."""
-
-
-class EventDispatcher:
- """A class for managing the events for an SL4A Session.
-
- Attributes:
- _serial: The serial of the device.
- _rpc_client: The rpc client for that session.
- _started: A bool that holds whether or not the event dispatcher is
- running.
- _executor: The thread pool executor for running event handlers and
- polling.
- _event_dict: A dictionary of str eventName = Queue<Event> eventQueue
- _handlers: A dictionary of str eventName => (lambda, args) handler
- _lock: A lock that prevents multiple reads/writes to the event queues.
- log: The EventDispatcher's logger.
- """
-
- DEFAULT_TIMEOUT = 60
-
- def __init__(self, serial, rpc_client):
- self._serial = serial
- self._rpc_client = rpc_client
- self._started = False
- self._executor = None
- self._event_dict = {}
- self._handlers = {}
- self._lock = threading.RLock()
-
- self.log = logger.PrefixLoggerAdapter(
- logging.getLogger(),
- {
- logger.PrefixLoggerAdapter.EXTRA_KEY_LOG_PREFIX: f"[E Dispatcher|{self._serial}|{self._rpc_client.uid}]",
- },
- )
-
- def poll_events(self):
- """Continuously polls all types of events from sl4a.
-
- Events are sorted by name and store in separate queues.
- If there are registered handlers, the handlers will be called with
- corresponding event immediately upon event discovery, and the event
- won't be stored. If exceptions occur, stop the dispatcher and return
- """
- while self._started:
- try:
- # 60000 in ms, timeout in second
- event_obj = self._rpc_client.eventWait(60000, timeout=120)
- except rpc_client.Sl4aConnectionError as e:
- if self._rpc_client.is_alive:
- self.log.warning("Closing due to closed session.")
- break
- else:
- self.log.warning(f"Closing due to error: {e}.")
- self.close()
- raise e
- if not event_obj:
- continue
- elif "name" not in event_obj:
- self.log.error(f"Received Malformed event {event_obj}")
- continue
- else:
- event_name = event_obj["name"]
- # if handler registered, process event
- if event_name == "EventDispatcherShutdown":
- self.log.debug("Received shutdown signal.")
- # closeSl4aSession has been called, which closes the event
- # dispatcher. Stop execution on this polling thread.
- return
- if event_name in self._handlers:
- self.log.debug(
- "Using handler %s for event: %r"
- % (self._handlers[event_name].__name__, event_obj)
- )
- self.handle_subscribed_event(event_obj, event_name)
- else:
- self.log.debug(f"Queuing event: {event_obj!r}")
- self._lock.acquire()
- if event_name in self._event_dict: # otherwise, cache event
- self._event_dict[event_name].put(event_obj)
- else:
- q = queue.Queue() # type: ignore # Blanket ignore to enable mypy
- q.put(event_obj)
- self._event_dict[event_name] = q
- self._lock.release()
-
- def register_handler(self, handler, event_name, args):
- """Registers an event handler.
-
- One type of event can only have one event handler associated with it.
-
- Args:
- handler: The event handler function to be registered.
- event_name: Name of the event the handler is for.
- args: User arguments to be passed to the handler when it's called.
-
- Raises:
- IllegalStateError: Raised if attempts to register a handler after
- the dispatcher starts running.
- DuplicateError: Raised if attempts to register more than one
- handler for one type of event.
- """
- if self._started:
- raise IllegalStateError(
- "Cannot register service after polling is " "started."
- )
- self._lock.acquire()
- try:
- if event_name in self._handlers:
- raise DuplicateError(
- f"A handler for {event_name} already exists"
- )
- self._handlers[event_name] = (handler, args)
- finally:
- self._lock.release()
-
- def start(self):
- """Starts the event dispatcher.
-
- Initiates executor and start polling events.
-
- Raises:
- IllegalStateError: Can't start a dispatcher again when it's already
- running.
- """
- if not self._started:
- self._started = True
- self._executor = ThreadPoolExecutor(max_workers=32)
- self._executor.submit(self.poll_events)
- else:
- raise IllegalStateError("Dispatcher is already started.")
-
- def close(self):
- """Clean up and release resources.
-
- This function should only be called after a
- rpc_client.closeSl4aSession() call.
- """
- if not self._started:
- return
- self._started = False
- self._executor.shutdown(wait=True) # type: ignore # Blanket ignore to enable mypy
- self.clear_all_events()
-
- def pop_event(self, event_name, timeout=DEFAULT_TIMEOUT):
- """Pop an event from its queue.
-
- Return and remove the oldest entry of an event.
- Block until an event of specified name is available or
- times out if timeout is set.
-
- Args:
- event_name: Name of the event to be popped.
- timeout: Number of seconds to wait when event is not present.
- Never times out if None.
-
- Returns:
- event: The oldest entry of the specified event. None if timed out.
-
- Raises:
- IllegalStateError: Raised if pop is called before the dispatcher
- starts polling.
- """
- if not self._started:
- raise IllegalStateError(
- "Dispatcher needs to be started before popping."
- )
-
- e_queue = self.get_event_q(event_name)
-
- if not e_queue:
- raise IllegalStateError(
- f"Failed to get an event queue for {event_name}"
- )
-
- try:
- # Block for timeout
- if timeout:
- return e_queue.get(True, timeout)
- # Non-blocking poll for event
- elif timeout == 0:
- return e_queue.get(False)
- else:
- # Block forever on event wait
- return e_queue.get(True)
- except queue.Empty:
- msg = f"Timeout after {timeout}s waiting for event: {event_name}"
- self.log.info(msg)
- raise queue.Empty(msg)
-
- def wait_for_event(
- self, event_name, predicate, timeout=DEFAULT_TIMEOUT, *args, **kwargs
- ):
- """Wait for an event that satisfies a predicate to appear.
-
- Continuously pop events of a particular name and check against the
- predicate until an event that satisfies the predicate is popped or
- timed out. Note this will remove all the events of the same name that
- do not satisfy the predicate in the process.
-
- Args:
- event_name: Name of the event to be popped.
- predicate: A function that takes an event and returns True if the
- predicate is satisfied, False otherwise.
- timeout: Number of seconds to wait.
- *args: Optional positional args passed to predicate().
- **kwargs: Optional keyword args passed to predicate().
- consume_ignored_events: Whether or not to consume events while
- searching for the desired event. Defaults to True if unset.
-
- Returns:
- The event that satisfies the predicate.
-
- Raises:
- queue.Empty: Raised if no event that satisfies the predicate was
- found before time out.
- """
- deadline = time.time() + timeout
- ignored_events = []
- consume_events = kwargs.pop("consume_ignored_events", True)
- while True:
- event = None
- try:
- event = self.pop_event(event_name, 1)
- if consume_events:
- self.log.debug(f"Consuming event: {event!r}")
- else:
- self.log.debug(f"Peeking at event: {event!r}")
- ignored_events.append(event)
- except queue.Empty:
- pass
-
- if event and predicate(event, *args, **kwargs):
- for ignored_event in ignored_events:
- self.get_event_q(event_name).put(ignored_event)
- self.log.debug(
- f"Matched event: {event!r} with {predicate.__name__}"
- )
- return event
-
- if time.time() > deadline:
- for ignored_event in ignored_events:
- self.get_event_q(event_name).put(ignored_event)
- msg = (
- f"Timeout after {timeout}s waiting for event: {event_name}"
- )
- self.log.info(msg)
- raise queue.Empty(msg)
-
- def pop_events(self, regex_pattern, timeout, freq=1):
- """Pop events whose names match a regex pattern.
-
- If such event(s) exist, pop one event from each event queue that
- satisfies the condition. Otherwise, wait for an event that satisfies
- the condition to occur, with timeout.
-
- Results are sorted by timestamp in ascending order.
-
- Args:
- regex_pattern: The regular expression pattern that an event name
- should match in order to be popped.
- timeout: Number of seconds to wait for events in case no event
- matching the condition exits when the function is called.
-
- Returns:
- results: Pop events whose names match a regex pattern.
- Empty if none exist and the wait timed out.
-
- Raises:
- IllegalStateError: Raised if pop is called before the dispatcher
- starts polling.
- queue.Empty: Raised if no event was found before time out.
- """
- if not self._started:
- raise IllegalStateError(
- "Dispatcher needs to be started before popping."
- )
- deadline = time.time() + timeout
- while True:
- # TODO: fix the sleep loop
- results = self._match_and_pop(regex_pattern)
- if len(results) != 0 or time.time() > deadline:
- break
- time.sleep(freq)
- if len(results) == 0:
- msg = f"Timeout after {timeout}s waiting for event: {regex_pattern}"
- self.log.error(msg)
- raise queue.Empty(msg)
-
- return sorted(results, key=lambda event: event["time"])
-
- def _match_and_pop(self, regex_pattern):
- """Pop one event from each of the event queues whose names
- match (in a sense of regular expression) regex_pattern.
- """
- results = []
- self._lock.acquire()
- for name in self._event_dict.keys():
- if re.match(regex_pattern, name):
- q = self._event_dict[name]
- if q:
- try:
- results.append(q.get(False))
- except queue.Empty:
- pass
- self._lock.release()
- return results
-
- def get_event_q(self, event_name):
- """Obtain the queue storing events of the specified name.
-
- If no event of this name has been polled, wait for one to.
-
- Returns: A queue storing all the events of the specified name.
- """
- self._lock.acquire()
- if (
- event_name not in self._event_dict
- or self._event_dict[event_name] is None
- ):
- self._event_dict[event_name] = queue.Queue()
- self._lock.release()
-
- event_queue = self._event_dict[event_name]
- return event_queue
-
- def handle_subscribed_event(self, event_obj, event_name):
- """Execute the registered handler of an event.
-
- Retrieve the handler and its arguments, and execute the handler in a
- new thread.
-
- Args:
- event_obj: Json object of the event.
- event_name: Name of the event to call handler for.
- """
- handler, args = self._handlers[event_name]
- self._executor.submit(handler, event_obj, *args) # type: ignore # Blanket ignore to enable mypy
-
- def _handle(
- self,
- event_handler,
- event_name,
- user_args,
- event_timeout,
- cond,
- cond_timeout,
- ):
- """Pop an event of specified type and calls its handler on it. If
- condition is not None, block until condition is met or timeout.
- """
- if cond:
- cond.wait(cond_timeout)
- event = self.pop_event(event_name, event_timeout)
- return event_handler(event, *user_args)
-
- def handle_event(
- self,
- event_handler,
- event_name,
- user_args,
- event_timeout=None,
- cond=None,
- cond_timeout=None,
- ):
- """Handle events that don't have registered handlers
-
- In a new thread, poll one event of specified type from its queue and
- execute its handler. If no such event exists, the thread waits until
- one appears.
-
- Args:
- event_handler: Handler for the event, which should take at least
- one argument - the event json object.
- event_name: Name of the event to be handled.
- user_args: User arguments for the handler; to be passed in after
- the event json.
- event_timeout: Number of seconds to wait for the event to come.
- cond: A condition to wait on before executing the handler. Should
- be a threading.Event object.
- cond_timeout: Number of seconds to wait before the condition times
- out. Never times out if None.
-
- Returns:
- worker: A concurrent.Future object associated with the handler.
- If blocking call worker.result() is triggered, the handler
- needs to return something to unblock.
- """
- worker = self._executor.submit( # type: ignore # Blanket ignore to enable mypy
- self._handle,
- event_handler,
- event_name,
- user_args,
- event_timeout,
- cond,
- cond_timeout,
- )
- return worker
-
- def pop_all(self, event_name):
- """Return and remove all stored events of a specified name.
-
- Pops all events from their queue. May miss the latest ones.
- If no event is available, return immediately.
-
- Args:
- event_name: Name of the events to be popped.
-
- Returns:
- results: List of the desired events.
-
- Raises:
- IllegalStateError: Raised if pop is called before the dispatcher
- starts polling.
- """
- if not self._started:
- raise IllegalStateError(
- ("Dispatcher needs to be started before " "popping.")
- )
- results = []
- try:
- self._lock.acquire()
- while True:
- e = self._event_dict[event_name].get(block=False)
- results.append(e)
- except (queue.Empty, KeyError):
- return results
- finally:
- self._lock.release()
-
- def clear_events(self, event_name):
- """Clear all events of a particular name.
-
- Args:
- event_name: Name of the events to be popped.
- """
- self._lock.acquire()
- try:
- q = self.get_event_q(event_name)
- q.queue.clear()
- except queue.Empty:
- return
- finally:
- self._lock.release()
-
- def clear_all_events(self):
- """Clear all event queues and their cached events."""
- self._lock.acquire()
- self._event_dict.clear()
- self._lock.release()
-
- def is_event_match(self, event, field, value):
- return self.is_event_match_for_list(event, field, [value])
-
- def is_event_match_for_list(self, event, field, value_list):
- try:
- value_in_event = event["data"][field]
- except KeyError:
- return False
- for value in value_list:
- if value_in_event == value:
- return True
- return False
diff --git a/packages/antlion/controllers/sl4a_lib/rpc_client.py b/packages/antlion/controllers/sl4a_lib/rpc_client.py
deleted file mode 100644
index 89e985d..0000000
--- a/packages/antlion/controllers/sl4a_lib/rpc_client.py
+++ /dev/null
@@ -1,393 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-# Blanket ignores to enable mypy in Antlion
-# mypy: disable-error-code="no-untyped-def, assignment, call-overload"
-import json
-import logging
-import socket
-import threading
-import time
-from concurrent import futures
-
-from mobly import logger
-
-from antlion import error
-
-# The default timeout value when no timeout is set.
-SOCKET_TIMEOUT = 60
-
-# The Session UID when a UID has not been received yet.
-UNKNOWN_UID = -1
-
-
-class Sl4aException(error.ActsError):
- """The base class for all SL4A exceptions."""
-
-
-class Sl4aStartError(Sl4aException):
- """Raised when sl4a is not able to be started."""
-
-
-class Sl4aApiError(Sl4aException):
- """Raised when remote API reports an error.
-
- This error mirrors the JSON-RPC 2.0 spec for Error Response objects.
-
- Attributes:
- code: The error code returned by SL4A. Not to be confused with
- ActsError's error_code.
- message: The error message returned by SL4A.
- data: The extra data, if any, returned by SL4A.
- """
-
- def __init__(self, message, code=-1, data=None, rpc_name=""):
- super().__init__()
- self.message = message
- self.code = code
- if data is None:
- self.data = {}
- else:
- self.data = data
- self.rpc_name = rpc_name
-
- def __str__(self):
- if self.data:
- return "Error in RPC %s %s:%s:%s" % (
- self.rpc_name,
- self.code,
- self.message,
- self.data,
- )
- else:
- return f"Error in RPC {self.rpc_name} {self.code}:{self.message}"
-
-
-class Sl4aConnectionError(Sl4aException):
- """An error raised upon failure to connect to SL4A."""
-
-
-class Sl4aProtocolError(Sl4aException):
- """Raised when there an error in exchanging data with server on device."""
-
- NO_RESPONSE_FROM_HANDSHAKE = "No response from handshake."
- NO_RESPONSE_FROM_SERVER = "No response from server."
- MISMATCHED_API_ID = "Mismatched API id."
-
-
-class Sl4aNotInstalledError(Sl4aException):
- """An error raised when an Sl4aClient is created without SL4A installed."""
-
-
-class Sl4aRpcTimeoutError(Sl4aException):
- """An error raised when an SL4A RPC has timed out."""
-
-
-class RpcClient(object):
- """An RPC client capable of processing multiple RPCs concurrently.
-
- Attributes:
- _free_connections: A list of all idle RpcConnections.
- _working_connections: A list of all working RpcConnections.
- _lock: A lock used for accessing critical memory.
- max_connections: The maximum number of RpcConnections at a time.
- Increasing or decreasing the number of max connections does NOT
- modify the thread pool size being used for self.future RPC calls.
- _log: The logger for this RpcClient.
- """
-
- """The default value for the maximum amount of connections for a client."""
- DEFAULT_MAX_CONNECTION = 15
-
- class AsyncClient(object):
- """An object that allows RPC calls to be called asynchronously.
-
- Attributes:
- _rpc_client: The RpcClient to use when making calls.
- _executor: The ThreadPoolExecutor used to keep track of workers
- """
-
- def __init__(self, rpc_client):
- self._rpc_client = rpc_client
- self._executor = futures.ThreadPoolExecutor(
- max_workers=max(rpc_client.max_connections - 2, 1)
- )
-
- def rpc(self, name, *args, **kwargs):
- future = self._executor.submit(name, *args, **kwargs)
- return future
-
- def __getattr__(self, name):
- """Wrapper for python magic to turn method calls into RPC calls."""
-
- def rpc_call(*args, **kwargs):
- future = self._executor.submit(
- self._rpc_client.__getattr__(name), *args, **kwargs
- )
- return future
-
- return rpc_call
-
- def __init__(
- self,
- uid,
- serial,
- on_error_callback,
- _create_connection_func,
- max_connections=None,
- ):
- """Creates a new RpcClient object.
-
- Args:
- uid: The session uid this client is a part of.
- serial: The serial of the Android device. Used for logging.
- on_error_callback: A callback for when a connection error is raised.
- _create_connection_func: A reference to the function that creates a
- new session.
- max_connections: The maximum number of connections the RpcClient
- can have.
- """
- self._serial = serial
- self.on_error = on_error_callback
- self._create_connection_func = _create_connection_func
- self._free_connections = [self._create_connection_func(uid)]
-
- self.uid = self._free_connections[0].uid
- self._lock = threading.Lock()
-
- self._log = logger.PrefixLoggerAdapter(
- logging.getLogger(),
- {
- logger.PrefixLoggerAdapter.EXTRA_KEY_LOG_PREFIX: f"[RPC Service|{self._serial}|{self._uid}]",
- },
- )
-
- self._working_connections = []
- if max_connections is None:
- self.max_connections = RpcClient.DEFAULT_MAX_CONNECTION
- else:
- self.max_connections = max_connections
-
- self._async_client = RpcClient.AsyncClient(self)
- self.is_alive = True
-
- def terminate(self):
- """Terminates all connections to the SL4A server."""
- if len(self._working_connections) > 0:
- self._log.warning(
- "%s connections are still active, and waiting on "
- "responses.Closing these connections now."
- % len(self._working_connections)
- )
- connections = self._free_connections + self._working_connections
- for connection in connections:
- self._log.debug(f"Closing connection over ports {connection.ports}")
- connection.close()
- self._free_connections = []
- self._working_connections = []
- self.is_alive = False
-
- def _get_free_connection(self):
- """Returns a free connection to be used for an RPC call.
-
- This function also adds the client to the working set to prevent
- multiple users from obtaining the same client.
- """
- while True:
- if len(self._free_connections) > 0:
- with self._lock:
- # Check if another thread grabbed the remaining connection.
- # while we were waiting for the lock.
- if len(self._free_connections) == 0:
- continue
- client = self._free_connections.pop()
- self._working_connections.append(client)
- return client
-
- client_count = len(self._free_connections) + len(
- self._working_connections
- )
- if client_count < self.max_connections:
- with self._lock:
- client_count = len(self._free_connections) + len(
- self._working_connections
- )
- if client_count < self.max_connections:
- client = self._create_connection_func(self.uid)
- self._working_connections.append(client)
- return client
- time.sleep(0.01)
-
- def _release_working_connection(self, connection):
- """Marks a working client as free.
-
- Args:
- connection: The client to mark as free.
- Raises:
- A ValueError if the client is not a known working connection.
- """
- # We need to keep this code atomic because the client count is based on
- # the length of the free and working connection list lengths.
- with self._lock:
- self._working_connections.remove(connection)
- self._free_connections.append(connection)
-
- def rpc(self, method, *args, timeout=None, retries=3):
- """Sends an rpc to sl4a.
-
- Sends an rpc call to sl4a over this RpcClient's corresponding session.
-
- Args:
- method: str, The name of the method to execute.
- args: any, The args to send to sl4a.
- timeout: The amount of time to wait for a response.
- retries: Misnomer, is actually the number of tries.
-
- Returns:
- The result of the rpc.
-
- Raises:
- Sl4aProtocolError: Something went wrong with the sl4a protocol.
- Sl4aApiError: The rpc went through, however executed with errors.
- """
- connection = self._get_free_connection()
- ticket = connection.get_new_ticket()
- timed_out = False
- if timeout:
- connection.set_timeout(timeout)
- data = {"id": ticket, "method": method, "params": args}
- request = json.dumps(data)
- response = ""
- try:
- for i in range(1, retries + 1):
- connection.send_request(request)
-
- response = connection.get_response()
- if not response:
- if i < retries:
- self._log.warning(
- "No response for RPC method %s on iteration %s",
- method,
- i,
- )
- continue
- else:
- self._log.exception(
- "No response for RPC method %s on iteration %s",
- method,
- i,
- )
- self.on_error(connection)
- raise Sl4aProtocolError(
- Sl4aProtocolError.NO_RESPONSE_FROM_SERVER
- )
- else:
- break
- except BrokenPipeError as e:
- if self.is_alive:
- self._log.exception(
- "The device disconnected during RPC call "
- "%s. Please check the logcat for a crash "
- "or disconnect.",
- method,
- )
- self.on_error(connection)
- else:
- self._log.warning("The connection was killed during cleanup:")
- self._log.warning(e)
- raise Sl4aConnectionError(e)
- except socket.timeout as err:
- # If a socket connection has timed out, the socket can no longer be
- # used. Close it out and remove the socket from the connection pool.
- timed_out = True
- self._log.warning(
- 'RPC "%s" (id: %s) timed out after %s seconds.',
- method,
- ticket,
- timeout or SOCKET_TIMEOUT,
- )
- self._log.debug(
- f"Closing timed out connection over {connection.ports}"
- )
- connection.close()
- self._working_connections.remove(connection)
- # Re-raise the error as an SL4A Error so end users can process it.
- raise Sl4aRpcTimeoutError(err)
- finally:
- if not timed_out:
- if timeout:
- connection.set_timeout(SOCKET_TIMEOUT)
- self._release_working_connection(connection)
- result = json.loads(str(response, encoding="utf8"))
-
- if result["error"]:
- error_object = result["error"]
- if isinstance(error_object, dict):
- # Uses JSON-RPC 2.0 Format
- sl4a_api_error = Sl4aApiError(
- error_object.get("message", None),
- error_object.get("code", -1),
- error_object.get("data", {}),
- rpc_name=method,
- )
- else:
- # Fallback on JSON-RPC 1.0 Format
- sl4a_api_error = Sl4aApiError(error_object, rpc_name=method)
- self._log.warning(sl4a_api_error)
- raise sl4a_api_error
- if result["id"] != ticket:
- self._log.error(
- "RPC method %s with mismatched api id %s", method, result["id"]
- )
- raise Sl4aProtocolError(Sl4aProtocolError.MISMATCHED_API_ID)
- return result["result"]
-
- @property
- def future(self):
- """Returns a magic function that returns a future running an RPC call.
-
- This function effectively allows the idiom:
-
- >>> rpc_client = RpcClient(...)
- >>> # returns after call finishes
- >>> rpc_client.someRpcCall()
- >>> # Immediately returns a reference to the RPC's future, running
- >>> # the lengthy RPC call on another thread.
- >>> future = rpc_client.future.someLengthyRpcCall()
- >>> rpc_client.doOtherThings()
- >>> ...
- >>> # Wait for and get the returned value of the lengthy RPC.
- >>> # Can specify a timeout as well.
- >>> value = future.result()
-
- The number of concurrent calls to this method is limited to
- (max_connections - 2), to prevent future calls from exhausting all free
- connections.
- """
- return self._async_client
-
- def __getattr__(self, name):
- """Wrapper for python magic to turn method calls into RPC calls."""
-
- def rpc_call(*args, **kwargs):
- return self.rpc(name, *args, **kwargs)
-
- if not self.is_alive:
- raise Sl4aStartError(
- "This SL4A session has already been terminated. You must "
- "create a new session to continue."
- )
- return rpc_call
diff --git a/packages/antlion/controllers/sl4a_lib/rpc_connection.py b/packages/antlion/controllers/sl4a_lib/rpc_connection.py
deleted file mode 100644
index 855cdc0..0000000
--- a/packages/antlion/controllers/sl4a_lib/rpc_connection.py
+++ /dev/null
@@ -1,153 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-# Blanket ignores to enable mypy in Antlion
-# mypy: disable-error-code="no-untyped-def"
-import json
-import logging
-import socket
-import threading
-
-from mobly import logger
-
-from antlion.controllers.adb import AdbProxy
-from antlion.controllers.sl4a_lib import rpc_client
-from antlion.controllers.sl4a_lib.sl4a_ports import Sl4aPorts
-
-# The Session UID when a UID has not been received yet.
-UNKNOWN_UID = -1
-
-
-class Sl4aConnectionCommand(object):
- """Commands that can be invoked on the sl4a client.
-
- INIT: Initializes a new sessions in sl4a.
- CONTINUE: Creates a connection.
- """
-
- INIT = "initiate"
- CONTINUE = "continue"
-
-
-class RpcConnection(object):
- """A single RPC Connection thread.
-
- Attributes:
- _client_socket: The socket this connection uses.
- _socket_file: The file created over the _client_socket.
- _ticket_counter: The counter storing the current ticket number.
- _ticket_lock: A lock on the ticket counter to prevent ticket collisions.
- adb: A reference to the AdbProxy of the AndroidDevice. Used for logging.
- log: The logger for this RPC Client.
- ports: The Sl4aPorts object that stores the ports this connection uses.
- uid: The SL4A session ID.
- """
-
- def __init__(
- self,
- adb: AdbProxy,
- ports: Sl4aPorts,
- client_socket,
- socket_fd,
- uid=UNKNOWN_UID,
- ):
- self._client_socket = client_socket
- self._socket_file = socket_fd
- self._ticket_counter = 0
- self._ticket_lock = threading.Lock()
- self.adb = adb
- self.uid = uid
-
- self.log = logger.PrefixLoggerAdapter(
- logging.getLogger(),
- {
- logger.PrefixLoggerAdapter.EXTRA_KEY_LOG_PREFIX: f"[SL4A Client|{self.adb.serial}|{ports.client_port}|{self.uid}]",
- },
- )
-
- self.ports = ports
- self.set_timeout(rpc_client.SOCKET_TIMEOUT)
-
- def open(self):
- if self.uid != UNKNOWN_UID:
- start_command = Sl4aConnectionCommand.CONTINUE
- else:
- start_command = Sl4aConnectionCommand.INIT
-
- self._initiate_handshake(start_command)
-
- def _initiate_handshake(self, start_command):
- """Establishes a connection with the SL4A server.
-
- Args:
- start_command: The command to send. See Sl4aConnectionCommand.
- """
- try:
- resp = self._cmd(start_command)
- except socket.timeout as e:
- self.log.error("Failed to open socket connection: %s", e)
- raise
- if not resp:
- raise rpc_client.Sl4aProtocolError(
- rpc_client.Sl4aProtocolError.NO_RESPONSE_FROM_HANDSHAKE
- )
- result = json.loads(str(resp, encoding="utf8"))
- if result["status"]:
- self.uid = result["uid"]
- else:
- self.log.warning(f"UID not received for connection {self.ports}.")
- self.uid = UNKNOWN_UID
- self.log.debug(f"Created connection over: {self.ports}.")
-
- def _cmd(self, command):
- """Sends an session protocol command to SL4A to establish communication.
-
- Args:
- command: The name of the command to execute.
-
- Returns:
- The line that was written back.
- """
- self.send_request(json.dumps({"cmd": command, "uid": self.uid}))
- return self.get_response()
-
- def get_new_ticket(self):
- """Returns a ticket for a new request."""
- with self._ticket_lock:
- self._ticket_counter += 1
- ticket = self._ticket_counter
- return ticket
-
- def set_timeout(self, timeout):
- """Sets the socket's wait for response timeout."""
- self._client_socket.settimeout(timeout)
-
- def send_request(self, request):
- """Sends a request over the connection."""
- self._socket_file.write(request.encode("utf8") + b"\n")
- self._socket_file.flush()
- self.log.debug(f"Sent: {request}")
-
- def get_response(self):
- """Returns the first response sent back to the client."""
- data = self._socket_file.readline()
- bytes = data.decode("utf8", errors="replace")
- self.log.debug(f"Received: {bytes}")
- return data
-
- def close(self):
- """Closes the connection gracefully."""
- self._client_socket.close()
- self.adb.remove_tcp_forward(self.ports.forwarded_port)
diff --git a/packages/antlion/controllers/sl4a_lib/sl4a_manager.py b/packages/antlion/controllers/sl4a_lib/sl4a_manager.py
deleted file mode 100644
index 018a5c9..0000000
--- a/packages/antlion/controllers/sl4a_lib/sl4a_manager.py
+++ /dev/null
@@ -1,346 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-# Blanket ignores to enable mypy in Antlion
-# mypy: disable-error-code="no-untyped-def, var-annotated"
-import logging
-import threading
-import time
-
-from mobly import logger
-
-from antlion.controllers.sl4a_lib import (
- error_reporter,
- rpc_client,
- sl4a_session,
-)
-
-ATTEMPT_INTERVAL = 0.25
-MAX_WAIT_ON_SERVER_SECONDS = 5
-
-SL4A_PKG_NAME = "com.googlecode.android_scripting"
-
-_SL4A_LAUNCH_SERVER_CMD = (
- "am startservice -a com.googlecode.android_scripting.action.LAUNCH_SERVER "
- "--ei com.googlecode.android_scripting.extra.USE_SERVICE_PORT %s "
- "com.googlecode.android_scripting/.service.ScriptingLayerService"
-)
-
-_SL4A_CLOSE_SERVER_CMD = (
- "am startservice -a com.googlecode.android_scripting.action.KILL_PROCESS "
- "--ei com.googlecode.android_scripting.extra.PROXY_PORT %s "
- "com.googlecode.android_scripting/.service.ScriptingLayerService"
-)
-
-# The command for finding SL4A's server port as root.
-_SL4A_ROOT_FIND_PORT_CMD = (
- # Get all open, listening ports, and their process names
- "ss -l -p -n | "
- # Find all open TCP ports for SL4A
- 'grep "tcp.*droid_scripting" | '
- # Shorten all whitespace to a single space character
- 'tr -s " " | '
- # Grab the 5th column (which is server:port)
- 'cut -d " " -f 5 |'
- # Only grab the port
- "sed s/.*://g"
-)
-
-# The command for finding SL4A's server port without root.
-_SL4A_USER_FIND_PORT_CMD = (
- # Get all open, listening ports, and their process names
- "ss -l -p -n | "
- # Find all open ports exposed to the public. This can produce false
- # positives since users cannot read the process associated with the port.
- 'grep -e "tcp.*::ffff:127\.0\.0\.1:" | '
- # Shorten all whitespace to a single space character
- 'tr -s " " | '
- # Grab the 5th column (which is server:port)
- 'cut -d " " -f 5 |'
- # Only grab the port
- "sed s/.*://g"
-)
-
-# The command that begins the SL4A ScriptingLayerService.
-_SL4A_START_SERVICE_CMD = (
- "am startservice "
- "com.googlecode.android_scripting/.service.ScriptingLayerService"
-)
-
-# Maps device serials to their SL4A Manager. This is done to prevent multiple
-# Sl4aManagers from existing for the same device.
-_all_sl4a_managers = {}
-
-
-def create_sl4a_manager(adb):
- """Creates and returns an SL4AManager for the given device.
-
- Args:
- adb: A reference to the device's AdbProxy.
- """
- if adb.serial in _all_sl4a_managers:
- _all_sl4a_managers[adb.serial].log.warning(
- "Attempted to return multiple SL4AManagers on the same device. "
- "Returning pre-existing SL4AManager instead."
- )
- return _all_sl4a_managers[adb.serial]
- else:
- manager = Sl4aManager(adb)
- _all_sl4a_managers[adb.serial] = manager
- return manager
-
-
-class Sl4aManager(object):
- """A manager for SL4A Clients to a given AndroidDevice.
-
- SL4A is a single APK that can host multiple RPC servers at a time. This
- class manages each server connection over ADB, and will gracefully
- terminate the apk during cleanup.
-
- Attributes:
- _listen_for_port_lock: A lock for preventing multiple threads from
- potentially mixing up requested ports.
- _sl4a_ports: A set of all known SL4A server ports in use.
- adb: A reference to the AndroidDevice's AdbProxy.
- log: The logger for this object.
- sessions: A dictionary of session_ids to sessions.
- """
-
- def __init__(self, adb):
- self._listen_for_port_lock = threading.Lock()
- self._sl4a_ports = set()
- self.adb = adb
- self.log = logger.PrefixLoggerAdapter(
- logging.getLogger(),
- {
- logger.PrefixLoggerAdapter.EXTRA_KEY_LOG_PREFIX: f"[SL4A Manager|{adb.serial}]",
- },
- )
-
- self.sessions = {}
- self._started = False
- self.error_reporter = error_reporter.ErrorReporter(f"SL4A {adb.serial}")
-
- @property
- def sl4a_ports_in_use(self):
- """Returns a list of all server ports used by SL4A servers."""
- return set([session.server_port for session in self.sessions.values()])
-
- def diagnose_failure(self, session, connection):
- """Diagnoses all potential known reasons SL4A can fail.
-
- Assumes the failure happened on an RPC call, which verifies the state
- of ADB/device."""
- self.error_reporter.create_error_report(self, session, connection)
-
- def start_sl4a_server(self, device_port, try_interval=ATTEMPT_INTERVAL):
- """Opens a server socket connection on SL4A.
-
- Args:
- device_port: The expected port for SL4A to open on. Note that in
- many cases, this will be different than the port returned by
- this method.
- try_interval: The amount of seconds between attempts at finding an
- opened port on the AndroidDevice.
-
- Returns:
- The port number on the device the SL4A server is open on.
-
- Raises:
- Sl4aConnectionError if SL4A's opened port cannot be found.
- """
- # Launch a server through SL4A.
- self.adb.shell(_SL4A_LAUNCH_SERVER_CMD % device_port)
-
- # There is a chance that the server has not come up yet by the time the
- # launch command has finished. Try to read get the listening port again
- # after a small amount of time.
- time_left = MAX_WAIT_ON_SERVER_SECONDS
- while time_left > 0:
- port = self._get_open_listening_port()
- if port is None:
- time.sleep(try_interval)
- time_left -= try_interval
- else:
- return port
-
- raise rpc_client.Sl4aConnectionError(
- "Unable to find a valid open port for a new server connection. "
- "Expected port: %s. Open ports: %s"
- % (device_port, self._sl4a_ports)
- )
-
- def _get_all_ports_command(self):
- """Returns the list of all ports from the command to get ports."""
- is_root = True
- if not self.adb.is_root():
- is_root = self.adb.ensure_root()
-
- if is_root:
- return _SL4A_ROOT_FIND_PORT_CMD
- else:
- # TODO(markdr): When root is unavailable, search logcat output for
- # the port the server has opened.
- self.log.warning(
- "Device cannot be put into root mode. SL4A "
- "server connections cannot be verified."
- )
- return _SL4A_USER_FIND_PORT_CMD
-
- def _get_all_ports(self):
- return self.adb.shell(self._get_all_ports_command()).split()
-
- def _get_open_listening_port(self):
- """Returns any open, listening port found for SL4A.
-
- Will return none if no port is found.
- """
- possible_ports = self._get_all_ports()
- self.log.debug(f"SL4A Ports found: {possible_ports}")
-
- # Acquire the lock. We lock this method because if multiple threads
- # attempt to get a server at the same time, they can potentially find
- # the same port as being open, and both attempt to connect to it.
- with self._listen_for_port_lock:
- for port in possible_ports:
- if port not in self._sl4a_ports:
- self._sl4a_ports.add(port)
- return int(port)
- return None
-
- def is_sl4a_installed(self):
- """Returns True if SL4A is installed on the AndroidDevice."""
- return bool(
- self.adb.shell(f"pm path {SL4A_PKG_NAME}", ignore_status=True)
- )
-
- def start_sl4a_service(self):
- """Starts the SL4A Service on the device.
-
- For starting an RPC server, use start_sl4a_server() instead.
- """
- # Verify SL4A is installed.
- if not self._started:
- self._started = True
- if not self.is_sl4a_installed():
- raise rpc_client.Sl4aNotInstalledError(
- f"SL4A is not installed on device {self.adb.serial}"
- )
- if self.adb.shell(f'(ps | grep "S {SL4A_PKG_NAME}") || true'):
- # Close all SL4A servers not opened by this manager.
- # TODO(markdr): revert back to closing all ports after
- # b/76147680 is resolved.
- self.adb.shell(f"kill -9 $(pidof {SL4A_PKG_NAME})")
- self.adb.shell(
- 'settings put global hidden_api_blacklist_exemptions "*"'
- )
- # Start the service if it is not up already.
- self.adb.shell(_SL4A_START_SERVICE_CMD)
-
- def obtain_sl4a_server(self, server_port):
- """Obtain an SL4A server port.
-
- If the port is open and valid, return it. Otherwise, open an new server
- with the hinted server_port.
- """
- if server_port not in self.sl4a_ports_in_use:
- return self.start_sl4a_server(server_port)
- else:
- return server_port
-
- def create_session(
- self,
- max_connections=None,
- client_port=0,
- forwarded_port=0,
- server_port=None,
- ):
- """Creates an SL4A server with the given ports if possible.
-
- The ports are not guaranteed to be available for use. If the port
- asked for is not available, this will be logged, and the port will
- be randomized.
-
- Args:
- client_port: The client port on the host machine
- forwarded_port: The server port on the host machine forwarded
- by adb from the Android device
- server_port: The port on the Android device.
- max_connections: The max number of client connections for the
- session.
-
- Returns:
- A new Sl4aServer instance.
- """
- if server_port is None:
- # If a session already exists, use the same server.
- if len(self.sessions) > 0:
- server_port = self.sessions[
- sorted(self.sessions.keys())[0]
- ].server_port
- # Otherwise, open a new server on a random port.
- else:
- server_port = 0
- self.log.debug(
- "Creating SL4A session client_port={}, forwarded_port={}, server_port={}".format(
- client_port, forwarded_port, server_port
- )
- )
- self.start_sl4a_service()
- session = sl4a_session.Sl4aSession(
- self.adb,
- client_port,
- server_port,
- self.obtain_sl4a_server,
- self.diagnose_failure,
- forwarded_port,
- max_connections=max_connections,
- )
- self.sessions[session.uid] = session
- return session
-
- def stop_service(self):
- """Stops The SL4A Service. Force-stops the SL4A apk."""
- try:
- self.adb.shell(f"am force-stop {SL4A_PKG_NAME}", ignore_status=True)
- except Exception as e:
- self.log.warning("Fail to stop package %s: %s", SL4A_PKG_NAME, e)
- self._started = False
-
- def terminate_all_sessions(self):
- """Terminates all SL4A sessions gracefully."""
- self.error_reporter.finalize_reports()
- for _, session in self.sessions.items():
- session.terminate()
- self.sessions = {}
- self._close_all_ports()
-
- def _close_all_ports(self, try_interval=ATTEMPT_INTERVAL):
- """Closes all ports opened on SL4A."""
- ports = self._get_all_ports()
- for port in set.union(self._sl4a_ports, ports):
- self.adb.shell(_SL4A_CLOSE_SERVER_CMD % port)
- time_left = MAX_WAIT_ON_SERVER_SECONDS
- while time_left > 0 and self._get_open_listening_port():
- time.sleep(try_interval)
- time_left -= try_interval
-
- if time_left <= 0:
- self.log.warning(
- "Unable to close all un-managed servers! Server ports that are "
- "still open are %s" % self._get_open_listening_port()
- )
- self._sl4a_ports = set()
diff --git a/packages/antlion/controllers/sl4a_lib/sl4a_ports.py b/packages/antlion/controllers/sl4a_lib/sl4a_ports.py
deleted file mode 100644
index 2816685..0000000
--- a/packages/antlion/controllers/sl4a_lib/sl4a_ports.py
+++ /dev/null
@@ -1,40 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-
-class Sl4aPorts(object):
- """A container for the three ports needed for an SL4A connection.
-
- Attributes:
- client_port: The port on the host associated with the SL4A client
- forwarded_port: The port forwarded to the Android device.
- server_port: The port on the device associated with the SL4A server.
- """
-
- def __init__(
- self,
- client_port: int = 0,
- forwarded_port: int = 0,
- server_port: int = 0,
- ) -> None:
- self.client_port = client_port
- self.forwarded_port = forwarded_port
- self.server_port = server_port
-
- def __str__(self) -> str:
- return (
- f"({self.client_port}, {self.forwarded_port}, {self.server_port})"
- )
diff --git a/packages/antlion/controllers/sl4a_lib/sl4a_session.py b/packages/antlion/controllers/sl4a_lib/sl4a_session.py
deleted file mode 100644
index 1c8de1e..0000000
--- a/packages/antlion/controllers/sl4a_lib/sl4a_session.py
+++ /dev/null
@@ -1,276 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-# Blanket ignores to enable mypy in Antlion
-# mypy: disable-error-code="no-untyped-def"
-import errno
-import logging
-import socket
-import threading
-
-from mobly import logger
-
-from antlion.controllers.adb_lib.error import AdbError
-from antlion.controllers.sl4a_lib import (
- event_dispatcher,
- rpc_client,
- rpc_connection,
- sl4a_ports,
-)
-from antlion.controllers.sl4a_lib.rpc_client import Sl4aStartError
-
-SOCKET_TIMEOUT = 60
-
-# The SL4A Session UID when a UID has not been received yet.
-UNKNOWN_UID = -1
-
-
-class Sl4aSession(object):
- """An object that tracks the state of an SL4A Session.
-
- Attributes:
- _event_dispatcher: The EventDispatcher instance, if any, for this
- session.
- _terminate_lock: A lock that prevents race conditions for multiple
- threads calling terminate()
- _terminated: A bool that stores whether or not this session has been
- terminated. Terminated sessions cannot be restarted.
- adb: A reference to the AndroidDevice's AdbProxy.
- log: The logger for this Sl4aSession
- server_port: The SL4A server port this session is established on.
- uid: The uid that corresponds the the SL4A Server's session id. This
- value is only unique during the lifetime of the SL4A apk.
- """
-
- def __init__(
- self,
- adb,
- host_port,
- device_port,
- get_server_port_func,
- on_error_callback,
- forwarded_port=0,
- max_connections=None,
- ):
- """Creates an SL4A Session.
-
- Args:
- adb: A reference to the adb proxy
- get_server_port_func: A lambda (int) that returns the corrected
- server port. The int passed in hints at which port to use, if
- possible.
- host_port: The port the host machine uses to connect to the SL4A
- server for its first connection.
- device_port: The SL4A server port to be used as a hint for which
- SL4A server to connect to.
- forwarded_port: The server port on host machine forwarded by adb
- from Android device to accept SL4A connection
- """
- self._event_dispatcher = None
- self._terminate_lock = threading.Lock()
- self._terminated = False
- self.adb = adb
- self.uid = UNKNOWN_UID
-
- self.log = logger.PrefixLoggerAdapter(
- logging.getLogger(),
- {
- logger.PrefixLoggerAdapter.EXTRA_KEY_LOG_PREFIX: f"[SL4A Session|{self.adb.serial}|{self.uid}]",
- },
- )
-
- self.forwarded_port = forwarded_port
- self.server_port = device_port
- self.obtain_server_port = get_server_port_func
- self._on_error_callback = on_error_callback
-
- connection_creator = self._rpc_connection_creator(host_port)
- self.rpc_client = rpc_client.RpcClient(
- self.uid,
- self.adb.serial,
- self.diagnose_failure,
- connection_creator,
- max_connections=max_connections,
- )
-
- def _rpc_connection_creator(self, host_port):
- def create_client(uid):
- return self._create_rpc_connection(
- ports=sl4a_ports.Sl4aPorts(
- host_port, self.forwarded_port, self.server_port
- ),
- uid=uid,
- )
-
- return create_client
-
- @property
- def is_alive(self):
- return not self._terminated
-
- def _create_forwarded_port(self, server_port, hinted_port=0):
- """Creates a forwarded port to the specified server port.
-
- Args:
- server_port: (int) The port to forward to.
- hinted_port: (int) The port to use for forwarding, if available.
- Otherwise, the chosen port will be random.
- Returns:
- The chosen forwarded port.
-
- Raises AdbError if the version of ADB is too old, or the command fails.
- """
- if self.adb.get_version_number() < 37 and hinted_port == 0:
- self.log.error(
- "The current version of ADB does not automatically provide a "
- "port to forward. Please upgrade ADB to version 1.0.37 or "
- "higher."
- )
- raise Sl4aStartError("Unable to forward a port to the device.")
- else:
- try:
- return self.adb.tcp_forward(hinted_port, server_port)
- except AdbError as e:
- if "cannot bind listener" in e.stderr:
- self.log.warning(
- "Unable to use %s to forward to device port %s due to: "
- '"%s". Attempting to choose a random port instead.'
- % (hinted_port, server_port, e.stderr)
- )
- # Call this method again, but this time with no hinted port.
- return self._create_forwarded_port(server_port)
- raise e
-
- def _create_rpc_connection(self, ports=None, uid=UNKNOWN_UID):
- """Creates an RPC Connection with the specified ports.
-
- Args:
- ports: A Sl4aPorts object or a tuple of (host/client_port,
- forwarded_port, device/server_port). If any of these are
- zero, the OS will determine their values during connection.
-
- Note that these ports are only suggestions. If they are not
- available, the a different port will be selected.
- uid: The UID of the SL4A Session. To create a new session, use
- UNKNOWN_UID.
- Returns:
- An Sl4aClient.
- """
- if ports is None:
- ports = sl4a_ports.Sl4aPorts(0, 0, 0)
- # Open a new server if a server cannot be inferred.
- ports.server_port = self.obtain_server_port(ports.server_port)
- self.server_port = ports.server_port
- # Forward the device port to the host.
- ports.forwarded_port = self._create_forwarded_port(
- ports.server_port, hinted_port=ports.forwarded_port
- )
- client_socket, fd = self._create_client_side_connection(ports)
- client = rpc_connection.RpcConnection(
- self.adb, ports, client_socket, fd, uid=uid
- )
- client.open()
- if uid == UNKNOWN_UID:
- self.uid = client.uid
- return client
-
- def diagnose_failure(self, connection):
- """Diagnoses any problems related to the SL4A session."""
- self._on_error_callback(self, connection)
-
- def get_event_dispatcher(self):
- """Returns the EventDispatcher for this Sl4aSession."""
- if self._event_dispatcher is None:
- self._event_dispatcher = event_dispatcher.EventDispatcher(
- self.adb.serial, self.rpc_client
- )
- return self._event_dispatcher
-
- def _create_client_side_connection(self, ports):
- """Creates and connects the client socket to the forward device port.
-
- Args:
- ports: A Sl4aPorts object or a tuple of (host_port,
- forwarded_port, device_port).
-
- Returns:
- A tuple of (socket, socket_file_descriptor).
- """
- client_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
- client_socket.settimeout(SOCKET_TIMEOUT)
- client_socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
- if ports.client_port != 0:
- try:
- client_socket.bind((socket.gethostname(), ports.client_port))
- except OSError as e:
- # If the port is in use, log and ask for any open port.
- if e.errno == errno.EADDRINUSE:
- self.log.warning(
- "Port %s is already in use on the host. "
- "Generating a random port." % ports.client_port
- )
- ports.client_port = 0
- return self._create_client_side_connection(ports)
- raise
-
- # Verify and obtain the port opened by SL4A.
- try:
- # Connect to the port that has been forwarded to the device.
- client_socket.connect(("127.0.0.1", ports.forwarded_port))
- except socket.timeout:
- raise rpc_client.Sl4aConnectionError(
- "SL4A has not connected over the specified port within the "
- "timeout of %s seconds." % SOCKET_TIMEOUT
- )
- except socket.error as e:
- # In extreme, unlikely cases, a socket error with
- # errno.EADDRNOTAVAIL can be raised when a desired host_port is
- # taken by a separate program between the bind and connect calls.
- # Note that if host_port is set to zero, there is no bind before
- # the connection is made, so this error will never be thrown.
- if e.errno == errno.EADDRNOTAVAIL:
- ports.client_port = 0
- return self._create_client_side_connection(ports)
- raise
- ports.client_port = client_socket.getsockname()[1]
- return client_socket, client_socket.makefile(mode="brw")
-
- def terminate(self):
- """Terminates the session.
-
- The return of process execution is blocked on completion of all events
- being processed by handlers in the Event Dispatcher.
- """
- with self._terminate_lock:
- if not self._terminated:
- self.log.debug("Terminating Session.")
- try:
- self.rpc_client.closeSl4aSession()
- except Exception as e:
- if "SL4A session has already been terminated" not in str(e):
- self.log.warning(e)
- # Must be set after closeSl4aSession so the rpc_client does not
- # think the session has closed.
- self._terminated = True
- if self._event_dispatcher:
- try:
- self._event_dispatcher.close()
- except Exception as e:
- self.log.warning(e)
- try:
- self.rpc_client.terminate()
- except Exception as e:
- self.log.warning(e)
diff --git a/packages/antlion/controllers/sniffer.py b/packages/antlion/controllers/sniffer.py
deleted file mode 100644
index 6c296f1..0000000
--- a/packages/antlion/controllers/sniffer.py
+++ /dev/null
@@ -1,297 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-# Blanket ignores to enable mypy in Antlion
-# mypy: disable-error-code="no-untyped-def"
-import importlib
-import logging
-
-MOBLY_CONTROLLER_CONFIG_NAME = "Sniffer"
-ACTS_CONTROLLER_REFERENCE_NAME = "sniffers"
-
-
-def create(configs):
- """Initializes the sniffer structures based on the JSON configuration. The
- expected keys are:
-
- Type: A first-level type of sniffer. Planned to be 'local' for sniffers
- running on the local machine, or 'remote' for sniffers running
- remotely.
- SubType: The specific sniffer type to be used.
- Interface: The WLAN interface used to configure the sniffer.
- BaseConfigs: A dictionary specifying baseline configurations of the
- sniffer. Configurations can be overridden when starting a capture.
- The keys must be one of the Sniffer.CONFIG_KEY_* values.
- """
- objs = []
- for c in configs:
- sniffer_type = c["Type"]
- sniffer_subtype = c["SubType"]
- interface = c["Interface"]
- base_configs = c["BaseConfigs"]
- module_name = "antlion.controllers.sniffer_lib.{}.{}".format(
- sniffer_type, sniffer_subtype
- )
- module = importlib.import_module(module_name)
- objs.append(
- module.Sniffer(
- interface, logging.getLogger(), base_configs=base_configs
- )
- )
- return objs
-
-
-def destroy(objs):
- """Destroys the sniffers and terminates any ongoing capture sessions."""
- for sniffer in objs:
- try:
- sniffer.stop_capture()
- except SnifferError:
- pass
-
-
-class SnifferError(Exception):
- """This is the Exception class defined for all errors generated by
- Sniffer-related modules.
- """
-
-
-class InvalidDataError(Exception):
- """This exception is thrown when invalid configuration data is passed
- to a method.
- """
-
-
-class ExecutionError(SnifferError):
- """This exception is thrown when trying to configure the capture device
- or when trying to execute the capture operation.
-
- When this exception is seen, it is possible that the sniffer module is run
- without sudo (for local sniffers) or keys are out-of-date (for remote
- sniffers).
- """
-
-
-class InvalidOperationError(SnifferError):
- """Certain methods may only be accessed when the instance upon which they
- are invoked is in a certain state. This indicates that the object is not
- in the correct state for a method to be called.
- """
-
-
-class Sniffer(object):
- """This class defines an object representing a sniffer.
-
- The object defines the generic behavior of sniffers - irrespective of how
- they are implemented, or where they are located: on the local machine or on
- the remote machine.
- """
-
- CONFIG_KEY_CHANNEL = "channel"
-
- def __init__(self, interface, logger, base_configs=None):
- """The constructor for the Sniffer. It constructs a sniffer and
- configures it to be ready for capture.
-
- Args:
- interface: A string specifying the interface used to configure the
- sniffer.
- logger: ACTS logger object.
- base_configs: A dictionary containing baseline configurations of the
- sniffer. These can be overridden when staring a capture. The
- keys are specified by Sniffer.CONFIG_KEY_*.
-
- Returns:
- self: A configured sniffer.
-
- Raises:
- InvalidDataError: if the config_path is invalid.
- NoPermissionError: if an error occurs while configuring the
- sniffer.
- """
- raise NotImplementedError("Base class should not be called directly!")
-
- def get_descriptor(self):
- """This function returns a string describing the sniffer. The specific
- string (and its format) is up to each derived sniffer type.
-
- Returns:
- A string describing the sniffer.
- """
- raise NotImplementedError("Base class should not be called directly!")
-
- def get_type(self):
- """This function returns the type of the sniffer.
-
- Returns:
- The type (string) of the sniffer. Corresponds to the 'Type' key of
- the sniffer configuration.
- """
- raise NotImplementedError("Base class should not be called directly!")
-
- def get_subtype(self):
- """This function returns the sub-type of the sniffer.
-
- Returns:
- The sub-type (string) of the sniffer. Corresponds to the 'SubType'
- key of the sniffer configuration.
- """
- raise NotImplementedError("Base class should not be called directly!")
-
- def get_interface(self):
- """This function returns The interface used to configure the sniffer,
- e.g. 'wlan0'.
-
- Returns:
- The interface (string) used to configure the sniffer. Corresponds to
- the 'Interface' key of the sniffer configuration.
- """
- raise NotImplementedError("Base class should not be called directly!")
-
- def get_capture_file(self):
- """The sniffer places a capture in the logger directory. This function
- enables the caller to obtain the path of that capture.
-
- Returns:
- The full path of the current or last capture.
- """
- raise NotImplementedError("Base class should not be called directly!")
-
- def start_capture(
- self,
- override_configs=None,
- additional_args=None,
- duration=None,
- packet_count=None,
- ):
- """This function starts a capture which is saved to the specified file
- path.
-
- Depending on the type/subtype and configuration of the sniffer the
- capture may terminate on its own or may require an explicit call to the
- stop_capture() function.
-
- This is a non-blocking function so a terminating function must be
- called - either explicitly or implicitly:
- - Explicitly: call either stop_capture() or wait_for_capture()
- - Implicitly: use with a with clause. The wait_for_capture() function
- will be called if a duration is specified (i.e. is not
- None), otherwise a stop_capture() will be called.
-
- The capture is saved to a file in the log path of the logger. Use
- the get_capture_file() to get the full path to the current or most
- recent capture.
-
- Args:
- override_configs: A dictionary which is combined with the
- base_configs ("BaseConfigs" in the sniffer configuration). The
- keys (specified by Sniffer.CONFIG_KEY_*) determine the
- configuration of the sniffer for this specific capture.
- additional_args: A string specifying additional raw
- command-line arguments to pass to the underlying sniffer. The
- interpretation of these flags is sniffer-dependent.
- duration: An integer specifying the number of seconds over which to
- capture packets. The sniffer will be terminated after this
- duration. Used in implicit mode when using a 'with' clause. In
- explicit control cases may have to be performed using a
- sleep+stop or as the timeout argument to the wait function.
- packet_count: An integer specifying the number of packets to capture
- before terminating. Should be used with duration to guarantee
- that capture terminates at some point (even if did not capture
- the specified number of packets).
-
- Returns:
- An ActiveCaptureContext process which can be used with a 'with'
- clause.
-
- Raises:
- InvalidDataError: for invalid configurations
- NoPermissionError: if an error occurs while configuring and running
- the sniffer.
- """
- raise NotImplementedError("Base class should not be called directly!")
-
- def stop_capture(self):
- """This function stops a capture and guarantees that the capture is
- saved to the capture file configured during the start_capture() method.
- Depending on the type of the sniffer the file may previously contain
- partial results (e.g. for a local sniffer) or may not exist until the
- stop_capture() method is executed (e.g. for a remote sniffer).
-
- Depending on the type/subtype and configuration of the sniffer the
- capture may terminate on its own without requiring a call to this
- function. In such a case it is still necessary to call either this
- function or the wait_for_capture() function to make sure that the
- capture file is moved to the correct location.
-
- Raises:
- NoPermissionError: No permission when trying to stop a capture
- and save the capture file.
- """
- raise NotImplementedError("Base class should not be called directly!")
-
- def wait_for_capture(self, timeout=None):
- """This function waits for a capture to terminate and guarantees that
- the capture is saved to the capture file configured during the
- start_capture() method. Depending on the type of the sniffer the file
- may previously contain partial results (e.g. for a local sniffer) or
- may not exist until the stop_capture() method is executed (e.g. for a
- remote sniffer).
-
- Depending on the type/subtype and configuration of the sniffer the
- capture may terminate on its own without requiring a call to this
- function. In such a case it is still necessary to call either this
- function or the stop_capture() function to make sure that the capture
- file is moved to the correct location.
-
- Args:
- timeout: An integer specifying the number of seconds to wait for
- the capture to terminate on its own. On expiration of the
- timeout the sniffer is stopped explicitly using the
- stop_capture() function.
-
- Raises:
- NoPermissionError: No permission when trying to stop a capture and
- save the capture file.
- """
- raise NotImplementedError("Base class should not be called directly!")
-
-
-class ActiveCaptureContext(object):
- """This class defines an object representing an active sniffer capture.
-
- The object is returned by a Sniffer.start_capture() command and terminates
- the capture when the 'with' clause exits. It is syntactic sugar for
- try/finally.
- """
-
- _sniffer = None
- _timeout = None
-
- def __init__(self, sniffer, timeout=None):
- self._sniffer = sniffer
- self._timeout = timeout
-
- def __enter__(self):
- pass
-
- def __exit__(self, type, value, traceback):
- if self._sniffer is not None:
- if self._timeout is None:
- self._sniffer.stop_capture()
- else:
- self._sniffer.wait_for_capture(self._timeout)
- self._sniffer = None
diff --git a/packages/antlion/controllers/sniffer_lib/__init__.py b/packages/antlion/controllers/sniffer_lib/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/packages/antlion/controllers/sniffer_lib/__init__.py
+++ /dev/null
diff --git a/packages/antlion/controllers/sniffer_lib/local/__init__.py b/packages/antlion/controllers/sniffer_lib/local/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/packages/antlion/controllers/sniffer_lib/local/__init__.py
+++ /dev/null
diff --git a/packages/antlion/controllers/sniffer_lib/local/local_base.py b/packages/antlion/controllers/sniffer_lib/local/local_base.py
deleted file mode 100644
index 345230a..0000000
--- a/packages/antlion/controllers/sniffer_lib/local/local_base.py
+++ /dev/null
@@ -1,168 +0,0 @@
-#!/usr/bin/env python3
-
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""
-Class for Local sniffers - i.e. running on the local machine.
-
-This class provides configuration for local interfaces but leaves
-the actual capture (sniff) to sub-classes.
-"""
-
-# Blanket ignores to enable mypy in Antlion
-# mypy: disable-error-code="no-untyped-def"
-import os
-import shutil
-import signal
-import subprocess
-import tempfile
-
-from mobly import logger
-
-from antlion import utils
-from antlion.controllers import sniffer
-
-
-class SnifferLocalBase(sniffer.Sniffer):
- """This class defines the common behaviors of WLAN sniffers running on
- WLAN interfaces of the local machine.
-
- Specific mechanisms to capture packets over the local WLAN interfaces are
- implemented by sub-classes of this class - i.e. it is not a final class.
- """
-
- def __init__(self, interface, logger, base_configs=None):
- """See base class documentation"""
- self._base_configs = None
- self._capture_file_path = ""
- self._interface = ""
- self._logger = logger
- self._process = None
- self._temp_capture_file_path = ""
-
- if interface == "":
- raise sniffer.InvalidDataError("Empty interface provided")
- self._interface = interface
- self._base_configs = base_configs
-
- try:
- utils.exe_cmd("ifconfig", self._interface, "down")
- utils.exe_cmd("iwconfig", self._interface, "mode", "monitor")
- utils.exe_cmd("ifconfig", self._interface, "up")
- except Exception as err:
- raise sniffer.ExecutionError(err)
-
- def get_interface(self):
- """See base class documentation"""
- return self._interface
-
- def get_type(self):
- """See base class documentation"""
- return "local"
-
- def get_capture_file(self):
- return self._capture_file_path
-
- def _pre_capture_config(self, override_configs=None):
- """Utility function which configures the wireless interface per the
- specified configurations. Operation is performed before every capture
- start using baseline configurations (specified when sniffer initialized)
- and override configurations specified here.
- """
- final_configs = {}
- if self._base_configs:
- final_configs.update(self._base_configs)
- if override_configs:
- final_configs.update(override_configs)
-
- if sniffer.Sniffer.CONFIG_KEY_CHANNEL in final_configs:
- try:
- utils.exe_cmd(
- "iwconfig",
- self._interface,
- "channel",
- str(final_configs[sniffer.Sniffer.CONFIG_KEY_CHANNEL]),
- )
- except Exception as err:
- raise sniffer.ExecutionError(err)
-
- def _get_command_line(
- self, additional_args=None, duration=None, packet_count=None
- ):
- """Utility function to be implemented by every child class - which
- are the concrete sniffer classes. Each sniffer-specific class should
- derive the command line to execute its sniffer based on the specified
- arguments.
- """
- raise NotImplementedError("Base class should not be called directly!")
-
- def _post_process(self):
- """Utility function which is executed after a capture is done. It
- moves the capture file to the requested location.
- """
- self._process = None
- shutil.move(self._temp_capture_file_path, self._capture_file_path)
-
- def start_capture(
- self,
- override_configs=None,
- additional_args=None,
- duration=None,
- packet_count=None,
- ):
- """See base class documentation"""
- if self._process is not None:
- raise sniffer.InvalidOperationError(
- "Trying to start a sniff while another is still running!"
- )
- capture_dir = os.path.join(
- self._logger.log_path, f"Sniffer-{self._interface}"
- )
- os.makedirs(capture_dir, exist_ok=True)
- self._capture_file_path = os.path.join(
- capture_dir, f"capture_{logger.get_log_file_timestamp()}.pcap"
- )
-
- self._pre_capture_config(override_configs)
- _, self._temp_capture_file_path = tempfile.mkstemp(suffix=".pcap")
-
- cmd = self._get_command_line(
- additional_args=additional_args,
- duration=duration,
- packet_count=packet_count,
- )
-
- self._process = utils.start_standing_subprocess(cmd)
- return sniffer.ActiveCaptureContext(self, duration)
-
- def stop_capture(self):
- """See base class documentation"""
- if self._process is None:
- raise sniffer.InvalidOperationError(
- "Trying to stop a non-started process"
- )
- utils.stop_standing_subprocess(self._process, kill_signal=signal.SIGINT)
- self._post_process()
-
- def wait_for_capture(self, timeout=None):
- """See base class documentation"""
- if self._process is None:
- raise sniffer.InvalidOperationError(
- "Trying to wait on a non-started process"
- )
- try:
- utils.wait_for_standing_subprocess(self._process, timeout)
- self._post_process()
- except subprocess.TimeoutExpired:
- self.stop_capture()
diff --git a/packages/antlion/controllers/sniffer_lib/local/tcpdump.py b/packages/antlion/controllers/sniffer_lib/local/tcpdump.py
deleted file mode 100644
index dca533a..0000000
--- a/packages/antlion/controllers/sniffer_lib/local/tcpdump.py
+++ /dev/null
@@ -1,58 +0,0 @@
-#!/usr/bin/env python3
-
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-# Blanket ignores to enable mypy in Antlion
-# mypy: disable-error-code="no-untyped-def"
-import shutil
-
-from antlion.controllers import sniffer
-from antlion.controllers.sniffer_lib.local import local_base
-
-
-class Sniffer(local_base.SnifferLocalBase):
- """This class defines a sniffer which uses tcpdump as its back-end"""
-
- def __init__(self, config_path, logger, base_configs=None):
- """See base class documentation"""
- self._executable_path = None
-
- super().__init__(config_path, logger, base_configs=base_configs)
-
- self._executable_path = shutil.which("tcpdump")
- if self._executable_path is None:
- raise sniffer.SnifferError(
- "Cannot find a path to the 'tcpdump' executable"
- )
-
- def get_descriptor(self):
- """See base class documentation"""
- return f"local-tcpdump-{self._interface}"
-
- def get_subtype(self):
- """See base class documentation"""
- return "tcpdump"
-
- def _get_command_line(
- self, additional_args=None, duration=None, packet_count=None
- ):
- cmd = "{} -i {} -w {}".format(
- self._executable_path, self._interface, self._temp_capture_file_path
- )
- if packet_count is not None:
- cmd = f"{cmd} -c {packet_count}"
- if additional_args is not None:
- cmd = f"{cmd} {additional_args}"
- return cmd
diff --git a/packages/antlion/controllers/sniffer_lib/local/tshark.py b/packages/antlion/controllers/sniffer_lib/local/tshark.py
deleted file mode 100644
index eb688fb..0000000
--- a/packages/antlion/controllers/sniffer_lib/local/tshark.py
+++ /dev/null
@@ -1,63 +0,0 @@
-#!/usr/bin/env python3
-
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-# Blanket ignores to enable mypy in Antlion
-# mypy: disable-error-code="no-untyped-def"
-import shutil
-
-from antlion.controllers import sniffer
-from antlion.controllers.sniffer_lib.local import local_base
-
-
-class Sniffer(local_base.SnifferLocalBase):
- """This class defines a sniffer which uses tshark as its back-end"""
-
- def __init__(self, config_path, logger, base_configs=None):
- """See base class documentation"""
- self._executable_path = None
-
- super().__init__(config_path, logger, base_configs=base_configs)
-
- self._executable_path = shutil.which("tshark") or shutil.which(
- "/usr/local/bin/tshark"
- )
- if self._executable_path is None:
- raise sniffer.SnifferError(
- "Cannot find a path to the 'tshark' "
- "executable (or to '/usr/local/bin/tshark')"
- )
-
- def get_descriptor(self):
- """See base class documentation"""
- return "local-tshark-{}-ch{}".format(self._interface) # type: ignore
-
- def get_subtype(self):
- """See base class documentation"""
- return "tshark"
-
- def _get_command_line(
- self, additional_args=None, duration=None, packet_count=None
- ):
- cmd = "{} -i {} -w {}".format(
- self._executable_path, self._interface, self._temp_capture_file_path
- )
- if duration is not None:
- cmd = f"{cmd} -a duration:{duration}"
- if packet_count is not None:
- cmd = f"{cmd} -c {packet_count}"
- if additional_args is not None:
- cmd = f"{cmd} {additional_args}"
- return cmd
diff --git a/packages/antlion/controllers/utils_lib/__init__.py b/packages/antlion/controllers/utils_lib/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/packages/antlion/controllers/utils_lib/__init__.py
+++ /dev/null
diff --git a/packages/antlion/controllers/utils_lib/commands/__init__.py b/packages/antlion/controllers/utils_lib/commands/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/packages/antlion/controllers/utils_lib/commands/__init__.py
+++ /dev/null
diff --git a/packages/antlion/controllers/utils_lib/commands/command.py b/packages/antlion/controllers/utils_lib/commands/command.py
deleted file mode 100644
index 3eb11b6..0000000
--- a/packages/antlion/controllers/utils_lib/commands/command.py
+++ /dev/null
@@ -1,117 +0,0 @@
-# Copyright 2024 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import subprocess
-from typing import IO, Protocol, TypeVar
-
-from mobly import signals
-
-from antlion.runner import CalledProcessError, Runner
-
-
-class Command(Protocol):
- """A runnable binary."""
-
- def binary(self) -> str:
- """Return the binary used for this command."""
- ...
-
- def available(self) -> bool:
- """Return true if this command is available to run."""
- ...
-
-
-_C = TypeVar("_C", bound=Command)
-
-
-def require(command: _C) -> _C:
- """Require a command to be available."""
- if command.available():
- return command
- raise signals.TestAbortClass(
- f"Required command not found: {command.binary()}"
- )
-
-
-def optional(command: _C) -> _C | None:
- """Optionally require a command to be available."""
- if command.available():
- return command
- return None
-
-
-class LinuxCommand(Command):
- """A command running on a Linux machine."""
-
- def __init__(self, runner: Runner, binary: str) -> None:
- self._runner = runner
- self._binary = binary
- self._can_sudo = self._available("sudo")
-
- def binary(self) -> str:
- """Return the binary used for this command."""
- return self._binary
-
- def available(self) -> bool:
- """Return true if this command is available to run."""
- return self._available(self._binary)
-
- def _available(self, binary: str) -> bool:
- """Check if binary is available to run."""
- try:
- self._runner.run(["command", "-v", binary])
- except CalledProcessError:
- return False
- return True
-
- def _run(
- self,
- args: list[str],
- sudo: bool = False,
- timeout_sec: float | None = None,
- log_output: bool = True,
- ) -> subprocess.CompletedProcess[bytes]:
- """Run the command without having to specify the binary.
-
- Args:
- args: List of arguments to pass to the binary
- sudo: Use sudo to execute the binary, if available
- timeout_sec: Seconds to wait for command to finish
- log_output: If true, print stdout and stderr to the debug log.
- """
- if sudo and self._can_sudo:
- cmd = ["sudo", self._binary]
- else:
- cmd = [self._binary]
- return self._runner.run(
- cmd + args, timeout_sec=timeout_sec, log_output=log_output
- )
-
- def _start(
- self,
- args: list[str],
- sudo: bool = False,
- stdout: IO[bytes] | int = subprocess.PIPE,
- ) -> subprocess.Popen[bytes]:
- """Start the command without having to specify the binary.
-
- Args:
- args: List of arguments to pass to the binary
- sudo: Use sudo to execute the binary, if available
- """
- if sudo and self._can_sudo:
- cmd = ["sudo", self._binary]
- else:
- cmd = [self._binary]
- return self._runner.start(cmd + args, stdout)
diff --git a/packages/antlion/controllers/utils_lib/commands/date.py b/packages/antlion/controllers/utils_lib/commands/date.py
deleted file mode 100644
index 84e628a..0000000
--- a/packages/antlion/controllers/utils_lib/commands/date.py
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2024 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import datetime
-
-from antlion.controllers.utils_lib.commands.command import LinuxCommand
-from antlion.runner import Runner
-
-
-class LinuxDateCommand(LinuxCommand):
- """Look through current running processes."""
-
- def __init__(self, runner: Runner, binary: str = "date") -> None:
- super().__init__(runner, binary)
-
- def sync(self) -> None:
- """Synchronize system time.
-
- Allows for better synchronization between antlion host logs and device
- logs. Useful for when the device does not have an internet connection.
- """
- now = datetime.datetime.now().astimezone().isoformat()
- self._run(["-s", now], sudo=True)
diff --git a/packages/antlion/controllers/utils_lib/commands/ip.py b/packages/antlion/controllers/utils_lib/commands/ip.py
deleted file mode 100644
index 4cfe237..0000000
--- a/packages/antlion/controllers/utils_lib/commands/ip.py
+++ /dev/null
@@ -1,194 +0,0 @@
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import ipaddress
-import re
-import subprocess
-from typing import Iterator
-
-from mobly import signals
-
-from antlion.controllers.utils_lib.commands.command import LinuxCommand
-from antlion.runner import Runner
-
-
-class LinuxIpCommand(LinuxCommand):
- """Interface for doing standard IP commands on a linux system.
-
- Wraps standard shell commands used for ip into a python object that can
- be interacted with more easily.
- """
-
- def __init__(self, runner: Runner, binary: str = "ip"):
- """Create a LinuxIpCommand.
-
- Args:
- runner: Runner to use to execute this command.
- binary: Path to binary to use. Defaults to "ip".
- sudo: Requires root permissions. Defaults to False.
- """
- super().__init__(runner, binary)
-
- def get_ipv4_addresses(
- self, net_interface: str
- ) -> Iterator[tuple[ipaddress.IPv4Interface, ipaddress.IPv4Address | None]]:
- """Gets all ipv4 addresses of a network interface.
-
- Args:
- net_interface: string, The network interface to get info on
- (eg. wlan0).
-
- Returns: An iterator of tuples that contain (address, broadcast).
- where address is a ipaddress.IPv4Interface and broadcast
- is an ipaddress.IPv4Address.
- """
- results = self._run(["addr", "show", "dev", net_interface])
- lines = results.stdout.splitlines()
-
- # Example stdout:
- # 2: eth0: <BROADCAST,MULTICAST,UP,LOWER_UP> mtu 1500 qdisc mq state UP group default qlen 1000
- # link/ether 48:0f:cf:3c:9d:89 brd ff:ff:ff:ff:ff:ff
- # inet 192.168.1.1/24 brd 192.168.1.255 scope global eth0
- # valid_lft forever preferred_lft forever
- # inet6 2620:0:1000:1500:a968:a776:2d80:a8b3/64 scope global temporary dynamic
- # valid_lft 599919sec preferred_lft 80919sec
-
- for line_bytes in lines:
- line = line_bytes.decode("utf-8").strip()
- match = re.search(
- "inet (?P<address>[^\\s]*) brd (?P<bcast>[^\\s]*)", line
- )
- if match:
- d = match.groupdict()
- address = ipaddress.IPv4Interface(d["address"])
- bcast = ipaddress.IPv4Address(d["bcast"])
- yield (address, bcast)
-
- match = re.search("inet (?P<address>[^\\s]*)", line)
- if match:
- d = match.groupdict()
- address = ipaddress.IPv4Interface(d["address"])
- yield (address, None)
-
- def add_ipv4_address(
- self,
- net_interface: str,
- address: ipaddress.IPv4Interface,
- broadcast: ipaddress.IPv4Address | None = None,
- ) -> None:
- """Adds an ipv4 address to a net_interface.
-
- Args:
- net_interface: The network interface to get the new ipv4 (eg. wlan0).
- address: The new ipaddress and netmask to add to an interface.
- broadcast: The broadcast address to use for this net_interfaces subnet.
- """
- args = ["addr", "add", str(address)]
- if broadcast:
- args += ["broadcast", str(broadcast)]
- args += ["dev", net_interface]
- self._run(args, sudo=True)
-
- def remove_ipv4_address(
- self,
- net_interface: str,
- address: ipaddress.IPv4Interface | ipaddress.IPv4Address,
- ignore_status: bool = False,
- ) -> None:
- """Remove an ipv4 address.
-
- Removes an ipv4 address from a network interface.
-
- Args:
- net_interface: The network interface to remove the ipv4 address from (eg. wlan0).
- address: The ip address to remove from the net_interface.
- ignore_status: True if the exit status can be ignored
- Returns:
- The job result from a the command
- """
- try:
- self._run(
- ["addr", "del", str(address), "dev", net_interface],
- sudo=True,
- )
- except subprocess.CalledProcessError as e:
- if e.returncode == 2 or "Address not found" in e.stdout:
- # Do not fail if the address was already deleted or couldn't be
- # found.
- return
- raise e
-
- def set_ipv4_address(
- self,
- net_interface: str,
- address: ipaddress.IPv4Interface,
- broadcast: ipaddress.IPv4Address | None = None,
- ) -> None:
- """Set the ipv4 address.
-
- Sets the ipv4 address of a network interface. If the network interface
- has any other ipv4 addresses these will be cleared.
-
- Args:
- net_interface: The network interface to set the ip address on (eg. wlan0).
- address: The ip address and subnet to give the net_interface.
- broadcast: The broadcast address to use for the subnet.
- """
- self.clear_ipv4_addresses(net_interface)
- self.add_ipv4_address(net_interface, address, broadcast)
-
- def clear_ipv4_addresses(self, net_interface: str) -> None:
- """Clears all ipv4 addresses registered to a net_interface.
-
- Args:
- net_interface: The network interface to clear addresses from (eg. wlan0).
- """
- ip_info = self.get_ipv4_addresses(net_interface)
-
- for address, _ in ip_info:
- try:
- self.remove_ipv4_address(net_interface, address)
- except subprocess.CalledProcessError as e:
- if (
- "RTNETLINK answers: Cannot assign requested address"
- in e.stderr
- ):
- # It is possible that the address has already been removed by the
- # time this command has been called.
- addresses = [
- a for a, _ in self.get_ipv4_addresses(net_interface)
- ]
- if address not in addresses:
- self._runner.log.warning(
- "Unable to remove address %s. The address was "
- "removed by another process.",
- address,
- )
- else:
- raise signals.TestError(
- f"Unable to remove address {address}. The address is still "
- f"registered to {net_interface}, despite call for removal.",
- extras={
- "stderr": e.stderr,
- "stdout": e.stdout,
- "returncode": e.returncode,
- },
- )
- raise signals.TestError(
- f"Unable to remove address {address}: {e.stderr}",
- extras={
- "stdout": e.stdout,
- "returncode": e.returncode,
- },
- )
diff --git a/packages/antlion/controllers/utils_lib/commands/journalctl.py b/packages/antlion/controllers/utils_lib/commands/journalctl.py
deleted file mode 100644
index c1a21f7..0000000
--- a/packages/antlion/controllers/utils_lib/commands/journalctl.py
+++ /dev/null
@@ -1,85 +0,0 @@
-# Copyright 2024 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import shlex
-from datetime import datetime
-
-from antlion.controllers.utils_lib.commands import pgrep
-from antlion.controllers.utils_lib.commands.command import LinuxCommand, require
-from antlion.runner import Runner
-
-# Timestamp format accepted by systemd.
-# See https://man7.org/linux/man-pages/man7/systemd.time.7.html#PARSING_TIMESTAMPS
-SYSTEMD_TIMESTAMP_FORMAT = "%Y-%m-%d %H:%M:%S UTC"
-
-# Wait a maximum of 5 minutes for journalctl to output all systemd journal logs
-# since boot.
-JOURNALCTL_TIMEOUT_SEC = 60 * 5
-
-
-class LinuxJournalctlCommand(LinuxCommand):
- """Print log entries from the systemd journal.
-
- Only supported on Linux distributions using systemd.
- """
-
- def __init__(self, runner: Runner, binary: str = "journalctl") -> None:
- super().__init__(runner, binary)
- self._pgrep = require(pgrep.LinuxPgrepCommand(runner))
- self._last_ran: datetime | None = None
- self._logs_before_reset: str | None = None
-
- def available(self) -> bool:
- if not super().available():
- return False
- return self._pgrep.find("systemd-journal") is not None
-
- def logs(self) -> str:
- """Return log entries since the last run or current boot, in that order."""
- if self._last_ran:
- args = [
- "--since",
- shlex.quote(self._last_ran.strftime(SYSTEMD_TIMESTAMP_FORMAT)),
- ]
- else:
- args = ["--boot"]
-
- self._last_ran = datetime.utcnow()
-
- self._runner.log.debug("Running journalctl")
- logs = self._run(
- args,
- sudo=True,
- log_output=False,
- timeout_sec=JOURNALCTL_TIMEOUT_SEC,
- ).stdout.decode("utf-8")
-
- if self._logs_before_reset:
- return f"{self._logs_before_reset}\n{logs}"
- return logs
-
- def set_runner(self, runner: Runner) -> None:
- """Set a new runner.
-
- Use when underlying connection to the device refreshes.
- """
- self._runner = runner
-
- def save_and_reset(self) -> None:
- """Save logs and reset the last known run time.
-
- Run before every reboot!
- """
- self._logs_before_reset = self.logs()
- self._last_ran = None
diff --git a/packages/antlion/controllers/utils_lib/commands/nmcli.py b/packages/antlion/controllers/utils_lib/commands/nmcli.py
deleted file mode 100644
index c773573..0000000
--- a/packages/antlion/controllers/utils_lib/commands/nmcli.py
+++ /dev/null
@@ -1,232 +0,0 @@
-# Copyright 2024 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from __future__ import annotations
-
-import enum
-from dataclasses import dataclass
-
-from antlion.controllers.utils_lib.commands import pgrep
-from antlion.controllers.utils_lib.commands.command import LinuxCommand, require
-from antlion.runner import Runner
-
-
-class LinuxNmcliCommand(LinuxCommand):
- """Control the Linux NetworkManager.
-
- The NetworkManager daemon attempts to make networking configuration and
- operation as painless and automatic as possible by managing the primary
- network connection and other network interfaces, like Ethernet, Wi-Fi, and
- Mobile Broadband devices. NetworkManager will connect any network device
- when a connection for that device becomes available, unless that behavior is
- disabled.
- """
-
- def __init__(self, runner: Runner, binary: str = "nmcli") -> None:
- super().__init__(runner, binary)
- self._pgrep = require(pgrep.LinuxPgrepCommand(runner))
-
- def available(self) -> bool:
- if not super().available():
- return False
- return self._pgrep.find("NetworkManager") is not None
-
- def setup_device(self, device: str) -> None:
- """Create a device connection suitable for antlion testing.
-
- Disables IPv4 DHCP so that tests can manage IP addresses manually, but
- still enables automatic IPv6 link-local address assignment.
- """
- # Remove existing connections associated with device.
- for conn in self._get_connections():
- if conn.device == device:
- self._delete_connection(conn)
-
- self._run(
- [
- "connection",
- "add",
- "ifname",
- device,
- "type",
- "ethernet",
- "ipv4.method",
- IPv4Method.DISABLED,
- "ipv6.method",
- IPv6Method.LINK_LOCAL,
- ],
- sudo=True,
- )
-
- def _get_connections(self) -> list[Connection]:
- res = self._run(
- [
- "--get-values",
- "name,uuid,type,device",
- "connection",
- ],
- sudo=True,
- )
- connections: list[Connection] = []
- for line in res.stdout.splitlines():
- tokens = line.decode("utf-8").split(":", 3)
- connections.append(
- Connection(
- name=tokens[0],
- uuid=tokens[1],
- type=tokens[2],
- device=tokens[3],
- )
- )
- return connections
-
- def _delete_connection(self, conn: Connection) -> None:
- self._run(
- [
- "connection",
- "delete",
- "id",
- conn.name,
- ],
- sudo=True,
- )
-
- def _down_device(self, device: str) -> None:
- self._run(
- [
- "device",
- "down",
- device,
- ],
- sudo=True,
- )
-
- def _up_device(self, device: str) -> None:
- self._run(
- [
- "device",
- "up",
- device,
- ],
- sudo=True,
- )
-
- def set_ipv4_method(self, device: str, method: IPv4Method) -> None:
- """Set the IPv4 connection method.
-
- Args:
- device: Name of the device to modify.
- method: Connection method to use.
- """
- self._run(
- [
- "device",
- "modify",
- device,
- "ipv4.method",
- method,
- ],
- sudo=True,
- )
-
-
-@dataclass(frozen=True)
-class Connection:
- name: str
- uuid: str
- type: str
- device: str
-
-
-class IPv4Method(enum.StrEnum):
- AUTO = "auto"
- """Enables automatic IPv4 address assignment from DHCP, PPP, or similar services."""
-
- MANUAL = "manual"
- """Enables the configuration of static IPv4 addresses on the interface.
-
- Note that you must set at least one IP address and subnet mask in the
- "ipv4.addresses" property.
- """
-
- DISABLED = "disabled"
- """Disables the IPv4 protocol in this connection profile."""
-
- SHARED = "shared"
- """Provides network access to other computers.
-
- If you do not specify an IP address and subnet mask in "ipv4.addresses",
- NetworkManager assigns 10.42.x.1/24 to the interface. Additionally,
- NetworkManager starts a DHCP server and DNS forwarder. Hosts that connect to
- this interface will then receive an IP address from the configured range,
- and NetworkManager configures NAT to map client addresses to the one of the
- current default network connection.
- """
-
- LINK_LOCAL = "link-local"
- """Enables link-local addresses according to RFC 3927.
-
- NetworkManager assigns a random link-local address from the 169.254.0.0/16
- subnet to the interface.
- """
-
-
-class IPv6Method(enum.StrEnum):
- AUTO = "auto"
- """Enables IPv6 auto-configuration.
-
- By default, NetworkManager uses Router Advertisements and, if the router
- announces the "managed" flag, NetworkManager requests an IPv6 address and
- prefix from a DHCPv6 server.
- """
-
- DHCP = "dhcp"
- """Requests an IPv6 address and prefix from a DHCPv6 server.
-
- Note that DHCPv6 does not have options to provide routes and the default
- gateway. As a consequence, by using the "dhcp" method, connections are
- limited to their own subnet.
- """
-
- MANUAL = "manual"
- """Enables the configuration of static IPv6 addresses on the interface.
-
- Note that you must set at least one IP address and prefix in the
- "ipv6.addresses" property.
- """
-
- DISABLED = "disabled"
- """Disables the IPv6 protocol in this connection profile."""
-
- IGNORE = "ignore"
- """Make no changes to the IPv6 configuration on the interface.
-
- For example, you can then use the "accept_ra" feature of the kernel to
- accept Router Advertisements.
- """
-
- SHARED = "shared"
- """Provides network access to other computers.
-
- NetworkManager requests a prefix from an upstream DHCPv6 server, assigns an
- address to the interface, and announces the prefix to clients that connect
- to this interface.
- """
-
- LINK_LOCAL = "link-local"
- """Enabled link-local addresses according to RFC 3927.
-
- Assigns a random link-local address from the fe80::/64 subnet to the
- interface.
- """
diff --git a/packages/antlion/controllers/utils_lib/commands/pgrep.py b/packages/antlion/controllers/utils_lib/commands/pgrep.py
deleted file mode 100644
index cf6f271..0000000
--- a/packages/antlion/controllers/utils_lib/commands/pgrep.py
+++ /dev/null
@@ -1,43 +0,0 @@
-# Copyright 2024 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import subprocess
-
-from antlion.controllers.utils_lib.commands.command import LinuxCommand
-from antlion.runner import Runner
-
-
-class LinuxPgrepCommand(LinuxCommand):
- """Look through current running processes."""
-
- def __init__(self, runner: Runner, binary: str = "pgrep") -> None:
- super().__init__(runner, binary)
-
- def find(self, process: str) -> list[int] | None:
- """Find a process by name.
-
- Args:
- process: Name of the process to query
-
- Returns:
- List of process IDs if running, otherwise None.
- """
- try:
- result = self._run(["-x", process])
- return [int(line) for line in result.stdout.splitlines()]
- except subprocess.CalledProcessError as e:
- if e.stdout or e.stderr:
- # pgrep should not output anything to stdout or stderr
- raise e
- return None
diff --git a/packages/antlion/controllers/utils_lib/commands/route.py b/packages/antlion/controllers/utils_lib/commands/route.py
deleted file mode 100644
index 0ee68d5..0000000
--- a/packages/antlion/controllers/utils_lib/commands/route.py
+++ /dev/null
@@ -1,212 +0,0 @@
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import ipaddress
-import re
-import subprocess
-from typing import Iterator, Literal
-
-from mobly import signals
-
-from antlion.controllers.utils_lib.commands.command import LinuxCommand
-from antlion.runner import Runner
-
-
-class Error(Exception):
- """Exception thrown when a valid ip command experiences errors."""
-
-
-class LinuxRouteCommand(LinuxCommand):
- """Interface for doing standard ip route commands on a linux system."""
-
- def __init__(self, runner: Runner, binary: str = "ip"):
- super().__init__(runner, binary)
-
- def add_route(
- self,
- net_interface: str,
- address: ipaddress.IPv4Interface
- | ipaddress.IPv6Interface
- | Literal["default"],
- proto: str = "static",
- ) -> None:
- """Add an entry to the ip routing table.
-
- Will add a route for either a specific ip address, or a network.
-
- Args:
- net_interface: Any packet that sends through this route will be sent
- using this network interface (eg. wlan0).
- address: The address to use. If a network is given then the entire
- subnet will be routed. If "default" is given then this will set the
- default route.
- proto: Routing protocol identifier of this route (e.g. kernel,
- redirect, boot, static, ra). See `man ip-route(8)` for details.
-
- Raises:
- NetworkInterfaceDown: Raised when the network interface is down.
- """
- try:
- self._run(
- [
- "route",
- "add",
- str(address),
- "dev",
- net_interface,
- "proto",
- proto,
- ],
- sudo=True,
- )
- except subprocess.CalledProcessError as e:
- if "File exists" in e.stderr:
- raise signals.TestError(
- "Route already exists",
- extras={
- "stderr": e.stderr,
- "stdout": e.stdout,
- "returncode": e.returncode,
- },
- )
- if "Network is down" in e.stderr:
- raise signals.TestError(
- "Device must be up for adding a route.",
- extras={
- "stderr": e.stderr,
- "stdout": e.stdout,
- "returncode": e.returncode,
- },
- )
- raise e
-
- def get_routes(
- self, net_interface: str | None = None
- ) -> Iterator[
- tuple[
- ipaddress.IPv4Interface
- | ipaddress.IPv6Interface
- | Literal["default"],
- str,
- ]
- ]:
- """Get the routes in the ip routing table.
-
- Args:
- net_interface: string, If given, only retrieve routes that have
- been registered to go through this network
- interface (eg. wlan0).
-
- Returns: An iterator that returns a tuple of (address, net_interface).
- If it is the default route then address
- will be the "default". If the route is a subnet then
- it will be a ipaddress.IPv4Network otherwise it is a
- ipaddress.IPv4Address.
- """
- result_ipv4 = self._run(["-4", "route", "show"])
- result_ipv6 = self._run(["-6", "route", "show"])
-
- lines = (
- result_ipv4.stdout.splitlines() + result_ipv6.stdout.splitlines()
- )
-
- # Scan through each line for valid route entries
- # Example output:
- # default via 192.168.1.254 dev eth0 proto static
- # 192.168.1.0/24 dev eth0 proto kernel scope link src 172.22.100.19 metric 1
- # 192.168.2.1 dev eth2 proto kernel scope link metric 1
- # fe80::/64 dev wlan0 proto static metric 1024
- for line_bytes in lines:
- line = line_bytes.decode("utf-8")
- if not "dev" in line:
- continue
-
- if line.startswith("default"):
- # The default route entry is formatted differently.
- match = re.search("dev (?P<net_interface>\\S+)", line)
- if not match:
- continue
-
- iface = match.groupdict()["net_interface"]
- assert isinstance(iface, str)
-
- if net_interface and iface != net_interface:
- continue
-
- # When there is a match for the route entry pattern create
- # A pair to hold the info.
- yield ("default", iface)
- else:
- # Test the normal route entry pattern.
- match = re.search(
- "(?P<address>[0-9A-Fa-f\\.\\:/]+) dev (?P<net_interface>\\S+)",
- line,
- )
- if not match:
- continue
-
- # When there is a match for the route entry pattern create
- # A pair to hold the info.
- d = match.groupdict()
-
- address_raw = d["address"]
- assert isinstance(address_raw, str)
-
- iface = d["net_interface"]
- assert isinstance(iface, str)
-
- if net_interface and iface != net_interface:
- continue
-
- yield (ipaddress.ip_interface(address_raw), iface)
-
- def remove_route(
- self,
- address: ipaddress.IPv4Interface
- | ipaddress.IPv6Interface
- | Literal["default"],
- net_interface: str | None = None,
- ) -> None:
- """Removes a route from the ip routing table.
-
- Removes a route from the ip routing table. If the route does not exist
- nothing is done.
-
- Args:
- address: The address of the route to remove.
- net_interface: If specified the route being removed is registered to
- go through this network interface (eg. wlan0)
- """
- try:
- args = ["route", "del", str(address)]
- if net_interface:
- args += ["dev", net_interface]
- self._run(args)
- except subprocess.CalledProcessError as e:
- if "RTNETLINK answers: No such process" in e.stderr:
- # The route didn't exist.
- return
- raise signals.TestError(
- f"Failed to delete route {address}: {e}"
- ) from e
-
- def clear_routes(self, net_interface: str) -> None:
- """Clears all routes.
-
- Args:
- net_interface: The network interface to clear routes on.
- """
- routes = self.get_routes(net_interface)
- for a, d in routes:
- self.remove_route(a, d)
diff --git a/packages/antlion/controllers/utils_lib/commands/shell.py b/packages/antlion/controllers/utils_lib/commands/shell.py
deleted file mode 100644
index 72fef9a..0000000
--- a/packages/antlion/controllers/utils_lib/commands/shell.py
+++ /dev/null
@@ -1,217 +0,0 @@
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from __future__ import annotations
-
-import re
-import shlex
-import signal
-import time
-from typing import Iterator
-
-from antlion.runner import CalledProcessError, Runner
-
-
-class ShellCommand(object):
- """Wraps basic commands that tend to be tied very closely to a shell.
-
- This class is a wrapper for running basic shell commands through
- any object that has a run command. Basic shell functionality for managing
- the system, programs, and files in wrapped within this class.
-
- Note: At the moment this only works with the ssh runner.
- """
-
- def __init__(self, runner: Runner) -> None:
- """Creates a new shell command invoker.
-
- Args:
- runner: The object that will run the shell commands.
- """
- self._runner = runner
-
- def is_alive(self, identifier: str | int) -> bool:
- """Checks to see if a program is alive.
-
- Checks to see if a program is alive on the shells environment. This can
- be used to check on generic programs, or a specific program using a pid.
-
- Args:
- identifier: Used to identify the program to check. if given an int
- then it is assumed to be a pid. If given a string then it will
- be used as a search key to compare on the running processes.
- Returns:
- True if a process was found running, otherwise False.
- """
- try:
- if isinstance(identifier, str):
- ps = self._runner.run(["ps", "aux"])
- if re.search(identifier, ps.stdout.decode("utf-8")):
- return True
- return False
- else:
- self.signal(identifier, 0)
- return True
- except CalledProcessError:
- return False
-
- def get_pids(self, identifier: str) -> Iterator[int]:
- """Gets the pids of a program.
-
- Searches for a program with a specific name and grabs the pids for all
- programs that match.
-
- Args:
- identifier: A search term that identifies the program.
-
- Returns: An array of all pids that matched the identifier, or None
- if no pids were found.
- """
- try:
- ps = self._runner.run(["ps", "aux"])
- except CalledProcessError as e:
- if e.returncode == 1:
- # Grep returns exit status 1 when no lines are selected. This is
- # an expected return code.
- return
- raise e
-
- lines = ps.stdout.decode("utf-8").splitlines()
-
- # The expected output of the above command is like so:
- # bob 14349 0.0 0.0 34788 5552 pts/2 Ss Oct10 0:03 bash
- # bob 52967 0.0 0.0 34972 5152 pts/4 Ss Oct10 0:00 bash
- # Where the format is:
- # USER PID ...
- for line in lines:
- if re.search(identifier, line) is None:
- continue
-
- pieces = line.split()
- try:
- yield int(pieces[1])
- except StopIteration:
- return
-
- def search_file(self, search_string: str, file_name: str) -> bool:
- """Searches through a file for a string.
-
- Args:
- search_string: The string or pattern to look for.
- file_name: The name of the file to search.
-
- Returns:
- True if the string or pattern was found, False otherwise.
- """
- try:
- self._runner.run(["grep", shlex.quote(search_string), file_name])
- return True
- except CalledProcessError:
- return False
-
- def read_file(self, file_name: str) -> str:
- """Reads a file through the shell.
-
- Args:
- file_name: The name of the file to read.
-
- Returns:
- A string of the files contents.
- """
- return self._runner.run(["cat", file_name]).stdout.decode("utf-8")
-
- def write_file(self, file_name: str, data: str) -> None:
- """Writes a block of data to a file through the shell.
-
- Args:
- file_name: The name of the file to write to.
- data: The string of data to write.
- """
- # Intentionally not passed through shlex.escape() to allow stdin
- # redirection to a remote file.
- self._runner.run(
- ["cat", "-", ">", file_name], stdin=data.encode("utf-8")
- )
-
- def touch_file(self, file_name: str) -> None:
- """Creates a file through the shell.
-
- Args:
- file_name: The name of the file to create.
- """
- self._runner.run(["touch", file_name])
-
- def delete_file(self, file_name: str) -> None:
- """Deletes a file through the shell.
-
- Args:
- file_name: The name of the file to delete.
- """
- try:
- self._runner.run(["rm", "-r", file_name])
- except CalledProcessError as e:
- if b"No such file or directory" in e.stderr:
- return
- raise e
-
- def kill(self, identifier: str | int, timeout_sec: int = 10) -> None:
- """Kills a program or group of programs through the shell.
-
- Kills all programs that match an identifier through the shell. This
- will send an increasing queue of kill signals to all programs
- that match the identifier until either all are dead or the timeout
- finishes.
-
- Programs are guaranteed to be killed after running this command.
-
- Args:
- identifier: A string used to identify the program.
- timeout_sec: The time to wait for all programs to die. Each signal
- will take an equal portion of this time.
- """
- if isinstance(identifier, int):
- pids = [identifier]
- else:
- pids = list(self.get_pids(identifier))
-
- signal_queue = [signal.SIGINT, signal.SIGTERM, signal.SIGKILL]
-
- signal_duration = timeout_sec / len(signal_queue)
- for sig in signal_queue:
- for pid in pids:
- try:
- self.signal(pid, sig)
- except CalledProcessError:
- pass
-
- start_time = time.time()
- while pids and time.time() - start_time < signal_duration:
- time.sleep(0.1)
- pids = [pid for pid in pids if self.is_alive(pid)]
-
- if not pids:
- break
-
- def signal(self, pid: int, sig: int) -> None:
- """Sends a specific signal to a program.
-
- Args:
- pid: The process id of the program to kill.
- sig: The signal to send.
-
- Raises:
- CalledProcessError: Raised when the signal fail to reach
- the specified program.
- """
- self._runner.run(["kill", f"-{sig}", str(pid)])
diff --git a/packages/antlion/controllers/utils_lib/commands/tcpdump.py b/packages/antlion/controllers/utils_lib/commands/tcpdump.py
deleted file mode 100644
index 27c4d25..0000000
--- a/packages/antlion/controllers/utils_lib/commands/tcpdump.py
+++ /dev/null
@@ -1,131 +0,0 @@
-# Copyright 2024 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from __future__ import annotations
-
-import time
-from io import BufferedRandom
-from pathlib import Path
-from subprocess import Popen
-from types import TracebackType
-
-from mobly.logger import (
- epoch_to_log_line_timestamp,
- normalize_log_line_timestamp,
-)
-
-from antlion import utils
-from antlion.controllers.utils_lib.commands.command import LinuxCommand
-from antlion.runner import Runner
-
-# Max time to wait for tcpdump to terminate after sending SIGTERM.
-TERMINATE_TIMEOUT_SEC: float = 5.0
-
-
-class LinuxTcpdumpCommand(LinuxCommand):
- """Dump traffic on a network."""
-
- def __init__(self, runner: Runner, binary: str = "tcpdump") -> None:
- super().__init__(runner, binary)
-
- def start(self, interface: str, output_dir: Path) -> TcpdumpProcess:
- """Start tcpdump.
-
- Args:
- interface: Listen on this interface.
- path: Path to output directory
-
- Returns:
- A context manager to run tcpdump. Must be used in a with statement
- for the process to start and exit correctly.
- """
- time_stamp = normalize_log_line_timestamp(
- epoch_to_log_line_timestamp(utils.get_current_epoch_time())
- )
- return TcpdumpProcess(
- self, interface, pcap=Path(output_dir, f"tcpdump_{time_stamp}.pcap")
- )
-
-
-class TcpdumpProcess:
- """Process running tcpdump."""
-
- def __init__(
- self,
- tcpdump: LinuxTcpdumpCommand,
- interface: str,
- pcap: Path,
- ) -> None:
- self._tcpdump = tcpdump
- self._log = tcpdump._runner.log
- self._interface = interface
- self._pcap_path = pcap
- self._pcap_file: BufferedRandom | None = None
- self._process: Popen[bytes] | None = None
-
- def __enter__(self) -> None:
- self._log.info(
- "Streaming %s packet capture to %s",
- self._interface,
- self._pcap_path,
- )
- self._pcap_file = self._pcap_path.open("w+b")
- self._process = self._tcpdump._start(
- [
- "-i",
- self._interface,
- # Stream pcap as bytes to stdout
- "-w",
- "-",
- ],
- sudo=True,
- stdout=self._pcap_file,
- )
-
- def __exit__(
- self,
- _exit_type: type[BaseException] | None,
- _exit_value: BaseException | None,
- _exit_traceback: TracebackType | None,
- ) -> None:
- if self._pcap_file is None or self._process is None:
- # tcpdump is not running.
- return
-
- self._process.terminate()
- timeout = time.time() + TERMINATE_TIMEOUT_SEC
- while time.time() < timeout:
- exit_code = self._process.poll()
- if exit_code is not None:
- self._pcap_file.close()
- self._pcap_file = None
- break
- else:
- self._process.kill()
- self._pcap_file.close()
- self._pcap_file = None
- raise TimeoutError(
- "tcpdump did not terminate after sending SIGTERM"
- )
-
- self._log.info(
- "%s packet capture wrote to %s", self._interface, self._pcap_path
- )
-
- _, stderr = self._process.communicate()
- self._log.debug(
- "tcpdump returned with status %i\nstderr: %s",
- exit_code,
- stderr.decode("utf-8", errors="replace"),
- )
diff --git a/packages/antlion/controllers/utils_lib/ssh/__init__.py b/packages/antlion/controllers/utils_lib/ssh/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/packages/antlion/controllers/utils_lib/ssh/__init__.py
+++ /dev/null
diff --git a/packages/antlion/controllers/utils_lib/ssh/connection.py b/packages/antlion/controllers/utils_lib/ssh/connection.py
deleted file mode 100644
index 6f270cd..0000000
--- a/packages/antlion/controllers/utils_lib/ssh/connection.py
+++ /dev/null
@@ -1,353 +0,0 @@
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-# Blanket ignores to enable mypy in Antlion
-# mypy: disable-error-code="no-untyped-def"
-import logging
-import os
-import re
-import shutil
-import subprocess
-import tempfile
-import threading
-import time
-from typing import IO
-
-from mobly import logger
-
-from antlion.controllers.utils_lib.ssh import formatter
-from antlion.libs.proc import job
-from antlion.runner import (
- CalledProcessError,
- CalledProcessTransportError,
- Runner,
-)
-
-
-class SshConnection(Runner):
- """Provides a connection to a remote machine through ssh.
-
- Provides the ability to connect to a remote machine and execute a command
- on it. The connection will try to establish a persistent connection When
- a command is run. If the persistent connection fails it will attempt
- to connect normally.
- """
-
- @property
- def socket_path(self):
- """Returns: The os path to the master socket file."""
- if self._master_ssh_tempdir is None:
- raise AttributeError(
- "socket_path is not available yet; run setup_master_ssh() first"
- )
- return os.path.join(self._master_ssh_tempdir, "socket")
-
- def __init__(self, settings):
- """
- Args:
- settings: The ssh settings to use for this connection.
- formatter: The object that will handle formatting ssh command
- for use with the background job.
- """
- self._settings = settings
- self._formatter = formatter.SshFormatter()
- self._lock = threading.Lock()
- self._master_ssh_proc = None
- self._master_ssh_tempdir: str | None = None
-
- self.log = logger.PrefixLoggerAdapter(
- logging.getLogger(),
- {
- logger.PrefixLoggerAdapter.EXTRA_KEY_LOG_PREFIX: f"[SshConnection | {self._settings.hostname}]",
- },
- )
-
- def __enter__(self):
- return self
-
- def __exit__(self, _, __, ___):
- self.close()
-
- def __del__(self):
- self.close()
-
- def setup_master_ssh(self, timeout_sec: int = 5):
- """Sets up the master ssh connection.
-
- Sets up the initial master ssh connection if it has not already been
- started.
-
- Args:
- timeout_sec: The time to wait for the master ssh connection to
- be made.
-
- Raises:
- Error: When setting up the master ssh connection fails.
- """
- with self._lock:
- if self._master_ssh_proc is not None:
- socket_path = self.socket_path
- if (
- not os.path.exists(socket_path)
- or self._master_ssh_proc.poll() is not None
- ):
- self.log.debug(
- "Master ssh connection to %s is down.",
- self._settings.hostname,
- )
- self._cleanup_master_ssh()
-
- if self._master_ssh_proc is None:
- # Create a shared socket in a temp location.
- self._master_ssh_tempdir = tempfile.mkdtemp(prefix="ssh-master")
-
- # Setup flags and options for running the master ssh
- # -N: Do not execute a remote command.
- # ControlMaster: Spawn a master connection.
- # ControlPath: The master connection socket path.
- extra_flags: dict[str, str | int | None] = {"-N": None}
- extra_options: dict[str, str | int | bool] = {
- "ControlMaster": True,
- "ControlPath": self.socket_path,
- "BatchMode": True,
- }
-
- # Construct the command and start it.
- master_cmd = self._formatter.format_ssh_local_command(
- self._settings,
- extra_flags=extra_flags,
- extra_options=extra_options,
- )
- self.log.info("Starting master ssh connection.")
- self._master_ssh_proc = job.run_async(master_cmd)
-
- end_time = time.time() + timeout_sec
-
- while time.time() < end_time:
- if os.path.exists(self.socket_path):
- break
- time.sleep(0.2)
- else:
- self._cleanup_master_ssh()
- raise CalledProcessTransportError(
- "Master ssh connection timed out."
- )
-
- def run(
- self,
- command: str | list[str],
- stdin: bytes | None = None,
- timeout_sec: float | None = 60.0,
- log_output: bool = True,
- ignore_status: bool = False,
- attempts: int = 2,
- ) -> subprocess.CompletedProcess[bytes]:
- """Runs a remote command over ssh.
-
- Will ssh to a remote host and run a command. This method will
- block until the remote command is finished.
-
- Args:
- command: The command to execute over ssh.
- stdin: Standard input to command.
- timeout_sec: seconds to wait for command to finish.
- log_output: If true, print stdout and stderr to the debug log.
- ignore_status: True to ignore the exit code of the remote
- subprocess. Note that if you do ignore status codes,
- you should handle non-zero exit codes explicitly.
- attempts: Number of attempts before giving up on command failures.
-
- Returns:
- Results of the ssh command.
-
- Raises:
- CalledProcessError: when the process exits with a non-zero status
- and ignore_status is False.
- subprocess.TimeoutExpired: When the remote command took to long to
- execute.
- CalledProcessTransportError: when the underlying transport fails
- """
- if attempts < 1:
- raise TypeError("attempts must be a positive, non-zero integer")
-
- try:
- self.setup_master_ssh(self._settings.connect_timeout)
- except CalledProcessTransportError:
- self.log.warning(
- "Failed to create master ssh connection, using "
- "normal ssh connection."
- )
-
- extra_options: dict[str, str | int | bool] = {"BatchMode": True}
- if self._master_ssh_proc:
- extra_options["ControlPath"] = self.socket_path
-
- if isinstance(command, list):
- full_command = " ".join(command)
- else:
- full_command = command
-
- terminal_command = self._formatter.format_command(
- full_command, self._settings, extra_options=extra_options
- )
-
- dns_retry_count = 2
- while True:
- try:
- result = job.run(
- terminal_command,
- stdin=stdin,
- log_output=log_output,
- timeout_sec=timeout_sec,
- )
-
- return subprocess.CompletedProcess(
- terminal_command,
- result.returncode,
- result.stdout,
- result.stderr,
- )
- except CalledProcessError as e:
- # Check for SSH errors.
- if e.returncode == 255:
- stderr = e.stderr.decode("utf-8", errors="replace")
-
- had_dns_failure = re.search(
- r"^ssh: .*: Name or service not known",
- stderr,
- flags=re.MULTILINE,
- )
- if had_dns_failure:
- dns_retry_count -= 1
- if not dns_retry_count:
- raise CalledProcessTransportError(
- "DNS failed to find host"
- ) from e
- self.log.debug("Failed to connect to host, retrying...")
- continue
-
- had_timeout = re.search(
- r"^ssh: connect to host .* port .*: "
- r"Connection timed out\r$",
- stderr,
- flags=re.MULTILINE,
- )
- if had_timeout:
- raise CalledProcessTransportError(
- "Ssh timed out"
- ) from e
-
- permission_denied = "Permission denied" in stderr
- if permission_denied:
- raise CalledProcessTransportError(
- "Permission denied"
- ) from e
-
- unknown_host = re.search(
- r"ssh: Could not resolve hostname .*: "
- r"Name or service not known",
- stderr,
- flags=re.MULTILINE,
- )
- if unknown_host:
- raise CalledProcessTransportError("Unknown host") from e
-
- # Retry unknown SSH errors.
- self.log.error(
- f"An unknown error has occurred. Job result: {e}"
- )
- ping_output = job.run(
- ["ping", self._settings.hostname, "-c", "3", "-w", "1"],
- ignore_status=True,
- )
- self.log.error(f"Ping result: {ping_output}")
- if attempts > 1:
- self._cleanup_master_ssh()
- self.run(
- command,
- stdin,
- timeout_sec,
- log_output,
- ignore_status,
- attempts - 1,
- )
- raise CalledProcessTransportError(
- "The job failed for unknown reasons"
- ) from e
-
- if not ignore_status:
- raise e
-
- def run_async(self, command: str) -> subprocess.CompletedProcess[bytes]:
- """Starts up a background command over ssh.
-
- Will ssh to a remote host and startup a command. This method will
- block until there is confirmation that the remote command has started.
-
- Args:
- command: The command to execute over ssh. Can be either a string
- or a list.
-
- Returns:
- The result of the command to launch the background job.
-
- Raises:
- CalledProcessError: when the process fails to start
- subprocess.TimeoutExpired: when the timeout expires while waiting
- for a child process
- CalledProcessTransportError: when the underlying transport fails
- """
- return self.run(
- f"({command}) < /dev/null > /dev/null 2>&1 & echo -n $!"
- )
-
- def start(
- self,
- command: list[str],
- stdout: IO[bytes] | int = subprocess.PIPE,
- stdin: IO[bytes] | int = subprocess.PIPE,
- ) -> subprocess.Popen[bytes]:
- """Execute a child program in a new process."""
- extra_options: dict[str, str | int | bool] = {"BatchMode": True}
- if self._master_ssh_proc:
- extra_options["ControlPath"] = self.socket_path
-
- terminal_command = self._formatter.format_command(
- " ".join(command),
- self._settings,
- extra_options=extra_options,
- )
- return subprocess.Popen(terminal_command, stdout=stdout, stdin=stdin)
-
- def close(self) -> None:
- """Clean up open connections to remote host."""
- self._cleanup_master_ssh()
-
- def _cleanup_master_ssh(self) -> None:
- """
- Release all resources (process, temporary directory) used by an active
- master SSH connection.
- """
- # If a master SSH connection is running, kill it.
- if self._master_ssh_proc is not None:
- self.log.debug("Nuking master_ssh_job.")
- self._master_ssh_proc.kill()
- self._master_ssh_proc.wait()
- self._master_ssh_proc = None
-
- # Remove the temporary directory for the master SSH socket.
- if self._master_ssh_tempdir is not None:
- self.log.debug("Cleaning master_ssh_tempdir.")
- shutil.rmtree(self._master_ssh_tempdir)
- self._master_ssh_tempdir = None
diff --git a/packages/antlion/controllers/utils_lib/ssh/formatter.py b/packages/antlion/controllers/utils_lib/ssh/formatter.py
deleted file mode 100644
index af5d5ae..0000000
--- a/packages/antlion/controllers/utils_lib/ssh/formatter.py
+++ /dev/null
@@ -1,186 +0,0 @@
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-
-from typing import Iterator
-
-from antlion.controllers.utils_lib.ssh.settings import SshSettings
-
-
-class SshFormatter(object):
- """Handles formatting ssh commands.
-
- Handler for formatting chunks of the ssh command to run.
- """
-
- def format_ssh_executable(self, settings: SshSettings) -> str:
- """Format the executable name.
-
- Formats the executable name as a string.
-
- Args:
- settings: The ssh settings being used.
-
- Returns:
- A string for the ssh executable name.
- """
- return settings.executable
-
- def format_host_name(self, settings: SshSettings) -> str:
- """Format hostname.
-
- Formats the hostname to connect to.
-
- Args:
- settings: The ssh settings being used.
-
- Returns:
- A string of the connection host name to connect to.
- """
- return f"{settings.username}@{settings.hostname}"
-
- def format_value(self, value: object) -> str:
- """Formats a command line value.
-
- Takes in a value and formats it so it can be safely used in the
- command line.
-
- Args:
- value: The value to format.
-
- Returns:
- A string representation of the formatted value.
- """
- if isinstance(value, bool):
- return "yes" if value else "no"
-
- return str(value)
-
- def format_options_list(
- self, options: dict[str, str | int | bool]
- ) -> Iterator[str]:
- """Format the option list.
-
- Formats a dictionary of options into a list of strings to be used
- on the command line.
-
- Args:
- options: A dictionary of options.
-
- Returns:
- An iterator of strings that should go on the command line.
- """
- for option_name in options:
- option = options[option_name]
-
- yield "-o"
- yield f"{option_name}={self.format_value(option)}"
-
- def format_flag_list(
- self, flags: dict[str, str | int | None]
- ) -> Iterator[str]:
- """Format the flags list.
-
- Formats a dictionary of flags into a list of strings to be used
- on the command line.
-
- Args:
- flags: A dictionary of options.
-
- Returns:
- An iterator of strings that should be used on the command line.
- """
- for flag_name in flags:
- flag = flags[flag_name]
-
- yield flag_name
- if flag is not None:
- yield self.format_value(flag)
-
- def format_ssh_local_command(
- self,
- settings: SshSettings,
- extra_flags: dict[str, str | int | None] | None = None,
- extra_options: dict[str, str | int | bool] | None = None,
- ) -> list[str]:
- """Formats the local part of the ssh command.
-
- Formats the local section of the ssh command. This is the part of the
- command that will actual launch ssh on our local machine with the
- specified settings.
-
- Args:
- settings: The ssh settings.
- extra_flags: Extra flags to include.
- extra_options: Extra options to include.
-
- Returns:
- An array of strings that make up the command and its local
- arguments.
- """
- if extra_flags is None:
- extra_flags = {}
- if extra_options is None:
- extra_options = {}
-
- options = settings.construct_ssh_options()
- for extra_option_name in extra_options:
- options[extra_option_name] = extra_options[extra_option_name]
- options_list = list(self.format_options_list(options))
-
- flags = settings.construct_ssh_flags()
- for extra_flag_name in extra_flags:
- flags[extra_flag_name] = extra_flags[extra_flag_name]
- flags_list = list(self.format_flag_list(flags))
-
- all_options = options_list + flags_list
- host_name = self.format_host_name(settings)
- executable = self.format_ssh_executable(settings)
-
- base_command = [executable] + all_options + [host_name]
-
- return base_command
-
- def format_command(
- self,
- command: str,
- settings: SshSettings,
- extra_flags: dict[str, str | int | None] | None = None,
- extra_options: dict[str, str | int | bool] | None = None,
- ) -> list[str]:
- """Formats a full command.
-
- Formats the full command to run in order to run a command on a remote
- machine.
-
- Args:
- command: The command to run on the remote machine. Can either be
- a string or a list of strings.
- env: The environment variables to include on the remote machine.
- settings: The ssh settings to use.
- extra_flags: Extra flags to include with the settings.
- extra_options: Extra options to include with the settings.
-
- Returns:
- A list of strings that make up the total ssh command.
- """
- if extra_flags is None:
- extra_flags = {}
- if extra_options is None:
- extra_options = {}
-
- local_command = self.format_ssh_local_command(
- settings, extra_flags, extra_options
- )
- return local_command + [command]
diff --git a/packages/antlion/controllers/utils_lib/ssh/settings.py b/packages/antlion/controllers/utils_lib/ssh/settings.py
deleted file mode 100644
index 725ade7..0000000
--- a/packages/antlion/controllers/utils_lib/ssh/settings.py
+++ /dev/null
@@ -1,117 +0,0 @@
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""Create a SshSettings from a dictionary from an ACTS config
-
-Args:
- config dict instance from an ACTS config
-
-Returns:
- An instance of SshSettings or None
-"""
-
-from antlion.types import Json
-from antlion.validation import MapValidator
-
-
-class SshSettings(object):
- """Contains settings for ssh.
-
- Container for ssh connection settings.
-
- Attributes:
- username: The name of the user to log in as.
- hostname: The name of the host to connect to.
- executable: The ssh executable to use.
- port: The port to connect through (usually 22).
- host_file: The known host file to use.
- connect_timeout: How long to wait on a connection before giving a
- timeout.
- alive_interval: How long between ssh heartbeat signals to keep the
- connection alive.
- """
-
- def __init__(
- self,
- hostname: str,
- username: str,
- identity_file: str,
- port: int = 22,
- host_file: str = "/dev/null",
- connect_timeout: int = 30,
- alive_interval: int = 300,
- executable: str = "/usr/bin/ssh",
- ssh_config: str | None = None,
- ):
- self.username = username
- self.hostname = hostname
- self.executable = executable
- self.port = port
- self.host_file = host_file
- self.connect_timeout = connect_timeout
- self.alive_interval = alive_interval
- self.identity_file = identity_file
- self.ssh_config = ssh_config
-
- def construct_ssh_options(self) -> dict[str, str | int | bool]:
- """Construct the ssh options.
-
- Constructs a dictionary of option that should be used with the ssh
- command.
-
- Returns:
- A dictionary of option name to value.
- """
- current_options: dict[str, str | int | bool] = {}
- current_options["StrictHostKeyChecking"] = False
- current_options["UserKnownHostsFile"] = self.host_file
- current_options["ConnectTimeout"] = self.connect_timeout
- current_options["ServerAliveInterval"] = self.alive_interval
- return current_options
-
- def construct_ssh_flags(self) -> dict[str, None | str | int]:
- """Construct the ssh flags.
-
- Constructs what flags should be used in the ssh connection.
-
- Returns:
- A dictionary of flag name to value. If value is none then it is
- treated as a binary flag.
- """
- current_flags: dict[str, None | str | int] = {}
- current_flags["-a"] = None
- current_flags["-x"] = None
- current_flags["-p"] = self.port
- if self.identity_file:
- current_flags["-i"] = self.identity_file
- if self.ssh_config:
- current_flags["-F"] = self.ssh_config
- return current_flags
-
-
-def from_config(config: Json) -> SshSettings:
- """Parse SSH settings from config JSON."""
-
- if not isinstance(config, dict):
- raise ValueError(f"config must be a dict, got {type(config)}")
-
- c = MapValidator(config)
- return SshSettings(
- hostname=c.get(str, "host"),
- username=c.get(str, "user"),
- identity_file=c.get(str, "identity_file"),
- port=c.get(int, "port", 22),
- ssh_config=c.get(str, "ssh_config", None),
- connect_timeout=c.get(int, "connect_timeout", 30),
- executable=c.get(str, "ssh_binary_path", "/usr/bin/ssh"),
- )
diff --git a/packages/antlion/decorators.py b/packages/antlion/decorators.py
deleted file mode 100644
index 6924606..0000000
--- a/packages/antlion/decorators.py
+++ /dev/null
@@ -1,127 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2023 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from __future__ import annotations
-
-import typing
-from threading import RLock
-from typing import Callable, Generic, TypeVar
-
-S = TypeVar("S")
-T = TypeVar("T")
-
-
-_NOT_FOUND = object()
-
-
-class cached_property(Generic[T, S]): # pylint: disable=invalid-name
- """A property whose value is computed then cached; deleter can be overridden.
-
- Similar to functools.cached_property(), with the addition of deleter function that
- can be overridden to provide custom clean up. The deleter function doesn't throw an
- AttributeError if the value doesn't already exist.
-
- Useful for properties that are tied to the lifetime of a device and need to be
- recomputed upon reboot of said device.
-
- Example:
-
- ```
- class LinuxDevice:
- @cached_property
- def ssh(self) -> SSH:
- return SSH(self.ip)
-
- @ssh.deleter
- def ssh(self, ssh: SSH) -> None:
- ssh.terminate_connections()
- ```
- """
-
- def __init__(
- self,
- func: Callable[[S], T],
- deleter: Callable[[S, T], None] | None = None,
- ) -> None:
- self.func = func
- self._deleter = deleter
- self.name: str | None = None
- self.__doc__ = func.__doc__
- self.lock = RLock()
-
- def __set_name__(self, owner: object, name: str) -> None:
- if self.name is None:
- self.name = name
- elif name != self.name:
- raise TypeError(
- "Cannot assign the same cached_property to two different names "
- f"({self.name!r} and {name!r})."
- )
-
- def _cache(self, instance: S) -> dict[str, object]:
- if self.name is None:
- raise TypeError(
- "Cannot use cached_property instance without calling __set_name__ on it."
- )
- try:
- return instance.__dict__
- except (
- AttributeError
- ): # not all objects have __dict__ (e.g. class defines slots)
- msg = (
- f"No '__dict__' attribute on {type(instance).__name__!r} "
- f"instance to cache {self.name!r} property."
- )
- raise TypeError(msg) from None
-
- def __get__(self, instance: S, owner: object | None = None) -> T:
- cache = self._cache(instance)
- assert self.name is not None
- val = cache.get(self.name, _NOT_FOUND)
- if val is _NOT_FOUND:
- with self.lock:
- # check if another thread filled cache while we awaited lock
- val = cache.get(self.name, _NOT_FOUND)
- if val is _NOT_FOUND:
- val = self.func(instance)
- try:
- cache[self.name] = val
- except TypeError:
- msg = (
- f"The '__dict__' attribute on {type(instance).__name__!r} instance "
- f"does not support item assignment for caching {self.name!r} property."
- )
- raise TypeError(msg) from None
- return val
- return typing.cast(T, val)
-
- def __delete__(self, instance: S) -> None:
- cache = self._cache(instance)
- assert self.name is not None
- with self.lock:
- val = cache.pop(self.name, _NOT_FOUND)
- if val is _NOT_FOUND:
- return
- if self._deleter:
- self._deleter(instance, typing.cast(T, val))
-
- def deleter(self, deleter: Callable[[S, T], None]) -> cached_property[T, S]:
- self._deleter = deleter
- prop = type(self)(self.func, deleter)
- prop.name = self.name
- prop.__doc__ = self.__doc__
- prop.lock = self.lock
- return prop
diff --git a/packages/antlion/error.py b/packages/antlion/error.py
deleted file mode 100644
index a8284f6..0000000
--- a/packages/antlion/error.py
+++ /dev/null
@@ -1,46 +0,0 @@
-"""This class is where error information will be stored.
-"""
-
-# mypy: disable-error-code="no-untyped-def"
-from mobly import signals
-
-
-class ActsError(signals.TestError):
- """Base Acts Error"""
-
- def __init__(self, *args, **kwargs):
- class_name = self.__class__.__name__
- self.error_doc = self.__class__.__doc__
- self.error_code = getattr(
- ActsErrorCode, class_name, ActsErrorCode.UNKNOWN
- )
- extras = dict(
- **kwargs, error_doc=self.error_doc, error_code=self.error_code
- )
- details = args[0] if len(args) > 0 else ""
- super().__init__(details, extras)
-
-
-class ActsErrorCode:
- # Framework Errors 0-999
-
- UNKNOWN = 0
-
- # This error code is used to implement unittests for this class.
- ActsError = 100
- AndroidDeviceError = 101
-
- # Controllers Errors 1000-3999
-
- Sl4aStartError = 1001
- Sl4aApiError = 1002
- Sl4aConnectionError = 1003
- Sl4aProtocolError = 1004
- Sl4aNotInstalledError = 1005
- Sl4aRpcTimeoutError = 1006
-
- # Util Errors 4000-9999
-
- FastbootError = 9000
- AdbError = 9001
- AdbCommandError = 9002
diff --git a/packages/antlion/event/__init__.py b/packages/antlion/event/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/packages/antlion/event/__init__.py
+++ /dev/null
diff --git a/packages/antlion/event/decorators.py b/packages/antlion/event/decorators.py
deleted file mode 100644
index 6903021..0000000
--- a/packages/antlion/event/decorators.py
+++ /dev/null
@@ -1,36 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-# Blanket ignores to enable mypy in Antlion
-
-# Blanket ignores to enable mypy in Antlion
-# mypy: disable-error-code="no-untyped-def"
-from antlion.event.subscription_handle import StaticSubscriptionHandle
-
-
-def subscribe_static(event_type, event_filter=None, order=0):
- """A decorator that subscribes a static or module-level function.
-
- This function must be registered manually.
- """
-
- class InnerSubscriptionHandle(StaticSubscriptionHandle):
- def __init__(self, func):
- super().__init__(
- event_type, func, event_filter=event_filter, order=order
- )
-
- return InnerSubscriptionHandle
diff --git a/packages/antlion/event/event.py b/packages/antlion/event/event.py
deleted file mode 100644
index 8050017..0000000
--- a/packages/antlion/event/event.py
+++ /dev/null
@@ -1,94 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-
-# Blanket ignores to enable mypy in Antlion
-# mypy: disable-error-code="no-untyped-def"
-class Event(object):
- """The base class for all event objects."""
-
-
-# TODO(markdr): Move these into test_runner.py
-class TestEvent(Event):
- """The base class for test-related events."""
-
- def __init__(self):
- pass
-
-
-class TestCaseEvent(TestEvent):
- """The base class for test-case-related events."""
-
- def __init__(self, test_class, test_case):
- super().__init__()
- self.test_class = test_class
- self.test_case = test_case
-
- @property
- def test_case_name(self):
- return self.test_case
-
- @property
- def test_class_name(self):
- return self.test_class.__class__.__name__
-
-
-class TestCaseSignalEvent(TestCaseEvent):
- """The base class for test-case-signal-related events."""
-
- def __init__(self, test_class, test_case, test_signal):
- super().__init__(test_class, test_case)
- self.test_signal = test_signal
-
-
-class TestCaseBeginEvent(TestCaseEvent):
- """The event posted when a test case has begun."""
-
-
-class TestCaseEndEvent(TestCaseSignalEvent):
- """The event posted when a test case has ended."""
-
-
-class TestCaseSkippedEvent(TestCaseSignalEvent):
- """The event posted when a test case has been skipped."""
-
-
-class TestCaseFailureEvent(TestCaseSignalEvent):
- """The event posted when a test case has failed."""
-
-
-class TestCasePassedEvent(TestCaseSignalEvent):
- """The event posted when a test case has passed."""
-
-
-class TestClassEvent(TestEvent):
- """The base class for test-class-related events"""
-
- def __init__(self, test_class):
- super().__init__()
- self.test_class = test_class
-
-
-class TestClassBeginEvent(TestClassEvent):
- """The event posted when a test class has begun testing."""
-
-
-class TestClassEndEvent(TestClassEvent):
- """The event posted when a test class has finished testing."""
-
- def __init__(self, test_class, result):
- super().__init__(test_class)
- self.result = result
diff --git a/packages/antlion/event/event_bus.py b/packages/antlion/event/event_bus.py
deleted file mode 100644
index a0c8a79..0000000
--- a/packages/antlion/event/event_bus.py
+++ /dev/null
@@ -1,305 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-# Blanket ignores to enable mypy in Antlion
-# mypy: disable-error-code="no-untyped-def"
-import bisect
-import inspect
-import logging
-from threading import RLock
-
-from antlion.event.event_subscription import EventSubscription
-from antlion.event.subscription_handle import SubscriptionHandle
-
-
-class _EventBus(object):
- """
- Attributes:
- _subscriptions: A dictionary of {EventType: list<EventSubscription>}.
- _registration_id_map: A dictionary of
- {RegistrationID: EventSubscription}
- _subscription_lock: The lock to prevent concurrent removal or addition
- to events.
- """
-
- def __init__(self):
- self._subscriptions = {}
- self._registration_id_map = {}
- self._subscription_lock = RLock()
-
- def register(self, event_type, func, filter_fn=None, order=0):
- """Subscribes the given function to the event type given.
-
- Args:
- event_type: The type of the event to subscribe to.
- func: The function to call when the event is posted.
- filter_fn: An option function to be called before calling the
- subscribed func. If this function returns falsy, then the
- function will not be invoked.
- order: The order the the subscription should run in. Lower values
- run first, with the default value set to 0. In the case of a
- tie between two subscriptions of the same event type, the
- subscriber added first executes first. In the case of a tie
- between two subscribers of a different type, the type of the
- subscription that is more specific goes first (i.e.
- BaseEventType will execute after ChildEventType if they share
- the same order).
-
- Returns:
- A registration ID.
- """
- subscription = EventSubscription(
- event_type, func, event_filter=filter_fn, order=order
- )
- return self.register_subscription(subscription)
-
- def register_subscriptions(self, subscriptions):
- """Registers all subscriptions to the event bus.
-
- Args:
- subscriptions: an iterable that returns EventSubscriptions
-
- Returns:
- The list of registration IDs.
- """
- registration_ids = []
- for subscription in subscriptions:
- registration_ids.append(self.register_subscription(subscription))
-
- return registration_ids
-
- def register_subscription(self, subscription):
- """Registers the given subscription to the event bus.
-
- Args:
- subscription: An EventSubscription object
-
- Returns:
- A registration ID.
- """
- with self._subscription_lock:
- if subscription.event_type in self._subscriptions.keys():
- subscription_list = self._subscriptions[subscription.event_type]
- subscription_list.append(subscription)
- subscription_list.sort(key=lambda x: x.order)
- else:
- subscription_list = list()
- bisect.insort(subscription_list, subscription)
- self._subscriptions[subscription.event_type] = subscription_list
-
- registration_id = id(subscription)
- self._registration_id_map[registration_id] = subscription
-
- return registration_id
-
- def post(self, event, ignore_errors=False):
- """Posts an event to its subscribers.
-
- Args:
- event: The event object to send to the subscribers.
- ignore_errors: Deliver to all subscribers, ignoring any errors.
- """
- listening_subscriptions = []
- for current_type in inspect.getmro(type(event)):
- if current_type not in self._subscriptions.keys():
- continue
- for subscription in self._subscriptions[current_type]:
- listening_subscriptions.append(subscription)
-
- # The subscriptions will be collected in sorted runs of sorted order.
- # Running timsort here is the optimal way to sort this list.
- listening_subscriptions.sort(key=lambda x: x.order)
- for subscription in listening_subscriptions:
- try:
- subscription.deliver(event)
- except Exception:
- if ignore_errors:
- logging.exception(
- "An exception occurred while handling " "an event."
- )
- continue
- raise
-
- def unregister(self, registration_id):
- """Unregisters an EventSubscription.
-
- Args:
- registration_id: the Subscription or registration_id to unsubscribe.
- """
- if type(registration_id) is SubscriptionHandle:
- subscription = registration_id.subscription
- registration_id = id(registration_id.subscription)
- elif type(registration_id) is EventSubscription:
- subscription = registration_id
- registration_id = id(registration_id)
- elif registration_id in self._registration_id_map.keys():
- subscription = self._registration_id_map[registration_id]
- elif type(registration_id) is not int:
- raise ValueError(
- 'Subscription ID "%s" is not a valid ID. This value'
- "must be an integer ID returned from subscribe()."
- % registration_id
- )
- else:
- # The value is a "valid" id, but is not subscribed. It's possible
- # another thread has unsubscribed this value.
- logging.warning(
- "Attempted to unsubscribe %s, but the matching "
- "subscription cannot be found." % registration_id
- )
- return False
-
- event_type = subscription.event_type
- with self._subscription_lock:
- self._registration_id_map.pop(registration_id, None)
- if (
- event_type in self._subscriptions
- and subscription in self._subscriptions[event_type]
- ):
- self._subscriptions[event_type].remove(subscription)
- return True
-
- def unregister_all(self, from_list=None, from_event=None):
- """Removes all event subscriptions.
-
- Args:
- from_list: Unregisters all events from a given list.
- from_event: Unregisters all events of a given event type.
- """
- if from_list is None:
- from_list = list(self._registration_id_map.values())
-
- for subscription in from_list:
- if from_event is None or subscription.event_type == from_event:
- self.unregister(subscription)
-
-
-_event_bus = _EventBus()
-
-
-def register(event_type, func, filter_fn=None, order=0):
- """Subscribes the given function to the event type given.
-
- Args:
- event_type: The type of the event to subscribe to.
- func: The function to call when the event is posted.
- filter_fn: An option function to be called before calling the subscribed
- func. If this function returns falsy, then the function will
- not be invoked.
- order: The order the the subscription should run in. Lower values run
- first, with the default value set to 0. In the case of a tie
- between two subscriptions of the same event type, the
- subscriber added first executes first. In the case of a tie
- between two subscribers of a different type, the type of the
- subscription that is more specific goes first (i.e. BaseEventType
- will execute after ChildEventType if they share the same order).
-
- Returns:
- A registration ID.
- """
- return _event_bus.register(
- event_type, func, filter_fn=filter_fn, order=order
- )
-
-
-def register_subscriptions(subscriptions):
- """Registers all subscriptions to the event bus.
-
- Args:
- subscriptions: an iterable that returns EventSubscriptions
-
- Returns:
- The list of registration IDs.
- """
- return _event_bus.register_subscriptions(subscriptions)
-
-
-def register_subscription(subscription):
- """Registers the given subscription to the event bus.
-
- Args:
- subscription: An EventSubscription object
-
- Returns:
- A registration ID.
- """
- return _event_bus.register_subscription(subscription)
-
-
-def post(event, ignore_errors=False):
- """Posts an event to its subscribers.
-
- Args:
- event: The event object to send to the subscribers.
- ignore_errors: Deliver to all subscribers, ignoring any errors.
- """
- _event_bus.post(event, ignore_errors)
-
-
-def unregister(registration_id):
- """Unregisters an EventSubscription.
-
- Args:
- registration_id: the Subscription or registration_id to unsubscribe.
- """
- # null check for the corner case where the _event_bus is destroyed before
- # the subscribers unregister. In such case there is nothing else to
- # be done.
- if _event_bus is None:
- return True
- return _event_bus.unregister(registration_id)
-
-
-def unregister_all(from_list=None, from_event=None):
- """Removes all event subscriptions.
-
- Args:
- from_list: Unregisters all events from a given list.
- from_event: Unregisters all events of a given event type.
- """
- return _event_bus.unregister_all(from_list=from_list, from_event=from_event)
-
-
-class listen_for(object):
- """A context-manager class (with statement) for listening to an event within
- a given section of code.
-
- Usage:
-
- with listen_for(EventType, event_listener):
- func_that_posts_event() # Will call event_listener
-
- func_that_posts_event() # Will not call event_listener
-
- """
-
- def __init__(self, event_type, func, filter_fn=None, order=0):
- self.event_type = event_type
- self.func = func
- self.filter_fn = filter_fn
- self.order = order
- self.registration_id = None
-
- def __enter__(self):
- self.registration_id = _event_bus.register(
- self.event_type,
- self.func,
- filter_fn=self.filter_fn,
- order=self.order,
- )
-
- def __exit__(self, *_):
- _event_bus.unregister(self.registration_id)
diff --git a/packages/antlion/event/event_subscription.py b/packages/antlion/event/event_subscription.py
deleted file mode 100644
index 303e3e9..0000000
--- a/packages/antlion/event/event_subscription.py
+++ /dev/null
@@ -1,52 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-
-# Blanket ignores to enable mypy in Antlion
-# mypy: disable-error-code="no-untyped-def"
-class EventSubscription(object):
- """A class that defines the way a function is subscribed to an event.
-
- Attributes:
- event_type: The type of the event.
- _func: The subscribed function.
- _event_filter: A lambda that returns True if an event should be passed
- to the subscribed function.
- order: The order value in which this subscription should be called.
- """
-
- def __init__(self, event_type, func, event_filter=None, order=0):
- self._event_type = event_type
- self._func = func
- self._event_filter = event_filter
- self.order = order
-
- @property
- def event_type(self):
- return self._event_type
-
- def deliver(self, event):
- """Delivers an event to the subscriber.
-
- This function will not deliver the event if the event filter rejects the
- event.
-
- Args:
- event: The event to send to the subscriber.
- """
- if self._event_filter and not self._event_filter(event):
- return
- self._func(event)
diff --git a/packages/antlion/event/subscription_handle.py b/packages/antlion/event/subscription_handle.py
deleted file mode 100644
index 759ba04..0000000
--- a/packages/antlion/event/subscription_handle.py
+++ /dev/null
@@ -1,78 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-# Blanket ignores to enable mypy in Antlion
-# mypy: disable-error-code="no-untyped-def"
-from antlion.event.event_subscription import EventSubscription
-
-
-class SubscriptionHandle(object):
- """The object created by a method decorated with an event decorator."""
-
- def __init__(self, event_type, func, event_filter=None, order=0):
- self._event_type = event_type
- self._func = func
- self._event_filter = event_filter
- self._order = order
- self._subscription = None
- self._owner = None
-
- @property
- def subscription(self):
- if self._subscription:
- return self._subscription
- self._subscription = EventSubscription(
- self._event_type,
- self._func,
- event_filter=self._event_filter,
- order=self._order,
- )
- return self._subscription
-
- def __get__(self, instance, owner):
- # If our owner has been initialized, or do not have an instance owner,
- # return self.
- if self._owner is not None or instance is None:
- return self
-
- # Otherwise, we create a new SubscriptionHandle that will only be used
- # for the instance that owns this SubscriptionHandle.
- ret = SubscriptionHandle(
- self._event_type, self._func, self._event_filter, self._order
- )
- ret._owner = instance
- ret._func = ret._wrap_call(ret._func)
- for attr, value in owner.__dict__.items():
- if value is self:
- setattr(instance, attr, ret)
- break
- return ret
-
- def _wrap_call(self, func):
- def _wrapped_call(*args, **kwargs):
- if self._owner is None:
- return func(*args, **kwargs)
- else:
- return func(self._owner, *args, **kwargs)
-
- return _wrapped_call
-
- def __call__(self, *args, **kwargs):
- return self._func(*args, **kwargs)
-
-
-class StaticSubscriptionHandle(SubscriptionHandle):
- """A SubscriptionHandle for static methods."""
diff --git a/packages/antlion/keys.py b/packages/antlion/keys.py
deleted file mode 100644
index c41d1e1..0000000
--- a/packages/antlion/keys.py
+++ /dev/null
@@ -1,115 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""This module has the global key values that are used across framework
-modules.
-"""
-
-import enum
-
-
-class Config(enum.Enum):
- """Enum values for test config related lookups."""
-
- # Keys used to look up values from test config files.
- # These keys define the wording of test configs and their internal
- # references.
- key_log_path = "logpath"
- key_testbeds_under_test = "testbeds_under_test"
- key_testbed = "testbed"
- key_testbed_name = "name"
- # configpath is the directory. key_config_full_path is the file path.
- key_config_path = "configpath"
- key_config_full_path = "config_full_path"
- key_test_paths = "testpaths"
- key_port = "Port"
- key_address = "Address"
- key_test_case_iterations = "test_case_iterations"
- key_test_failure_tracebacks = "test_failure_tracebacks"
- # Config names for controllers packaged in ACTS.
- key_access_point = "AccessPoint"
- key_android_device = "AndroidDevice"
- key_attenuator = "Attenuator"
- key_bluetooth_pts_device = "BluetoothPtsDevice"
- key_fuchsia_device = "FuchsiaDevice"
- key_iperf_client = "IPerfClient"
- key_iperf_server = "IPerfServer"
- key_openwrt_ap = "OpenWrtAP"
- key_packet_capture = "PacketCapture"
- key_pdu = "PduDevice"
- key_sniffer = "Sniffer"
- # Internal keys, used internally, not exposed to user's config files.
- ikey_user_param = "user_params"
- ikey_testbed_name = "testbed_name"
- ikey_logger = "log"
- ikey_logpath = "log_path"
- ikey_summary_writer = "summary_writer"
- # module name of controllers packaged in ACTS.
- m_key_access_point = "access_point"
- m_key_android_device = "android_device"
- m_key_attenuator = "attenuator"
- m_key_bluetooth_pts_device = "bluetooth_pts_device"
- m_key_fuchsia_device = "fuchsia_device"
- m_key_iperf_client = "iperf_client"
- m_key_iperf_server = "iperf_server"
- m_key_openwrt_ap = "openwrt_ap"
- m_key_packet_capture = "packet_capture"
- m_key_pdu = "pdu"
- m_key_sniffer = "sniffer"
-
- # A list of keys whose values in configs should not be passed to test
- # classes without unpacking first.
- reserved_keys = (key_testbed, key_log_path, key_test_paths)
-
- # Controller names packaged with ACTS.
- builtin_controller_names = [
- key_access_point,
- key_android_device,
- key_attenuator,
- key_bluetooth_pts_device,
- key_fuchsia_device,
- key_iperf_client,
- key_iperf_server,
- key_openwrt_ap,
- key_packet_capture,
- key_pdu,
- key_sniffer,
- ]
-
-
-def get_name_by_value(value: str) -> str | None:
- for name, member in Config.__members__.items():
- if member.value == value:
- return name
- return None
-
-
-def get_module_name(name_in_config: str) -> str | None:
- """Translates the name of a controller in config file to its module name."""
- return value_to_value(name_in_config, "m_%s")
-
-
-def value_to_value(ref_value: str, pattern: str) -> str | None:
- """Translates the value of a key to the value of its corresponding key. The
- corresponding key is chosen based on the variable name pattern.
- """
- ref_key_name = get_name_by_value(ref_value)
- if not ref_key_name:
- return None
- target_key_name = pattern % ref_key_name
- try:
- return getattr(Config, target_key_name).value
- except AttributeError:
- return None
diff --git a/packages/antlion/libs/__init__.py b/packages/antlion/libs/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/packages/antlion/libs/__init__.py
+++ /dev/null
diff --git a/packages/antlion/libs/logging/__init__.py b/packages/antlion/libs/logging/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/packages/antlion/libs/logging/__init__.py
+++ /dev/null
diff --git a/packages/antlion/libs/logging/log_stream.py b/packages/antlion/libs/logging/log_stream.py
deleted file mode 100644
index af34a6f..0000000
--- a/packages/antlion/libs/logging/log_stream.py
+++ /dev/null
@@ -1,445 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-# Blanket ignores to enable mypy in Antlion
-# mypy: disable-error-code="no-untyped-def, var-annotated"
-import logging
-import os
-import sys
-from logging import FileHandler, Handler, StreamHandler
-from logging.handlers import RotatingFileHandler
-
-from antlion import context
-from antlion.context import ContextLevel
-from antlion.event import event_bus
-from antlion.event.decorators import subscribe_static
-
-
-# yapf: disable
-class LogStyles:
- NONE = 0x00
- LOG_DEBUG = 0x01
- LOG_INFO = 0x02
- LOG_WARNING = 0x04
- LOG_ERROR = 0x08
- LOG_CRITICAL = 0x10
-
- DEFAULT_LEVELS = LOG_DEBUG + LOG_INFO + LOG_ERROR
- ALL_LEVELS = LOG_DEBUG + LOG_INFO + LOG_WARNING + LOG_ERROR + LOG_CRITICAL
-
- MONOLITH_LOG = 0x0100
- TESTCLASS_LOG = 0x0200
- TESTCASE_LOG = 0x0400
- TO_STDOUT = 0x0800
- TO_ACTS_LOG = 0x1000
- ROTATE_LOGS = 0x2000
-
- ALL_FILE_LOGS = MONOLITH_LOG + TESTCLASS_LOG + TESTCASE_LOG
-
- LEVEL_NAMES = {
- LOG_DEBUG: 'debug',
- LOG_INFO: 'info',
- LOG_WARNING: 'warning',
- LOG_ERROR: 'error',
- LOG_CRITICAL: 'critical',
- }
-
- LOG_LEVELS = [
- LOG_DEBUG,
- LOG_INFO,
- LOG_WARNING,
- LOG_ERROR,
- LOG_CRITICAL,
- ]
-
- LOG_LOCATIONS = [
- TO_STDOUT,
- TO_ACTS_LOG,
- MONOLITH_LOG,
- TESTCLASS_LOG,
- TESTCASE_LOG
- ]
-
- LEVEL_TO_NO = {
- LOG_DEBUG: logging.DEBUG,
- LOG_INFO: logging.INFO,
- LOG_WARNING: logging.WARNING,
- LOG_ERROR: logging.ERROR,
- LOG_CRITICAL: logging.CRITICAL,
- }
-
- LOCATION_TO_CONTEXT_LEVEL = {
- MONOLITH_LOG: ContextLevel.ROOT,
- TESTCLASS_LOG: ContextLevel.TESTCLASS,
- TESTCASE_LOG: ContextLevel.TESTCASE
- }
-# yapf: enable
-
-_log_streams = dict()
-_null_handler = logging.NullHandler()
-
-
-@subscribe_static(context.NewContextEvent)
-def _update_handlers(event):
- for log_stream in _log_streams.values():
- log_stream.update_handlers(event)
-
-
-event_bus.register_subscription(_update_handlers.subscription)
-
-
-def create_logger(
- name,
- log_name=None,
- base_path="",
- subcontext="",
- log_styles=LogStyles.NONE,
- stream_format=None,
- file_format=None,
-):
- """Creates a Python Logger object with the given attributes.
-
- Creation through this method will automatically manage the logger in the
- background for test-related events, such as TestCaseBegin and TestCaseEnd
- Events.
-
- Args:
- name: The name of the LogStream. Used as the file name prefix.
- log_name: The name of the underlying logger. Use LogStream name as
- default.
- base_path: The base path used by the logger.
- subcontext: Location of logs relative to the test context path.
- log_styles: An integer or array of integers that are the sum of
- corresponding flag values in LogStyles. Examples include:
-
- >>> LogStyles.LOG_INFO + LogStyles.TESTCASE_LOG
-
- >>> LogStyles.ALL_LEVELS + LogStyles.MONOLITH_LOG
-
- >>> [LogStyles.DEFAULT_LEVELS + LogStyles.MONOLITH_LOG]
- >>> LogStyles.LOG_ERROR + LogStyles.TO_ACTS_LOG]
- stream_format: Format used for log output to stream
- file_format: Format used for log output to files
- """
- if name in _log_streams:
- _log_streams[name].cleanup()
- log_stream = _LogStream(
- name,
- log_name,
- base_path,
- subcontext,
- log_styles,
- stream_format,
- file_format,
- )
- _set_logger(log_stream)
- return log_stream.logger
-
-
-def _set_logger(log_stream):
- _log_streams[log_stream.name] = log_stream
- return log_stream
-
-
-class AlsoToLogHandler(Handler):
- """Logs a message at a given level also to another logger.
-
- Used for logging messages at a high enough level to the main log, or another
- logger.
- """
-
- def __init__(self, to_logger=None, *args, **kwargs):
- super().__init__(*args, **kwargs)
- self._log = logging.getLogger(to_logger)
-
- def emit(self, record):
- self._log.log(record.levelno, record.getMessage())
-
-
-class MovableFileHandler(FileHandler):
- """FileHandler implementation that allows the output file to be changed
- during operation.
- """
-
- def set_file(self, file_name):
- """Set the target output file to file_name.
-
- Args:
- file_name: path to the new output file
- """
- self.baseFilename = os.path.abspath(file_name)
- if self.stream is not None:
- new_stream = self._open()
- # An atomic operation redirects the output and closes the old file
- os.dup2(new_stream.fileno(), self.stream.fileno())
- self.stream = new_stream
-
-
-class MovableRotatingFileHandler(RotatingFileHandler):
- """RotatingFileHandler implementation that allows the output file to be
- changed during operation. Rotated files will automatically adopt the newest
- output path.
- """
-
- set_file = MovableFileHandler.set_file
-
-
-class InvalidStyleSetError(Exception):
- """Raised when the given LogStyles are an invalid set."""
-
-
-class _LogStream(object):
- """A class that sets up a logging.Logger object.
-
- The LogStream class creates a logging.Logger object. LogStream is also
- responsible for managing the logger when events take place, such as
- TestCaseEndedEvents and TestCaseBeginEvents.
-
- Attributes:
- name: The name of the LogStream.
- logger: The logger created by this LogStream.
- base_path: The base path used by the logger. Use logging.log_path
- as default.
- subcontext: Location of logs relative to the test context path.
- stream_format: Format used for log output to stream
- file_format: Format used for log output to files
- """
-
- def __init__(
- self,
- name,
- log_name=None,
- base_path="",
- subcontext="",
- log_styles=LogStyles.NONE,
- stream_format=None,
- file_format=None,
- ):
- """Creates a LogStream.
-
- Args:
- name: The name of the LogStream. Used as the file name prefix.
- log_name: The name of the underlying logger. Use LogStream name
- as default.
- base_path: The base path used by the logger. Use logging.log_path
- as default.
- subcontext: Location of logs relative to the test context path.
- log_styles: An integer or array of integers that are the sum of
- corresponding flag values in LogStyles. Examples include:
-
- >>> LogStyles.LOG_INFO + LogStyles.TESTCASE_LOG
-
- >>> LogStyles.ALL_LEVELS + LogStyles.MONOLITH_LOG
-
- >>> [LogStyles.DEFAULT_LEVELS + LogStyles.MONOLITH_LOG]
- >>> LogStyles.LOG_ERROR + LogStyles.TO_ACTS_LOG]
- stream_format: Format used for log output to stream
- file_format: Format used for log output to files
- """
- self.name = name
- if log_name is not None:
- self.logger = logging.getLogger(log_name)
- else:
- self.logger = logging.getLogger(name)
- # Add a NullHandler to suppress unwanted console output
- self.logger.addHandler(_null_handler)
- self.logger.propagate = False
- self.base_path = base_path or getattr(
- logging, "log_path", "/tmp/acts_logs"
- )
- self.subcontext = subcontext
- context.TestContext.add_base_output_path(
- self.logger.name, self.base_path
- )
- context.TestContext.add_subcontext(self.logger.name, self.subcontext)
- self.stream_format = stream_format
- self.file_format = file_format
- self._testclass_handlers = []
- self._testcase_handlers = []
- if not isinstance(log_styles, list):
- log_styles = [log_styles]
- self.__validate_styles(log_styles)
- for log_style in log_styles:
- self.__handle_style(log_style)
-
- @staticmethod
- def __validate_styles(_log_styles_list):
- """Determines if the given list of styles is valid.
-
- Terminology:
- Log-level: any of [DEBUG, INFO, WARNING, ERROR, CRITICAL].
- Log Location: any of [MONOLITH_LOG, TESTCLASS_LOG,
- TESTCASE_LOG, TO_STDOUT, TO_ACTS_LOG].
-
- Styles are invalid when any of the below criteria are met:
- A log-level is not set within an element of the list.
- A log location is not set within an element of the list.
- A log-level, log location pair appears twice within the list.
- A log-level has both TESTCLASS and TESTCASE locations set
- within the list.
- ROTATE_LOGS is set without MONOLITH_LOG,
- TESTCLASS_LOG, or TESTCASE_LOG.
-
- Raises:
- InvalidStyleSetError if the given style cannot be achieved.
- """
-
- def invalid_style_error(message):
- raise InvalidStyleSetError(
- "{LogStyle Set: %s} %s" % (_log_styles_list, message)
- )
-
- # Store the log locations that have already been set per level.
- levels_dict = {}
- for log_style in _log_styles_list:
- for level in LogStyles.LOG_LEVELS:
- if log_style & level:
- levels_dict[level] = levels_dict.get(level, LogStyles.NONE)
- # Check that a log-level, log location pair has not yet
- # been set.
- for log_location in LogStyles.LOG_LOCATIONS:
- if log_style & log_location:
- if log_location & levels_dict[level]:
- invalid_style_error(
- "The log location %s for log level %s has "
- "been set multiple times"
- % (log_location, level)
- )
- else:
- levels_dict[level] |= log_location
- # Check that for a given log-level, not more than one
- # of MONOLITH_LOG, TESTCLASS_LOG, TESTCASE_LOG is set.
- locations = levels_dict[level] & LogStyles.ALL_FILE_LOGS
- valid_locations = [
- LogStyles.TESTCASE_LOG,
- LogStyles.TESTCLASS_LOG,
- LogStyles.MONOLITH_LOG,
- LogStyles.NONE,
- ]
- if locations not in valid_locations:
- invalid_style_error(
- "More than one of MONOLITH_LOG, TESTCLASS_LOG, "
- "TESTCASE_LOG is set for log level %s." % level
- )
- if log_style & LogStyles.ALL_LEVELS == 0:
- invalid_style_error(
- f"LogStyle {log_style} needs to set a log level."
- )
- if log_style & ~LogStyles.ALL_LEVELS == 0:
- invalid_style_error(
- f"LogStyle {log_style} needs to set a log location."
- )
- if log_style & LogStyles.ROTATE_LOGS and not log_style & (
- LogStyles.MONOLITH_LOG
- | LogStyles.TESTCLASS_LOG
- | LogStyles.TESTCASE_LOG
- ):
- invalid_style_error(
- "LogStyle %s has ROTATE_LOGS set, but does "
- "not specify a log type." % log_style
- )
-
- @staticmethod
- def __create_rotating_file_handler(filename):
- """Generates a callable to create an appropriate RotatingFileHandler."""
- # Magic number explanation: 10485760 == 10MB
- return MovableRotatingFileHandler(
- filename, maxBytes=10485760, backupCount=5
- )
-
- @staticmethod
- def __get_file_handler_creator(log_style):
- """Gets the callable to create the correct FileLogHandler."""
- create_file_handler = MovableFileHandler
- if log_style & LogStyles.ROTATE_LOGS:
- create_file_handler = _LogStream.__create_rotating_file_handler # type: ignore # Blanket ignore to enable mypy
- return create_file_handler
-
- @staticmethod
- def __get_lowest_log_level(log_style):
- """Returns the lowest log level's LogStyle for the given log_style."""
- for log_level in LogStyles.LOG_LEVELS:
- if log_level & log_style:
- return log_level
- return LogStyles.NONE
-
- def __get_current_output_dir(self, depth=ContextLevel.TESTCASE):
- """Gets the current output directory from the context system. Make the
- directory if it doesn't exist.
-
- Args:
- depth: The desired level of the output directory. For example,
- the TESTCLASS level would yield the directory associated with
- the current test class context, even if the test is currently
- within a test case.
- """
- curr_context = context.get_current_context(depth)
- return curr_context.get_full_output_path(self.logger.name)
-
- def __create_handler(self, creator, level, location):
- """Creates the FileHandler.
-
- Args:
- creator: The callable that creates the FileHandler
- level: The logging level (INFO, DEBUG, etc.) for this handler.
- location: The log location (MONOLITH, TESTCLASS, TESTCASE) for this
- handler.
-
- Returns: A FileHandler
- """
- directory = self.__get_current_output_dir(
- LogStyles.LOCATION_TO_CONTEXT_LEVEL[location]
- )
- base_name = f"{self.name}_{LogStyles.LEVEL_NAMES[level]}.txt"
- handler = creator(os.path.join(directory, base_name))
- handler.setLevel(LogStyles.LEVEL_TO_NO[level])
- if self.file_format:
- handler.setFormatter(self.file_format)
- return handler
-
- def __handle_style(self, log_style):
- """Creates the handlers described in the given log_style."""
- handler_creator = self.__get_file_handler_creator(log_style)
-
- # Handle streaming logs to STDOUT or the ACTS Logger
- if log_style & (LogStyles.TO_ACTS_LOG | LogStyles.TO_STDOUT):
- lowest_log_level = self.__get_lowest_log_level(log_style)
-
- if log_style & LogStyles.TO_ACTS_LOG:
- handler = AlsoToLogHandler()
- else: # LogStyles.TO_STDOUT:
- handler = StreamHandler(sys.stdout) # type: ignore # Blanket ignore to enable mypy
- if self.stream_format:
- handler.setFormatter(self.stream_format)
-
- handler.setLevel(LogStyles.LEVEL_TO_NO[lowest_log_level])
- self.logger.addHandler(handler)
-
- # Handle streaming logs to log-level files
- for log_level in LogStyles.LOG_LEVELS:
- log_location = log_style & LogStyles.ALL_FILE_LOGS
- if not (log_style & log_level and log_location):
- continue
-
- handler = self.__create_handler(
- handler_creator, log_level, log_location
- )
- self.logger.addHandler(handler)
-
- if log_style & LogStyles.TESTCLASS_LOG:
- self._testclass_handlers.append(handler)
- if log_style & LogStyles.TESTCASE_LOG:
- self._testcase_handlers.append(handler)
diff --git a/packages/antlion/libs/ota/__init__.py b/packages/antlion/libs/ota/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/packages/antlion/libs/ota/__init__.py
+++ /dev/null
diff --git a/packages/antlion/libs/ota/ota_runners/__init__.py b/packages/antlion/libs/ota/ota_runners/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/packages/antlion/libs/ota/ota_runners/__init__.py
+++ /dev/null
diff --git a/packages/antlion/libs/ota/ota_runners/ota_runner.py b/packages/antlion/libs/ota/ota_runners/ota_runner.py
deleted file mode 100644
index 8d8bc96..0000000
--- a/packages/antlion/libs/ota/ota_runners/ota_runner.py
+++ /dev/null
@@ -1,228 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-# Blanket ignores to enable mypy in Antlion
-# mypy: disable-error-code="no-untyped-def"
-import time
-from zipfile import ZipFile
-
-"""The setup time in seconds."""
-SL4A_SERVICE_SETUP_TIME = 5
-"""The path to the metadata found within the OTA package."""
-OTA_PACKAGE_METADATA_PATH = "META-INF/com/android/metadata"
-
-
-class OtaError(Exception):
- """Raised when an error in the OTA Update process occurs."""
-
-
-class InvalidOtaUpdateError(OtaError):
- """Raised when the update from one version to another is not valid."""
-
-
-class OtaRunner(object):
- """The base class for all OTA Update Runners."""
-
- def __init__(self, ota_tool, android_device):
- self.ota_tool = ota_tool
- self.android_device = android_device
- self.serial = self.android_device.serial
-
- def _update(self):
- post_build_id = self.get_post_build_id()
- log = self.android_device.log
- old_info = self.android_device.adb.getprop("ro.build.fingerprint")
- log.info("Starting Update. Beginning build info: %s", old_info)
- log.info("Stopping services.")
- self.android_device.stop_services()
- log.info("Beginning tool.")
- self.ota_tool.update(self)
- log.info("Tool finished. Waiting for boot completion.")
- self.android_device.wait_for_boot_completion()
- new_info = self.android_device.adb.getprop("ro.build.fingerprint")
- if not old_info or old_info == new_info:
- raise OtaError(
- "The device was not updated to a new build. "
- "Previous build: %s. Current build: %s. "
- "Expected build: %s" % (old_info, new_info, post_build_id)
- )
- log.info("Boot completed. Rooting adb.")
- self.android_device.root_adb()
- log.info("Root complete.")
- if self.android_device.skip_sl4a:
- self.android_device.log.info("Skipping SL4A install.")
- else:
- for _ in range(3):
- self.android_device.log.info(
- 'Re-installing SL4A from "%s".', self.get_sl4a_apk()
- )
- self.android_device.adb.install(
- f"-r -g {self.get_sl4a_apk()}", ignore_status=True
- )
- time.sleep(SL4A_SERVICE_SETUP_TIME)
- if self.android_device.is_sl4a_installed():
- break
- log.info("Starting services.")
- self.android_device.start_services()
- self.android_device.update_sdk_api_level()
- log.info("Services started. Running ota tool cleanup.")
- self.ota_tool.cleanup(self)
- log.info("Cleanup complete.")
-
- def get_ota_package_metadata(self, requested_field):
- """Returns a variable found within the OTA package's metadata.
-
- Args:
- requested_field: the name of the metadata field
-
- Will return None if the variable cannot be found.
- """
- ota_zip = ZipFile(self.get_ota_package(), "r")
- if OTA_PACKAGE_METADATA_PATH in ota_zip.namelist():
- with ota_zip.open(OTA_PACKAGE_METADATA_PATH) as metadata:
- timestamp_line = requested_field.encode("utf-8")
- timestamp_offset = len(timestamp_line) + 1
-
- for line in metadata.readlines():
- if line.startswith(timestamp_line):
- return line[timestamp_offset:].decode("utf-8").strip()
- return None
-
- def validate_update(self):
- """Raises an error if updating to the next build is not valid.
-
- Raises:
- InvalidOtaUpdateError if the ota version is not valid, or cannot be
- validated.
- """
- # The timestamp the current device build was created at.
- cur_img_timestamp = self.android_device.adb.getprop("ro.build.date.utc")
- ota_img_timestamp = self.get_ota_package_metadata("post-timestamp")
-
- if ota_img_timestamp is None:
- raise InvalidOtaUpdateError(
- "Unable to find the timestamp " "for the OTA build."
- )
-
- try:
- if int(ota_img_timestamp) <= int(cur_img_timestamp):
- cur_fingerprint = self.android_device.adb.getprop(
- "ro.bootimage.build.fingerprint"
- )
- ota_fingerprint = self.get_post_build_id()
- raise InvalidOtaUpdateError(
- "The OTA image comes from an earlier build than the "
- "source build. Current build: Time: %s -- %s, "
- "OTA build: Time: %s -- %s"
- % (
- cur_img_timestamp,
- cur_fingerprint,
- ota_img_timestamp,
- ota_fingerprint,
- )
- )
- except ValueError:
- raise InvalidOtaUpdateError(
- "Unable to parse timestamps. Current timestamp: %s, OTA "
- "timestamp: %s" % (ota_img_timestamp, cur_img_timestamp)
- )
-
- def get_post_build_id(self):
- """Returns the post-build ID found within the OTA package metadata.
-
- Raises:
- InvalidOtaUpdateError if the post-build ID cannot be found.
- """
- return self.get_ota_package_metadata("post-build")
-
- def can_update(self):
- """Whether or not an update package is available for the device."""
- return NotImplementedError()
-
- def get_ota_package(self):
- raise NotImplementedError()
-
- def get_sl4a_apk(self):
- raise NotImplementedError()
-
-
-class SingleUseOtaRunner(OtaRunner):
- """A single use OtaRunner.
-
- SingleUseOtaRunners can only be ran once. If a user attempts to run it more
- than once, an error will be thrown. Users can avoid the error by checking
- can_update() before calling update().
- """
-
- def __init__(self, ota_tool, android_device, ota_package, sl4a_apk):
- super(SingleUseOtaRunner, self).__init__(ota_tool, android_device)
- self._ota_package = ota_package
- self._sl4a_apk = sl4a_apk
- self._called = False
-
- def can_update(self):
- return not self._called
-
- def update(self):
- """Starts the update process."""
- if not self.can_update():
- raise OtaError(
- "A SingleUseOtaTool instance cannot update a device "
- "multiple times."
- )
- self._called = True
- self._update()
-
- def get_ota_package(self):
- return self._ota_package
-
- def get_sl4a_apk(self):
- return self._sl4a_apk
-
-
-class MultiUseOtaRunner(OtaRunner):
- """A multiple use OtaRunner.
-
- MultiUseOtaRunner can only be ran for as many times as there have been
- packages provided to them. If a user attempts to run it more than the number
- of provided packages, an error will be thrown. Users can avoid the error by
- checking can_update() before calling update().
- """
-
- def __init__(self, ota_tool, android_device, ota_packages, sl4a_apks):
- super(MultiUseOtaRunner, self).__init__(ota_tool, android_device)
- self._ota_packages = ota_packages
- self._sl4a_apks = sl4a_apks
- self.current_update_number = 0
-
- def can_update(self):
- return not self.current_update_number == len(self._ota_packages)
-
- def update(self):
- """Starts the update process."""
- if not self.can_update():
- raise OtaError(
- "This MultiUseOtaRunner has already updated all "
- "given packages onto the phone."
- )
- self._update()
- self.current_update_number += 1
-
- def get_ota_package(self):
- return self._ota_packages[self.current_update_number]
-
- def get_sl4a_apk(self):
- return self._sl4a_apks[self.current_update_number]
diff --git a/packages/antlion/libs/ota/ota_runners/ota_runner_factory.py b/packages/antlion/libs/ota/ota_runners/ota_runner_factory.py
deleted file mode 100644
index dffb2ae..0000000
--- a/packages/antlion/libs/ota/ota_runners/ota_runner_factory.py
+++ /dev/null
@@ -1,205 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-# Blanket ignores to enable mypy in Antlion
-# mypy: disable-error-code="no-untyped-def, attr-defined, import-untyped, var-annotated, assignment, comparison-overlap"
-import logging
-
-from antlion.libs.ota.ota_runners import ota_runner
-from antlion.libs.ota.ota_tools import adb_sideload_ota_tool, ota_tool_factory
-
-_bound_devices = {}
-
-DEFAULT_OTA_TOOL = adb_sideload_ota_tool.AdbSideloadOtaTool.__name__
-DEFAULT_OTA_COMMAND = "adb"
-
-
-class OtaConfigError(Exception):
- """Raised when there is a problem in test configuration file."""
-
-
-def create_from_configs(config, android_device):
- """Creates a new OtaTool for the given AndroidDevice.
-
- After an OtaTool is assigned to a device, another OtaTool cannot be created
- for that device. This will prevent OTA Update tests that accidentally flash
- the same build onto a device more than once.
-
- Args:
- config: the ACTS config user_params.
- android_device: The device to run the OTA Update on.
-
- Returns:
- An OtaRunner responsible for updating the given device.
- """
- # Default to adb sideload
- try:
- ota_tool_class_name = get_ota_value_from_config(
- config, "ota_tool", android_device
- )
- except OtaConfigError:
- ota_tool_class_name = DEFAULT_OTA_TOOL
-
- if ota_tool_class_name not in config:
- if ota_tool_class_name is not DEFAULT_OTA_TOOL:
- raise OtaConfigError(
- "If the ota_tool is overloaded, the path to the tool must be "
- 'added to the ACTS config file under {"OtaToolName": '
- '"path/to/tool"} (in this case, {"%s": "path/to/tool"}.'
- % ota_tool_class_name
- )
- else:
- command = DEFAULT_OTA_COMMAND
- else:
- command = config[ota_tool_class_name]
- if type(command) is list:
- # If file came as a list in the config.
- if len(command) == 1:
- command = command[0]
- else:
- raise OtaConfigError(
- 'Config value for "%s" must be either a string or a list '
- "of exactly one element" % ota_tool_class_name
- )
-
- ota_package = get_ota_value_from_config(
- config, "ota_package", android_device
- )
- ota_sl4a = get_ota_value_from_config(config, "ota_sl4a", android_device)
- if type(ota_sl4a) != type(ota_package):
- raise OtaConfigError(
- "The ota_package and ota_sl4a must either both be strings, or "
- 'both be lists. Device with serial "%s" has mismatched types.'
- % android_device.serial
- )
- return create(
- ota_package, ota_sl4a, android_device, ota_tool_class_name, command
- )
-
-
-def create(
- ota_package,
- ota_sl4a,
- android_device,
- ota_tool_class_name=DEFAULT_OTA_TOOL,
- command=DEFAULT_OTA_COMMAND,
- use_cached_runners=True,
-):
- """
- Args:
- ota_package: A string or list of strings corresponding to the
- update.zip package location(s) for running an OTA update.
- ota_sl4a: A string or list of strings corresponding to the
- sl4a.apk package location(s) for running an OTA update.
- ota_tool_class_name: The class name for the desired ota_tool
- command: The command line tool name for the updater
- android_device: The AndroidDevice to run the OTA Update on.
- use_cached_runners: Whether or not to use runners cached by previous
- create calls.
-
- Returns:
- An OtaRunner with the given properties from the arguments.
- """
- ota_tool = ota_tool_factory.create(ota_tool_class_name, command)
- return create_from_package(
- ota_package, ota_sl4a, android_device, ota_tool, use_cached_runners
- )
-
-
-def create_from_package(
- ota_package, ota_sl4a, android_device, ota_tool, use_cached_runners=True
-):
- """
- Args:
- ota_package: A string or list of strings corresponding to the
- update.zip package location(s) for running an OTA update.
- ota_sl4a: A string or list of strings corresponding to the
- sl4a.apk package location(s) for running an OTA update.
- ota_tool: The OtaTool to be paired with the returned OtaRunner
- android_device: The AndroidDevice to run the OTA Update on.
- use_cached_runners: Whether or not to use runners cached by previous
- create calls.
-
- Returns:
- An OtaRunner with the given properties from the arguments.
- """
- if android_device in _bound_devices and use_cached_runners:
- logging.warning(
- "Android device %s has already been assigned an "
- "OtaRunner. Returning previously created runner."
- )
- return _bound_devices[android_device]
-
- if type(ota_package) != type(ota_sl4a):
- raise TypeError(
- "The ota_package and ota_sl4a must either both be strings, or "
- 'both be lists. Device with serial "%s" has requested mismatched '
- "types." % android_device.serial
- )
-
- if type(ota_package) is str:
- runner = ota_runner.SingleUseOtaRunner(
- ota_tool, android_device, ota_package, ota_sl4a
- )
- elif type(ota_package) is list:
- runner = ota_runner.MultiUseOtaRunner(
- ota_tool, android_device, ota_package, ota_sl4a
- )
- else:
- raise TypeError(
- 'The "ota_package" value in the acts config must be '
- "either a list or a string."
- )
-
- _bound_devices[android_device] = runner
- return runner
-
-
-def get_ota_value_from_config(config, key, android_device):
- """Returns a key for the given AndroidDevice.
-
- Args:
- config: The ACTS config
- key: The base key desired (ota_tool, ota_sl4a, or ota_package)
- android_device: An AndroidDevice
-
- Returns: The value at the specified key.
- Throws: ActsConfigError if the value cannot be determined from the config.
- """
- suffix = ""
- if "ota_map" in config:
- if android_device.serial in config["ota_map"]:
- suffix = f"_{config['ota_map'][android_device.serial]}"
-
- ota_package_key = f"{key}{suffix}"
- if ota_package_key not in config:
- if suffix != "":
- raise OtaConfigError(
- "Asked for an OTA Update without specifying a required value. "
- '"ota_map" has entry {"%s": "%s"}, but there is no '
- 'corresponding entry {"%s":"/path/to/file"} found within the '
- "ACTS config."
- % (android_device.serial, suffix[1:], ota_package_key)
- )
- else:
- raise OtaConfigError(
- "Asked for an OTA Update without specifying a required value. "
- '"ota_map" does not exist or have a key for serial "%s", and '
- 'the default value entry "%s" cannot be found within the ACTS '
- "config." % (android_device.serial, ota_package_key)
- )
-
- return config[ota_package_key]
diff --git a/packages/antlion/libs/ota/ota_tools/__init__.py b/packages/antlion/libs/ota/ota_tools/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/packages/antlion/libs/ota/ota_tools/__init__.py
+++ /dev/null
diff --git a/packages/antlion/libs/ota/ota_tools/adb_sideload_ota_tool.py b/packages/antlion/libs/ota/ota_tools/adb_sideload_ota_tool.py
deleted file mode 100644
index 3545571..0000000
--- a/packages/antlion/libs/ota/ota_tools/adb_sideload_ota_tool.py
+++ /dev/null
@@ -1,51 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-# Blanket ignores to enable mypy in Antlion
-# mypy: disable-error-code="no-untyped-def"
-import logging
-
-from antlion.libs.ota.ota_tools.ota_tool import OtaTool
-
-# OTA Packages can be upwards of 1 GB. This may take some time to transfer over
-# USB 2.0.
-PUSH_TIMEOUT = 10 * 60
-
-
-class AdbSideloadOtaTool(OtaTool):
- """Updates an AndroidDevice using adb sideload."""
-
- def __init__(self, ignored_command):
- # "command" is ignored. The ACTS adb version is used to prevent
- # differing adb versions from constantly killing adbd.
- super(AdbSideloadOtaTool, self).__init__(ignored_command)
-
- def update(self, ota_runner):
- logging.info("Rooting adb")
- ota_runner.android_device.root_adb()
- logging.info("Rebooting to sideload")
- ota_runner.android_device.adb.reboot("sideload")
- ota_runner.android_device.adb.wait_for_sideload()
- logging.info("Sideloading ota package")
- package_path = ota_runner.get_ota_package()
- logging.info(f'Running adb sideload with package "{package_path}"')
- ota_runner.android_device.adb.sideload(
- package_path, timeout=PUSH_TIMEOUT
- )
- logging.info("Sideload complete. Waiting for device to come back up.")
- ota_runner.android_device.adb.wait_for_recovery()
- ota_runner.android_device.reboot(stop_at_lock_screen=True)
- logging.info("Device is up. Update complete.")
diff --git a/packages/antlion/libs/ota/ota_tools/ota_tool.py b/packages/antlion/libs/ota/ota_tools/ota_tool.py
deleted file mode 100644
index 43ca530..0000000
--- a/packages/antlion/libs/ota/ota_tools/ota_tool.py
+++ /dev/null
@@ -1,49 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-# Blanket ignores to enable mypy in Antlion
-# mypy: disable-error-code="no-untyped-def"
-
-
-class OtaTool(object):
- """A Wrapper for an OTA Update command or tool.
-
- Each OtaTool acts as a facade to the underlying command or tool used to
- update the device.
- """
-
- def __init__(self, command):
- """Creates an OTA Update tool with the given properties.
-
- Args:
- command: A string that is used as the command line tool
- """
- self.command = command
-
- def update(self, ota_runner):
- """Begins the OTA Update. Returns after the update has installed.
-
- Args:
- ota_runner: The OTA Runner that handles the device information.
- """
- raise NotImplementedError()
-
- def cleanup(self, ota_runner):
- """A cleanup method for the OTA Tool to run after the update completes.
-
- Args:
- ota_runner: The OTA Runner that handles the device information.
- """
diff --git a/packages/antlion/libs/ota/ota_tools/ota_tool_factory.py b/packages/antlion/libs/ota/ota_tools/ota_tool_factory.py
deleted file mode 100644
index 8e02158..0000000
--- a/packages/antlion/libs/ota/ota_tools/ota_tool_factory.py
+++ /dev/null
@@ -1,58 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-# Blanket ignores to enable mypy in Antlion
-# mypy: disable-error-code="no-untyped-def, attr-defined, import-untyped, var-annotated"
-from antlion.libs.ota.ota_tools.adb_sideload_ota_tool import AdbSideloadOtaTool
-from antlion.libs.ota.ota_tools.update_device_ota_tool import (
- UpdateDeviceOtaTool,
-)
-
-_CONSTRUCTORS = {
- AdbSideloadOtaTool.__name__: lambda command: AdbSideloadOtaTool(command),
- UpdateDeviceOtaTool.__name__: lambda command: UpdateDeviceOtaTool(command),
-}
-_constructed_tools = {}
-
-
-def create(ota_tool_class, command):
- """Returns an OtaTool with the given class name.
-
- If the tool has already been created, the existing instance will be
- returned.
-
- Args:
- ota_tool_class: the class/type of the tool you wish to use.
- command: the command line tool being used.
-
- Returns:
- An OtaTool.
- """
- if ota_tool_class in _constructed_tools:
- return _constructed_tools[ota_tool_class]
-
- if ota_tool_class not in _CONSTRUCTORS:
- raise KeyError(
- "Given Ota Tool class name does not match a known "
- 'name. Found "%s". Expected any of %s. If this tool '
- "does exist, add it to the _CONSTRUCTORS dict in this "
- "module." % (ota_tool_class, _CONSTRUCTORS.keys())
- )
-
- new_update_tool = _CONSTRUCTORS[ota_tool_class](command)
- _constructed_tools[ota_tool_class] = new_update_tool
-
- return new_update_tool
diff --git a/packages/antlion/libs/ota/ota_tools/update_device_ota_tool.py b/packages/antlion/libs/ota/ota_tools/update_device_ota_tool.py
deleted file mode 100644
index 7221c4d..0000000
--- a/packages/antlion/libs/ota/ota_tools/update_device_ota_tool.py
+++ /dev/null
@@ -1,63 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-# Blanket ignores to enable mypy in Antlion
-# mypy: disable-error-code="no-untyped-def"
-import logging
-import os
-import shutil
-import tempfile
-
-from antlion import utils
-from antlion.libs.ota.ota_tools import ota_tool
-from antlion.libs.proc import job
-
-# OTA Packages can be upwards of 1 GB. This may take some time to transfer over
-# USB 2.0. A/B devices must also complete the update in the background.
-UPDATE_TIMEOUT = 60 * 60
-UPDATE_LOCATION = "/data/ota_package/update.zip"
-
-
-class UpdateDeviceOtaTool(ota_tool.OtaTool):
- """Runs an OTA Update with system/update_engine/scripts/update_device.py."""
-
- def __init__(self, command):
- super(UpdateDeviceOtaTool, self).__init__(command)
-
- self.unzip_path = tempfile.mkdtemp()
- utils.unzip_maintain_permissions(self.command, self.unzip_path)
-
- self.command = os.path.join(self.unzip_path, "update_device.py")
-
- def update(self, ota_runner):
- logging.info("Forcing adb to be in root mode.")
- ota_runner.android_device.root_adb()
- update_command = "python3 %s -s %s %s" % (
- self.command,
- ota_runner.serial,
- ota_runner.get_ota_package(),
- )
- logging.info(f"Running {update_command}")
- result = job.run(update_command, timeout_sec=UPDATE_TIMEOUT)
- logging.info(f'Output: {result.stdout.decode("utf-8")}')
-
- logging.info("Rebooting device for update to go live.")
- ota_runner.android_device.reboot(stop_at_lock_screen=True)
- logging.info("Reboot sent.")
-
- def __del__(self):
- """Delete the unzipped update_device folder before ACTS exits."""
- shutil.rmtree(self.unzip_path)
diff --git a/packages/antlion/libs/ota/ota_updater.py b/packages/antlion/libs/ota/ota_updater.py
deleted file mode 100644
index 87bcb8c..0000000
--- a/packages/antlion/libs/ota/ota_updater.py
+++ /dev/null
@@ -1,77 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-# Blanket ignores to enable mypy in Antlion
-# mypy: disable-error-code="no-untyped-def"
-from antlion import utils
-from antlion.libs.ota.ota_runners import ota_runner_factory
-
-"""Maps AndroidDevices to OtaRunners."""
-ota_runners = {}
-
-
-def initialize(user_params, android_devices):
- """Initialize OtaRunners for each device.
-
- Args:
- user_params: The user_params from the ACTS config.
- android_devices: The android_devices in the test.
- """
- for ad in android_devices:
- ota_runners[ad] = ota_runner_factory.create_from_configs(
- user_params, ad
- )
-
-
-def _check_initialization(android_device):
- """Check if a given device was initialized."""
- if android_device not in ota_runners:
- raise KeyError(
- 'Android Device with serial "%s" has not been '
- "initialized for OTA Updates. Did you forget to call"
- "ota_updater.initialize()?" % android_device.serial
- )
-
-
-def update(android_device, ignore_update_errors=False):
- """Update a given AndroidDevice.
-
- Args:
- android_device: The device to update
- ignore_update_errors: Whether or not to ignore update errors such as
- no more updates available for a given device. Default is false.
- Throws:
- OtaError if ignore_update_errors is false and the OtaRunner has run out
- of packages to update the phone with.
- """
- _check_initialization(android_device)
- ota_runners[android_device].validate_update()
- try:
- ota_runners[android_device].update()
- except Exception as e:
- if ignore_update_errors:
- return
- android_device.log.error(e)
- android_device.take_bug_report(
- "ota_update", utils.get_current_epoch_time()
- )
- raise e
-
-
-def can_update(android_device):
- """Whether or not a device can be updated."""
- _check_initialization(android_device)
- return ota_runners[android_device].can_update()
diff --git a/packages/antlion/libs/proc/__init__.py b/packages/antlion/libs/proc/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/packages/antlion/libs/proc/__init__.py
+++ /dev/null
diff --git a/packages/antlion/libs/proc/job.py b/packages/antlion/libs/proc/job.py
deleted file mode 100644
index ac57b13..0000000
--- a/packages/antlion/libs/proc/job.py
+++ /dev/null
@@ -1,224 +0,0 @@
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-# Blanket ignores to enable mypy in Antlion
-# mypy: disable-error-code="no-untyped-def"
-import logging
-import os
-import shlex
-import subprocess
-import time
-
-from antlion.runner import CalledProcessError, CompletedProcess
-
-
-class Result(CompletedProcess):
- """Command execution result.
-
- Contains information on subprocess execution after it has exited.
-
- Attributes:
- command: An array containing the command and all arguments that
- was executed.
- exit_status: Integer exit code of the process.
- stdout_raw: The raw bytes output from standard out.
- stderr_raw: The raw bytes output from standard error
- duration: How long the process ran for.
- did_timeout: True if the program timed out and was killed.
- """
-
- def __init__(
- self,
- command: str | list[str],
- stdout: bytes,
- stderr: bytes,
- exit_status: int,
- duration: float = 0,
- did_timeout: bool = False,
- encoding: str = "utf-8",
- ) -> None:
- """
- Args:
- command: The command that was run. This will be a list containing
- the executed command and all args.
- stdout: The raw bytes that standard output gave.
- stderr: The raw bytes that standard error gave.
- exit_status: The exit status of the command.
- duration: How long the command ran.
- did_timeout: True if the command timed out.
- encoding: The encoding standard that the program uses.
- """
- self.command = command
- self.exit_status = exit_status
- self._raw_stdout = stdout
- self._raw_stderr = stderr
- self._stdout_str: str | None = None
- self._stderr_str: str | None = None
- self._encoding = encoding
- self.duration = duration
- self.did_timeout = did_timeout
-
- @property
- def stdout(self) -> str:
- """String representation of standard output."""
- if not self._stdout_str:
- self._stdout_str = self._raw_stdout.decode(
- encoding=self._encoding, errors="replace"
- )
- self._stdout_str = self._stdout_str.strip()
- return self._stdout_str
-
- @property
- def stderr(self) -> str:
- """String representation of standard error."""
- if not self._stderr_str:
- self._stderr_str = self._raw_stderr.decode(
- encoding=self._encoding, errors="replace"
- )
- self._stderr_str = self._stderr_str.strip()
- return self._stderr_str
-
- @property
- def returncode(self) -> int:
- return self.exit_status
-
- def __repr__(self) -> str:
- if self.did_timeout:
- prefix = f"Command timed out"
- else:
- prefix = f"Command exited with {self.exit_status}"
-
- command = (
- " ".join(self.command)
- if isinstance(self.command, list)
- else self.command
- )
-
- return (
- f"{prefix} after {self.duration}s: {command}\n"
- f"stdout: {self._raw_stdout.decode('utf-8', errors='replace')}\n"
- f"stderr: {self._raw_stderr.decode('utf-8', errors='replace')}"
- )
-
-
-def run(
- command: str | list[str],
- stdin: bytes | None = None,
- timeout_sec: float | None = 60,
- log_output: bool = True,
- ignore_status: bool = False,
- env: dict[str, str] | None = None,
-) -> subprocess.CompletedProcess[bytes]:
- """Execute a command in a subprocess and return its output.
-
- Commands can be either shell commands (given as strings) or the
- path and arguments to an executable (given as a list). This function
- will block until the subprocess finishes or times out.
-
- Args:
- command: The command to execute.
- timeout_sec: number seconds to wait for command to finish.
- log_output: If true, print stdout and stderr to the debug log.
- ignore_status: True to ignore the exit code of the remote
- subprocess. Note that if you do ignore status codes,
- you should handle non-zero exit codes explicitly.
- env: environment variables to setup on the remote host.
-
- Returns:
- Result of the ssh command.
-
- Raises:
- CalledProcessError: when the process exits with a non-zero status
- and ignore_status is False.
- subprocess.TimeoutExpired: When the remote command took to long to
- execute.
- """
- start = time.perf_counter()
- proc = subprocess.Popen(
- command,
- env=env,
- stdout=subprocess.PIPE,
- stderr=subprocess.PIPE,
- stdin=subprocess.PIPE,
- shell=not isinstance(command, list),
- )
- # Wait on the process terminating
- timed_out = False
- stdout = bytes()
- stderr = bytes()
- try:
- (stdout, stderr) = proc.communicate(stdin, timeout_sec)
- except subprocess.TimeoutExpired:
- timed_out = True
- proc.kill()
- proc.wait()
-
- elapsed = time.perf_counter() - start
- exit_code = proc.poll()
- if log_output:
- logging.debug(
- "Command %s exited with %d after %.2fs\nstdout: %s\nstderr: %s",
- shlex.join(command),
- exit_code,
- elapsed,
- stdout.decode("utf-8", errors="replace"),
- stderr.decode("utf-8", errors="replace"),
- )
- else:
- logging.debug(
- "Command %s exited with %d after %.2fs",
- shlex.join(command),
- exit_code,
- elapsed,
- )
-
- if timed_out:
- raise subprocess.TimeoutExpired(command, elapsed, stdout, stderr)
-
- if not ignore_status and exit_code != 0:
- raise CalledProcessError(proc.returncode, command, stdout, stderr)
-
- return subprocess.CompletedProcess(command, proc.returncode, stdout, stderr)
-
-
-def run_async(
- command: str | list[str], env: dict[str, str] | None = None
-) -> subprocess.Popen[bytes]:
- """Execute a command in a subproccess asynchronously.
-
- It is the callers responsibility to kill/wait on the resulting
- subprocess.Popen object.
-
- Commands can be either shell commands (given as strings) or the
- path and arguments to an executable (given as a list). This function
- will not block.
-
- Args:
- command: The command to execute. Can be either a string or a list.
- env: dict enviroment variables to setup on the remote host.
-
- Returns:
- A subprocess.Popen object representing the created subprocess.
-
- """
- proc = subprocess.Popen(
- command,
- env=env,
- preexec_fn=os.setpgrp,
- shell=not isinstance(command, list),
- stdout=subprocess.PIPE,
- stderr=subprocess.STDOUT,
- )
- logging.debug("command %s started with pid %s", command, proc.pid)
- return proc
diff --git a/packages/antlion/libs/proc/process.py b/packages/antlion/libs/proc/process.py
deleted file mode 100644
index 577b459..0000000
--- a/packages/antlion/libs/proc/process.py
+++ /dev/null
@@ -1,295 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-# Blanket ignores to enable mypy in Antlion
-# mypy: disable-error-code="no-untyped-def"
-from __future__ import annotations
-
-import logging
-import os
-import shlex
-import signal
-import subprocess
-import sys
-import time
-from collections.abc import Callable
-from threading import Thread
-from typing import Self
-
-
-class ProcessError(Exception):
- """Raised when invalid operations are run on a Process."""
-
-
-class Process(object):
- """A Process object used to run various commands.
-
- Attributes:
- _command: The initial command to run.
- _subprocess_kwargs: The kwargs to send to Popen for more control over
- execution.
- _process: The subprocess.Popen object currently executing a process.
- _listening_thread: The thread that is listening for the process to stop.
- _redirection_thread: The thread that is redirecting process output.
- _on_output_callback: The callback to call when output is received.
- _on_terminate_callback: The callback to call when the process terminates
- without stop() being called first.
- _started: Whether or not start() was called.
- _stopped: Whether or not stop() was called.
- """
-
- def __init__(self, command: list[str] | str) -> None:
- """Creates a Process object.
-
- Note that this constructor does not begin the process. To start the
- process, use Process.start().
- """
- if isinstance(command, str):
- # Split command string into list
- command = shlex.split(command)
- self._command = command
-
- self._process: subprocess.Popen[bytes] | None = None
-
- self._listening_thread: Thread | None = None
- self._redirection_thread: Thread | None = None
- self._on_output_callback: Callable[[str | bytes], None] = lambda _: None
- self._binary_output: bool = False
- self._on_terminate_callback: Callable[
- [subprocess.Popen[bytes]], list[str] | str
- ] = lambda _: ""
-
- self._started: bool = False
- self._stopped: bool = False
-
- def set_on_output_callback(
- self,
- on_output_callback: Callable[[str | bytes], None],
- binary: bool = False,
- ) -> Self:
- """Sets the on_output_callback function.
-
- Args:
- on_output_callback: The function to be called when output is sent to
- the output. The output callback has the following signature:
-
- >>> def on_output_callback(output_line):
- >>> return None
-
- binary: If True, read the process output as raw binary.
- Returns:
- self
- """
- self._on_output_callback = on_output_callback
- self._binary_output = binary
- return self
-
- def set_on_terminate_callback(
- self,
- on_terminate_callback: Callable[
- [subprocess.Popen[bytes]], list[str] | str
- ],
- ) -> Self:
- """Sets the on_self_terminate callback function.
-
- Args:
- on_terminate_callback: The function to be called when the process
- has terminated on its own. The callback has the following
- signature:
-
- >>> def on_self_terminate_callback(popen_process):
- >>> return 'command to run' or None
-
- If a string is returned, the string returned will be the command
- line used to run the command again. If None is returned, the
- process will end without restarting.
-
- Returns:
- self
- """
- self._on_terminate_callback = on_terminate_callback
- return self
-
- def start(self) -> None:
- """Starts the process's execution."""
- if self._started:
- raise ProcessError("Process has already started.")
- self._started = True
- self._process = None
-
- self._listening_thread = Thread(target=self._exec_loop)
- self._listening_thread.start()
-
- time_up_at = time.time() + 1
-
- while self._process is None:
- if time.time() > time_up_at:
- raise OSError("Unable to open process!")
-
- self._stopped = False
-
- @staticmethod
- def _get_timeout_left(timeout, start_time) -> float:
- return max(0.1, timeout - (time.time() - start_time))
-
- def is_running(self) -> bool:
- """Checks that the underlying Popen process is still running
-
- Returns:
- True if the process is running.
- """
- return self._process is not None and self._process.poll() is None
-
- def _join_threads(self) -> None:
- """Waits for the threads associated with the process to terminate."""
- if self._listening_thread is not None:
- self._listening_thread.join()
- self._listening_thread = None
-
- if self._redirection_thread is not None:
- self._redirection_thread.join()
- self._redirection_thread = None
-
- def _kill_process(self) -> None:
- """Kills the underlying process/process group. Implementation is
- platform-dependent."""
- if sys.platform == "win32":
- subprocess.check_call(f"taskkill /F /T /PID {self._process.pid}")
- else:
- self.signal(signal.SIGKILL)
-
- def wait(self, kill_timeout: float = 60.0) -> None:
- """Waits for the process to finish execution.
-
- If the process has reached the kill_timeout, the process will be killed
- instead.
-
- Note: the on_self_terminate callback will NOT be called when calling
- this function.
-
- Args:
- kill_timeout: The amount of time to wait until killing the process.
- """
- if self._stopped or self._process is None:
- raise ProcessError("Process is already being stopped.")
- self._stopped = True
-
- try:
- self._process.wait(kill_timeout)
- except subprocess.TimeoutExpired:
- self._kill_process()
- finally:
- self._join_threads()
- self._started = False
-
- def signal(self, sig) -> None:
- """Sends a signal to the process.
-
- Args:
- sig: The signal to be sent.
- """
- if sys.platform == "win32":
- raise ProcessError("Unable to call Process.signal on windows.")
- if self._process is None:
- raise ProcessError("No process is running")
-
- pgid = os.getpgid(self._process.pid)
- os.killpg(pgid, sig)
-
- def stop(self) -> None:
- """Stops the process.
-
- This command is effectively equivalent to kill, but gives time to clean
- up any related work on the process, such as output redirection.
-
- Note: the on_self_terminate callback will NOT be called when calling
- this function.
- """
- self.wait(0)
-
- def _redirect_output(self) -> None:
- """Redirects the output from the command into the on_output_callback."""
- if self._process is None:
- raise ProcessError("No process is running")
- if self._process.stdout is None:
- raise ProcessError("Process stdout is not PIPE")
-
- while True:
- data: str | bytes
- if self._binary_output:
- data = self._process.stdout.read(1024)
- else:
- data = (
- self._process.stdout.readline()
- .decode("utf-8", errors="replace")
- .rstrip()
- )
-
- if not data:
- return
- else:
- self._on_output_callback(data)
-
- def _exec_loop(self) -> None:
- """Executes Popen in a loop.
-
- When Popen terminates without stop() being called,
- self._on_terminate_callback() will be called. The returned value from
- _on_terminate_callback will then be used to determine if the loop should
- continue and start up the process again. See set_on_terminate_callback()
- for more information.
- """
- command = self._command
- while True:
- acts_logger = logging.getLogger()
- acts_logger.debug('Starting command "%s"', command)
-
- creationflags: int = 0
- if sys.platform == "win32":
- creationflags = subprocess.CREATE_NEW_PROCESS_GROUP
-
- self._process = subprocess.Popen(
- command,
- stdout=subprocess.PIPE,
- stderr=subprocess.STDOUT,
- creationflags=creationflags,
- start_new_session=False if sys.platform == "win32" else True,
- bufsize=1,
- )
- self._redirection_thread = Thread(target=self._redirect_output)
- self._redirection_thread.start()
- self._process.wait()
-
- if self._stopped:
- logging.debug(
- "The process for command %s was stopped.", command
- )
- break
- else:
- logging.debug("The process for command %s terminated.", command)
- # Wait for all output to be processed before sending
- # _on_terminate_callback()
- self._redirection_thread.join()
- logging.debug(
- "Beginning on_terminate_callback for %s.", command
- )
- retry_value = self._on_terminate_callback(self._process)
- if retry_value:
- if isinstance(retry_value, str):
- retry_value = shlex.split(retry_value)
- command = retry_value
- else:
- break
diff --git a/packages/antlion/logger.py b/packages/antlion/logger.py
deleted file mode 100755
index d04b0bf..0000000
--- a/packages/antlion/logger.py
+++ /dev/null
@@ -1,51 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-# Blanket ignores to enable mypy in Antlion
-# mypy: disable-error-code="type-arg"
-import logging
-from types import TracebackType
-
-
-class LogLevel:
- """Sets the logging level threshold for logger within this context.
-
- Logging messages which are equal or less severe than level will be ignored.
- See https://docs.python.org/3/library/logging.html#levels for a list of
- levels.
- """
-
- def __init__(
- self, logger: logging.Logger | logging.LoggerAdapter, level: int
- ) -> None:
- self._logger = logger
- if isinstance(logger, logging.Logger):
- self._old_level = logger.level
- else:
- self._old_level = logger.logger.level
- self._new_level = level
-
- def __enter__(self) -> logging.Logger | logging.LoggerAdapter:
- self._logger.setLevel(self._new_level)
- return self._logger
-
- def __exit__(
- self,
- _exit_type: type[BaseException] | None,
- _exit_value: BaseException | None,
- _exit_traceback: TracebackType | None,
- ) -> None:
- self._logger.setLevel(self._old_level)
diff --git a/packages/antlion/net.py b/packages/antlion/net.py
deleted file mode 100644
index e4a1851..0000000
--- a/packages/antlion/net.py
+++ /dev/null
@@ -1,64 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2023 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import errno
-import socket
-import time
-
-
-def wait_for_port(host: str, port: int, timeout_sec: int = 5) -> None:
- """Wait for the host to start accepting connections on the port.
-
- Some services take some time to start. Call this after launching the service
- to avoid race conditions.
-
- Args:
- host: IP of the running service.
- port: Port of the running service.
- timeout_sec: Seconds to wait until raising TimeoutError
-
- Raises:
- TimeoutError: when timeout_sec has expired without a successful
- connection to the service
- """
- last_error: OSError | None = None
- timeout = time.perf_counter() + timeout_sec
-
- while True:
- try:
- time_left = max(timeout - time.perf_counter(), 0)
- with socket.create_connection((host, port), timeout=time_left):
- return
- except ConnectionRefusedError as e:
- # Occurs when the host is online but not ready to accept connections
- # yet; wait to see if the host becomes ready.
- last_error = e
- except TimeoutError as e:
- last_error = e
- except OSError as e:
- if e.errno == errno.EHOSTUNREACH:
- # No route to host. Occurs when the interface to the host is
- # torn down; wait to see if the interface comes back.
- last_error = e
- else:
- # Unexpected error
- raise e
-
- if time.perf_counter() >= timeout:
- raise TimeoutError(
- f"Waited over {timeout_sec}s for the service to start "
- f"accepting connections at {host}:{port}"
- ) from last_error
diff --git a/packages/antlion/py.typed b/packages/antlion/py.typed
deleted file mode 100644
index e69de29..0000000
--- a/packages/antlion/py.typed
+++ /dev/null
diff --git a/packages/antlion/runner.py b/packages/antlion/runner.py
deleted file mode 100644
index a47f5d8..0000000
--- a/packages/antlion/runner.py
+++ /dev/null
@@ -1,141 +0,0 @@
-# Copyright 2023 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from __future__ import annotations
-
-import logging
-import subprocess
-from os import PathLike
-from typing import IO, Protocol, Sequence, TypeAlias
-
-from mobly import signals
-
-StrOrBytesPath: TypeAlias = str | bytes | PathLike[str] | PathLike[bytes]
-_CMD: TypeAlias = StrOrBytesPath | Sequence[StrOrBytesPath]
-
-
-class Runner(Protocol):
- """A command runner."""
-
- log: logging.LoggerAdapter[logging.Logger]
-
- def run(
- self,
- command: str | list[str],
- stdin: bytes | None = None,
- timeout_sec: float | None = None,
- log_output: bool = True,
- ) -> subprocess.CompletedProcess[bytes]:
- """Run command with arguments.
-
- Args:
- command: Command to execute
- stdin: Standard input to command.
- timeout_sec: Seconds to wait for command to finish
- log_output: If true, print stdout and stderr to the debug log.
-
- Returns:
- Result of the completed command.
-
- Raises:
- CalledProcessError: when the process exits with a non-zero status
- subprocess.TimeoutExpired: when the timeout expires while waiting
- for a child process
- CalledProcessTransportError: when the underlying transport fails
- """
- ...
-
- def run_async(self, command: str) -> subprocess.CompletedProcess[bytes]:
- """Run command asynchronously.
-
- Args:
- command: Command to execute
-
- Returns:
- Results of the dispatched command.
-
- Raises:
- CalledProcessError: when the process fails to start
- subprocess.TimeoutExpired: when the timeout expires while waiting
- for a child process
- CalledProcessTransportError: when the underlying transport fails
- """
- ...
-
- def start(
- self,
- command: list[str],
- stdout: IO[bytes] | int = subprocess.PIPE,
- stdin: IO[bytes] | int = subprocess.PIPE,
- ) -> subprocess.Popen[bytes]:
- """Execute a child program in a new process."""
- ...
-
-
-class CompletedProcess(Protocol):
- @property
- def returncode(self) -> int:
- """Exit status."""
- ...
-
- @property
- def stdout(self) -> str:
- """Output stream."""
- ...
-
- @property
- def stderr(self) -> str:
- """Error output stream."""
- ...
-
-
-class CalledProcessError(subprocess.CalledProcessError):
- """Wrapper over subprocess.CalledProcessError to guarantee stdout and stderr
- are bytes and not None."""
-
- returncode: int
- cmd: _CMD
- output: bytes
-
- stdout: bytes
- stderr: bytes
-
- def __init__(
- self: CalledProcessError,
- returncode: int,
- cmd: _CMD,
- output: str | bytes | None = None,
- stderr: str | bytes | None = None,
- ) -> None:
- # For useability, guaranteed stdout and stderr are bytes and not None.
- if isinstance(output, str):
- output = output.encode("utf-8")
- if isinstance(stderr, str):
- stderr = stderr.encode("utf-8")
- if output is None:
- output = bytes()
- if stderr is None:
- stderr = bytes()
-
- super().__init__(returncode, cmd, output, stderr)
-
- def __str__(self) -> str:
- out = super().__str__()
- out += f'\nstderr: {self.stderr.decode("utf-8", errors="replace")}'
- out += f'\nstdout: {self.stdout.decode("utf-8", errors="replace")}'
- return out
-
-
-class CalledProcessTransportError(signals.TestError):
- """Error in process's underlying transport."""
diff --git a/packages/antlion/test_utils/OWNERS b/packages/antlion/test_utils/OWNERS
deleted file mode 100644
index bf3ed6c..0000000
--- a/packages/antlion/test_utils/OWNERS
+++ /dev/null
@@ -1 +0,0 @@
-include /acts_tests/tests/OWNERS
diff --git a/packages/antlion/test_utils/__init__.py b/packages/antlion/test_utils/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/packages/antlion/test_utils/__init__.py
+++ /dev/null
diff --git a/packages/antlion/test_utils/abstract_devices/__init__.py b/packages/antlion/test_utils/abstract_devices/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/packages/antlion/test_utils/abstract_devices/__init__.py
+++ /dev/null
diff --git a/packages/antlion/test_utils/abstract_devices/wlan_device.py b/packages/antlion/test_utils/abstract_devices/wlan_device.py
deleted file mode 100644
index 87f8fae..0000000
--- a/packages/antlion/test_utils/abstract_devices/wlan_device.py
+++ /dev/null
@@ -1,570 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from __future__ import annotations
-
-import enum
-from typing import Protocol, runtime_checkable
-
-import fidl_fuchsia_wlan_policy as f_wlan_policy
-from honeydew.affordances.connectivity.wlan.utils.types import (
- ClientStatusConnected,
- ClientStatusConnecting,
- ClientStatusIdle,
- ConnectionState,
-)
-from honeydew.affordances.connectivity.wlan.utils.types import (
- SecurityType as HdSecurityType,
-)
-from mobly.records import TestResultRecord
-
-from antlion.controllers import iperf_client
-from antlion.controllers.android_device import AndroidDevice
-from antlion.controllers.ap_lib.hostapd_security import SecurityMode
-from antlion.controllers.fuchsia_device import FuchsiaDevice
-from antlion.controllers.fuchsia_lib.lib_controllers.wlan_policy_controller import (
- WlanPolicyControllerError,
-)
-from antlion.controllers.iperf_client import IPerfClientBase
-from antlion.controllers.pdu import PduDevice
-from antlion.test_utils.wifi import wifi_test_utils as awutils
-from antlion.utils import PingResult, adb_shell_ping
-
-DEFAULT_ASSOCIATE_TIMEOUT_SEC = 30
-
-
-@runtime_checkable
-class SupportsWLAN(Protocol):
- """A generic WLAN device."""
-
- @property
- def identifier(self) -> str:
- """Unique identifier for this device."""
- ...
-
- @property
- def has_wep_support(self) -> bool:
- "Whether the wlan_device has support for WEP security"
- ...
-
- @property
- def has_wpa_support(self) -> bool:
- "Whether the wlan_device has support for WPA security"
- ...
-
- def take_bug_report(self, record: TestResultRecord) -> None:
- """Take a bug report on the device and stores it on the host.
-
- Will store the bug report in the output directory for the currently running
- test, as specified by `record`.
-
- Args:
- record: Information about the current running test.
- """
- ...
-
- def associate(
- self,
- target_ssid: str,
- target_security: SecurityMode,
- target_pwd: str | None = None,
- key_mgmt: str | None = None,
- check_connectivity: bool = True,
- hidden: bool = False,
- ) -> bool:
- """Associate to a target network.
-
- Args:
- target_ssid: SSID to associate to.
- target_pwd: Password for the SSID, if necessary.
- key_mgmt: The hostapd wpa_key_mgmt, if specified.
- check_connectivity: Whether to check for internet connectivity.
- hidden: Whether the network is hidden.
- target_security: Target security for network, used to
- save the network in policy connects (see wlan_policy_lib)
- Returns:
- True if successfully connected to WLAN, False if not.
- """
- ...
-
- def disconnect(self) -> None:
- """Disconnect from all WLAN networks."""
- ...
-
- def get_default_wlan_test_interface(self) -> str:
- """Name of default WLAN interface to use for testing."""
- ...
-
- def is_connected(self, ssid: str | None = None) -> bool:
- """Determines if wlan_device is connected to wlan network.
-
- Args:
- ssid: If specified, check if device is connected to a specific network.
-
- Returns:
- True if connected to requested network; or if ssid not specified, True if
- connected to any network; otherwise, False.
- """
- ...
-
- def create_iperf_client(
- self, test_interface: str | None = None
- ) -> IPerfClientBase:
- """Create an iPerf3 client on this device.
-
- Args:
- test_interface: Name of test interface. Defaults to first found wlan client
- interface.
-
- Returns:
- IPerfClient object
- """
- ...
-
- def get_wlan_interface_id_list(self) -> list[int]:
- """List available WLAN interfaces.
-
- Returns:
- A list of wlan interface IDs.
- """
- ...
-
- def destroy_wlan_interface(self, iface_id: int) -> None:
- """Destroy the specified WLAN interface.
-
- Args:
- iface_id: ID of the interface to destroy.
- """
- ...
-
- def ping(
- self,
- dest_ip: str,
- count: int = 3,
- interval: int = 1000,
- timeout: int = 1000,
- size: int = 25,
- additional_ping_params: str | None = None,
- ) -> PingResult:
- """Pings from a device to an IP address or hostname
-
- Args:
- dest_ip: IP or hostname to ping
- count: How many icmp packets to send
- interval: Milliseconds to wait between pings
- timeout: Milliseconds to wait before having the icmp packet timeout
- size: Size of the icmp packet in bytes
- additional_ping_params: Command option flags to append to the command string
-
- Returns:
- A dictionary for the results of the ping. The dictionary contains
- the following items:
- status: Whether the ping was successful.
- rtt_min: The minimum round trip time of the ping.
- rtt_max: The minimum round trip time of the ping.
- rtt_avg: The avg round trip time of the ping.
- stdout: The standard out of the ping command.
- stderr: The standard error of the ping command.
- """
- ...
-
- def hard_power_cycle(self, pdus: list[PduDevice]) -> None:
- """Reboot a device abruptly without notification.
-
- Args:
- pdus: All testbed PDUs
- """
- ...
-
- def feature_is_present(self, feature: str) -> bool:
- """Check if a WLAN feature is present.
-
- Args:
- feature: WLAN feature to query
-
- Returns:
- True if `feature` is present; otherwise, False.
- """
- ...
-
- def wifi_toggle_state(self, state: bool | None) -> None:
- """Toggle the state of Wi-Fi.
-
- Args:
- state: Wi-Fi state to set to. If None, opposite of the current state.
- """
- ...
-
- def reset_wifi(self) -> None:
- """Clears all saved Wi-Fi networks on a device.
-
- This will turn Wi-Fi on.
- """
- ...
-
- def turn_location_off_and_scan_toggle_off(self) -> None:
- """Turn off Wi-Fi location scans."""
- ...
-
-
-class AndroidWlanDevice(SupportsWLAN):
- """Android device that supports WLAN."""
-
- def __init__(self, android_device: AndroidDevice) -> None:
- self.device = android_device
-
- @property
- def identifier(self) -> str:
- return self.device.serial
-
- @property
- def has_wep_support(self) -> bool:
- "Whether the wlan_device has support for WEP security"
- return True
-
- @property
- def has_wpa_support(self) -> bool:
- "Whether the wlan_device has support for WPA security"
- return True
-
- def wifi_toggle_state(self, state: bool | None) -> None:
- awutils.wifi_toggle_state(self.device, state)
-
- def reset_wifi(self) -> None:
- awutils.reset_wifi(self.device)
-
- def take_bug_report(self, record: TestResultRecord) -> None:
- self.device.take_bug_report(record.test_name, record.begin_time)
-
- def turn_location_off_and_scan_toggle_off(self) -> None:
- awutils.turn_location_off_and_scan_toggle_off(self.device)
-
- def associate(
- self,
- target_ssid: str,
- target_security: SecurityMode,
- target_pwd: str | None = None,
- key_mgmt: str | None = None,
- check_connectivity: bool = True,
- hidden: bool = False,
- ) -> bool:
- network = {"SSID": target_ssid, "hiddenSSID": hidden}
- if target_pwd:
- network["password"] = target_pwd
- if key_mgmt:
- network["security"] = key_mgmt
- try:
- awutils.connect_to_wifi_network(
- self.device,
- network,
- check_connectivity=check_connectivity,
- hidden=hidden,
- )
- return True
- except Exception as e:
- self.device.log.info(f"Failed to associated ({e})")
- return False
-
- def disconnect(self) -> None:
- awutils.turn_location_off_and_scan_toggle_off(self.device)
-
- def get_wlan_interface_id_list(self) -> list[int]:
- raise NotImplementedError(
- "get_wlan_interface_id_list is not implemented"
- )
-
- def get_default_wlan_test_interface(self) -> str:
- return "wlan0"
-
- def destroy_wlan_interface(self, iface_id: int) -> None:
- raise NotImplementedError("destroy_wlan_interface is not implemented")
-
- def is_connected(self, ssid: str | None = None) -> bool:
- wifi_info = self.device.droid.wifiGetConnectionInfo()
- if ssid:
- return "BSSID" in wifi_info and wifi_info["SSID"] == ssid
- return "BSSID" in wifi_info
-
- def ping(
- self,
- dest_ip: str,
- count: int = 3,
- interval: int = 1000,
- timeout: int = 1000,
- size: int = 25,
- additional_ping_params: str | None = None,
- ) -> PingResult:
- success = adb_shell_ping(
- self.device, dest_ip, count=count, timeout=timeout
- )
- return PingResult(
- exit_status=0 if success else 1,
- # TODO: Implement the rest if needed for any tests
- stdout="",
- stderr="",
- transmitted=None,
- received=None,
- time_ms=None,
- rtt_min_ms=None,
- rtt_avg_ms=None,
- rtt_max_ms=None,
- rtt_mdev_ms=None,
- )
-
- def hard_power_cycle(self, pdus: list[PduDevice]) -> None:
- raise NotImplementedError("hard_power_cycle is not implemented")
-
- def create_iperf_client(
- self, test_interface: str | None = None
- ) -> IPerfClientBase:
- if not test_interface:
- test_interface = self.get_default_wlan_test_interface()
-
- return iperf_client.IPerfClientOverAdb(
- android_device=self.device, test_interface=test_interface
- )
-
- def feature_is_present(self, feature: str) -> bool:
- raise NotImplementedError("feature_is_present is not implemented")
-
-
-class AssociationMode(enum.Enum):
- """Defines which FIDLs to use for WLAN association and disconnect."""
-
- DRIVER = 1
- """Call WLAN core FIDLs to provide all association and disconnect."""
- POLICY = 2
- """Call WLAN policy FIDLs to provide all association and disconnect."""
-
-
-class FuchsiaWlanDevice(SupportsWLAN):
- """Fuchsia device that supports WLAN."""
-
- def __init__(self, fuchsia_device: FuchsiaDevice, mode: AssociationMode):
- self.device = fuchsia_device
- self.device.configure_wlan()
- self.association_mode = mode
-
- @property
- def identifier(self) -> str:
- return self.device.ip
-
- @property
- def has_wep_support(self) -> bool:
- for line in self._get_wlandevicemonitor_config().splitlines():
- if "wep_supported" in line and "Bool(true)" in line:
- return True
- return False
-
- @property
- def has_wpa_support(self) -> bool:
- for line in self._get_wlandevicemonitor_config().splitlines():
- if "wpa1_supported" in line and "Bool(true)" in line:
- return True
- return False
-
- def _get_wlandevicemonitor_config(self) -> str:
- return self.device.ffx.run(
- ["component", "show", "core/wlandevicemonitor"]
- )
-
- def wifi_toggle_state(self, state: bool | None) -> None:
- pass
-
- def reset_wifi(self) -> None:
- pass
-
- def take_bug_report(self, _: TestResultRecord) -> None:
- self.device.take_bug_report()
-
- def turn_location_off_and_scan_toggle_off(self) -> None:
- pass
-
- def associate(
- self,
- target_ssid: str,
- target_security: SecurityMode,
- target_pwd: str | None = None,
- key_mgmt: str | None = None,
- check_connectivity: bool = True,
- hidden: bool = False,
- timeout_sec: int = DEFAULT_ASSOCIATE_TIMEOUT_SEC,
- ) -> bool:
- match self.association_mode:
- case AssociationMode.DRIVER:
- ssid_bss_desc_map = (
- self.device.honeydew_fd.wlan_core.scan_for_bss_info()
- )
-
- bss_descs_for_ssid = ssid_bss_desc_map.get(target_ssid, None)
- if not bss_descs_for_ssid or len(bss_descs_for_ssid) < 1:
- self.device.log.error(
- "Scan failed to find a BSS description for target_ssid "
- f"{target_ssid}"
- )
- return False
-
- return self.device.honeydew_fd.wlan_core.connect(
- target_ssid, target_pwd, bss_descs_for_ssid[0]
- )
- case AssociationMode.POLICY:
- try:
- self.device.honeydew_fd.wlan_policy.save_network(
- target_ssid,
- HdSecurityType(target_security.fuchsia_security_type()),
- target_pwd=target_pwd,
- )
- status = self.device.honeydew_fd.wlan_policy.connect(
- target_ssid,
- HdSecurityType(target_security.fuchsia_security_type()),
- )
- if status is f_wlan_policy.RequestStatus.ACKNOWLEDGED:
- self.device.wlan_policy_controller.wait_for_network_state(
- target_ssid,
- ConnectionState.CONNECTED,
- timeout_sec=timeout_sec,
- )
- else:
- self.device.log.warning(
- f"Received request status: {status.name} while trying to "
- f"connect to ssid: {target_ssid}."
- )
- return False
-
- return True
- except WlanPolicyControllerError as e:
- self.device.log.error(
- f"Failed to save and connect to {target_ssid} with "
- f"error: {e}"
- )
- return False
-
- def disconnect(self) -> None:
- """Function to disconnect from a Fuchsia WLAN device.
- Asserts if disconnect was not successful.
- """
- match self.association_mode:
- case AssociationMode.DRIVER:
- self.device.honeydew_fd.wlan_core.disconnect()
- case AssociationMode.POLICY:
- self.device.honeydew_fd.wlan_policy.remove_all_networks()
- self.device.wlan_policy_controller.wait_for_no_connections()
-
- def ping(
- self,
- dest_ip: str,
- count: int = 3,
- interval: int = 1000,
- timeout: int = 1000,
- size: int = 25,
- additional_ping_params: str | None = None,
- ) -> PingResult:
- return self.device.ping(
- dest_ip,
- count=count,
- interval=interval,
- timeout=timeout,
- size=size,
- additional_ping_params=additional_ping_params,
- )
-
- def get_wlan_interface_id_list(self) -> list[int]:
- return self.device.honeydew_fd.wlan_core.get_iface_id_list()
-
- def get_default_wlan_test_interface(self) -> str:
- if self.device.wlan_client_test_interface_name is None:
- raise TypeError(
- "Expected wlan_client_test_interface_name to be str"
- )
- return self.device.wlan_client_test_interface_name
-
- def destroy_wlan_interface(self, iface_id: int) -> None:
- self.device.honeydew_fd.wlan_core.destroy_iface(iface_id)
-
- def is_connected(self, ssid: str | None = None) -> bool:
- result = self.device.honeydew_fd.wlan_core.status()
- match result:
- case ClientStatusIdle():
- self.device.log.info("Client status idle")
- return False
- case ClientStatusConnecting():
- ssid_bytes = bytearray(result.ssid).decode(
- encoding="utf-8", errors="replace"
- )
- self.device.log.info(
- f"Client status connecting to ssid: {ssid_bytes}"
- )
- return False
- case ClientStatusConnected():
- ssid_bytes = bytearray(result.ssid).decode(
- encoding="utf-8", errors="replace"
- )
- self.device.log.info(f"Client connected to ssid: {ssid_bytes}")
- if ssid is None:
- return True
- return ssid == ssid_bytes
- case _:
- raise ValueError(
- "Status did not return a valid status response: "
- f"{result}"
- )
-
- def hard_power_cycle(self, pdus: list[PduDevice]) -> None:
- self.device.reboot(reboot_type="hard", testbed_pdus=pdus)
-
- def create_iperf_client(
- self, test_interface: str | None = None
- ) -> IPerfClientBase:
- if not test_interface:
- test_interface = self.get_default_wlan_test_interface()
-
- # A package server is necessary to acquire the iperf3 client for
- # some builds.
- self.device.start_package_server()
-
- return iperf_client.IPerfClientOverSsh(
- ssh_provider=self.device.ssh,
- test_interface=test_interface,
- # Fuchsia's date tool does not support setting system date/time.
- sync_date=False,
- )
-
- def feature_is_present(self, feature: str) -> bool:
- return feature in self.device.wlan_features
-
-
-def create_wlan_device(
- hardware_device: FuchsiaDevice | AndroidDevice,
- associate_mode: AssociationMode,
-) -> SupportsWLAN:
- """Creates a generic WLAN device based on type of device that is sent to
- the functions.
-
- Args:
- hardware_device: A WLAN hardware device that is supported by ACTS.
- """
- device: SupportsWLAN
- if isinstance(hardware_device, FuchsiaDevice):
- device = FuchsiaWlanDevice(hardware_device, associate_mode)
- elif isinstance(hardware_device, AndroidDevice):
- device = AndroidWlanDevice(hardware_device)
- else:
- raise ValueError(
- f"Unable to create WLAN device for type {type(hardware_device)}"
- )
-
- assert isinstance(device, SupportsWLAN)
- return device
diff --git a/packages/antlion/test_utils/abstract_devices/wmm_transceiver.py b/packages/antlion/test_utils/abstract_devices/wmm_transceiver.py
deleted file mode 100644
index 6895d03..0000000
--- a/packages/antlion/test_utils/abstract_devices/wmm_transceiver.py
+++ /dev/null
@@ -1,702 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-# Blanket ignores to enable mypy in Antlion
-# mypy: disable-error-code="no-untyped-def"
-import logging
-import multiprocessing
-import time
-from datetime import datetime
-from multiprocessing.managers import DictProxy
-from typing import Any, Mapping
-from uuid import UUID, uuid4
-
-from mobly import logger, signals
-
-from antlion import utils
-from antlion.controllers import iperf_client, iperf_server
-from antlion.controllers.access_point import AccessPoint
-from antlion.test_utils.abstract_devices.wlan_device import SupportsWLAN
-from antlion.validation import MapValidator
-
-AC_VO = "AC_VO"
-AC_VI = "AC_VI"
-AC_BE = "AC_BE"
-AC_BK = "AC_BK"
-
-# TODO(fxb/61421): Add tests to check all DSCP classes are mapped to the correct
-# AC (there are many that aren't included here). Requires implementation of
-# sniffer.
-DEFAULT_AC_TO_TOS_TAG_MAP = {
- AC_VO: "0xC0",
- AC_VI: "0x80",
- AC_BE: "0x0",
- AC_BK: "0x20",
-}
-UDP = "udp"
-TCP = "tcp"
-DEFAULT_IPERF_PORT = 5201
-DEFAULT_STREAM_TIME = 10
-DEFAULT_IP_ADDR_TIMEOUT = 15
-PROCESS_JOIN_TIMEOUT = 60
-AVAILABLE = True
-UNAVAILABLE = False
-
-
-class WmmTransceiverError(signals.ControllerError):
- pass
-
-
-def create(
- config: Mapping[str, Any],
- identifier: str | None = None,
- wlan_devices: list[SupportsWLAN] | None = None,
- access_points: list[AccessPoint] | None = None,
-):
- """Creates a WmmTransceiver from a config.
-
- Args:
- config: Config parameters for the transceiver. Contains:
- - iperf_config: dict, the config to use for creating IPerfClients and
- IPerfServers (excluding port).
- - port_range_start: int, the lower bound of the port range to use for
- creating IPerfServers. Defaults to 5201.
- - wlan_device: string, the identifier of the wlan_device used for this
- WmmTransceiver (optional)
-
- identifier: Identifier for the WmmTransceiver. Must be provided either as arg or
- in the config.
- wlan_devices: WLAN devices from which to get the wlan_device, if any, used as
- this transceiver
- access_points: Access points from which to get the access_point, if any, used as
- this transceiver
- """
- try:
- iperf_config = config["iperf_config"]
- except KeyError as err:
- raise WmmTransceiverError(
- f"Parameter not provided as func arg, nor found in config: {err}"
- )
-
- if not identifier:
- # If identifier is not provided as func arg, it must be provided via
- # config file.
- identifier = MapValidator(config).get(str, "identifier")
-
- if wlan_devices is None:
- wlan_devices = []
-
- if access_points is None:
- access_points = []
-
- port_range_start = config.get("port_range_start", DEFAULT_IPERF_PORT)
-
- wd = None
- ap = None
- if "wlan_device" in config:
- wd = _find_wlan_device(config["wlan_device"], wlan_devices)
- elif "access_point" in config:
- ap = _find_access_point(config["access_point"], access_points)
-
- return WmmTransceiver(
- iperf_config,
- identifier,
- wlan_device=wd,
- access_point=ap,
- port_range_start=port_range_start,
- )
-
-
-def _find_wlan_device(
- wlan_device_identifier: str, wlan_devices: list[SupportsWLAN]
-) -> SupportsWLAN:
- """Returns WLAN device based on string identifier (e.g. ip, serial, etc.)
-
- Args:
- wlan_device_identifier: Identifier for the desired WLAN device
- wlan_devices: WLAN devices to search through
-
- Returns:
- A WLAN device matching wlan_device_identifier
-
- Raises:
- WmmTransceiverError, if no WLAN devices matches wlan_device_identifier
- """
- for wd in wlan_devices:
- if wlan_device_identifier == wd.identifier:
- return wd
- raise WmmTransceiverError(
- f'No WLAN device with identifier "{wlan_device_identifier}"'
- )
-
-
-def _find_access_point(
- access_point_ip: str, access_points: list[AccessPoint]
-) -> AccessPoint:
- """Returns AccessPoint based on string ip address
-
- Args:
- access_point_ip: Control plane IP address of the desired AP
- access_points: Access points to search through
-
- Returns:
- Access point with hostname matching access_point_ip
-
- Raises:
- WmmTransceiverError, if no access points matches access_point_ip
- """
- for ap in access_points:
- if ap.ssh_settings.hostname == access_point_ip:
- return ap
- raise WmmTransceiverError(f"No AccessPoint with ip: {access_point_ip}")
-
-
-class WmmTransceiver(object):
- """Object for handling WMM tagged streams between devices"""
-
- def __init__(
- self,
- iperf_config,
- identifier,
- wlan_device=None,
- access_point=None,
- port_range_start=5201,
- ):
- self.identifier = identifier
- self.log = logger.PrefixLoggerAdapter(
- logging.getLogger(),
- {
- logger.PrefixLoggerAdapter.EXTRA_KEY_LOG_PREFIX: (
- f"[WmmTransceiver | {self.identifier}]"
- if self.identifier
- else "[WmmTransceiver]"
- ),
- },
- )
- # WLAN device or AccessPoint, that is used as the transceiver. Only one
- # will be set. This helps consolodate association, setup, teardown, etc.
- self.wlan_device = wlan_device
- self.access_point = access_point
-
- # Parameters used to create IPerfClient and IPerfServer objects on
- # device
- self._iperf_config = iperf_config
- self._test_interface = self._iperf_config.get("test_interface")
- self._port_range_start = port_range_start
- self._next_server_port = port_range_start
-
- # Maps IPerfClients, used for streams from this device, to True if
- # available, False if reserved
- self._iperf_clients = {}
-
- # Maps IPerfServers, used to receive streams from other devices, to True
- # if available, False if reserved
- self._iperf_servers = {}
-
- # Maps ports of servers, which are provided to other transceivers, to
- # the actual IPerfServer objects
- self._iperf_server_ports = {}
-
- # Maps stream UUIDs to IPerfClients reserved for that streams use
- self._reserved_clients = {}
-
- # Maps stream UUIDs to (WmmTransceiver, IPerfServer) tuples, where the
- # server is reserved on the transceiver for that streams use
- self._reserved_servers = {}
-
- # Maps with shared memory functionality to be used across the parallel
- # streams. active_streams holds UUIDs of streams that are currently
- # running on this device (mapped to True, since there is no
- # multiprocessing set). stream_results maps UUIDs of streams completed
- # on this device to IPerfResult results for that stream.
- self._manager = multiprocessing.Manager()
- self._active_streams = self._manager.dict()
- self._stream_results = self._manager.dict()
-
- # Holds parameters for streams that are prepared to run asynchronously
- # (i.e. resources have been allocated). Maps UUIDs of the future streams
- # to a dict, containing the stream parameters.
- self._pending_async_streams = {}
-
- # Set of UUIDs of asynchronous streams that have at least started, but
- # have not had their resources reclaimed yet
- self._ran_async_streams = set()
-
- # Set of stream parallel process, which can be joined if completed
- # successfully, or terminated and joined in the event of an error
- self._running_processes = set()
-
- def run_synchronous_traffic_stream(self, stream_parameters, subnet):
- """Runs a traffic stream with IPerf3 between two WmmTransceivers and
- saves the results.
-
- Args:
- stream_parameters: dict, containing parameters to used for the
- stream. See _parse_stream_parameters for details.
- subnet: string, the subnet of the network to use for the stream
-
- Returns:
- uuid: UUID object, identifier of the stream
- """
- (
- receiver,
- access_category,
- bandwidth,
- stream_time,
- ) = self._parse_stream_parameters(stream_parameters)
- uuid = uuid4()
-
- (client, server_ip, server_port) = self._get_stream_resources(
- uuid, receiver, subnet
- )
-
- self._validate_server_address(server_ip, uuid)
-
- self.log.info(
- f"Running synchronous stream to {receiver.identifier} WmmTransceiver"
- )
- self._run_traffic(
- uuid,
- client,
- server_ip,
- server_port,
- self._active_streams,
- self._stream_results,
- access_category=access_category,
- bandwidth=bandwidth,
- stream_time=stream_time,
- )
-
- self._return_stream_resources(uuid)
- return uuid
-
- def prepare_asynchronous_stream(self, stream_parameters, subnet):
- """Reserves resources and saves configs for upcoming asynchronous
- traffic streams, so they can be started more simultaneously.
-
- Args:
- stream_parameters: dict, containing parameters to used for the
- stream. See _parse_stream_parameters for details.
- subnet: string, the subnet of the network to use for the stream
-
- Returns:
- uuid: UUID object, identifier of the stream
- """
- (
- receiver,
- access_category,
- bandwidth,
- time,
- ) = self._parse_stream_parameters(stream_parameters)
- uuid = uuid4()
-
- (client, server_ip, server_port) = self._get_stream_resources(
- uuid, receiver, subnet
- )
-
- self._validate_server_address(server_ip, uuid)
-
- pending_stream_config = {
- "client": client,
- "server_ip": server_ip,
- "server_port": server_port,
- "access_category": access_category,
- "bandwidth": bandwidth,
- "time": time,
- }
-
- self._pending_async_streams[uuid] = pending_stream_config
- self.log.info(
- f"Stream to {receiver.identifier} WmmTransceiver prepared."
- )
- return uuid
-
- def start_asynchronous_streams(self, start_time=None):
- """Starts pending asynchronous streams between two WmmTransceivers as
- parallel processes.
-
- Args:
- start_time: float, time, seconds since epoch, at which to start the
- stream (for better synchronicity). If None, start immediately.
- """
- for uuid in self._pending_async_streams:
- pending_stream_config = self._pending_async_streams[uuid]
- client = pending_stream_config["client"]
- server_ip = pending_stream_config["server_ip"]
- server_port = pending_stream_config["server_port"]
- access_category = pending_stream_config["access_category"]
- bandwidth = pending_stream_config["bandwidth"]
- time = pending_stream_config["time"]
-
- process = multiprocessing.Process(
- target=self._run_traffic,
- args=[
- uuid,
- client,
- server_ip,
- server_port,
- self._active_streams,
- self._stream_results,
- ],
- kwargs={
- "access_category": access_category,
- "bandwidth": bandwidth,
- "stream_time": time,
- "start_time": start_time,
- },
- )
-
- # This needs to be set here to ensure its marked active before
- # it even starts.
- self._active_streams[uuid] = True
- process.start()
- self._ran_async_streams.add(uuid)
- self._running_processes.add(process)
-
- self._pending_async_streams.clear()
-
- def cleanup_asynchronous_streams(self, timeout=PROCESS_JOIN_TIMEOUT):
- """Releases reservations on resources (IPerfClients and IPerfServers)
- that were held for asynchronous streams, both pending and finished.
- Attempts to join any running processes, logging an error if timeout is
- exceeded.
-
- Args:
- timeout: time, in seconds, to wait for each running process, if any,
- to join
- """
- self.log.info("Cleaning up any asynchronous streams.")
-
- # Releases resources for any streams that were prepared, but no run
- for uuid in self._pending_async_streams:
- self.log.error(
- f"Pending asynchronous stream {uuid} never ran. Cleaning."
- )
- self._return_stream_resources(uuid)
- self._pending_async_streams.clear()
-
- # Attempts to join any running streams, terminating them after timeout
- # if necessary.
- while self._running_processes:
- process = self._running_processes.pop()
- process.join(timeout)
- if process.is_alive():
- self.log.error(
- f"Stream process failed to join in {timeout} seconds. Terminating."
- )
- process.terminate()
- process.join()
- self._active_streams.clear()
-
- # Release resources for any finished streams
- while self._ran_async_streams:
- uuid = self._ran_async_streams.pop()
- self._return_stream_resources(uuid)
-
- def get_results(self, uuid):
- """Retrieves a streams IPerfResults from stream_results
-
- Args:
- uuid: UUID object, identifier of the stream
- """
- return self._stream_results.get(uuid, None)
-
- def destroy_resources(self):
- for server in self._iperf_servers:
- server.stop()
- self._iperf_servers.clear()
- self._iperf_server_ports.clear()
- self._iperf_clients.clear()
- self._next_server_port = self._port_range_start
- self._stream_results.clear()
-
- @property
- def has_active_streams(self):
- return bool(self._active_streams)
-
- # Helper Functions
-
- def _run_traffic(
- self,
- uuid: UUID,
- client: iperf_client.IPerfClientBase,
- server_ip: str,
- server_port: int,
- active_streams: DictProxy[Any, Any],
- stream_results: DictProxy[Any, Any],
- access_category: str | None = None,
- bandwidth: int | None = None,
- stream_time: int = DEFAULT_STREAM_TIME,
- start_time: float | None = None,
- ):
- """Runs an iperf3 stream.
-
- 1. Adds stream UUID to active_streams
- 2. Runs stream
- 3. Saves results to stream_results
- 4. Removes stream UUID from active_streams
-
- Args:
- uuid: Identifier for stream
- client: IPerfClient object on device
- server_ip: IP address of IPerfServer for stream
- server_port: port of the IPerfServer for stream
- active_streams: holds stream UUIDs of active streams on the device
- stream_results: maps stream UUIDs of streams to IPerfResult objects
- access_category: WMM access category to use with iperf (AC_BK, AC_BE, AC_VI,
- AC_VO). Unset if None.
- bandwidth: Bandwidth in mbps to use with iperf. Implies UDP. Unlimited if
- None.
- stream_time: Time in seconds, to run iperf stream
- start_time: Time, seconds since epoch, at which to start the stream (for
- better synchronicity). If None, start immediately.
- """
- active_streams[uuid] = True
-
- ac_flag = ""
- bandwidth_flag = ""
- time_flag = f"-t {stream_time}"
-
- if access_category:
- ac_flag = f" -S {DEFAULT_AC_TO_TOS_TAG_MAP[access_category]}"
-
- if bandwidth:
- bandwidth_flag = f" -u -b {bandwidth}M"
-
- iperf_flags = (
- f"-p {server_port} -i 1 {time_flag}{ac_flag}{bandwidth_flag} -J"
- )
- if not start_time:
- start_time = time.time()
- time_str = datetime.fromtimestamp(start_time).strftime("%H:%M:%S.%f")
- self.log.info(
- "At %s, starting %s second stream to %s:%s with (AC: %s, Bandwidth: %s)"
- % (
- time_str,
- stream_time,
- server_ip,
- server_port,
- access_category,
- bandwidth if bandwidth else "Unlimited",
- )
- )
-
- # If present, wait for stream start time
- if start_time:
- current_time = time.time()
- while current_time < start_time:
- current_time = time.time()
- path = client.start(server_ip, iperf_flags, f"{uuid}")
- stream_results[uuid] = iperf_server.IPerfResult(
- path, reporting_speed_units="mbps"
- )
-
- active_streams.pop(uuid)
-
- def _get_stream_resources(self, uuid, receiver, subnet):
- """Reserves an IPerfClient and IPerfServer for a stream.
-
- Args:
- uuid: UUID object, identifier of the stream
- receiver: WmmTransceiver object, which will be the streams receiver
- subnet: string, subnet of test network, to retrieve the appropriate
- server address
-
- Returns:
- (IPerfClient, string, int) representing the client, server address,
- and server port to use for the stream
- """
- client = self._get_client(uuid)
- server_ip, server_port = self._get_server(receiver, uuid, subnet)
- return (client, server_ip, server_port)
-
- def _return_stream_resources(self, uuid):
- """Releases reservations on a streams IPerfClient and IPerfServer, so
- they can be used by a future stream.
-
- Args:
- uuid: UUID object, identifier of the stream
- """
- if uuid in self._active_streams:
- raise EnvironmentError(
- f"Resource still being used by stream {uuid}"
- )
- (receiver, server_port) = self._reserved_servers.pop(uuid)
- receiver._release_server(server_port)
- client = self._reserved_clients.pop(uuid)
- self._iperf_clients[client] = AVAILABLE
-
- def _get_client(self, uuid):
- """Retrieves and reserves IPerfClient for use in a stream. If none are
- available, a new one is created.
-
- Args:
- uuid: UUID object, identifier for stream, used to link client to
- stream for teardown
-
- Returns:
- IPerfClient on device
- """
- reserved_client = None
- for client in self._iperf_clients:
- if self._iperf_clients[client] == AVAILABLE:
- reserved_client = client
- break
- else:
- reserved_client = iperf_client.create([self._iperf_config])[0]
-
- self._iperf_clients[reserved_client] = UNAVAILABLE
- self._reserved_clients[uuid] = reserved_client
- return reserved_client
-
- def _get_server(self, receiver, uuid, subnet):
- """Retrieves the address and port of a reserved IPerfServer object from
- the receiver object for use in a stream.
-
- Args:
- receiver: WmmTransceiver, to get an IPerfServer from
- uuid: UUID, identifier for stream, used to link server to stream
- for teardown
- subnet: string, subnet of test network, to retrieve the appropriate
- server address
-
- Returns:
- (string, int) representing the IPerfServer address and port
- """
- (server_ip, server_port) = receiver._reserve_server(subnet)
- self._reserved_servers[uuid] = (receiver, server_port)
- return (server_ip, server_port)
-
- def _reserve_server(self, subnet):
- """Reserves an available IPerfServer for use in a stream from another
- WmmTransceiver. If none are available, a new one is created.
-
- Args:
- subnet: string, subnet of test network, to retrieve the appropriate
- server address
-
- Returns:
- (string, int) representing the IPerfServer address and port
- """
- reserved_server = None
- for server in self._iperf_servers:
- if self._iperf_servers[server] == AVAILABLE:
- reserved_server = server
- break
- else:
- iperf_server_config = self._iperf_config
- iperf_server_config.update({"port": self._next_server_port})
- self._next_server_port += 1
- reserved_server = iperf_server.create([iperf_server_config])[0]
- self._iperf_server_ports[reserved_server.port] = reserved_server
-
- self._iperf_servers[reserved_server] = UNAVAILABLE
- reserved_server.start()
- end_time = time.time() + DEFAULT_IP_ADDR_TIMEOUT
- while time.time() < end_time:
- if self.wlan_device:
- addresses = utils.get_interface_ip_addresses(
- self.wlan_device.device, self._test_interface
- )
- else:
- addresses = reserved_server.get_interface_ip_addresses(
- self._test_interface
- )
- for addr in addresses["ipv4_private"]:
- if utils.ip_in_subnet(addr, subnet):
- return (addr, reserved_server.port)
- raise AttributeError(
- f"Reserved server has no ipv4 address in the {subnet} subnet"
- )
-
- def _release_server(self, server_port):
- """Releases reservation on IPerfServer, which was held for a stream
- from another WmmTransceiver.
-
- Args:
- server_port: int, the port of the IPerfServer being returned (since)
- it is the identifying characteristic
- """
- server = self._iperf_server_ports[server_port]
- server.stop()
- self._iperf_servers[server] = AVAILABLE
-
- def _validate_server_address(self, server_ip, uuid, timeout=60):
- """Verifies server address can be pinged before attempting to run
- traffic, since iperf is unforgiving when the server is unreachable.
-
- Args:
- server_ip: string, ip address of the iperf server
- uuid: string, uuid of the stream to use this server
- timeout: int, time in seconds to wait for server to respond to pings
-
- Raises:
- WmmTransceiverError, if, after timeout, server ip is unreachable.
- """
- self.log.info(f"Verifying server address ({server_ip}) is reachable.")
- end_time = time.time() + timeout
- while time.time() < end_time:
- if self.can_ping(server_ip):
- break
- else:
- self.log.debug(
- "Could not ping server address (%s). Retrying in 1 second."
- % (server_ip)
- )
- time.sleep(1)
- else:
- self._return_stream_resources(uuid)
- raise WmmTransceiverError(
- f"IPerfServer address ({server_ip}) unreachable."
- )
-
- def can_ping(self, dest_ip):
- """Utilizes can_ping function in wlan_device or access_point device to
- ping dest_ip
-
- Args:
- dest_ip: string, ip address to ping
-
- Returns:
- True, if dest address is reachable
- False, otherwise
- """
- if self.wlan_device:
- return self.wlan_device.can_ping(dest_ip)
- else:
- return self.access_point.can_ping(dest_ip)
-
- def _parse_stream_parameters(self, stream_parameters):
- """Parses stream_parameters from dictionary.
-
- Args:
- stream_parameters: dict of stream parameters
- 'receiver': WmmTransceiver, the receiver for the stream
- 'access_category': String, the access category to use for the
- stream. Unset if None.
- 'bandwidth': int, bandwidth in mbps for the stream. If set,
- implies UDP. If unset, implies TCP and unlimited bandwidth.
- 'time': int, time in seconds to run stream.
-
- Returns:
- (receiver, access_category, bandwidth, time) as
- (WmmTransceiver, String, int, int)
- """
- receiver = stream_parameters["receiver"]
- access_category = stream_parameters.get("access_category", None)
- bandwidth = stream_parameters.get("bandwidth", None)
- time = stream_parameters.get("time", DEFAULT_STREAM_TIME)
- return (receiver, access_category, bandwidth, time)
diff --git a/packages/antlion/test_utils/dhcp/__init__.py b/packages/antlion/test_utils/dhcp/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/packages/antlion/test_utils/dhcp/__init__.py
+++ /dev/null
diff --git a/packages/antlion/test_utils/dhcp/base_test.py b/packages/antlion/test_utils/dhcp/base_test.py
deleted file mode 100644
index 4be8a0e..0000000
--- a/packages/antlion/test_utils/dhcp/base_test.py
+++ /dev/null
@@ -1,321 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import logging
-import time
-from dataclasses import dataclass
-from ipaddress import IPv4Address, IPv4Network
-from pathlib import Path
-
-from mobly import asserts, signals
-from mobly.config_parser import TestRunConfig
-
-from antlion import utils
-from antlion.controllers.access_point import AccessPoint, setup_ap
-from antlion.controllers.android_device import AndroidDevice
-from antlion.controllers.ap_lib import dhcp_config, hostapd_constants
-from antlion.controllers.ap_lib.hostapd_security import Security, SecurityMode
-from antlion.controllers.ap_lib.hostapd_utils import generate_random_password
-from antlion.controllers.fuchsia_device import FuchsiaDevice
-from antlion.test_utils.abstract_devices.wlan_device import AssociationMode
-from antlion.test_utils.wifi import base_test
-
-
-@dataclass
-class APParams:
- id: str
- ssid: str
- security: Security
- ip: IPv4Address
- network: IPv4Network
-
-
-class Dhcpv4InteropFixture(base_test.WifiBaseTest):
- """Test helpers for validating DHCPv4 Interop
-
- Test Bed Requirement:
- * One Android device or Fuchsia device
- * One Access Point
- """
-
- def __init__(self, configs: TestRunConfig) -> None:
- super().__init__(configs)
- self.log = logging.getLogger()
- self.fuchsia_device: FuchsiaDevice | None = None
- self.access_point: AccessPoint = self.access_points[0]
-
- device_type = self.user_params.get("dut", "fuchsia_devices")
- if device_type == "fuchsia_devices":
- self.fuchsia_device, self.dut = self.get_dut_type(
- FuchsiaDevice, AssociationMode.POLICY
- )
- elif device_type == "android_devices":
- _, self.dut = self.get_dut_type(
- AndroidDevice, AssociationMode.POLICY
- )
- else:
- raise ValueError(
- f'Invalid "dut" type specified in config: "{device_type}".'
- 'Expected "fuchsia_devices" or "android_devices".'
- )
-
- def setup_class(self) -> None:
- super().setup_class()
- self.access_point.stop_all_aps()
-
- def setup_test(self) -> None:
- if hasattr(self, "android_devices"):
- for ad in self.android_devices:
- ad.droid.wakeLockAcquireBright()
- ad.droid.wakeUpNow()
- self.dut.wifi_toggle_state(True)
-
- def teardown_test(self) -> None:
- if hasattr(self, "android_devices"):
- for ad in self.android_devices:
- ad.droid.wakeLockRelease()
- ad.droid.goToSleepNow()
- self.dut.turn_location_off_and_scan_toggle_off()
- self.dut.disconnect()
- self.dut.reset_wifi()
- self.access_point.stop_all_aps()
-
- def connect(self, ap_params: APParams) -> None:
- asserts.assert_true(
- self.dut.associate(
- ap_params.ssid,
- target_pwd=ap_params.security.password,
- target_security=ap_params.security.security_mode,
- ),
- "Failed to connect.",
- )
-
- def setup_ap(self) -> APParams:
- """Generates a hostapd config and sets up the AP with that config.
-
- Does not run a DHCP server.
-
- Returns:
- APParams for the newly setup AP.
- """
- ssid = utils.rand_ascii_str(20)
- security = Security(
- security_mode=SecurityMode.WPA2,
- password=generate_random_password(length=20),
- wpa_cipher="CCMP",
- wpa2_cipher="CCMP",
- )
-
- ap_ids = setup_ap(
- access_point=self.access_point,
- profile_name="whirlwind",
- mode=hostapd_constants.Mode.MODE_11N_MIXED,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- n_capabilities=[],
- ac_capabilities=[],
- force_wmm=True,
- ssid=ssid,
- security=security,
- )
-
- if len(ap_ids) > 1:
- raise Exception("Expected only one SSID on AP")
-
- configured_subnets = self.access_point.get_configured_subnets()
- if len(configured_subnets) > 1:
- raise Exception("Expected only one subnet on AP")
- router_ip = configured_subnets[0].router
- network = configured_subnets[0].network
-
- self.access_point.stop_dhcp()
-
- return APParams(
- id=ap_ids[0],
- ssid=ssid,
- security=security,
- ip=router_ip,
- network=network,
- )
-
- def get_device_ipv4_addr(
- self, interface: str | None = None, timeout_sec: float = 20.0
- ) -> IPv4Address:
- """Checks if device has an ipv4 private address.
-
- Only supported on Fuchsia.
-
- Args:
- interface: name of interface from which to get ipv4 address.
- timeout: seconds to wait until raising ConnectionError
-
- Raises:
- ConnectionError, if DUT does not have an ipv4 address after all
- timeout.
-
- Returns:
- The device's IP address
- """
- if self.fuchsia_device is None:
- # TODO(http://b/292289291): Add get_(ipv4|ipv6)_addr to SupportsIP.
- raise TypeError(
- "TODO(http://b/292289291): get_device_ipv4_addr only supports "
- "FuchsiaDevice"
- )
-
- self.log.debug("Fetching updated WLAN interface list")
- if interface is None:
- interface = self.dut.get_default_wlan_test_interface()
- self.log.info(
- "Checking if DUT has received an ipv4 addr on iface %s. Will retry for %s "
- "seconds." % (interface, timeout_sec)
- )
- timeout_sec = time.time() + timeout_sec
- while time.time() < timeout_sec:
- ip_addrs = self.fuchsia_device.get_interface_ip_addresses(interface)
-
- if len(ip_addrs["ipv4_private"]) > 0:
- ip = ip_addrs["ipv4_private"][0]
- self.log.info(f"DUT has an ipv4 address: {ip}")
- return IPv4Address(ip)
- else:
- self.log.debug(
- "DUT does not yet have an ipv4 address...retrying in 1 "
- "second."
- )
- time.sleep(1)
- else:
- raise ConnectionError("DUT failed to get an ipv4 address.")
-
- def run_test_case_expect_dhcp_success(
- self,
- dhcp_parameters: dict[str, str],
- dhcp_options: dict[str, int | str],
- ) -> None:
- """Starts the AP and DHCP server, and validates that the client
- connects and obtains an address.
-
- Args:
- dhcp_parameters: a dictionary of DHCP parameters
- dhcp_options: a dictionary of DHCP options
- """
- ap_params = self.setup_ap()
- subnet_conf = dhcp_config.Subnet(
- subnet=ap_params.network,
- router=ap_params.ip,
- additional_parameters=dhcp_parameters,
- additional_options=dhcp_options,
- )
- dhcp_conf = dhcp_config.DhcpConfig(subnets=[subnet_conf])
-
- self.log.debug(
- "DHCP Configuration:\n%s\n", dhcp_conf.render_config_file()
- )
-
- with self.access_point.tcpdump.start(
- self.access_point.wlan_5g, Path(self.log_path)
- ):
- self.access_point.start_dhcp(dhcp_conf=dhcp_conf)
- self.connect(ap_params=ap_params)
-
- # Typical log lines look like:
- #
- # dhcpd[26695]: DHCPDISCOVER from 01:23:45:67:89:ab via wlan1
- # dhcpd[26695]: DHCPOFFER on 192.168.9.2 to 01:23:45:67:89:ab via wlan1
- # dhcpd[26695]: DHCPREQUEST for 192.168.9.2 (192.168.9.1) from 01:23:45:67:89:ab via wlan1
- # dhcpd[26695]: DHCPACK on 192.168.9.2 to 01:23:45:67:89:ab via wlan1
-
- # Due to b/384790032, logs can also show duplicate DISCOVER and
- # OFFER packets due to the Fuchsia DHCP client queuing packets while
- # EAPOL is in progress:
- #
- # DHCPDISCOVER from 01:23:45:67:89:ab via wlan1
- # DHCPOFFER on 192.168.9.2 to 01:23:45:67:89:ab via wlan1
- # DHCPDISCOVER from 01:23:45:67:89:ab via wlan1
- # DHCPOFFER on 192.168.9.2 to 01:23:45:67:89:ab via wlan1
- # DHCPREQUEST for 192.168.9.2 (192.168.9.1) from 01:23:45:67:89:ab via wlan1
- # DHCPACK on 192.168.9.2 to 01:23:45:67:89:ab via wlan1
-
- try:
- ip = self.get_device_ipv4_addr()
- except ConnectionError:
- self.log.warning(
- "DHCP logs: %s", self.access_point.get_dhcp_logs()
- )
- raise signals.TestFailure("DUT failed to get an IP address")
-
- # Get updates to DHCP logs
- dhcp_logs = self.access_point.get_dhcp_logs()
- if dhcp_logs is None:
- raise signals.TestFailure("No DHCP logs")
-
- # TODO(http://b/384790032): Replace with logic below with this
- # comment once DHCP is started after EAPOL finishes. Or remove this
- # comment if queueing is determined expected and acceptable
- # behavior.
- #
- # expected_string = f"DHCPDISCOVER from"
- # asserts.assert_equal(
- # dhcp_logs.count(expected_string),
- # 1,
- # f'Incorrect count of DHCP Discovers ("{expected_string}") in logs',
- # dhcp_logs,
- # )
- #
- # expected_string = f"DHCPOFFER on {ip}"
- # asserts.assert_equal(
- # dhcp_logs.count(expected_string),
- # 1,
- # f'Incorrect count of DHCP Offers ("{expected_string}") in logs',
- # dhcp_logs,
- # )
-
- discover_count = dhcp_logs.count("DHCPDISCOVER from")
- offer_count = dhcp_logs.count(f"DHCPOFFER on {ip}")
- asserts.assert_greater(
- discover_count,
- 0,
- "Expected one or more DHCP Discovers",
- dhcp_logs,
- )
- asserts.assert_equal(
- discover_count,
- offer_count,
- "Expected an equal amount of DHCP Discovers and Offers",
- dhcp_logs,
- )
-
- expected_string = f"DHCPREQUEST for {ip}"
- asserts.assert_true(
- dhcp_logs.count(expected_string) >= 1,
- f'Incorrect count of DHCP Requests ("{expected_string}") in logs: '
- + dhcp_logs
- + "\n",
- )
-
- expected_string = f"DHCPACK on {ip}"
- asserts.assert_true(
- dhcp_logs.count(expected_string) >= 1,
- f'Incorrect count of DHCP Acks ("{expected_string}") in logs: '
- + dhcp_logs
- + "\n",
- )
-
- self.log.info(f"Attempting to ping {ap_params.ip}...")
- ping_result = self.dut.ping(str(ap_params.ip), count=2)
- asserts.assert_true(
- ping_result.success,
- f"DUT failed to ping router at {ap_params.ip}: {ping_result}",
- )
diff --git a/packages/antlion/test_utils/fuchsia/__init__.py b/packages/antlion/test_utils/fuchsia/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/packages/antlion/test_utils/fuchsia/__init__.py
+++ /dev/null
diff --git a/packages/antlion/test_utils/fuchsia/wmm_test_cases.py b/packages/antlion/test_utils/fuchsia/wmm_test_cases.py
deleted file mode 100644
index 5a05cbf..0000000
--- a/packages/antlion/test_utils/fuchsia/wmm_test_cases.py
+++ /dev/null
@@ -1,1434 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-# Internal Traffic Differentiation
-test_internal_traffic_diff_VO_VI = {
- "phase_1": {
- "stream_VO": dict(
- transmitter_str="staut",
- receiver_str="access_point",
- access_category="AC_VO",
- max_bandwidth_percentage=0.85,
- validation=[
- dict(
- operator=">=", bandwidth_percentage=0.87, rel_tolerance=0.03
- )
- ],
- ),
- "stream_VI": dict(
- transmitter_str="staut",
- receiver_str="access_point",
- access_category="AC_VI",
- max_bandwidth_percentage=1.0,
- validation=[
- dict(operator="<", phase="phase_1", stream="stream_VO")
- ],
- ),
- }
-}
-
-test_internal_traffic_diff_VO_BE = {
- "phase_1": {
- "stream_VO": dict(
- transmitter_str="staut",
- receiver_str="access_point",
- access_category="AC_VO",
- max_bandwidth_percentage=0.85,
- validation=[
- dict(
- operator=">=", bandwidth_percentage=0.89, rel_tolerance=0.03
- )
- ],
- ),
- "stream_BE": dict(
- transmitter_str="staut",
- receiver_str="access_point",
- access_category="AC_BE",
- max_bandwidth_percentage=1.0,
- validation=[
- dict(operator="<", phase="phase_1", stream="stream_VO")
- ],
- ),
- }
-}
-
-test_internal_traffic_diff_VO_BK = {
- "phase_1": {
- "stream_VO": dict(
- transmitter_str="staut",
- receiver_str="access_point",
- access_category="AC_VO",
- max_bandwidth_percentage=0.85,
- validation=[
- dict(
- operator=">=", bandwidth_percentage=0.89, rel_tolerance=0.03
- )
- ],
- ),
- "stream_BK": dict(
- transmitter_str="staut",
- receiver_str="access_point",
- access_category="AC_BK",
- max_bandwidth_percentage=1.0,
- validation=[
- dict(operator="<", phase="phase_1", stream="stream_VO")
- ],
- ),
- }
-}
-
-test_internal_traffic_diff_VI_BE = {
- "phase_1": {
- "stream_VI": dict(
- transmitter_str="staut",
- receiver_str="access_point",
- access_category="AC_VI",
- max_bandwidth_percentage=0.85,
- validation=[
- dict(
- operator=">=", bandwidth_percentage=0.89, rel_tolerance=0.03
- )
- ],
- ),
- "stream_BE": dict(
- transmitter_str="staut",
- receiver_str="access_point",
- access_category="AC_BE",
- max_bandwidth_percentage=1.0,
- validation=[
- dict(operator="<", phase="phase_1", stream="stream_VI")
- ],
- ),
- }
-}
-
-test_internal_traffic_diff_VI_BK = {
- "phase_1": {
- "stream_VI": dict(
- transmitter_str="staut",
- receiver_str="access_point",
- access_category="AC_VI",
- max_bandwidth_percentage=0.85,
- validation=[
- dict(
- operator=">=", bandwidth_percentage=0.89, rel_tolerance=0.03
- )
- ],
- ),
- "stream_BK": dict(
- transmitter_str="staut",
- receiver_str="access_point",
- access_category="AC_BK",
- max_bandwidth_percentage=1.0,
- validation=[
- dict(operator="<", phase="phase_1", stream="stream_VI")
- ],
- ),
- }
-}
-
-test_internal_traffic_diff_BE_BK = {
- "phase_1": {
- "stream_BE": dict(
- transmitter_str="staut",
- receiver_str="access_point",
- access_category="AC_BE",
- max_bandwidth_percentage=0.85,
- validation=[
- dict(
- operator=">=", bandwidth_percentage=0.81, rel_tolerance=0.03
- )
- ],
- ),
- "stream_BK": dict(
- transmitter_str="staut",
- receiver_str="access_point",
- access_category="AC_BK",
- max_bandwidth_percentage=1.0,
- validation=[
- dict(operator="<", phase="phase_1", stream="stream_BE")
- ],
- ),
- }
-}
-# External Traffic Differentiation
-
-# Single station, STAUT transmits high priority
-test_external_traffic_diff_staut_VO_ap_VI = {
- "phase_1": {
- "stream_VO_staut_to_ap": dict(
- transmitter_str="staut",
- receiver_str="access_point",
- access_category="AC_VO",
- max_bandwidth_percentage=0.85,
- validation=[
- dict(
- operator=">=", bandwidth_percentage=0.87, rel_tolerance=0.03
- )
- ],
- ),
- "stream_VI_ap_to_staut": dict(
- transmitter_str="access_point",
- receiver_str="staut",
- access_category="AC_VI",
- max_bandwidth_percentage=1.0,
- validation=[
- dict(
- operator="<",
- phase="phase_1",
- stream="stream_VO_staut_to_ap",
- )
- ],
- ),
- }
-}
-
-test_external_traffic_diff_staut_VO_ap_BE = {
- "phase_1": {
- "stream_VO_staut_to_ap": dict(
- transmitter_str="staut",
- receiver_str="access_point",
- access_category="AC_VO",
- max_bandwidth_percentage=0.85,
- validation=[
- dict(
- operator=">=", bandwidth_percentage=0.89, rel_tolerance=0.03
- )
- ],
- ),
- "stream_BE_ap_to_staut": dict(
- transmitter_str="access_point",
- receiver_str="staut",
- access_category="AC_BE",
- max_bandwidth_percentage=1.0,
- validation=[
- dict(
- operator="<",
- phase="phase_1",
- stream="stream_VO_staut_to_ap",
- )
- ],
- ),
- }
-}
-
-test_external_traffic_diff_staut_VO_ap_BK = {
- "phase_1": {
- "stream_VO_staut_to_ap": dict(
- transmitter_str="staut",
- receiver_str="access_point",
- access_category="AC_VO",
- max_bandwidth_percentage=0.85,
- validation=[
- dict(
- operator=">=", bandwidth_percentage=0.89, rel_tolerance=0.03
- )
- ],
- ),
- "stream_BK_ap_to_staut": dict(
- transmitter_str="access_point",
- receiver_str="staut",
- access_category="AC_BK",
- max_bandwidth_percentage=1.0,
- validation=[
- dict(
- operator="<",
- phase="phase_1",
- stream="stream_VO_staut_to_ap",
- )
- ],
- ),
- }
-}
-
-test_external_traffic_diff_staut_VI_ap_BE = {
- "phase_1": {
- "stream_VI_staut_to_ap": dict(
- transmitter_str="staut",
- receiver_str="access_point",
- access_category="AC_VI",
- max_bandwidth_percentage=0.85,
- validation=[
- dict(
- operator=">=", bandwidth_percentage=0.89, rel_tolerance=0.03
- )
- ],
- ),
- "stream_BE_ap_to_staut": dict(
- transmitter_str="access_point",
- receiver_str="staut",
- access_category="AC_BE",
- max_bandwidth_percentage=1.0,
- validation=[
- dict(
- operator="<",
- phase="phase_1",
- stream="stream_VI_staut_to_ap",
- )
- ],
- ),
- }
-}
-
-test_external_traffic_diff_staut_VI_ap_BK = {
- "phase_1": {
- "stream_VI_staut_to_ap": dict(
- transmitter_str="staut",
- receiver_str="access_point",
- access_category="AC_VI",
- max_bandwidth_percentage=0.85,
- validation=[
- dict(
- operator=">=", bandwidth_percentage=0.89, rel_tolerance=0.03
- )
- ],
- ),
- "stream_BK_ap_to_staut": dict(
- transmitter_str="access_point",
- receiver_str="staut",
- access_category="AC_BK",
- max_bandwidth_percentage=1.0,
- validation=[
- dict(
- operator="<",
- phase="phase_1",
- stream="stream_VI_staut_to_ap",
- )
- ],
- ),
- }
-}
-
-test_external_traffic_diff_staut_BE_ap_BK = {
- "phase_1": {
- "stream_BE_staut_to_ap": dict(
- transmitter_str="staut",
- receiver_str="access_point",
- access_category="AC_BE",
- max_bandwidth_percentage=0.85,
- validation=[
- dict(
- operator=">=", bandwidth_percentage=0.81, rel_tolerance=0.03
- )
- ],
- ),
- "stream_BK_ap_to_staut": dict(
- transmitter_str="access_point",
- receiver_str="staut",
- access_category="AC_BK",
- max_bandwidth_percentage=1.0,
- validation=[
- dict(
- operator="<",
- phase="phase_1",
- stream="stream_BE_staut_to_ap",
- )
- ],
- ),
- }
-}
-
-# Single station, STAUT transmits low priority
-test_external_traffic_diff_staut_VI_ap_VO = {
- "phase_1": {
- "stream_VO_ap_to_staut": dict(
- transmitter_str="access_point",
- receiver_str="staut",
- access_category="AC_VO",
- max_bandwidth_percentage=0.85,
- validation=[
- dict(
- operator=">=", bandwidth_percentage=0.87, rel_tolerance=0.03
- )
- ],
- ),
- "stream_VI_staut_to_ap": dict(
- transmitter_str="staut",
- receiver_str="access_point",
- access_category="AC_VI",
- max_bandwidth_percentage=1.0,
- validation=[
- dict(
- operator="<",
- phase="phase_1",
- stream="stream_VO_ap_to_staut",
- )
- ],
- ),
- }
-}
-
-test_external_traffic_diff_staut_BE_ap_VO = {
- "phase_1": {
- "stream_VO_ap_to_staut": dict(
- transmitter_str="access_point",
- receiver_str="staut",
- access_category="AC_VO",
- max_bandwidth_percentage=0.85,
- validation=[
- dict(
- operator=">=", bandwidth_percentage=0.89, rel_tolerance=0.03
- )
- ],
- ),
- "stream_BE_staut_to_ap": dict(
- transmitter_str="staut",
- receiver_str="access_point",
- access_category="AC_BE",
- max_bandwidth_percentage=1.0,
- validation=[
- dict(
- operator="<",
- phase="phase_1",
- stream="stream_VO_ap_to_staut",
- )
- ],
- ),
- }
-}
-
-test_external_traffic_diff_staut_BK_ap_VO = {
- "phase_1": {
- "stream_VO_ap_to_staut": dict(
- transmitter_str="access_point",
- receiver_str="staut",
- access_category="AC_VO",
- max_bandwidth_percentage=0.85,
- validation=[
- dict(
- operator=">=", bandwidth_percentage=0.89, rel_tolerance=0.03
- )
- ],
- ),
- "stream_BK_staut_to_ap": dict(
- transmitter_str="staut",
- receiver_str="access_point",
- access_category="AC_BK",
- max_bandwidth_percentage=1.0,
- validation=[
- dict(
- operator="<",
- phase="phase_1",
- stream="stream_VO_ap_to_staut",
- )
- ],
- ),
- }
-}
-
-test_external_traffic_diff_staut_BE_ap_VI = {
- "phase_1": {
- "stream_VI_ap_to_staut": dict(
- transmitter_str="access_point",
- receiver_str="staut",
- access_category="AC_VI",
- max_bandwidth_percentage=0.85,
- validation=[
- dict(
- operator=">=", bandwidth_percentage=0.89, rel_tolerance=0.03
- )
- ],
- ),
- "stream_BE_staut_to_ap": dict(
- transmitter_str="staut",
- receiver_str="access_point",
- access_category="AC_BE",
- max_bandwidth_percentage=1.0,
- validation=[
- dict(
- operator="<",
- phase="phase_1",
- stream="stream_VI_ap_to_staut",
- )
- ],
- ),
- }
-}
-
-test_external_traffic_diff_staut_BK_ap_VI = {
- "phase_1": {
- "stream_VI_ap_to_staut": dict(
- transmitter_str="access_point",
- receiver_str="staut",
- access_category="AC_VI",
- max_bandwidth_percentage=0.85,
- validation=[
- dict(
- operator=">=", bandwidth_percentage=0.89, rel_tolerance=0.03
- )
- ],
- ),
- "stream_BK_staut_to_ap": dict(
- transmitter_str="staut",
- receiver_str="access_point",
- access_category="AC_BK",
- max_bandwidth_percentage=1.0,
- validation=[
- dict(
- operator="<",
- phase="phase_1",
- stream="stream_VI_ap_to_staut",
- )
- ],
- ),
- }
-}
-
-test_external_traffic_diff_staut_BK_ap_BE = {
- "phase_1": {
- "stream_BE_ap_to_staut": dict(
- transmitter_str="access_point",
- receiver_str="staut",
- access_category="AC_BE",
- max_bandwidth_percentage=0.85,
- validation=[
- dict(
- operator=">=", bandwidth_percentage=0.81, rel_tolerance=0.03
- )
- ],
- ),
- "stream_BK_staut_to_ap": dict(
- transmitter_str="staut",
- receiver_str="access_point",
- access_category="AC_BK",
- max_bandwidth_percentage=1.0,
- validation=[
- dict(
- operator="<",
- phase="phase_1",
- stream="stream_BE_ap_to_staut",
- )
- ],
- ),
- }
-}
-
-# Dual Internal/External Traffic Differetiation
-
-test_dual_traffic_diff_staut_VO_VI_ap_VI = {
- "phase_1": {
- "stream_VI_staut_to_ap": dict(
- transmitter_str="staut",
- receiver_str="access_point",
- access_category="AC_VI",
- max_bandwidth_percentage=0.5,
- validation=[
- dict(
- operator="==",
- phase="phase_1",
- stream="stream_VI_ap_to_staut",
- max_bw_rel_tolerance=0.15,
- )
- ],
- ),
- "stream_VO_staut_to_ap": dict(
- transmitter_str="staut",
- receiver_str="access_point",
- access_category="AC_VO",
- max_bandwidth_percentage=0.85,
- validation=[
- dict(
- operator=">=", bandwidth_percentage=0.81, rel_tolerance=0.01
- )
- ],
- ),
- "stream_VI_ap_to_staut": dict(
- transmitter_str="access_point",
- receiver_str="staut",
- access_category="AC_VI",
- max_bandwidth_percentage=0.5,
- ),
- }
-}
-
-test_dual_traffic_diff_staut_VO_BE_ap_BE = {
- "phase_1": {
- "stream_BE_staut_to_ap": dict(
- transmitter_str="staut",
- receiver_str="access_point",
- access_category="AC_BE",
- max_bandwidth_percentage=0.5,
- validation=[
- dict(
- operator="==",
- phase="phase_1",
- stream="stream_BE_ap_to_staut",
- max_bw_rel_tolerance=0.15,
- )
- ],
- ),
- "stream_VO_staut_to_ap": dict(
- transmitter_str="staut",
- receiver_str="access_point",
- access_category="AC_VO",
- max_bandwidth_percentage=0.85,
- validation=[
- dict(
- operator=">=", bandwidth_percentage=0.89, rel_tolerance=0.01
- )
- ],
- ),
- "stream_BE_ap_to_staut": dict(
- transmitter_str="access_point",
- receiver_str="staut",
- access_category="AC_BE",
- max_bandwidth_percentage=0.5,
- ),
- }
-}
-
-test_dual_traffic_diff_staut_VO_BK_ap_BK = {
- "phase_1": {
- "stream_BK_staut_to_ap": dict(
- transmitter_str="staut",
- receiver_str="access_point",
- access_category="AC_BK",
- max_bandwidth_percentage=0.5,
- validation=[
- dict(
- operator="==",
- phase="phase_1",
- stream="stream_BK_ap_to_staut",
- max_bw_rel_tolerance=0.15,
- )
- ],
- ),
- "stream_VO_staut_to_ap": dict(
- transmitter_str="staut",
- receiver_str="access_point",
- access_category="AC_VO",
- max_bandwidth_percentage=0.85,
- validation=[
- dict(
- operator=">=", bandwidth_percentage=0.89, rel_tolerance=0.01
- )
- ],
- ),
- "stream_BK_ap_to_staut": dict(
- transmitter_str="access_point",
- receiver_str="staut",
- access_category="AC_BK",
- max_bandwidth_percentage=0.5,
- ),
- }
-}
-
-test_dual_traffic_diff_staut_VI_BE_ap_BE = {
- "phase_1": {
- "stream_BE_staut_to_ap": dict(
- transmitter_str="staut",
- receiver_str="access_point",
- access_category="AC_BE",
- max_bandwidth_percentage=0.5,
- validation=[
- dict(
- operator="==",
- phase="phase_1",
- stream="stream_BE_ap_to_staut",
- max_bw_rel_tolerance=0.15,
- )
- ],
- ),
- "stream_VI_staut_to_ap": dict(
- transmitter_str="staut",
- receiver_str="access_point",
- access_category="AC_VI",
- max_bandwidth_percentage=0.85,
- validation=[
- dict(
- operator=">=", bandwidth_percentage=0.89, rel_tolerance=0.01
- )
- ],
- ),
- "stream_BE_ap_to_staut": dict(
- transmitter_str="access_point",
- receiver_str="staut",
- access_category="AC_BE",
- max_bandwidth_percentage=0.5,
- ),
- }
-}
-
-test_dual_traffic_diff_staut_VI_BK_ap_BK = {
- "phase_1": {
- "stream_BK_staut_to_ap": dict(
- transmitter_str="staut",
- receiver_str="access_point",
- access_category="AC_BK",
- max_bandwidth_percentage=0.5,
- validation=[
- dict(
- operator="==",
- phase="phase_1",
- stream="stream_BK_ap_to_staut",
- max_bw_rel_tolerance=0.15,
- )
- ],
- ),
- "stream_VI_staut_to_ap": dict(
- transmitter_str="staut",
- receiver_str="access_point",
- access_category="AC_VI",
- max_bandwidth_percentage=0.85,
- validation=[
- dict(
- operator=">=", bandwidth_percentage=0.89, rel_tolerance=0.01
- )
- ],
- ),
- "stream_BK_ap_to_staut": dict(
- transmitter_str="access_point",
- receiver_str="staut",
- access_category="AC_BK",
- max_bandwidth_percentage=0.5,
- ),
- }
-}
-
-test_dual_traffic_diff_staut_BE_BK_ap_BK = {
- "phase_1": {
- "stream_BK_staut_to_ap": dict(
- transmitter_str="staut",
- receiver_str="access_point",
- access_category="AC_BK",
- max_bandwidth_percentage=0.5,
- validation=[
- dict(
- operator="==",
- phase="phase_1",
- stream="stream_BK_ap_to_staut",
- max_bw_rel_tolerance=0.15,
- )
- ],
- ),
- "stream_BE_staut_to_ap": dict(
- transmitter_str="staut",
- receiver_str="access_point",
- access_category="AC_BE",
- max_bandwidth_percentage=0.85,
- validation=[
- dict(
- operator=">=", bandwidth_percentage=0.81, rel_tolerance=0.01
- )
- ],
- ),
- "stream_BK_ap_to_staut": dict(
- transmitter_str="access_point",
- receiver_str="staut",
- access_category="AC_BK",
- max_bandwidth_percentage=0.5,
- ),
- }
-}
-
-# ACM Bit Conformance Tests (Single station, as WFA test below uses two)
-test_acm_bit_on_VI = {
- "phase_1": {
- "stream_VI_staut_to_ap": dict(
- transmitter_str="staut",
- receiver_str="access_point",
- access_category="AC_VI",
- max_bandwidth_percentage=0.6,
- validation=[
- # TODO(): This should technically be an "or"
- dict(
- operator="<",
- phase="phase_1",
- stream="stream_BE_staut_to_ap_1",
- bandwidth_percentage=1.15,
- rel_tolerance=0.05,
- ),
- dict(
- operator="<",
- phase="phase_1",
- stream="stream_BE_staut_to_ap_2",
- bandwidth_percentage=1.15,
- rel_tolerance=0.05,
- ),
- ],
- ),
- "stream_BE_staut_to_ap_1": dict(
- transmitter_str="staut",
- receiver_str="access_point",
- access_category="AC_BE",
- max_bandwidth_percentage=0.6,
- ),
- "stream_BE_staut_to_ap_2": dict(
- transmitter_str="staut",
- receiver_str="access_point",
- access_category="AC_BE",
- max_bandwidth_percentage=0.6,
- ),
- }
-}
-
-# AC Parameter Modificiation Tests (Single station, as WFA test below uses two)
-test_ac_param_degrade_VI = {
- "phase_1": {
- "stream_VI_staut_to_ap": dict(
- transmitter_str="staut",
- receiver_str="access_point",
- access_category="AC_VI",
- max_bandwidth_percentage=0.6,
- validation=[
- # TODO(): This should technically be an "or"
- dict(
- operator="<",
- phase="phase_1",
- stream="stream_BE_staut_to_ap_1",
- bandwidth_percentage=1.15,
- rel_tolerance=0.05,
- ),
- dict(
- operator="<",
- phase="phase_1",
- stream="stream_BE_staut_to_ap_2",
- bandwidth_percentage=1.15,
- rel_tolerance=0.05,
- ),
- ],
- ),
- "stream_BE_staut_to_ap_1": dict(
- transmitter_str="staut",
- receiver_str="access_point",
- access_category="AC_BE",
- max_bandwidth_percentage=0.6,
- ),
- "stream_BE_staut_to_ap_2": dict(
- transmitter_str="staut",
- receiver_str="access_point",
- access_category="AC_BE",
- max_bandwidth_percentage=0.6,
- ),
- }
-}
-
-test_ac_param_degrade_VO = {
- "phase_1": {
- "stream_VO_staut_to_ap": dict(
- transmitter_str="staut",
- receiver_str="access_point",
- access_category="AC_VO",
- max_bandwidth_percentage=0.6,
- validation=[
- # TODO(): This should technically be an "or"
- dict(
- operator="<",
- phase="phase_1",
- stream="stream_BE_staut_to_ap_1",
- bandwidth_percentage=1.15,
- rel_tolerance=0.05,
- ),
- dict(
- operator="<",
- phase="phase_1",
- stream="stream_BE_staut_to_ap_2",
- bandwidth_percentage=1.15,
- rel_tolerance=0.05,
- ),
- ],
- ),
- "stream_BE_staut_to_ap_1": dict(
- transmitter_str="staut",
- receiver_str="access_point",
- access_category="AC_BE",
- max_bandwidth_percentage=0.6,
- ),
- "stream_BE_staut_to_ap_2": dict(
- transmitter_str="staut",
- receiver_str="access_point",
- access_category="AC_BE",
- max_bandwidth_percentage=0.6,
- ),
- }
-}
-
-test_ac_param_improve_BE = {
- "phase_1": {
- "stream_BE_staut_to_ap": dict(
- transmitter_str="staut",
- receiver_str="access_point",
- access_category="AC_BE",
- max_bandwidth_percentage=0.6,
- validation=[
- # TODO(): This should technically be an "or"
- dict(
- operator=">",
- phase="phase_1",
- stream="stream_VI_staut_to_ap_1",
- bandwidth_percentage=0.869,
- rel_tolerance=0.05,
- ),
- dict(
- operator=">",
- phase="phase_1",
- stream="stream_VI_staut_to_ap_2",
- bandwidth_percentage=0.869,
- rel_tolerance=0.05,
- ),
- ],
- ),
- "stream_VI_staut_to_ap_1": dict(
- transmitter_str="staut",
- receiver_str="access_point",
- access_category="AC_VI",
- max_bandwidth_percentage=0.6,
- ),
- "stream_VI_staut_to_ap_2": dict(
- transmitter_str="staut",
- receiver_str="access_point",
- access_category="AC_VI",
- max_bandwidth_percentage=0.6,
- ),
- }
-}
-
-test_ac_param_improve_BK = {
- "phase_1": {
- "stream_BK_staut_to_ap": dict(
- transmitter_str="staut",
- receiver_str="access_point",
- access_category="AC_BK",
- max_bandwidth_percentage=0.6,
- validation=[
- # TODO(): This should technically be an "or"
- dict(
- operator=">",
- phase="phase_1",
- stream="stream_VI_staut_to_ap_1",
- bandwidth_percentage=0.869,
- rel_tolerance=0.05,
- ),
- dict(
- operator=">",
- phase="phase_1",
- stream="stream_VI_staut_to_ap_2",
- bandwidth_percentage=0.869,
- rel_tolerance=0.05,
- ),
- ],
- ),
- "stream_VI_staut_to_ap_1": dict(
- transmitter_str="staut",
- receiver_str="access_point",
- access_category="AC_VI",
- max_bandwidth_percentage=0.6,
- ),
- "stream_VI_staut_to_ap_2": dict(
- transmitter_str="staut",
- receiver_str="access_point",
- access_category="AC_VI",
- max_bandwidth_percentage=0.6,
- ),
- }
-}
-# WFA Test Plan Cases
-
-# Traffic Differentiation in Single BSS (Single Station)
-test_wfa_traffic_diff_single_station_staut_BE_ap_VI_BE = {
- "phase_1": {
- "steam_BE_ap_to_staut": dict(
- transmitter_str="access_point",
- receiver_str="staut",
- access_category="AC_BE",
- max_bandwidth_percentage=0.45,
- ),
- "stream_VI_ap_to_staut": dict(
- transmitter_str="access_point",
- receiver_str="staut",
- access_category="AC_VI",
- max_bandwidth_percentage=0.45,
- ),
- },
- "phase_2": {
- "steam_BE_ap_to_staut": dict(
- transmitter_str="access_point",
- receiver_str="staut",
- access_category="AC_BE",
- max_bandwidth_percentage=0.45,
- ),
- "stream_VI_ap_to_staut": dict(
- transmitter_str="access_point",
- receiver_str="staut",
- access_category="AC_VI",
- max_bandwidth_percentage=0.45,
- validation=[
- dict(
- operator=">=",
- phase="phase_1",
- stream="stream_VI_ap_to_staut",
- bandwidth_percentage=0.85,
- rel_tolerance=0.01,
- )
- ],
- ),
- "stream_BE_staut_to_ap": dict(
- transmitter_str="staut",
- receiver_str="access_point",
- access_category="AC_BE",
- max_bandwidth_percentage=0.65,
- ),
- },
-}
-
-test_wfa_traffic_diff_single_station_staut_VI_BE = {
- "phase_1": {
- "stream_BE_staut_to_ap": dict(
- transmitter_str="staut",
- receiver_str="access_point",
- access_category="AC_BE",
- max_bandwidth_percentage=0.45,
- ),
- "stream_VI_staut_to_ap": dict(
- transmitter_str="staut",
- receiver_str="access_point",
- access_category="AC_VI",
- max_bandwidth_percentage=0.45,
- ),
- },
- "phase_2": {
- "stream_BE_staut_to_ap_1": dict(
- transmitter_str="staut",
- receiver_str="access_point",
- access_category="AC_BE",
- max_bandwidth_percentage=0.45,
- ),
- "stream_VI_staut_to_ap": dict(
- transmitter_str="staut",
- receiver_str="access_point",
- access_category="AC_VI",
- max_bandwidth_percentage=0.45,
- validation=[
- dict(
- operator=">=",
- phase="phase_1",
- stream="stream_VI_staut_to_ap",
- bandwidth_percentage=0.89,
- rel_tolerance=0.01,
- )
- ],
- ),
- "stream_BE_staut_to_ap_2": dict(
- transmitter_str="staut",
- receiver_str="access_point",
- access_category="AC_BE",
- max_bandwidth_percentage=0.65,
- ),
- },
-}
-
-test_wfa_traffic_diff_single_station_staut_VI_BE_ap_BE = {
- "phase_1": {
- "stream_BE_staut_to_ap": dict(
- transmitter_str="staut",
- receiver_str="access_point",
- access_category="AC_BE",
- max_bandwidth_percentage=0.45,
- ),
- "stream_VI_staut_to_ap": dict(
- transmitter_str="staut",
- receiver_str="access_point",
- access_category="AC_VI",
- max_bandwidth_percentage=0.45,
- ),
- },
- "phase_2": {
- "stream_BE_staut_to_ap": dict(
- transmitter_str="staut",
- receiver_str="access_point",
- access_category="AC_BE",
- max_bandwidth_percentage=0.45,
- ),
- "stream_VI_staut_to_ap": dict(
- transmitter_str="staut",
- receiver_str="access_point",
- access_category="AC_VI",
- max_bandwidth_percentage=0.45,
- validation=[
- dict(
- operator=">=",
- phase="phase_1",
- stream="stream_VI_staut_to_ap",
- bandwidth_percentage=0.87,
- rel_tolerance=0.01,
- )
- ],
- ),
- "stream_BE_ap_to_staut": dict(
- transmitter_str="access_point",
- receiver_str="staut",
- access_category="AC_BE",
- max_bandwidth_percentage=0.65,
- ),
- },
-}
-
-test_wfa_traffic_diff_single_station_staut_BE_BK_ap_BK = {
- "phase_1": {
- "stream_BK_staut_to_ap": dict(
- transmitter_str="staut",
- receiver_str="access_point",
- access_category="AC_BK",
- max_bandwidth_percentage=0.45,
- ),
- "stream_BE_staut_to_ap": dict(
- transmitter_str="staut",
- receiver_str="access_point",
- access_category="AC_BE",
- max_bandwidth_percentage=0.45,
- ),
- },
- "phase_2": {
- "stream_BK_staut_to_ap": dict(
- transmitter_str="staut",
- receiver_str="access_point",
- access_category="AC_BK",
- max_bandwidth_percentage=0.45,
- ),
- "stream_BE_staut_to_ap": dict(
- transmitter_str="staut",
- receiver_str="access_point",
- access_category="AC_BE",
- max_bandwidth_percentage=0.45,
- validation=[
- dict(
- operator=">=",
- phase="phase_1",
- stream="stream_BE_staut_to_ap",
- bandwidth_percentage=0.81,
- rel_tolerance=0.01,
- )
- ],
- ),
- "stream_BK_ap_to_staut": dict(
- transmitter_str="access_point",
- receiver_str="staut",
- access_category="AC_BK",
- max_bandwidth_percentage=0.65,
- ),
- },
-}
-
-test_wfa_traffic_diff_single_station_staut_VO_VI_ap_VI = {
- "phase_1": {
- "stream_VI_staut_to_ap": dict(
- transmitter_str="staut",
- receiver_str="access_point",
- access_category="AC_VI",
- max_bandwidth_percentage=0.45,
- ),
- "stream_VO_staut_to_ap": dict(
- transmitter_str="staut",
- receiver_str="access_point",
- access_category="AC_VO",
- max_bandwidth_percentage=0.45,
- ),
- },
- "phase_2": {
- "stream_VI_staut_to_ap": dict(
- transmitter_str="staut",
- receiver_str="access_point",
- access_category="AC_VI",
- max_bandwidth_percentage=0.45,
- ),
- "stream_VO_staut_to_ap": dict(
- transmitter_str="staut",
- receiver_str="access_point",
- access_category="AC_VO",
- max_bandwidth_percentage=0.45,
- validation=[
- dict(
- operator=">=",
- phase="phase_1",
- stream="stream_VO_staut_to_ap",
- bandwidth_percentage=0.81,
- rel_tolerance=0.01,
- )
- ],
- ),
- "stream_VI_ap_to_staut": dict(
- transmitter_str="access_point",
- receiver_str="staut",
- access_category="AC_VI",
- max_bandwidth_percentage=0.65,
- ),
- },
-}
-
-# Traffic Differentiation in Single BSS (Two Stations)
-test_wfa_traffic_diff_two_stations_staut_BE_secondary_VI_BE = {
- "phase_1": {
- "steam_BE_staut_to_ap": dict(
- transmitter_str="staut",
- receiver_str="access_point",
- access_category="AC_BE",
- max_bandwidth_percentage=0.45,
- ),
- "stream_VI_secondary_to_ap": dict(
- transmitter_str="secondary_sta",
- receiver_str="access_point",
- access_category="AC_VI",
- max_bandwidth_percentage=0.45,
- ),
- },
- "phase_2": {
- "steam_BE_staut_to_ap": dict(
- transmitter_str="staut",
- receiver_str="access_point",
- access_category="AC_BE",
- max_bandwidth_percentage=0.45,
- ),
- "stream_VI_secondary_to_ap": dict(
- transmitter_str="secondary_sta",
- receiver_str="access_point",
- access_category="AC_VI",
- max_bandwidth_percentage=0.45,
- validation=[
- dict(
- operator=">=",
- phase="phase_1",
- stream="stream_VI_secondary_to_ap",
- bandwidth_percentage=0.90,
- rel_tolerance=0.01,
- )
- ],
- ),
- "stream_BE_secondary_to_ap": dict(
- transmitter_str="secondary_sta",
- receiver_str="access_point",
- access_category="AC_BE",
- max_bandwidth_percentage=0.65,
- ),
- },
-}
-
-test_wfa_traffic_diff_two_stations_staut_VI_secondary_BE = {
- "phase_1": {
- "steam_VI_staut_to_ap": dict(
- transmitter_str="staut",
- receiver_str="access_point",
- access_category="AC_VI",
- max_bandwidth_percentage=0.45,
- ),
- "stream_BE_secondary_to_ap": dict(
- transmitter_str="secondary_sta",
- receiver_str="access_point",
- access_category="AC_BE",
- max_bandwidth_percentage=0.45,
- ),
- },
- "phase_2": {
- "steam_VI_staut_to_ap": dict(
- transmitter_str="staut",
- receiver_str="access_point",
- access_category="AC_BE",
- max_bandwidth_percentage=0.45,
- validation=[
- dict(
- operator=">=",
- phase="phase_1",
- stream="steam_VI_staut_to_ap",
- bandwidth_percentage=0.88,
- rel_tolerance=0.01,
- )
- ],
- ),
- "stream_BE_secondary_to_ap_1": dict(
- transmitter_str="secondary_sta",
- receiver_str="access_point",
- access_category="AC_BE",
- max_bandwidth_percentage=0.45,
- ),
- "stream_BE_secondary_to_ap_2": dict(
- transmitter_str="secondary_sta",
- receiver_str="access_point",
- access_category="AC_BE",
- max_bandwidth_percentage=0.65,
- ),
- },
-}
-
-test_wfa_traffic_diff_two_stations_staut_BK_secondary_BE_BK = {
- "phase_1": {
- "steam_BK_staut_to_ap": dict(
- transmitter_str="staut",
- receiver_str="access_point",
- access_category="AC_BK",
- max_bandwidth_percentage=0.45,
- ),
- "stream_BE_secondary_to_ap": dict(
- transmitter_str="secondary_sta",
- receiver_str="access_point",
- access_category="AC_BE",
- max_bandwidth_percentage=0.45,
- ),
- },
- "phase_2": {
- "steam_BK_staut_to_ap": dict(
- transmitter_str="staut",
- receiver_str="access_point",
- access_category="AC_BK",
- max_bandwidth_percentage=0.45,
- ),
- "stream_BE_secondary_to_ap": dict(
- transmitter_str="secondary_sta",
- receiver_str="access_point",
- access_category="AC_BE",
- max_bandwidth_percentage=0.45,
- validation=[
- dict(
- operator=">=",
- phase="phase_1",
- stream="stream_BE_secondary_to_ap",
- bandwidth_percentage=0.90,
- rel_tolerance=0.01,
- )
- ],
- ),
- "stream_BK_secondary_to_ap": dict(
- transmitter_str="secondary_sta",
- receiver_str="access_point",
- access_category="AC_BK",
- max_bandwidth_percentage=0.65,
- ),
- },
-}
-
-test_wfa_traffic_diff_two_stations_staut_VI_secondary_VO_VI = {
- "phase_1": {
- "steam_VI_staut_to_ap": dict(
- transmitter_str="staut",
- receiver_str="access_point",
- access_category="AC_VI",
- max_bandwidth_percentage=0.45,
- ),
- "stream_VO_secondary_to_ap": dict(
- transmitter_str="secondary_sta",
- receiver_str="access_point",
- access_category="AC_VO",
- max_bandwidth_percentage=0.45,
- ),
- },
- "phase_2": {
- "steam_VI_staut_to_ap": dict(
- transmitter_str="staut",
- receiver_str="access_point",
- access_category="AC_VI",
- max_bandwidth_percentage=0.45,
- ),
- "stream_VO_secondary_to_ap": dict(
- transmitter_str="secondary_sta",
- receiver_str="access_point",
- access_category="AC_VO",
- max_bandwidth_percentage=0.45,
- validation=[
- dict(
- operator=">=",
- phase="phase_1",
- stream="stream_VO_secondary_to_ap",
- bandwidth_percentage=0.90,
- rel_tolerance=0.01,
- )
- ],
- ),
- "stream_VI_secondary_to_ap": dict(
- transmitter_str="secondary_sta",
- receiver_str="access_point",
- access_category="AC_VI",
- max_bandwidth_percentage=0.65,
- ),
- },
-}
-
-test_wfa_acm_bit_on_VI = {
- "phase_1": {
- "stream_VI_staut_to_ap": dict(
- transmitter_str="staut",
- receiver_str="access_point",
- access_category="AC_VI",
- max_bandwidth_percentage=0.65,
- validation=[
- # TODO(): This should technically be an "or"
- dict(
- operator="<",
- phase="phase_1",
- stream="stream_BE_secondary_to_ap_1",
- bandwidth_percentage=1.15,
- rel_tolerance=0.05,
- ),
- dict(
- operator="<",
- phase="phase_1",
- stream="stream_BE_secondary_to_ap_2",
- bandwidth_percentage=1.15,
- rel_tolerance=0.05,
- ),
- ],
- ),
- "stream_BE_secondary_to_ap_1": dict(
- transmitter_str="secondary_sta",
- receiver_str="access_point",
- access_category="AC_BE",
- max_bandwidth_percentage=0.65,
- ),
- "stream_BE_secondary_to_ap_2": dict(
- transmitter_str="secondary_sta",
- receiver_str="access_point",
- access_category="AC_BE",
- max_bandwidth_percentage=0.65,
- ),
- }
-}
-
-test_wfa_ac_param_degrade_VI = {
- "phase_1": {
- "stream_VI_staut_to_ap": dict(
- transmitter_str="staut",
- receiver_str="access_point",
- access_category="AC_VI",
- max_bandwidth_percentage=0.65,
- validation=[
- # TODO(): This should technically be an "or"
- dict(
- operator="<",
- phase="phase_1",
- stream="stream_BE_secondary_to_ap_1",
- bandwidth_percentage=1.15,
- rel_tolerance=0.05,
- ),
- dict(
- operator="<",
- phase="phase_1",
- stream="stream_BE_secondary_to_ap_2",
- bandwidth_percentage=1.15,
- rel_tolerance=0.05,
- ),
- ],
- ),
- "stream_BE_secondary_to_ap_1": dict(
- transmitter_str="secondary_sta",
- receiver_str="access_point",
- access_category="AC_BE",
- max_bandwidth_percentage=0.65,
- ),
- "stream_BE_secondary_to_ap_2": dict(
- transmitter_str="secondary_sta",
- receiver_str="access_point",
- access_category="AC_BE",
- max_bandwidth_percentage=0.65,
- ),
- }
-}
diff --git a/packages/antlion/test_utils/net/__init__.py b/packages/antlion/test_utils/net/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/packages/antlion/test_utils/net/__init__.py
+++ /dev/null
diff --git a/packages/antlion/test_utils/net/connectivity_const.py b/packages/antlion/test_utils/net/connectivity_const.py
deleted file mode 100644
index 05495f0..0000000
--- a/packages/antlion/test_utils/net/connectivity_const.py
+++ /dev/null
@@ -1,172 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import enum
-
-######################################################
-# ConnectivityManager.NetworkCallback events
-######################################################
-EVENT_NETWORK_CALLBACK = "NetworkCallback"
-
-# event types
-NETWORK_CB_PRE_CHECK = "PreCheck"
-NETWORK_CB_AVAILABLE = "Available"
-NETWORK_CB_LOSING = "Losing"
-NETWORK_CB_LOST = "Lost"
-NETWORK_CB_UNAVAILABLE = "Unavailable"
-NETWORK_CB_CAPABILITIES_CHANGED = "CapabilitiesChanged"
-NETWORK_CB_SUSPENDED = "Suspended"
-NETWORK_CB_RESUMED = "Resumed"
-NETWORK_CB_LINK_PROPERTIES_CHANGED = "LinkPropertiesChanged"
-NETWORK_CB_INVALID = "Invalid"
-
-# event data keys
-NETWORK_CB_KEY_ID = "id"
-NETWORK_CB_KEY_EVENT = "networkCallbackEvent"
-NETWORK_CB_KEY_MAX_MS_TO_LIVE = "maxMsToLive"
-NETWORK_CB_KEY_RSSI = "rssi"
-NETWORK_CB_KEY_INTERFACE_NAME = "interfaceName"
-NETWORK_CB_KEY_CREATE_TS = "creation_timestamp"
-NETWORK_CB_KEY_CURRENT_TS = "current_timestamp"
-NETWORK_CB_KEY_NETWORK_SPECIFIER = "network_specifier"
-NETWORK_CB_KEY_TRANSPORT_INFO = "transport_info"
-
-# Constants for VPN connection status
-VPN_STATE_DISCONNECTED = 0
-VPN_STATE_INITIALIZING = 1
-VPN_STATE_CONNECTING = 2
-VPN_STATE_CONNECTED = 3
-VPN_STATE_TIMEOUT = 4
-VPN_STATE_FAILED = 5
-# TODO gmoturu: determine the exact timeout value
-# This is a random value as of now
-VPN_TIMEOUT = 30
-
-# Connectiivty Manager constants
-TYPE_MOBILE = 0
-TYPE_WIFI = 1
-
-# Network request related constants.
-NETWORK_CAP_TRANSPORT_WIFI = TYPE_WIFI
-NETWORK_CAP_CAPABILITY_INTERNET = 12
-
-# Network request related keys.
-NETWORK_CAP_TRANSPORT_TYPE_KEY = "TransportType"
-NETWORK_CAP_CAPABILITY_KEY = "Capability"
-
-# Multipath preference constants
-MULTIPATH_PREFERENCE_NONE = 0
-MULTIPATH_PREFERENCE_HANDOVER = 1 << 0
-MULTIPATH_PREFERENCE_RELIABILITY = 1 << 1
-MULTIPATH_PREFERENCE_PERFORMANCE = 1 << 2
-
-# Private DNS constants
-DNS_GOOGLE_HOSTNAME = "dns.google"
-DNS_QUAD9_HOSTNAME = "dns.quad9.net"
-DNS_CLOUDFLARE_HOSTNAME = "1dot1dot1dot1.cloudflare-dns.com"
-DOH_CLOUDFLARE_HOSTNAME = "cloudflare-dns.com"
-PRIVATE_DNS_MODE_OFF = "off"
-PRIVATE_DNS_MODE_OPPORTUNISTIC = "opportunistic"
-PRIVATE_DNS_MODE_STRICT = "hostname"
-
-DNS_SUPPORT_TYPE = {
- DNS_GOOGLE_HOSTNAME: ["Do53", "DoT", "DoH"],
- DNS_CLOUDFLARE_HOSTNAME: ["Do53", "DoT"],
- DOH_CLOUDFLARE_HOSTNAME: ["DoH"],
-}
-
-DNS_GOOGLE_ADDR_V4 = ["8.8.4.4", "8.8.8.8"]
-DNS_GOOGLE_ADDR_V6 = ["2001:4860:4860::8888", "2001:4860:4860::8844"]
-DNS_CLOUDFLARE_ADDR_V4 = ["1.1.1.1", "1.0.0.1"]
-DOH_CLOUDFLARE_ADDR_V4 = ["104.16.248.249", "104.16.249.249"]
-DOH_CLOUDFLARE_ADDR_V6 = ["2606:4700::6810:f8f9", "2606:4700::6810:f9f9"]
-
-# IpSec constants
-SOCK_STREAM = 1
-SOCK_DGRAM = 2
-AF_INET = 2
-AF_INET6 = 10
-DIRECTION_IN = 0
-DIRECTION_OUT = 1
-MODE_TRANSPORT = 0
-MODE_TUNNEL = 1
-CRYPT_NULL = "ecb(cipher_null)"
-CRYPT_AES_CBC = "cbc(aes)"
-AUTH_HMAC_MD5 = "hmac(md5)"
-AUTH_HMAC_SHA1 = "hmac(sha1)"
-AUTH_HMAC_SHA256 = "hmac(sha256)"
-AUTH_HMAC_SHA384 = "hmac(sha384)"
-AUTH_HMAC_SHA512 = "hmac(sha512)"
-AUTH_CRYPT_AES_GCM = "rfc4106(gcm(aes))"
-
-
-# Constants for VpnProfile
-class VpnProfile(object):
- """This class contains all the possible
- parameters required for VPN connection
- """
-
- NAME = "name"
- TYPE = "type"
- SERVER = "server"
- USER = "username"
- PWD = "password"
- DNS = "dnsServers"
- SEARCH_DOMAINS = "searchDomains"
- ROUTES = "routes"
- MPPE = "mppe"
- L2TP_SECRET = "l2tpSecret"
- IPSEC_ID = "ipsecIdentifier"
- IPSEC_SECRET = "ipsecSecret"
- IPSEC_USER_CERT = "ipsecUserCert"
- IPSEC_CA_CERT = "ipsecCaCert"
- IPSEC_SERVER_CERT = "ipsecServerCert"
-
-
-# Enums for VPN profile types
-class VpnProfileType(enum.Enum):
- """Integer constant for each type of VPN"""
-
- PPTP = 0
- L2TP_IPSEC_PSK = 1
- L2TP_IPSEC_RSA = 2
- IPSEC_XAUTH_PSK = 3
- IPSEC_XAUTH_RSA = 4
- IPSEC_HYBRID_RSA = 5
- IKEV2_IPSEC_USER_PASS = 6
- IKEV2_IPSEC_PSK = 7
- IKEV2_IPSEC_RSA = 8
-
-
-# Constants for config file
-class VpnReqParams(object):
- """Config file parameters required for
- VPN connection
- """
-
- vpn_server_addresses = "vpn_server_addresses"
- vpn_verify_addresses = "vpn_verify_addresses"
- vpn_username = "vpn_username"
- vpn_password = "vpn_password"
- psk_secret = "psk_secret"
- client_pkcs_file_name = "client_pkcs_file_name"
- cert_path_vpnserver = "cert_path_vpnserver"
- cert_password = "cert_password"
- pptp_mppe = "pptp_mppe"
- ipsec_server_type = "ipsec_server_type"
- wifi_network = "wifi_network"
- vpn_identity = "vpn_identity"
- vpn_server_hostname = "vpn_server_hostname"
diff --git a/packages/antlion/test_utils/net/net_test_utils.py b/packages/antlion/test_utils/net/net_test_utils.py
deleted file mode 100644
index 74c48d3..0000000
--- a/packages/antlion/test_utils/net/net_test_utils.py
+++ /dev/null
@@ -1,101 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-# Blanket ignores to enable mypy in Antlion
-# mypy: disable-error-code="no-untyped-def"
-import os
-
-from antlion.controllers import adb
-from antlion.test_utils.net import connectivity_const as cconst
-from antlion.utils import start_standing_subprocess, stop_standing_subprocess
-
-VPN_CONST = cconst.VpnProfile
-VPN_TYPE = cconst.VpnProfileType
-VPN_PARAMS = cconst.VpnReqParams
-TCPDUMP_PATH = "/data/local/tmp/"
-USB_CHARGE_MODE = "svc usb setFunctions"
-USB_TETHERING_MODE = "svc usb setFunctions rndis"
-ENABLE_HARDWARE_OFFLOAD = "settings put global tether_offload_disabled 0"
-DISABLE_HARDWARE_OFFLOAD = "settings put global tether_offload_disabled 1"
-DEVICE_IP_ADDRESS = "ip address"
-LOCALHOST = "192.168.1.1"
-
-# Time to wait for radio to up and running after reboot
-WAIT_TIME_AFTER_REBOOT = 10
-
-GCE_SSH = "gcloud compute ssh "
-GCE_SCP = "gcloud compute scp "
-
-
-def start_tcpdump(ad, test_name, interface="any"):
- """Start tcpdump on all interfaces.
-
- Args:
- ad: android device object.
- test_name: tcpdump file name will have this
- """
- ad.log.info("Starting tcpdump on all interfaces")
- ad.adb.shell("killall -9 tcpdump", ignore_status=True)
- ad.adb.shell(f"mkdir {TCPDUMP_PATH}", ignore_status=True)
- ad.adb.shell(f"rm -rf {TCPDUMP_PATH}/*", ignore_status=True)
-
- file_name = f"{TCPDUMP_PATH}/tcpdump_{ad.serial}_{test_name}.pcap"
- ad.log.info("tcpdump file is %s", file_name)
- cmd = f"adb -s {ad.serial} shell tcpdump -i {interface} -s0 -w {file_name}"
- try:
- return start_standing_subprocess(cmd, 5)
- except Exception:
- ad.log.exception(f"Could not start standing process {repr(cmd)}")
-
- return None
-
-
-def stop_tcpdump(
- ad,
- proc,
- test_name,
- pull_dump=True,
- adb_pull_timeout=adb.DEFAULT_ADB_PULL_TIMEOUT,
-):
- """Stops tcpdump on any iface.
-
- Pulls the tcpdump file in the tcpdump dir if necessary.
-
- Args:
- ad: android device object.
- proc: need to know which pid to stop
- test_name: test name to save the tcpdump file
- pull_dump: pull tcpdump file or not
- adb_pull_timeout: timeout for adb_pull
-
- Returns:
- log_path of the tcpdump file
- """
- ad.log.info("Stopping and pulling tcpdump if any")
- if proc is None:
- return None
- try:
- stop_standing_subprocess(proc)
- except Exception as e:
- ad.log.warning(e)
- if pull_dump:
- log_path = os.path.join(ad.device_log_path, f"TCPDUMP_{ad.serial}")
- os.makedirs(log_path, exist_ok=True)
- ad.adb.pull(f"{TCPDUMP_PATH}/. {log_path}", timeout=adb_pull_timeout)
- ad.adb.shell(f"rm -rf {TCPDUMP_PATH}/*", ignore_status=True)
- file_name = f"tcpdump_{ad.serial}_{test_name}.pcap"
- return f"{log_path}/{file_name}"
- return None
diff --git a/packages/antlion/test_utils/wifi/OWNERS b/packages/antlion/test_utils/wifi/OWNERS
deleted file mode 100644
index 10e4214..0000000
--- a/packages/antlion/test_utils/wifi/OWNERS
+++ /dev/null
@@ -1,5 +0,0 @@
-bkleung@google.com
-gmoturu@google.com
-hsiuchangchen@google.com
-
-include platform/packages/modules/Wifi:/WIFI_OWNERS
diff --git a/packages/antlion/test_utils/wifi/__init__.py b/packages/antlion/test_utils/wifi/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/packages/antlion/test_utils/wifi/__init__.py
+++ /dev/null
diff --git a/packages/antlion/test_utils/wifi/base_test.py b/packages/antlion/test_utils/wifi/base_test.py
deleted file mode 100644
index fbac57d..0000000
--- a/packages/antlion/test_utils/wifi/base_test.py
+++ /dev/null
@@ -1,904 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""
-Base Class for Defining Common WiFi Test Functionality
-"""
-
-import copy
-import os
-from typing import Any, TypedDict, TypeVar
-
-from mobly import signals
-from mobly.base_test import BaseTestClass
-from mobly.config_parser import TestRunConfig
-from mobly.records import TestResultRecord
-
-from antlion import context, controllers, utils
-from antlion.controllers.access_point import AccessPoint
-from antlion.controllers.android_device import AndroidDevice
-from antlion.controllers.ap_lib import hostapd_constants
-from antlion.controllers.ap_lib.hostapd_security import (
- OpenWRTEncryptionMode,
- SecurityMode,
-)
-from antlion.controllers.attenuator import Attenuator
-from antlion.controllers.fuchsia_device import FuchsiaDevice
-from antlion.controllers.iperf_client import IPerfClientBase
-from antlion.controllers.iperf_server import IPerfServer, IPerfServerOverSsh
-from antlion.controllers.openwrt_ap import PMF_ENABLED, BSSIDMap, OpenWrtAP
-from antlion.controllers.openwrt_lib.wireless_config import WirelessConfig
-from antlion.controllers.packet_capture import PacketCapture
-from antlion.controllers.pdu import PduDevice
-from antlion.keys import Config
-from antlion.test_utils.abstract_devices.wlan_device import (
- AndroidWlanDevice,
- AssociationMode,
- FuchsiaWlanDevice,
- SupportsWLAN,
-)
-from antlion.test_utils.net import net_test_utils as nutils
-from antlion.test_utils.wifi import wifi_test_utils as wutils
-from antlion.types import Controller
-from antlion.validation import MapValidator
-
-WifiEnums = wutils.WifiEnums
-MAX_AP_COUNT = 2
-
-
-class Network(TypedDict):
- SSID: str
- security: SecurityMode
- password: str | None
- hiddenSSID: bool
- wepKeys: list[str] | None
- ieee80211w: str | None
-
-
-class NetworkUpdate(TypedDict, total=False):
- SSID: str
- security: SecurityMode
- password: str | None
- hiddenSSID: bool
- wepKeys: list[str] | None
- ieee80211w: str | None
-
-
-NetworkList = dict[str, Network]
-
-_T = TypeVar("_T")
-
-
-class WifiBaseTest(BaseTestClass):
- def __init__(self, configs: TestRunConfig) -> None:
- super().__init__(configs)
- self.enable_packet_log = False
- self.packet_log_2g = hostapd_constants.AP_DEFAULT_CHANNEL_2G
- self.packet_log_5g = hostapd_constants.AP_DEFAULT_CHANNEL_5G
- self.tcpdump_proc: list[Any] = []
- self.packet_log_pid: dict[str, Any] = {}
-
- T = TypeVar("T")
-
- def register_controller(module: Controller[T]) -> list[T]:
- registered_controllers: list[T] | None = self.register_controller(
- module, required=False
- )
- if registered_controllers is None:
- return []
- return registered_controllers
-
- self.access_points: list[AccessPoint] = register_controller(
- controllers.access_point
- )
- self.openwrt_aps: list[OpenWrtAP] = register_controller(
- controllers.openwrt_ap
- )
- self.android_devices: list[AndroidDevice] = register_controller(
- controllers.android_device
- )
- self.attenuators: list[Attenuator] = register_controller(
- controllers.attenuator
- )
- self.fuchsia_devices: list[FuchsiaDevice] = register_controller(
- controllers.fuchsia_device
- )
- self.iperf_clients: list[IPerfClientBase] = register_controller(
- controllers.iperf_client
- )
- iperf_servers: list[
- IPerfServer | IPerfServerOverSsh
- ] = register_controller(controllers.iperf_server)
- self.iperf_servers = [
- iperf_server
- for iperf_server in iperf_servers
- if isinstance(iperf_server, IPerfServerOverSsh)
- ]
- self.pdu_devices: list[PduDevice] = register_controller(controllers.pdu)
- self.packet_capture: list[PacketCapture] = register_controller(
- controllers.packet_capture
- )
-
- for attenuator in self.attenuators:
- attenuator.set_atten(0)
-
- self.pixel_models: list[str] | None = self.user_params.get(
- "pixel_models"
- )
- self.cnss_diag_file: str | list[str] | None = self.user_params.get(
- "cnss_diag_file"
- )
- self.country_code_file: str | list[str] | None = self.user_params.get(
- "country_code_file"
- )
-
- if self.cnss_diag_file:
- if isinstance(self.cnss_diag_file, list):
- self.cnss_diag_file = self.cnss_diag_file[0]
- if not os.path.isfile(self.cnss_diag_file):
- self.cnss_diag_file = os.path.join(
- self.user_params[Config.key_config_path.value],
- self.cnss_diag_file,
- )
-
- self.packet_logger: PacketCapture | None = None
- if self.enable_packet_log and self.packet_capture:
- self.packet_logger = self.packet_capture[0]
- self.packet_logger.configure_monitor_mode("2G", self.packet_log_2g)
- self.packet_logger.configure_monitor_mode("5G", self.packet_log_5g)
-
- for ad in self.android_devices:
- wutils.wifi_test_device_init(ad)
- if self.country_code_file:
- if isinstance(self.country_code_file, list):
- self.country_code_file = self.country_code_file[0]
- if not os.path.isfile(self.country_code_file):
- self.country_code_file = os.path.join(
- self.user_params[Config.key_config_path.value],
- self.country_code_file,
- )
- self.country_code = utils.load_config(self.country_code_file)[
- "country"
- ]
- else:
- self.country_code = WifiEnums.CountryCode.US
- wutils.set_wifi_country_code(ad, self.country_code)
-
- def setup_test(self) -> None:
- if self.android_devices and self.cnss_diag_file and self.pixel_models:
- wutils.start_cnss_diags(
- self.android_devices, self.cnss_diag_file, self.pixel_models
- )
- self.tcpdump_proc = []
- for ad in self.android_devices:
- proc = nutils.start_tcpdump(ad, self.current_test_info.name)
- self.tcpdump_proc.append((ad, proc))
- if self.packet_logger:
- self.packet_log_pid = wutils.start_pcap(
- self.packet_logger, "dual", self.current_test_info.name
- )
-
- def teardown_test(self) -> None:
- if self.android_devices and self.cnss_diag_file and self.pixel_models:
- wutils.stop_cnss_diags(self.android_devices, self.pixel_models)
- for proc in self.tcpdump_proc:
- nutils.stop_tcpdump(
- proc[0],
- proc[1],
- self.current_test_info.name,
- pull_dump=False,
- )
- self.tcpdump_proc = []
- if self.packet_logger and self.packet_log_pid:
- wutils.stop_pcap(
- self.packet_logger, self.packet_log_pid, test_status=True
- )
- self.packet_log_pid = {}
-
- def teardown_class(self) -> None:
- super().teardown_class()
- if hasattr(self, "fuchsia_devices"):
- for device in self.fuchsia_devices:
- device.take_bug_report()
- self.download_logs()
-
- def on_fail(self, record: TestResultRecord) -> None:
- """A function that is executed upon a test failure.
-
- Args:
- record: A copy of the test record for this test, containing all information of
- the test execution including exception objects.
- """
- if self.android_devices:
- for ad in self.android_devices:
- ad.take_bug_report(record.test_name, record.begin_time)
- ad.cat_adb_log(record.test_name, record.begin_time)
- wutils.get_ssrdumps(ad)
- if self.cnss_diag_file and self.pixel_models:
- wutils.stop_cnss_diags(self.android_devices, self.pixel_models)
- for ad in self.android_devices:
- wutils.get_cnss_diag_log(ad)
- for proc in self.tcpdump_proc:
- nutils.stop_tcpdump(proc[0], proc[1], record.test_name)
- self.tcpdump_proc = []
- if self.packet_logger and self.packet_log_pid:
- wutils.stop_pcap(
- self.packet_logger, self.packet_log_pid, test_status=False
- )
- self.packet_log_pid = {}
-
- # Gets a wlan_device log and calls the generic device fail on DUT.
- for fd in self.fuchsia_devices:
- self.on_device_fail(fd, record)
-
- def on_device_fail(
- self, device: FuchsiaDevice, _: TestResultRecord
- ) -> None:
- """Gets a generic device DUT bug report.
-
- This method takes a bug report if the device has the
- 'take_bug_report_on_fail' config value, and if the flag is true. This
- method also power cycles if 'hard_reboot_on_fail' is True.
-
- Args:
- device: Generic device to gather logs from.
- record: More information about the test.
- """
- if (
- not hasattr(device, "take_bug_report_on_fail")
- or device.take_bug_report_on_fail
- ):
- device.take_bug_report()
-
- if (
- hasattr(device, "hard_reboot_on_fail")
- and device.hard_reboot_on_fail
- ):
- device.reboot(reboot_type="hard", testbed_pdus=self.pdu_devices)
-
- def get_dut(self, association_mode: AssociationMode) -> SupportsWLAN:
- """Get the DUT based on user_params, default to Fuchsia."""
- device_type = self.user_params.get("dut", "fuchsia_devices")
- if device_type == "fuchsia_devices":
- return self.get_dut_type(FuchsiaDevice, association_mode)[1]
- elif device_type == "android_devices":
- return self.get_dut_type(FuchsiaDevice, association_mode)[1]
- else:
- raise signals.TestAbortClass(
- f'Invalid "dut" type specified in config: "{device_type}". '
- 'Expected "fuchsia_devices" or "android_devices".'
- )
-
- def get_dut_type(
- self, device_type: type[_T], association_mode: AssociationMode
- ) -> tuple[_T, SupportsWLAN]:
- if device_type is FuchsiaDevice:
- if len(self.fuchsia_devices) == 0:
- raise signals.TestAbortClass(
- "Requires at least one Fuchsia device"
- )
- fd = self.fuchsia_devices[0]
- assert isinstance(fd, device_type)
- return fd, FuchsiaWlanDevice(fd, association_mode)
-
- if device_type is AndroidDevice:
- if len(self.android_devices) == 0:
- raise signals.TestAbortClass(
- "Requires at least one Android device"
- )
- ad = self.android_devices[0]
- assert isinstance(ad, device_type)
- return ad, AndroidWlanDevice(ad)
-
- raise signals.TestAbortClass(
- f"Invalid device_type specified: {device_type.__name__}. "
- "Expected FuchsiaDevice or AndroidDevice."
- )
-
- def download_logs(self) -> None:
- """Downloads the DHCP and hostapad logs from the access_point.
-
- Using the current TestClassContext and TestCaseContext this method pulls
- the DHCP and hostapd logs and outputs them to the correct path.
- """
- current_path = context.get_current_context().get_full_output_path()
- if hasattr(self, "access_points"):
- for access_point in self.access_points:
- access_point.download_ap_logs(current_path)
- if hasattr(self, "iperf_servers"):
- for iperf_server in self.iperf_servers:
- iperf_server.download_logs(current_path)
-
- def get_psk_network(
- self,
- mirror_ap: bool,
- reference_networks: list[NetworkList],
- hidden: bool = False,
- same_ssid: bool = False,
- security_mode: SecurityMode = SecurityMode.WPA2,
- ssid_length_2g: int = hostapd_constants.AP_SSID_LENGTH_2G,
- ssid_length_5g: int = hostapd_constants.AP_SSID_LENGTH_5G,
- passphrase_length_2g: int = hostapd_constants.AP_PASSPHRASE_LENGTH_2G,
- passphrase_length_5g: int = hostapd_constants.AP_PASSPHRASE_LENGTH_5G,
- ) -> NetworkList:
- """Generates SSID and passphrase for a WPA2 network using random
- generator.
-
- Args:
- mirror_ap: Determines if both APs use the same hostapd config or
- different configs.
- reference_networks: PSK networks.
- same_ssid: Determines if both bands on AP use the same SSID.
- ssid_length_2g: Number of characters to use for 2G SSID.
- ssid_length_5g: Number of characters to use for 5G SSID.
- passphrase_length_2g: Length of password for 2G network.
- passphrase_length_5g: Length of password for 5G network.
-
- Returns: A dict of 2G and 5G network lists for hostapd configuration.
- """
- if same_ssid:
- ref_2g_ssid = f"xg_{utils.rand_ascii_str(ssid_length_2g)}"
- ref_5g_ssid = ref_2g_ssid
-
- ref_2g_passphrase = utils.rand_ascii_str(passphrase_length_2g)
- ref_5g_passphrase = ref_2g_passphrase
-
- else:
- ref_2g_ssid = f"2g_{utils.rand_ascii_str(ssid_length_2g)}"
- ref_2g_passphrase = utils.rand_ascii_str(passphrase_length_2g)
-
- ref_5g_ssid = f"5g_{utils.rand_ascii_str(ssid_length_5g)}"
- ref_5g_passphrase = utils.rand_ascii_str(passphrase_length_5g)
-
- network_dict_2g = Network(
- SSID=ref_2g_ssid,
- security=security_mode,
- password=ref_2g_passphrase,
- hiddenSSID=hidden,
- wepKeys=None,
- ieee80211w=None,
- )
-
- network_dict_5g = Network(
- SSID=ref_5g_ssid,
- security=security_mode,
- password=ref_5g_passphrase,
- hiddenSSID=hidden,
- wepKeys=None,
- ieee80211w=None,
- )
-
- for _ in range(MAX_AP_COUNT):
- reference_networks.append(
- {
- "2g": copy.copy(network_dict_2g),
- "5g": copy.copy(network_dict_5g),
- }
- )
- if not mirror_ap:
- break
- return {"2g": network_dict_2g, "5g": network_dict_5g}
-
- def get_open_network(
- self,
- mirror_ap: bool,
- open_network: list[NetworkList],
- hidden: bool = False,
- same_ssid: bool = False,
- ssid_length_2g: int = hostapd_constants.AP_SSID_LENGTH_2G,
- ssid_length_5g: int = hostapd_constants.AP_SSID_LENGTH_5G,
- security_mode: SecurityMode = SecurityMode.OPEN,
- ) -> NetworkList:
- """Generates SSIDs for a open network using a random generator.
-
- Args:
- mirror_ap: Boolean, determines if both APs use the same hostapd
- config or different configs.
- open_network: List of open networks.
- same_ssid: Boolean, determines if both bands on AP use the same
- SSID.
- ssid_length_2g: Int, number of characters to use for 2G SSID.
- ssid_length_5g: Int, number of characters to use for 5G SSID.
- security_mode: 'none' for open and 'OWE' for WPA3 OWE.
-
- Returns: A dict of 2G and 5G network lists for hostapd configuration.
-
- """
- if same_ssid:
- open_2g_ssid = f"xg_{utils.rand_ascii_str(ssid_length_2g)}"
- open_5g_ssid = open_2g_ssid
- else:
- open_2g_ssid = f"2g_{utils.rand_ascii_str(ssid_length_2g)}"
- open_5g_ssid = f"5g_{utils.rand_ascii_str(ssid_length_5g)}"
-
- network_dict_2g = Network(
- SSID=open_2g_ssid,
- security=security_mode,
- password=None,
- hiddenSSID=hidden,
- wepKeys=None,
- ieee80211w=None,
- )
-
- network_dict_5g = Network(
- SSID=open_5g_ssid,
- security=security_mode,
- password=None,
- hiddenSSID=hidden,
- wepKeys=None,
- ieee80211w=None,
- )
-
- for _ in range(MAX_AP_COUNT):
- open_network.append(
- {
- "2g": copy.copy(network_dict_2g),
- "5g": copy.copy(network_dict_5g),
- }
- )
- if not mirror_ap:
- break
- return {"2g": network_dict_2g, "5g": network_dict_5g}
-
- def get_wep_network(
- self,
- mirror_ap: bool,
- networks: list[NetworkList],
- hidden: bool = False,
- same_ssid: bool = False,
- ssid_length_2g: int = hostapd_constants.AP_SSID_LENGTH_2G,
- ssid_length_5g: int = hostapd_constants.AP_SSID_LENGTH_5G,
- passphrase_length_2g: int = hostapd_constants.AP_PASSPHRASE_LENGTH_2G,
- passphrase_length_5g: int = hostapd_constants.AP_PASSPHRASE_LENGTH_5G,
- ) -> NetworkList:
- """Generates SSID and passphrase for a WEP network using random
- generator.
-
- Args:
- mirror_ap: Determines if both APs use the same hostapd config or
- different configs.
- networks: List of WEP networks.
- same_ssid: Determines if both bands on AP use the same SSID.
- ssid_length_2g: Number of characters to use for 2G SSID.
- ssid_length_5g: Number of characters to use for 5G SSID.
- passphrase_length_2g: Length of password for 2G network.
- passphrase_length_5g: Length of password for 5G network.
-
- Returns: A dict of 2G and 5G network lists for hostapd configuration.
-
- """
- if same_ssid:
- ref_2g_ssid = f"xg_{utils.rand_ascii_str(ssid_length_2g)}"
- ref_5g_ssid = ref_2g_ssid
-
- ref_2g_passphrase = utils.rand_hex_str(passphrase_length_2g)
- ref_5g_passphrase = ref_2g_passphrase
-
- else:
- ref_2g_ssid = f"2g_{utils.rand_ascii_str(ssid_length_2g)}"
- ref_2g_passphrase = utils.rand_hex_str(passphrase_length_2g)
-
- ref_5g_ssid = f"5g_{utils.rand_ascii_str(ssid_length_5g)}"
- ref_5g_passphrase = utils.rand_hex_str(passphrase_length_5g)
-
- network_dict_2g = Network(
- SSID=ref_2g_ssid,
- security=SecurityMode.WEP,
- password=None,
- hiddenSSID=hidden,
- wepKeys=[ref_2g_passphrase] * 4,
- ieee80211w=None,
- )
-
- network_dict_5g = Network(
- SSID=ref_5g_ssid,
- security=SecurityMode.WEP,
- password=None,
- hiddenSSID=hidden,
- wepKeys=[ref_5g_passphrase] * 4,
- ieee80211w=None,
- )
-
- for _ in range(MAX_AP_COUNT):
- networks.append(
- {
- "2g": copy.copy(network_dict_2g),
- "5g": copy.copy(network_dict_5g),
- }
- )
- if not mirror_ap:
- break
- return {"2g": network_dict_2g, "5g": network_dict_5g}
-
- def configure_openwrt_ap_and_start(
- self,
- channel_5g: int = hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- channel_2g: int = hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- channel_5g_ap2: int | None = None,
- channel_2g_ap2: int | None = None,
- ssid_length_2g: int = hostapd_constants.AP_SSID_LENGTH_2G,
- passphrase_length_2g: int = hostapd_constants.AP_PASSPHRASE_LENGTH_2G,
- ssid_length_5g: int = hostapd_constants.AP_SSID_LENGTH_5G,
- passphrase_length_5g: int = hostapd_constants.AP_PASSPHRASE_LENGTH_5G,
- mirror_ap: bool = False,
- hidden: bool = False,
- same_ssid: bool = False,
- open_network: bool = False,
- wpa1_network: bool = False,
- wpa_network: bool = False,
- wep_network: bool = False,
- ent_network: bool = False,
- ent_network_pwd: bool = False,
- owe_network: bool = False,
- sae_network: bool = False,
- saemixed_network: bool = False,
- radius_conf_2g: dict[str, Any] | None = None,
- radius_conf_5g: dict[str, Any] | None = None,
- radius_conf_pwd: dict[str, Any] | None = None,
- ap_count: int = 1,
- ieee80211w: int | None = None,
- ) -> None:
- """Create, configure and start OpenWrt AP.
-
- Args:
- channel_5g: 5G channel to configure.
- channel_2g: 2G channel to configure.
- channel_5g_ap2: 5G channel to configure on AP2.
- channel_2g_ap2: 2G channel to configure on AP2.
- ssid_length_2g: Int, number of characters to use for 2G SSID.
- passphrase_length_2g: Int, length of password for 2G network.
- ssid_length_5g: Int, number of characters to use for 5G SSID.
- passphrase_length_5g: Int, length of password for 5G network.
- same_ssid: Boolean, determines if both bands on AP use the same SSID.
- open_network: Boolean, to check if open network should be configured.
- wpa_network: Boolean, to check if wpa network should be configured.
- wep_network: Boolean, to check if wep network should be configured.
- ent_network: Boolean, to check if ent network should be configured.
- ent_network_pwd: Boolean, to check if ent pwd network should be configured.
- owe_network: Boolean, to check if owe network should be configured.
- sae_network: Boolean, to check if sae network should be configured.
- saemixed_network: Boolean, to check if saemixed network should be configured.
- radius_conf_2g: dictionary with enterprise radius server details.
- radius_conf_5g: dictionary with enterprise radius server details.
- radius_conf_pwd: dictionary with enterprise radiuse server details.
- ap_count: APs to configure.
- ieee80211w:PMF to configure
- """
- if mirror_ap and ap_count == 1:
- raise ValueError("ap_count cannot be 1 if mirror_ap is True.")
- if (channel_5g_ap2 or channel_2g_ap2) and ap_count == 1:
- raise ValueError(
- "ap_count cannot be 1 if channels of AP2 are provided."
- )
- # we are creating a channel list for 2G and 5G bands. The list is of
- # size 2 and this is based on the assumption that each testbed will have
- # at most 2 APs.
- if not channel_5g_ap2:
- channel_5g_ap2 = channel_5g
- if not channel_2g_ap2:
- channel_2g_ap2 = channel_2g
- channels_2g = [channel_2g, channel_2g_ap2]
- channels_5g = [channel_5g, channel_5g_ap2]
-
- if radius_conf_2g is None:
- radius_conf_2g = {}
- if radius_conf_5g is None:
- radius_conf_5g = {}
- if radius_conf_pwd is None:
- radius_conf_pwd = {}
-
- self.bssid_map: list[BSSIDMap] = []
- for i in range(ap_count):
- configs: list[WirelessConfig] = []
-
- num_2g: int = 1
- num_5g: int = 1
-
- if wpa1_network:
- networks = self.get_psk_network(
- mirror_ap,
- [],
- hidden,
- same_ssid,
- SecurityMode.WPA,
- ssid_length_2g,
- ssid_length_5g,
- passphrase_length_2g,
- passphrase_length_5g,
- )
-
- def add_config(name: str, band: str) -> None:
- configs.append(
- WirelessConfig(
- name=name,
- ssid=networks[band]["SSID"],
- security=OpenWRTEncryptionMode.PSK,
- band=band,
- password=networks[band]["password"],
- hidden=networks[band]["hiddenSSID"],
- ieee80211w=ieee80211w,
- )
- )
-
- add_config(f"wifi_2g_{num_2g}", hostapd_constants.BAND_2G)
- add_config(f"wifi_5g_{num_5g}", hostapd_constants.BAND_5G)
- num_2g += 1
- num_5g += 1
- if wpa_network:
- networks = self.get_psk_network(
- mirror_ap,
- [],
- hidden,
- same_ssid,
- SecurityMode.WPA2,
- ssid_length_2g,
- ssid_length_5g,
- passphrase_length_2g,
- passphrase_length_5g,
- )
-
- def add_config(name: str, band: str) -> None:
- configs.append(
- WirelessConfig(
- name=name,
- ssid=networks[band]["SSID"],
- security=OpenWRTEncryptionMode.PSK2,
- band=band,
- password=networks[band]["password"],
- hidden=networks[band]["hiddenSSID"],
- ieee80211w=ieee80211w,
- )
- )
-
- add_config(f"wifi_2g_{num_2g}", hostapd_constants.BAND_2G)
- add_config(f"wifi_5g_{num_5g}", hostapd_constants.BAND_5G)
- num_2g += 1
- num_5g += 1
- if wep_network:
- networks = self.get_wep_network(
- mirror_ap,
- [],
- hidden,
- same_ssid,
- ssid_length_2g,
- ssid_length_5g,
- )
-
- def add_config(name: str, band: str) -> None:
- configs.append(
- WirelessConfig(
- name=name,
- ssid=networks[band]["SSID"],
- security=OpenWRTEncryptionMode.WEP,
- band=band,
- wep_key=networks[band]["wepKeys"],
- hidden=networks[band]["hiddenSSID"],
- )
- )
-
- add_config(f"wifi_2g_{num_2g}", hostapd_constants.BAND_2G)
- add_config(f"wifi_5g_{num_5g}", hostapd_constants.BAND_5G)
- num_2g += 1
- num_5g += 1
- if ent_network:
- networks = self.get_open_network(
- mirror_ap,
- [],
- hidden,
- same_ssid,
- ssid_length_2g,
- ssid_length_5g,
- SecurityMode.WPA2,
- )
-
- def add_config_with_radius(
- name: str,
- band: str,
- radius_conf: dict[str, str | int | None],
- ) -> None:
- conf = MapValidator(radius_conf)
- configs.append(
- WirelessConfig(
- name=name,
- ssid=networks[band]["SSID"],
- security=OpenWRTEncryptionMode.WPA2,
- band=band,
- radius_server_ip=conf.get(
- str, "radius_server_ip", None
- ),
- radius_server_port=conf.get(
- int, "radius_server_port", None
- ),
- radius_server_secret=conf.get(
- str, "radius_server_secret", None
- ),
- hidden=networks[band]["hiddenSSID"],
- )
- )
-
- add_config_with_radius(
- f"wifi_2g_{num_2g}",
- hostapd_constants.BAND_2G,
- radius_conf_2g,
- )
- add_config_with_radius(
- f"wifi_5g_{num_5g}",
- hostapd_constants.BAND_5G,
- radius_conf_5g,
- )
- num_2g += 1
- num_5g += 1
- if ent_network_pwd:
- networks = self.get_open_network(
- mirror_ap,
- [],
- hidden,
- same_ssid,
- ssid_length_2g,
- ssid_length_5g,
- SecurityMode.WPA2,
- )
-
- radius_conf = {} if radius_conf_pwd is None else radius_conf_pwd
-
- def add_config(name: str, band: str) -> None:
- configs.append(
- WirelessConfig(
- name=name,
- ssid=networks[band]["SSID"],
- security=OpenWRTEncryptionMode.WPA2,
- band=band,
- radius_server_ip=radius_conf.get(
- "radius_server_ip"
- ),
- radius_server_port=radius_conf.get(
- "radius_server_port"
- ),
- radius_server_secret=radius_conf.get(
- "radius_server_secret"
- ),
- hidden=networks[band]["hiddenSSID"],
- )
- )
-
- add_config(f"wifi_2g_{num_2g}", hostapd_constants.BAND_2G)
- add_config(f"wifi_5g_{num_5g}", hostapd_constants.BAND_5G)
- num_2g += 1
- num_5g += 1
- if open_network:
- networks = self.get_open_network(
- mirror_ap,
- [],
- hidden,
- same_ssid,
- ssid_length_2g,
- ssid_length_5g,
- )
-
- def add_config(name: str, band: str) -> None:
- configs.append(
- WirelessConfig(
- name=name,
- ssid=networks[band]["SSID"],
- security=OpenWRTEncryptionMode.NONE,
- band=band,
- hidden=networks[band]["hiddenSSID"],
- )
- )
-
- add_config(f"wifi_2g_{num_2g}", hostapd_constants.BAND_2G)
- add_config(f"wifi_5g_{num_5g}", hostapd_constants.BAND_5G)
- num_2g += 1
- num_5g += 1
- if owe_network:
- networks = self.get_open_network(
- mirror_ap,
- [],
- hidden,
- same_ssid,
- ssid_length_2g,
- ssid_length_5g,
- )
-
- def add_config(name: str, band: str) -> None:
- configs.append(
- WirelessConfig(
- name=name,
- ssid=networks[band]["SSID"],
- security=OpenWRTEncryptionMode.OWE,
- band=band,
- hidden=networks[band]["hiddenSSID"],
- ieee80211w=PMF_ENABLED,
- )
- )
-
- add_config(f"wifi_2g_{num_2g}", hostapd_constants.BAND_2G)
- add_config(f"wifi_5g_{num_5g}", hostapd_constants.BAND_5G)
- num_2g += 1
- num_5g += 1
- if sae_network:
- networks = self.get_psk_network(
- mirror_ap,
- [],
- hidden,
- same_ssid,
- ssid_length_2g=ssid_length_2g,
- ssid_length_5g=ssid_length_5g,
- passphrase_length_2g=passphrase_length_2g,
- passphrase_length_5g=passphrase_length_5g,
- )
-
- def add_config(name: str, band: str) -> None:
- configs.append(
- WirelessConfig(
- name=name,
- ssid=networks[band]["SSID"],
- security=OpenWRTEncryptionMode.SAE,
- band=band,
- password=networks[band]["password"],
- hidden=networks[band]["hiddenSSID"],
- ieee80211w=PMF_ENABLED,
- )
- )
-
- add_config(f"wifi_2g_{num_2g}", hostapd_constants.BAND_2G)
- add_config(f"wifi_5g_{num_5g}", hostapd_constants.BAND_5G)
- num_2g += 1
- num_5g += 1
- if saemixed_network:
- networks = self.get_psk_network(
- mirror_ap,
- [],
- hidden,
- same_ssid,
- ssid_length_2g=ssid_length_2g,
- ssid_length_5g=ssid_length_5g,
- passphrase_length_2g=passphrase_length_2g,
- passphrase_length_5g=passphrase_length_5g,
- )
-
- def add_config(name: str, band: str) -> None:
- configs.append(
- WirelessConfig(
- name=name,
- ssid=networks[band]["SSID"],
- security=OpenWRTEncryptionMode.SAE_MIXED,
- band=band,
- password=networks[band]["password"],
- hidden=networks[band]["hiddenSSID"],
- ieee80211w=ieee80211w,
- )
- )
-
- add_config(f"wifi_2g_{num_2g}", hostapd_constants.BAND_2G)
- add_config(f"wifi_5g_{num_5g}", hostapd_constants.BAND_5G)
- num_2g += 1
- num_5g += 1
-
- openwrt_ap = self.openwrt_aps[i]
- openwrt_ap.configure_ap(configs, channels_2g[i], channels_5g[i])
- openwrt_ap.start_ap()
- self.bssid_map.append(openwrt_ap.get_bssids_for_wifi_networks())
-
- if mirror_ap:
- openwrt_ap_mirror = self.openwrt_aps[i + 1]
- openwrt_ap_mirror.configure_ap(
- configs, channels_2g[i + 1], channels_5g[i + 1]
- )
- openwrt_ap_mirror.start_ap()
- self.bssid_map.append(
- openwrt_ap_mirror.get_bssids_for_wifi_networks()
- )
- break
diff --git a/packages/antlion/test_utils/wifi/wifi_constants.py b/packages/antlion/test_utils/wifi/wifi_constants.py
deleted file mode 100644
index 0783586..0000000
--- a/packages/antlion/test_utils/wifi/wifi_constants.py
+++ /dev/null
@@ -1,128 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-# Constants for Wifi related events.
-WIFI_CONNECTED = "WifiNetworkConnected"
-WIFI_DISCONNECTED = "WifiNetworkDisconnected"
-SUPPLICANT_CON_CHANGED = "SupplicantConnectionChanged"
-WIFI_STATE_CHANGED = "WifiStateChanged"
-WIFI_FORGET_NW_SUCCESS = "WifiManagerForgetNetworkOnSuccess"
-WIFI_NETWORK_REQUEST_MATCH_CB_ON_MATCH = (
- "WifiManagerNetworkRequestMatchCallbackOnMatch"
-)
-WIFI_NETWORK_REQUEST_MATCH_CB_ON_CONNECT_SUCCESS = (
- "WifiManagerNetworkRequestMatchCallbackOnUserSelectionConnectSuccess"
-)
-WIFI_NETWORK_REQUEST_MATCH_CB_ON_CONNECT_FAILURE = (
- "WifiManagerNetworkRequestMatchCallbackOnUserSelectionConnectFailure"
-)
-WIFI_NETWORK_CB_ON_AVAILABLE = "WifiManagerNetworkCallbackOnAvailable"
-WIFI_NETWORK_CB_ON_UNAVAILABLE = "WifiManagerNetworkCallbackOnUnavailable"
-WIFI_NETWORK_CB_ON_LOST = "WifiManagerNetworkCallbackOnLost"
-WIFI_NETWORK_SUGGESTION_POST_CONNECTION = "WifiNetworkSuggestionPostConnection"
-WIFI_SUBSYSTEM_RESTARTING = "WifiSubsystemRestarting"
-WIFI_SUBSYSTEM_RESTARTED = "WifiSubsystemRestarted"
-
-# These constants will be used by the ACTS wifi tests.
-CONNECT_BY_CONFIG_SUCCESS = "WifiManagerConnectByConfigOnSuccess"
-CONNECT_BY_NETID_SUCCESS = "WifiManagerConnectByNetIdOnSuccess"
-
-# Softap related constants
-SOFTAP_CALLBACK_EVENT = "WifiManagerSoftApCallback-"
-# Callback Event for softap state change
-# WifiManagerSoftApCallback-[callbackId]-OnStateChanged
-SOFTAP_STATE_CHANGED = "-OnStateChanged"
-SOFTAP_STATE_CHANGE_CALLBACK_KEY = "State"
-WIFI_AP_DISABLING_STATE = 10
-WIFI_AP_DISABLED_STATE = 11
-WIFI_AP_ENABLING_STATE = 12
-WIFI_AP_ENABLED_STATE = 13
-WIFI_AP_FAILED_STATE = 14
-
-SOFTAP_RANDOMIZATION_NONE = 0
-SOFTAP_RANDOMIZATION_PERSISTENT = 1
-
-# Callback Event for client number change:
-# WifiManagerSoftApCallback-[callbackId]-OnNumClientsChanged
-SOFTAP_NUMBER_CLIENTS_CHANGED_WITH_INFO = "-OnConnectedClientsChangedWithInfo"
-SOFTAP_NUMBER_CLIENTS_CHANGED = "-OnNumClientsChanged"
-SOFTAP_NUMBER_CLIENTS_CALLBACK_KEY = "NumClients"
-SOFTAP_CLIENTS_MACS_CALLBACK_KEY = "MacAddresses"
-# Callback Event for softap info change
-SOFTAP_INFO_CHANGED = "-OnInfoChanged"
-SOFTAP_INFOLIST_CHANGED = "-OnInfoListChanged"
-SOFTAP_INFO_FREQUENCY_CALLBACK_KEY = "frequency"
-SOFTAP_INFO_BANDWIDTH_CALLBACK_KEY = "bandwidth"
-SOFTAP_INFO_WIFISTANDARD_CALLBACK_KEY = "wifiStandard"
-SOFTAP_INFO_AUTO_SHUTDOWN_CALLBACK_KEY = "autoShutdownTimeoutMillis"
-SOFTAP_INFO_BSSID_CALLBACK_KEY = "bssid"
-# Callback Event for softap client blocking
-SOFTAP_BLOCKING_CLIENT_CONNECTING = "-OnBlockedClientConnecting"
-SOFTAP_BLOCKING_CLIENT_REASON_KEY = "BlockedReason"
-SOFTAP_BLOCKING_CLIENT_WIFICLIENT_KEY = "WifiClient"
-SAP_CLIENT_BLOCK_REASON_CODE_BLOCKED_BY_USER = 0
-SAP_CLIENT_BLOCK_REASON_CODE_NO_MORE_STAS = 1
-
-# Callback Event for softap capability
-SOFTAP_CAPABILITY_CHANGED = "-OnCapabilityChanged"
-SOFTAP_CAPABILITY_MAX_SUPPORTED_CLIENTS = "maxSupportedClients"
-SOFTAP_CAPABILITY_24GHZ_SUPPORTED_CHANNEL_LIST = "supported2GHzChannellist"
-SOFTAP_CAPABILITY_5GHZ_SUPPORTED_CHANNEL_LIST = "supported5GHzChannellist"
-SOFTAP_CAPABILITY_6GHZ_SUPPORTED_CHANNEL_LIST = "supported6GHzChannellist"
-SOFTAP_CAPABILITY_60GHZ_SUPPORTED_CHANNEL_LIST = "supported60GHzChannellist"
-SOFTAP_CAPABILITY_FEATURE_ACS = "acsOffloadSupported"
-SOFTAP_CAPABILITY_FEATURE_CLIENT_CONTROL = "clientForceDisconnectSupported"
-SOFTAP_CAPABILITY_FEATURE_WPA3_SAE = "wpa3SaeSupported"
-SOFTAP_CAPABILITY_FEATURE_IEEE80211AX = "ieee80211axSupported"
-SOFTAP_CAPABILITY_FEATURE_24GHZ = "24gSupported"
-SOFTAP_CAPABILITY_FEATURE_5GHZ = "5gSupported"
-SOFTAP_CAPABILITY_FEATURE_6GHZ = "6gSupported"
-SOFTAP_CAPABILITY_FEATURE_60GHZ = "60gSupported"
-
-DEFAULT_SOFTAP_TIMEOUT_S = 600 # 10 minutes
-
-# AP related constants
-AP_MAIN = "main_AP"
-AP_AUX = "aux_AP"
-SSID = "SSID"
-
-# cnss_diag property related constants
-DEVICES_USING_LEGACY_PROP = [
- "sailfish",
- "marlin",
- "walleye",
- "taimen",
- "muskie",
-]
-CNSS_DIAG_PROP = "persist.vendor.sys.cnss.diag_txt"
-LEGACY_CNSS_DIAG_PROP = "persist.sys.cnss.diag_txt"
-
-# Delay before registering the match callback.
-NETWORK_REQUEST_CB_REGISTER_DELAY_SEC = 2
-
-# Constants for JSONObject representation of CoexUnsafeChannel
-COEX_BAND = "band"
-COEX_BAND_24_GHZ = "24_GHZ"
-COEX_BAND_5_GHZ = "5_GHZ"
-COEX_CHANNEL = "channel"
-COEX_POWER_CAP_DBM = "powerCapDbm"
-
-# Constants for bundle keys for CoexCallback#onCoexUnsafeChannelsChanged
-KEY_COEX_UNSAFE_CHANNELS = "KEY_COEX_UNSAFE_CHANNELS"
-KEY_COEX_RESTRICTIONS = "KEY_COEX_RESTRICTIONS"
-
-# WiFi standards
-WIFI_STANDARD_11AX = 6
diff --git a/packages/antlion/test_utils/wifi/wifi_test_utils.py b/packages/antlion/test_utils/wifi/wifi_test_utils.py
deleted file mode 100755
index d53bdea..0000000
--- a/packages/antlion/test_utils/wifi/wifi_test_utils.py
+++ /dev/null
@@ -1,1110 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-# Blanket ignores to enable mypy in Antlion
-# mypy: disable-error-code="no-untyped-def"
-import logging
-import os
-import shutil
-import time
-from enum import IntEnum
-from queue import Empty
-
-from mobly import asserts, signals
-
-from antlion import context, utils
-from antlion.controllers.ap_lib.hostapd_constants import BAND_2G, BAND_5G
-from antlion.test_utils.wifi import wifi_constants
-
-# Default timeout used for reboot, toggle WiFi and Airplane mode,
-# for the system to settle down after the operation.
-DEFAULT_TIMEOUT = 10
-# Number of seconds to wait for events that are supposed to happen quickly.
-# Like onSuccess for start background scan and confirmation on wifi state
-# change.
-SHORT_TIMEOUT = 30
-ROAMING_TIMEOUT = 30
-WIFI_CONNECTION_TIMEOUT_DEFAULT = 30
-DEFAULT_SCAN_TRIES = 3
-DEFAULT_CONNECT_TRIES = 3
-# Speed of light in m/s.
-SPEED_OF_LIGHT = 299792458
-
-DEFAULT_PING_ADDR = "https://www.google.com/robots.txt"
-
-CNSS_DIAG_CONFIG_PATH = "/data/vendor/wifi/cnss_diag/"
-CNSS_DIAG_CONFIG_FILE = "cnss_diag.conf"
-
-ROAMING_ATTN = {
- "AP1_on_AP2_off": [0, 0, 95, 95],
- "AP1_off_AP2_on": [95, 95, 0, 0],
- "default": [0, 0, 0, 0],
-}
-
-
-class WifiEnums:
- SSID_KEY = "SSID" # Used for Wifi & SoftAp
- SSID_PATTERN_KEY = "ssidPattern"
- NETID_KEY = "network_id"
- BSSID_KEY = "BSSID" # Used for Wifi & SoftAp
- BSSID_PATTERN_KEY = "bssidPattern"
- PWD_KEY = "password" # Used for Wifi & SoftAp
- frequency_key = "frequency"
- HIDDEN_KEY = "hiddenSSID" # Used for Wifi & SoftAp
- IS_APP_INTERACTION_REQUIRED = "isAppInteractionRequired"
- IS_USER_INTERACTION_REQUIRED = "isUserInteractionRequired"
- IS_SUGGESTION_METERED = "isMetered"
- PRIORITY = "priority"
- SECURITY = "security" # Used for Wifi & SoftAp
-
- # Used for SoftAp
- AP_BAND_KEY = "apBand"
- AP_CHANNEL_KEY = "apChannel"
- AP_BANDS_KEY = "apBands"
- AP_CHANNEL_FREQUENCYS_KEY = "apChannelFrequencies"
- AP_MAC_RANDOMIZATION_SETTING_KEY = "MacRandomizationSetting"
- AP_BRIDGED_OPPORTUNISTIC_SHUTDOWN_ENABLE_KEY = (
- "BridgedModeOpportunisticShutdownEnabled"
- )
- AP_IEEE80211AX_ENABLED_KEY = "Ieee80211axEnabled"
- AP_MAXCLIENTS_KEY = "MaxNumberOfClients"
- AP_SHUTDOWNTIMEOUT_KEY = "ShutdownTimeoutMillis"
- AP_SHUTDOWNTIMEOUTENABLE_KEY = "AutoShutdownEnabled"
- AP_CLIENTCONTROL_KEY = "ClientControlByUserEnabled"
- AP_ALLOWEDLIST_KEY = "AllowedClientList"
- AP_BLOCKEDLIST_KEY = "BlockedClientList"
-
- WIFI_CONFIG_SOFTAP_BAND_2G = 1
- WIFI_CONFIG_SOFTAP_BAND_5G = 2
- WIFI_CONFIG_SOFTAP_BAND_2G_5G = 3
- WIFI_CONFIG_SOFTAP_BAND_6G = 4
- WIFI_CONFIG_SOFTAP_BAND_2G_6G = 5
- WIFI_CONFIG_SOFTAP_BAND_5G_6G = 6
- WIFI_CONFIG_SOFTAP_BAND_ANY = 7
-
- # DO NOT USE IT for new test case! Replaced by WIFI_CONFIG_SOFTAP_BAND_
- WIFI_CONFIG_APBAND_2G = WIFI_CONFIG_SOFTAP_BAND_2G
- WIFI_CONFIG_APBAND_5G = WIFI_CONFIG_SOFTAP_BAND_5G
- WIFI_CONFIG_APBAND_AUTO = WIFI_CONFIG_SOFTAP_BAND_2G_5G
-
- WIFI_CONFIG_APBAND_2G_OLD = 0
- WIFI_CONFIG_APBAND_5G_OLD = 1
- WIFI_CONFIG_APBAND_AUTO_OLD = -1
-
- WIFI_WPS_INFO_PBC = 0
- WIFI_WPS_INFO_DISPLAY = 1
- WIFI_WPS_INFO_KEYPAD = 2
- WIFI_WPS_INFO_LABEL = 3
- WIFI_WPS_INFO_INVALID = 4
-
- class CountryCode:
- AUSTRALIA = "AU"
- CHINA = "CN"
- GERMANY = "DE"
- JAPAN = "JP"
- UK = "GB"
- US = "US"
- UNKNOWN = "UNKNOWN"
-
- # Start of Macros for EAP
- # EAP types
- class Eap(IntEnum):
- NONE = -1
- PEAP = 0
- TLS = 1
- TTLS = 2
- PWD = 3
- SIM = 4
- AKA = 5
- AKA_PRIME = 6
- UNAUTH_TLS = 7
-
- # EAP Phase2 types
- class EapPhase2(IntEnum):
- NONE = 0
- PAP = 1
- MSCHAP = 2
- MSCHAPV2 = 3
- GTC = 4
-
- class Enterprise:
- # Enterprise Config Macros
- EMPTY_VALUE = "NULL"
- EAP = "eap"
- PHASE2 = "phase2"
- IDENTITY = "identity"
- ANON_IDENTITY = "anonymous_identity"
- PASSWORD = "password"
- SUBJECT_MATCH = "subject_match"
- ALTSUBJECT_MATCH = "altsubject_match"
- DOM_SUFFIX_MATCH = "domain_suffix_match"
- CLIENT_CERT = "client_cert"
- CA_CERT = "ca_cert"
- ENGINE = "engine"
- ENGINE_ID = "engine_id"
- PRIVATE_KEY_ID = "key_id"
- REALM = "realm"
- PLMN = "plmn"
- FQDN = "FQDN"
- FRIENDLY_NAME = "providerFriendlyName"
- ROAMING_IDS = "roamingConsortiumIds"
- OCSP = "ocsp"
-
- # End of Macros for EAP
-
- # Macros as specified in the WifiScanner code.
- WIFI_BAND_UNSPECIFIED = 0 # not specified
- WIFI_BAND_24_GHZ = 1 # 2.4 GHz band
- WIFI_BAND_5_GHZ = 2 # 5 GHz band without DFS channels
- WIFI_BAND_5_GHZ_DFS_ONLY = 4 # 5 GHz band with DFS channels
- WIFI_BAND_5_GHZ_WITH_DFS = 6 # 5 GHz band with DFS channels
- WIFI_BAND_BOTH = 3 # both bands without DFS channels
- WIFI_BAND_BOTH_WITH_DFS = 7 # both bands with DFS channels
-
- SCAN_TYPE_LOW_LATENCY = 0
- SCAN_TYPE_LOW_POWER = 1
- SCAN_TYPE_HIGH_ACCURACY = 2
-
- # US Wifi frequencies
- ALL_2G_FREQUENCIES = [
- 2412,
- 2417,
- 2422,
- 2427,
- 2432,
- 2437,
- 2442,
- 2447,
- 2452,
- 2457,
- 2462,
- ]
- DFS_5G_FREQUENCIES = [
- 5260,
- 5280,
- 5300,
- 5320,
- 5500,
- 5520,
- 5540,
- 5560,
- 5580,
- 5600,
- 5620,
- 5640,
- 5660,
- 5680,
- 5700,
- 5720,
- ]
- NONE_DFS_5G_FREQUENCIES = [
- 5180,
- 5200,
- 5220,
- 5240,
- 5745,
- 5765,
- 5785,
- 5805,
- 5825,
- ]
- ALL_5G_FREQUENCIES = DFS_5G_FREQUENCIES + NONE_DFS_5G_FREQUENCIES
-
- band_to_frequencies = {
- WIFI_BAND_24_GHZ: ALL_2G_FREQUENCIES,
- WIFI_BAND_5_GHZ: NONE_DFS_5G_FREQUENCIES,
- WIFI_BAND_5_GHZ_DFS_ONLY: DFS_5G_FREQUENCIES,
- WIFI_BAND_5_GHZ_WITH_DFS: ALL_5G_FREQUENCIES,
- WIFI_BAND_BOTH: ALL_2G_FREQUENCIES + NONE_DFS_5G_FREQUENCIES,
- WIFI_BAND_BOTH_WITH_DFS: ALL_5G_FREQUENCIES + ALL_2G_FREQUENCIES,
- }
-
- # TODO: add all of the band mapping.
- softap_band_frequencies = {
- WIFI_CONFIG_SOFTAP_BAND_2G: ALL_2G_FREQUENCIES,
- WIFI_CONFIG_SOFTAP_BAND_5G: ALL_5G_FREQUENCIES,
- }
-
- # All Wifi frequencies to channels lookup.
- freq_to_channel = {
- 2412: 1,
- 2417: 2,
- 2422: 3,
- 2427: 4,
- 2432: 5,
- 2437: 6,
- 2442: 7,
- 2447: 8,
- 2452: 9,
- 2457: 10,
- 2462: 11,
- 2467: 12,
- 2472: 13,
- 2484: 14,
- 4915: 183,
- 4920: 184,
- 4925: 185,
- 4935: 187,
- 4940: 188,
- 4945: 189,
- 4960: 192,
- 4980: 196,
- 5035: 7,
- 5040: 8,
- 5045: 9,
- 5055: 11,
- 5060: 12,
- 5080: 16,
- 5170: 34,
- 5180: 36,
- 5190: 38,
- 5200: 40,
- 5210: 42,
- 5220: 44,
- 5230: 46,
- 5240: 48,
- 5260: 52,
- 5280: 56,
- 5300: 60,
- 5320: 64,
- 5500: 100,
- 5520: 104,
- 5540: 108,
- 5560: 112,
- 5580: 116,
- 5600: 120,
- 5620: 124,
- 5640: 128,
- 5660: 132,
- 5680: 136,
- 5700: 140,
- 5745: 149,
- 5765: 153,
- 5785: 157,
- 5795: 159,
- 5805: 161,
- 5825: 165,
- }
-
- # All Wifi channels to frequencies lookup.
- channel_2G_to_freq = {
- 1: 2412,
- 2: 2417,
- 3: 2422,
- 4: 2427,
- 5: 2432,
- 6: 2437,
- 7: 2442,
- 8: 2447,
- 9: 2452,
- 10: 2457,
- 11: 2462,
- 12: 2467,
- 13: 2472,
- 14: 2484,
- }
-
- channel_5G_to_freq = {
- 183: 4915,
- 184: 4920,
- 185: 4925,
- 187: 4935,
- 188: 4940,
- 189: 4945,
- 192: 4960,
- 196: 4980,
- 7: 5035,
- 8: 5040,
- 9: 5045,
- 11: 5055,
- 12: 5060,
- 16: 5080,
- 34: 5170,
- 36: 5180,
- 38: 5190,
- 40: 5200,
- 42: 5210,
- 44: 5220,
- 46: 5230,
- 48: 5240,
- 50: 5250,
- 52: 5260,
- 56: 5280,
- 60: 5300,
- 64: 5320,
- 100: 5500,
- 104: 5520,
- 108: 5540,
- 112: 5560,
- 116: 5580,
- 120: 5600,
- 124: 5620,
- 128: 5640,
- 132: 5660,
- 136: 5680,
- 140: 5700,
- 149: 5745,
- 151: 5755,
- 153: 5765,
- 155: 5775,
- 157: 5785,
- 159: 5795,
- 161: 5805,
- 165: 5825,
- }
-
- channel_6G_to_freq = {4 * x + 1: 5955 + 20 * x for x in range(59)}
-
- channel_to_freq = {
- "2G": channel_2G_to_freq,
- "5G": channel_5G_to_freq,
- "6G": channel_6G_to_freq,
- }
-
-
-def _assert_on_fail_handler(func, assert_on_fail, *args, **kwargs):
- """Wrapper function that handles the bahevior of assert_on_fail.
-
- When assert_on_fail is True, let all test signals through, which can
- terminate test cases directly. When assert_on_fail is False, the wrapper
- raises no test signals and reports operation status by returning True or
- False.
-
- Args:
- func: The function to wrap. This function reports operation status by
- raising test signals.
- assert_on_fail: A boolean that specifies if the output of the wrapper
- is test signal based or return value based.
- args: Positional args for func.
- kwargs: Name args for func.
-
- Returns:
- If assert_on_fail is True, returns True/False to signal operation
- status, otherwise return nothing.
- """
- try:
- func(*args, **kwargs)
- if not assert_on_fail:
- return True
- except signals.TestSignal:
- if assert_on_fail:
- raise
- return False
-
-
-def match_networks(target_params, networks):
- """Finds the WiFi networks that match a given set of parameters in a list
- of WiFi networks.
-
- To be considered a match, the network should contain every key-value pair
- of target_params
-
- Args:
- target_params: A dict with 1 or more key-value pairs representing a Wi-Fi network.
- E.g { 'SSID': 'wh_ap1_5g', 'BSSID': '30:b5:c2:33:e4:47' }
- networks: A list of dict objects representing WiFi networks.
-
- Returns:
- The networks that match the target parameters.
- """
- results = []
- asserts.assert_true(
- target_params, "Expected networks object 'target_params' is empty"
- )
- for n in networks:
- add_network = 1
- for k, v in target_params.items():
- if k not in n:
- add_network = 0
- break
- if n[k] != v:
- add_network = 0
- break
- if add_network:
- results.append(n)
- return results
-
-
-def wifi_toggle_state(ad, new_state=None, assert_on_fail=True):
- """Toggles the state of wifi.
-
- Args:
- ad: An AndroidDevice object.
- new_state: Wifi state to set to. If None, opposite of the current state.
- assert_on_fail: If True, error checks in this function will raise test
- failure signals.
-
- Returns:
- If assert_on_fail is False, function returns True if the toggle was
- successful, False otherwise. If assert_on_fail is True, no return value.
- """
- return _assert_on_fail_handler(
- _wifi_toggle_state, assert_on_fail, ad, new_state=new_state
- )
-
-
-def _wifi_toggle_state(ad, new_state=None):
- """Toggles the state of wifi.
-
- TestFailure signals are raised when something goes wrong.
-
- Args:
- ad: An AndroidDevice object.
- new_state: The state to set Wi-Fi to. If None, opposite of the current
- state will be set.
- """
- if new_state is None:
- new_state = not ad.droid.wifiCheckState()
- elif new_state == ad.droid.wifiCheckState():
- # Check if the new_state is already achieved, so we don't wait for the
- # state change event by mistake.
- return
- ad.droid.wifiStartTrackingStateChange()
- ad.log.info("Setting Wi-Fi state to %s.", new_state)
- ad.ed.clear_all_events()
- # Setting wifi state.
- ad.droid.wifiToggleState(new_state)
- time.sleep(2)
- fail_msg = f"Failed to set Wi-Fi state to {new_state} on {ad.serial}."
- try:
- ad.ed.wait_for_event(
- wifi_constants.WIFI_STATE_CHANGED,
- lambda x: x["data"]["enabled"] == new_state,
- SHORT_TIMEOUT,
- )
- except Empty:
- asserts.assert_equal(new_state, ad.droid.wifiCheckState(), fail_msg)
- finally:
- ad.droid.wifiStopTrackingStateChange()
-
-
-def reset_wifi(ad):
- """Clears all saved Wi-Fi networks on a device.
-
- This will turn Wi-Fi on.
-
- Args:
- ad: An AndroidDevice object.
-
- """
- networks = ad.droid.wifiGetConfiguredNetworks()
- if not networks:
- return
- removed = []
- for n in networks:
- if n["networkId"] not in removed:
- ad.droid.wifiForgetNetwork(n["networkId"])
- removed.append(n["networkId"])
- else:
- continue
- try:
- event = ad.ed.pop_event(
- wifi_constants.WIFI_FORGET_NW_SUCCESS, SHORT_TIMEOUT
- )
- except Empty:
- logging.warning("Could not confirm the removal of network %s.", n)
- # Check again to see if there's any network left.
- asserts.assert_true(
- not ad.droid.wifiGetConfiguredNetworks(),
- f"Failed to remove these configured Wi-Fi networks: {networks}",
- )
-
-
-def wifi_test_device_init(ad, country_code=WifiEnums.CountryCode.US):
- """Initializes an android device for wifi testing.
-
- 0. Make sure SL4A connection is established on the android device.
- 1. Disable location service's WiFi scan.
- 2. Turn WiFi on.
- 3. Clear all saved networks.
- 4. Set country code to US.
- 5. Enable WiFi verbose logging.
- 6. Sync device time with computer time.
- 7. Turn off cellular data.
- 8. Turn off ambient display.
- """
- utils.require_sl4a([ad])
- ad.droid.wifiScannerToggleAlwaysAvailable(False)
- msg = "Failed to turn off location service's scan."
- asserts.assert_true(not ad.droid.wifiScannerIsAlwaysAvailable(), msg)
- wifi_toggle_state(ad, True)
- reset_wifi(ad)
- ad.droid.wifiEnableVerboseLogging(1)
- msg = "Failed to enable WiFi verbose logging."
- asserts.assert_equal(ad.droid.wifiGetVerboseLoggingLevel(), 1, msg)
- # We don't verify the following settings since they are not critical.
- # Set wpa_supplicant log level to EXCESSIVE.
- output = ad.adb.shell(
- "wpa_cli -i wlan0 -p -g@android:wpa_wlan0 IFNAME="
- "wlan0 log_level EXCESSIVE",
- ignore_status=True,
- )
- ad.log.info("wpa_supplicant log change status: %s", output)
- utils.sync_device_time(ad)
- ad.droid.telephonyToggleDataConnection(False)
- set_wifi_country_code(ad, country_code)
- utils.set_ambient_display(ad, False)
-
-
-def set_wifi_country_code(ad, country_code):
- """Sets the wifi country code on the device.
-
- Args:
- ad: An AndroidDevice object.
- country_code: 2 letter ISO country code
-
- Raises:
- An RpcException if unable to set the country code.
- """
- try:
- ad.adb.shell(f"cmd wifi force-country-code enabled {country_code}")
- except Exception as e:
- ad.log.warn(
- f"Failed to set country code to {country_code}; defaulting to US. Error: {e}"
- )
- ad.droid.wifiSetCountryCode(WifiEnums.CountryCode.US)
-
-
-def start_wifi_connection_scan_and_return_status(ad):
- """
- Starts a wifi connection scan and wait for results to become available
- or a scan failure to be reported.
-
- Args:
- ad: An AndroidDevice object.
- Returns:
- True: if scan succeeded & results are available
- False: if scan failed
- """
- ad.ed.clear_all_events()
- ad.droid.wifiStartScan()
- try:
- events = ad.ed.pop_events(
- "WifiManagerScan(ResultsAvailable|Failure)", 60
- )
- except Empty:
- asserts.fail(
- "Wi-Fi scan results/failure did not become available within 60s."
- )
- # If there are multiple matches, we check for atleast one success.
- for event in events:
- if event["name"] == "WifiManagerScanResultsAvailable":
- return True
- elif event["name"] == "WifiManagerScanFailure":
- ad.log.debug("Scan failure received")
- return False
-
-
-def start_wifi_connection_scan_and_check_for_network(
- ad, network_ssid, max_tries=3
-):
- """
- Start connectivity scans & checks if the |network_ssid| is seen in
- scan results. The method performs a max of |max_tries| connectivity scans
- to find the network.
-
- Args:
- ad: An AndroidDevice object.
- network_ssid: SSID of the network we are looking for.
- max_tries: Number of scans to try.
- Returns:
- True: if network_ssid is found in scan results.
- False: if network_ssid is not found in scan results.
- """
- start_time = time.time()
- for num_tries in range(max_tries):
- if start_wifi_connection_scan_and_return_status(ad):
- scan_results = ad.droid.wifiGetScanResults()
- match_results = match_networks(
- {WifiEnums.SSID_KEY: network_ssid}, scan_results
- )
- if len(match_results) > 0:
- ad.log.debug(
- f"Found network in {time.time() - start_time} seconds."
- )
- return True
- ad.log.debug(f"Did not find network in {time.time() - start_time} seconds.")
- return False
-
-
-def start_wifi_connection_scan_and_ensure_network_found(
- ad, network_ssid, max_tries=3
-):
- """
- Start connectivity scans & ensure the |network_ssid| is seen in
- scan results. The method performs a max of |max_tries| connectivity scans
- to find the network.
- This method asserts on failure!
-
- Args:
- ad: An AndroidDevice object.
- network_ssid: SSID of the network we are looking for.
- max_tries: Number of scans to try.
- """
- ad.log.info("Starting scans to ensure %s is present", network_ssid)
- assert_msg = f"Failed to find {network_ssid} in scan results after {str(max_tries)} tries"
- asserts.assert_true(
- start_wifi_connection_scan_and_check_for_network(
- ad, network_ssid, max_tries
- ),
- assert_msg,
- )
-
-
-def start_wifi_connection_scan_and_ensure_network_not_found(
- ad, network_ssid, max_tries=3
-):
- """
- Start connectivity scans & ensure the |network_ssid| is not seen in
- scan results. The method performs a max of |max_tries| connectivity scans
- to find the network.
- This method asserts on failure!
-
- Args:
- ad: An AndroidDevice object.
- network_ssid: SSID of the network we are looking for.
- max_tries: Number of scans to try.
- """
- ad.log.info("Starting scans to ensure %s is not present", network_ssid)
- assert_msg = (
- f"Found {network_ssid} in scan results after {str(max_tries)} tries"
- )
- asserts.assert_false(
- start_wifi_connection_scan_and_check_for_network(
- ad, network_ssid, max_tries
- ),
- assert_msg,
- )
-
-
-def _wait_for_connect_event(ad, ssid=None, id=None, tries=1):
- """Wait for a connect event on queue and pop when available.
-
- Args:
- ad: An Android device object.
- ssid: SSID of the network to connect to.
- id: Network Id of the network to connect to.
- tries: An integer that is the number of times to try before failing.
-
- Returns:
- A dict with details of the connection data, which looks like this:
- {
- 'time': 1485460337798,
- 'name': 'WifiNetworkConnected',
- 'data': {
- 'rssi': -27,
- 'is_24ghz': True,
- 'mac_address': '02:00:00:00:00:00',
- 'network_id': 1,
- 'BSSID': '30:b5:c2:33:d3:fc',
- 'ip_address': 117483712,
- 'link_speed': 54,
- 'supplicant_state': 'completed',
- 'hidden_ssid': False,
- 'SSID': 'wh_ap1_2g',
- 'is_5ghz': False}
- }
-
- """
- conn_result = None
-
- # If ssid and network id is None, just wait for any connect event.
- if id is None and ssid is None:
- for i in range(tries):
- try:
- conn_result = ad.ed.pop_event(wifi_constants.WIFI_CONNECTED, 30)
- break
- except Empty:
- pass
- else:
- # If ssid or network id is specified, wait for specific connect event.
- for i in range(tries):
- try:
- conn_result = ad.ed.pop_event(wifi_constants.WIFI_CONNECTED, 30)
- if id and conn_result["data"][WifiEnums.NETID_KEY] == id:
- break
- elif ssid and conn_result["data"][WifiEnums.SSID_KEY] == ssid:
- break
- except Empty:
- pass
-
- return conn_result
-
-
-def connect_to_wifi_network(
- ad,
- network,
- assert_on_fail=True,
- check_connectivity=True,
- hidden=False,
- num_of_scan_tries=DEFAULT_SCAN_TRIES,
- num_of_connect_tries=DEFAULT_CONNECT_TRIES,
-):
- """Connection logic for open and psk wifi networks.
-
- Args:
- ad: AndroidDevice to use for connection
- network: network info of the network to connect to
- assert_on_fail: If true, errors from wifi_connect will raise
- test failure signals.
- hidden: Is the Wifi network hidden.
- num_of_scan_tries: The number of times to try scan
- interface before declaring failure.
- num_of_connect_tries: The number of times to try
- connect wifi before declaring failure.
- """
- if hidden:
- start_wifi_connection_scan_and_ensure_network_not_found(
- ad, network[WifiEnums.SSID_KEY], max_tries=num_of_scan_tries
- )
- else:
- start_wifi_connection_scan_and_ensure_network_found(
- ad, network[WifiEnums.SSID_KEY], max_tries=num_of_scan_tries
- )
- wifi_connect(
- ad,
- network,
- num_of_tries=num_of_connect_tries,
- assert_on_fail=assert_on_fail,
- check_connectivity=check_connectivity,
- )
-
-
-def wifi_connect(
- ad, network, num_of_tries=1, assert_on_fail=True, check_connectivity=True
-):
- """Connect an Android device to a wifi network.
-
- Initiate connection to a wifi network, wait for the "connected" event, then
- confirm the connected ssid is the one requested.
-
- This will directly fail a test if anything goes wrong.
-
- Args:
- ad: android_device object to initiate connection on.
- network: A dictionary representing the network to connect to. The
- dictionary must have the key "SSID".
- num_of_tries: An integer that is the number of times to try before
- delaring failure. Default is 1.
- assert_on_fail: If True, error checks in this function will raise test
- failure signals.
-
- Returns:
- Returns a value only if assert_on_fail is false.
- Returns True if the connection was successful, False otherwise.
- """
- return _assert_on_fail_handler(
- _wifi_connect,
- assert_on_fail,
- ad,
- network,
- num_of_tries=num_of_tries,
- check_connectivity=check_connectivity,
- )
-
-
-def _wifi_connect(ad, network, num_of_tries=1, check_connectivity=True):
- """Connect an Android device to a wifi network.
-
- Initiate connection to a wifi network, wait for the "connected" event, then
- confirm the connected ssid is the one requested.
-
- This will directly fail a test if anything goes wrong.
-
- Args:
- ad: android_device object to initiate connection on.
- network: A dictionary representing the network to connect to. The
- dictionary must have the key "SSID".
- num_of_tries: An integer that is the number of times to try before
- delaring failure. Default is 1.
- """
- asserts.assert_true(
- WifiEnums.SSID_KEY in network,
- f"Key '{WifiEnums.SSID_KEY}' must be present in network definition.",
- )
- ad.droid.wifiStartTrackingStateChange()
- expected_ssid = network[WifiEnums.SSID_KEY]
- ad.droid.wifiConnectByConfig(network)
- ad.log.info("Starting connection process to %s", expected_ssid)
- try:
- ad.ed.pop_event(wifi_constants.CONNECT_BY_CONFIG_SUCCESS, 30)
- connect_result = _wait_for_connect_event(
- ad, ssid=expected_ssid, tries=num_of_tries
- )
- asserts.assert_true(
- connect_result,
- f"Failed to connect to Wi-Fi network {network} on {ad.serial}",
- )
- ad.log.debug("Wi-Fi connection result: %s.", connect_result)
- actual_ssid = connect_result["data"][WifiEnums.SSID_KEY]
- asserts.assert_equal(
- actual_ssid,
- expected_ssid,
- f"Connected to the wrong network on {ad.serial}.",
- )
- ad.log.info("Connected to Wi-Fi network %s.", actual_ssid)
-
- if check_connectivity:
- internet = validate_connection(ad, DEFAULT_PING_ADDR)
- if not internet:
- raise signals.TestFailure(
- f"Failed to connect to internet on {expected_ssid}"
- )
- except Empty:
- asserts.fail(
- f"Failed to start connection process to {network} on {ad.serial}"
- )
- except Exception as error:
- ad.log.error(
- "Failed to connect to %s with error %s", expected_ssid, error
- )
- raise signals.TestFailure(f"Failed to connect to {network} network")
-
- finally:
- ad.droid.wifiStopTrackingStateChange()
-
-
-def validate_connection(
- ad, ping_addr=DEFAULT_PING_ADDR, wait_time=15, ping_gateway=True
-):
- """Validate internet connection by pinging the address provided.
-
- Args:
- ad: android_device object.
- ping_addr: address on internet for pinging.
- wait_time: wait for some time before validating connection
-
- Returns:
- ping output if successful, NULL otherwise.
- """
- android_version = int(
- ad.adb.shell("getprop ro.vendor.build.version.release")
- )
- # wait_time to allow for DHCP to complete.
- for i in range(wait_time):
- if ad.droid.connectivityNetworkIsConnected():
- if (
- android_version > 10
- and ad.droid.connectivityGetIPv4DefaultGateway()
- ) or android_version < 11:
- break
- time.sleep(1)
- ping = False
- try:
- ping = ad.droid.httpPing(ping_addr)
- ad.log.info("Http ping result: %s.", ping)
- except:
- pass
- if android_version > 10 and not ping and ping_gateway:
- ad.log.info("Http ping failed. Pinging default gateway")
- gw = ad.droid.connectivityGetIPv4DefaultGateway()
- result = ad.adb.shell(f"ping -c 6 {gw}")
- ad.log.info(f"Default gateway ping result: {result}")
- ping = False if "100% packet loss" in result else True
- return ping
-
-
-# TODO(angli): This can only verify if an actual value is exactly the same.
-# Would be nice to be able to verify an actual value is one of serveral.
-def verify_wifi_connection_info(ad, expected_con):
- """Verifies that the information of the currently connected wifi network is
- as expected.
-
- Args:
- expected_con: A dict representing expected key-value pairs for wifi
- connection. e.g. {"SSID": "test_wifi"}
- """
- current_con = ad.droid.wifiGetConnectionInfo()
- case_insensitive = ["BSSID", "supplicant_state"]
- ad.log.debug("Current connection: %s", current_con)
- for k, expected_v in expected_con.items():
- # Do not verify authentication related fields.
- if k == "password":
- continue
- msg = f"Field {k} does not exist in wifi connection info {current_con}."
- if k not in current_con:
- raise signals.TestFailure(msg)
- actual_v = current_con[k]
- if k in case_insensitive:
- actual_v = actual_v.lower()
- expected_v = expected_v.lower()
- msg = f"Expected {k} to be {expected_v}, actual {k} is {actual_v}."
- if actual_v != expected_v:
- raise signals.TestFailure(msg)
-
-
-def get_current_softap_capability(ad, callbackId, need_to_wait):
- """pop up all of softap info list changed event from queue.
- Args:
- callbackId: Id of the callback associated with registering.
- need_to_wait: Wait for the info callback event before pop all.
- Returns:
- Returns last updated capability of softap.
- """
- eventStr = (
- wifi_constants.SOFTAP_CALLBACK_EVENT
- + str(callbackId)
- + wifi_constants.SOFTAP_CAPABILITY_CHANGED
- )
- ad.log.debug("softap capability dump from eventStr %s", eventStr)
- if need_to_wait:
- event = ad.ed.pop_event(eventStr, SHORT_TIMEOUT)
- capability = event["data"]
-
- events = ad.ed.pop_all(eventStr)
- for event in events:
- capability = event["data"]
-
- return capability
-
-
-def get_ssrdumps(ad):
- """Pulls dumps in the ssrdump dir
- Args:
- ad: android device object.
- """
- logs = ad.get_file_names("/data/vendor/ssrdump/")
- if logs:
- ad.log.info("Pulling ssrdumps %s", logs)
- log_path = os.path.join(ad.device_log_path, f"SSRDUMPS_{ad.serial}")
- os.makedirs(log_path, exist_ok=True)
- ad.pull_files(logs, log_path)
- ad.adb.shell(
- "find /data/vendor/ssrdump/ -type f -delete", ignore_status=True
- )
-
-
-def start_pcap(pcap, wifi_band, test_name):
- """Start packet capture in monitor mode.
-
- Args:
- pcap: packet capture object
- wifi_band: '2g' or '5g' or 'dual'
- test_name: test name to be used for pcap file name
-
- Returns:
- Dictionary with wifi band as key and the tuple
- (pcap Process object, log directory) as the value
- """
- log_dir = os.path.join(
- context.get_current_context().get_full_output_path(), "PacketCapture"
- )
- os.makedirs(log_dir, exist_ok=True)
- if wifi_band == "dual":
- bands = [BAND_2G, BAND_5G]
- else:
- bands = [wifi_band]
- procs = {}
- for band in bands:
- proc = pcap.start_packet_capture(band, log_dir, test_name)
- procs[band] = (proc, os.path.join(log_dir, test_name))
- return procs
-
-
-def stop_pcap(pcap, procs, test_status=None):
- """Stop packet capture in monitor mode.
-
- Since, the pcap logs in monitor mode can be very large, we will
- delete them if they are not required. 'test_status' if True, will delete
- the pcap files. If False, we will keep them.
-
- Args:
- pcap: packet capture object
- procs: dictionary returned by start_pcap
- test_status: status of the test case
- """
- for proc, fname in procs.values():
- pcap.stop_packet_capture(proc)
-
- if test_status:
- shutil.rmtree(os.path.dirname(fname))
-
-
-def start_cnss_diags(ads, cnss_diag_file, pixel_models):
- for ad in ads:
- start_cnss_diag(ad, cnss_diag_file, pixel_models)
-
-
-def start_cnss_diag(ad, cnss_diag_file, pixel_models):
- """Start cnss_diag to record extra wifi logs
-
- Args:
- ad: android device object.
- cnss_diag_file: cnss diag config file to push to device.
- pixel_models: pixel devices.
- """
- if ad.model not in pixel_models:
- ad.log.info("Device not supported to collect pixel logger")
- return
- if ad.model in wifi_constants.DEVICES_USING_LEGACY_PROP:
- prop = wifi_constants.LEGACY_CNSS_DIAG_PROP
- else:
- prop = wifi_constants.CNSS_DIAG_PROP
- if ad.adb.getprop(prop) != "true":
- if not int(
- ad.adb.shell(
- f"ls -l {CNSS_DIAG_CONFIG_PATH}{CNSS_DIAG_CONFIG_FILE} | wc -l"
- )
- ):
- ad.adb.push(f"{cnss_diag_file} {CNSS_DIAG_CONFIG_PATH}")
- ad.adb.shell(
- "find /data/vendor/wifi/cnss_diag/wlan_logs/ -type f -delete",
- ignore_status=True,
- )
- ad.adb.shell(f"setprop {prop} true", ignore_status=True)
-
-
-def stop_cnss_diags(ads, pixel_models):
- for ad in ads:
- stop_cnss_diag(ad, pixel_models)
-
-
-def stop_cnss_diag(ad, pixel_models):
- """Stops cnss_diag
-
- Args:
- ad: android device object.
- pixel_models: pixel devices.
- """
- if ad.model not in pixel_models:
- ad.log.info("Device not supported to collect pixel logger")
- return
- if ad.model in wifi_constants.DEVICES_USING_LEGACY_PROP:
- prop = wifi_constants.LEGACY_CNSS_DIAG_PROP
- else:
- prop = wifi_constants.CNSS_DIAG_PROP
- ad.adb.shell(f"setprop {prop} false", ignore_status=True)
-
-
-def get_cnss_diag_log(ad):
- """Pulls the cnss_diag logs in the wlan_logs dir
- Args:
- ad: android device object.
- """
- logs = ad.get_file_names("/data/vendor/wifi/cnss_diag/wlan_logs/")
- if logs:
- ad.log.info("Pulling cnss_diag logs %s", logs)
- log_path = os.path.join(ad.device_log_path, f"CNSS_DIAG_{ad.serial}")
- os.makedirs(log_path, exist_ok=True)
- ad.pull_files(logs, log_path)
-
-
-def turn_location_off_and_scan_toggle_off(ad):
- """Turns off wifi location scans."""
- utils.set_location_service(ad, False)
- ad.droid.wifiScannerToggleAlwaysAvailable(False)
- msg = "Failed to turn off location service's scan."
- asserts.assert_true(not ad.droid.wifiScannerIsAlwaysAvailable(), msg)
diff --git a/packages/antlion/types.py b/packages/antlion/types.py
deleted file mode 100644
index 2a61e63..0000000
--- a/packages/antlion/types.py
+++ /dev/null
@@ -1,77 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2024 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from typing import Protocol, TypeAlias, TypeVar
-
-Json: TypeAlias = (
- dict[str, "Json"] | list["Json"] | str | int | float | bool | None
-)
-"""JSON serializable data."""
-
-ControllerConfig: TypeAlias = dict[str, Json]
-"""Mobly configuration specific to a controller.
-
-Defined in the Mobly config under TestBeds -> Controllers ->
-<MOBLY_CONTROLLER_CONFIG_NAME>.
-"""
-
-_T = TypeVar("_T")
-
-
-class Controller(Protocol[_T]):
- MOBLY_CONTROLLER_CONFIG_NAME: str
- """Key used to get this controller's config from the Mobly config."""
-
- def create(self, configs: list[ControllerConfig]) -> list[_T]:
- """Create controller objects from configurations.
-
- Args:
- configs: A list of serialized data like string/dict. Each element of
- the list is a configuration for a controller object.
-
- Returns:
- A list of controller objects.
- """
-
- def destroy(self, objects: list[_T]) -> None:
- """Destroys controller objects.
-
- Each controller object shall be properly cleaned up and all the
- resources held should be released, e.g. memory allocation, sockets, file
- handlers etc.
-
- Args:
- objects: A list of controller objects created by the create
- function.
- """
-
- def get_info(self, objects: list[_T]) -> list[Json]:
- """Gets info from the controller objects.
-
- The info will be included in test_summary.yaml under the key
- 'ControllerInfo'. Such information could include unique ID, version, or
- anything that could be useful for describing the test bed and debugging.
-
- Args:
- objects: A list of controller objects created by the create
- function.
-
- Returns:
- A list of json serializable objects: each represents the info of a
- controller object. The order of the info object should follow that
- of the input objects.
- """
- return []
diff --git a/packages/antlion/unit_tests/__init__.py b/packages/antlion/unit_tests/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/packages/antlion/unit_tests/__init__.py
+++ /dev/null
diff --git a/packages/antlion/unit_tests/acts_adb_test.py b/packages/antlion/unit_tests/acts_adb_test.py
deleted file mode 100755
index bda6125..0000000
--- a/packages/antlion/unit_tests/acts_adb_test.py
+++ /dev/null
@@ -1,114 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import unittest
-
-import mock
-
-from antlion.controllers import adb
-from antlion.controllers.adb_lib.error import AdbCommandError, AdbError
-
-
-class MockJob(object):
- def __init__(self, exit_status=0, stderr="", stdout=""):
- self.exit_status = exit_status
- self.stderr = stderr
- self.stdout = stdout
-
-
-class MockAdbProxy(adb.AdbProxy):
- def __init__(self):
- pass
-
-
-class ADBTest(unittest.TestCase):
- """A class for testing antlion/controllers/adb.py"""
-
- def test__exec_cmd_failure_old_adb(self):
- mock_job = MockJob(exit_status=1, stderr="error: device not found")
- cmd = ["adb", "-s", '"SOME_SERIAL"', "shell", '"SOME_SHELL_CMD"']
- with mock.patch("antlion.libs.proc.job.run", return_value=mock_job):
- with self.assertRaises(AdbError):
- MockAdbProxy()._exec_cmd(cmd)
-
- def test__exec_cmd_failure_new_adb(self):
- mock_job = MockJob(
- exit_status=1, stderr="error: device 'DEADBEEF' not found"
- )
- cmd = ["adb", "-s", '"SOME_SERIAL"', "shell", '"SOME_SHELL_CMD"']
- with mock.patch("antlion.libs.proc.job.run", return_value=mock_job):
- with self.assertRaises(AdbError):
- MockAdbProxy()._exec_cmd(cmd)
-
- def test__exec_cmd_pass_basic(self):
- mock_job = MockJob(exit_status=0, stderr="DEADBEEF", stdout="FEEDACAB")
- cmd = ["adb", "-s", '"SOME_SERIAL"', "shell", '"SOME_SHELL_CMD"']
- with mock.patch("antlion.libs.proc.job.run", return_value=mock_job):
- result = MockAdbProxy()._exec_cmd(cmd)
- self.assertEqual(result, "FEEDACAB")
-
- def test__exec_cmd_ignore_status(self):
- mock_job = MockJob(exit_status=0, stderr="DEADBEEF", stdout="")
- cmd = ["adb", "-s", '"SOME_SERIAL"', "shell", '"SOME_SHELL_CMD"']
- with mock.patch("antlion.libs.proc.job.run", return_value=mock_job):
- result = MockAdbProxy()._exec_cmd(cmd, ignore_status=True)
- self.assertEqual(result, "DEADBEEF")
-
- def test__exec_cmd_pass_grep(self):
- mock_job = MockJob(exit_status=1, stderr="", stdout="foo")
- cmd = ["adb", "-s", '"SOME_SERIAL"', "shell", '"grep foo"']
- with mock.patch("antlion.libs.proc.job.run", return_value=mock_job):
- result = MockAdbProxy()._exec_cmd(cmd)
- self.assertEqual(result, "foo")
-
- def test__exec_cmd_failure_ret_nonzero(self):
- mock_job = MockJob(exit_status=1, stderr="error not related to adb")
- cmd = ["adb", "-s", '"SOME_SERIAL"', "shell", '"SOME_SHELL_CMD"']
- with mock.patch("antlion.libs.proc.job.run", return_value=mock_job):
- with self.assertRaises(AdbCommandError):
- MockAdbProxy()._exec_cmd(cmd)
-
- def test__exec_cmd_raises_on_bind_error(self):
- """Tests _exec_cmd raises an AdbError on port forwarding failure."""
- mock_job = MockJob(
- exit_status=1,
- stderr="error: cannot bind listener: " "Address already in use",
- stdout="",
- )
- cmd = ["adb", "-s", '"SOME_SERIAL"', "shell", '"SOME_SHELL_CMD"']
- with mock.patch("antlion.libs.proc.job.run", return_value=mock_job):
- with self.assertRaises(AdbError):
- MockAdbProxy()._exec_cmd(cmd)
-
- def test__get_version_number_gets_version_number(self):
- """Tests the positive case for AdbProxy.get_version_number()."""
- proxy = MockAdbProxy()
- expected_version_number = 39
- proxy.version = lambda: (
- f"Android Debug Bridge version 1.0.{expected_version_number}\nblah"
- )
- self.assertEqual(expected_version_number, proxy.get_version_number())
-
- def test__get_version_number_raises_upon_parse_failure(self):
- """Tests the failure case for AdbProxy.get_version_number()."""
- proxy = MockAdbProxy()
- proxy.version = lambda: "Bad format"
- with self.assertRaises(AdbError):
- proxy.get_version_number()
-
-
-if __name__ == "__main__":
- unittest.main()
diff --git a/packages/antlion/unit_tests/acts_android_device_test.py b/packages/antlion/unit_tests/acts_android_device_test.py
deleted file mode 100755
index a324640..0000000
--- a/packages/antlion/unit_tests/acts_android_device_test.py
+++ /dev/null
@@ -1,808 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import logging
-import os
-import shutil
-import tempfile
-import unittest
-
-import mock
-from mobly import logger
-
-from antlion.controllers import android_device
-from antlion.controllers.android_lib import errors
-
-# Mock log path for a test run.
-MOCK_LOG_PATH = "/tmp/logs/MockTest/xx-xx-xx_xx-xx-xx/"
-
-# Mock start and end time of the adb cat.
-MOCK_ADB_EPOCH_BEGIN_TIME = 191000123
-MOCK_ADB_LOGCAT_BEGIN_TIME = logger.sanitize_filename(
- logger.epoch_to_log_line_timestamp(MOCK_ADB_EPOCH_BEGIN_TIME)
-)
-MOCK_ADB_LOGCAT_END_TIME = "1970-01-02 21:22:02.000"
-
-MOCK_SERIAL = 1
-MOCK_RELEASE_BUILD_ID = "ABC1.123456.007"
-MOCK_DEV_BUILD_ID = "ABC-MR1"
-MOCK_NYC_BUILD_ID = "N4F27P"
-
-
-def get_mock_ads(num):
- """Generates a list of mock AndroidDevice objects.
-
- The serial number of each device will be integer 0 through num - 1.
-
- Args:
- num: An integer that is the number of mock AndroidDevice objects to
- create.
- """
- ads = []
- for i in range(num):
- ad = mock.MagicMock(name="AndroidDevice", serial=i, h_port=None)
- ad.ensure_screen_on = mock.MagicMock(return_value=True)
- ads.append(ad)
- return ads
-
-
-def mock_get_all_instances():
- return get_mock_ads(5)
-
-
-def mock_list_adb_devices():
- return [ad.serial for ad in get_mock_ads(5)]
-
-
-class MockAdbProxy(object):
- """Mock class that swaps out calls to adb with mock calls."""
-
- def __init__(
- self,
- serial,
- fail_br=False,
- fail_br_before_N=False,
- build_id=MOCK_RELEASE_BUILD_ID,
- return_value=None,
- ):
- self.serial = serial
- self.fail_br = fail_br
- self.fail_br_before_N = fail_br_before_N
- self.return_value = return_value
- self.return_multiple = False
- self.build_id = build_id
-
- def shell(self, params, ignore_status=False, timeout=60):
- if params == "id -u":
- return "root"
- elif params == "bugreportz":
- if self.fail_br:
- return "OMG I died!\n"
- return "OK:/path/bugreport.zip\n"
- elif params == "bugreportz -v":
- if self.fail_br_before_N:
- return "/system/bin/sh: bugreportz: not found"
- return "1.1"
- else:
- if self.return_multiple:
- return self.return_value.pop(0)
- else:
- return self.return_value
-
- def getprop(self, params):
- if params == "ro.build.id":
- return self.build_id
- elif params == "ro.build.version.incremental":
- return "123456789"
- elif params == "ro.build.type":
- return "userdebug"
- elif params == "ro.build.product" or params == "ro.product.name":
- return "FakeModel"
- elif params == "sys.boot_completed":
- return "1"
-
- def devices(self):
- return f"{str(self.serial)}\tdevice"
-
- def bugreport(self, params, timeout=android_device.BUG_REPORT_TIMEOUT):
- expected = os.path.join(
- logging.log_path,
- f"AndroidDevice{self.serial}",
- "AndroidDevice%s_%s.txt"
- % (
- self.serial,
- logger.normalize_log_line_timestamp(MOCK_ADB_LOGCAT_BEGIN_TIME),
- ),
- )
- assert expected in params, f"Expected '{expected}', got '{params}'."
-
- def __getattr__(self, name):
- """All calls to the none-existent functions in adb proxy would
- simply return the adb command string.
- """
-
- def adb_call(*args, **kwargs):
- arg_str = " ".join(str(elem) for elem in args)
- return arg_str
-
- return adb_call
-
-
-class MockFastbootProxy:
- """Mock class that swaps out calls to adb with mock calls."""
-
- def __init__(self, serial):
- self.serial = serial
-
- def devices(self):
- return "xxxx\tdevice\nyyyy\tdevice"
-
- def __getattr__(self, name):
- def fastboot_call(*args):
- arg_str = " ".join(str(elem) for elem in args)
- return arg_str
-
- return fastboot_call
-
-
-class ActsAndroidDeviceTest(unittest.TestCase):
- """This test class has unit tests for the implementation of everything
- under antlion.controllers.android_device.
- """
-
- def setUp(self):
- # Set log_path to logging since acts logger setup is not called.
- if not hasattr(logging, "log_path"):
- setattr(logging, "log_path", "/tmp/logs")
- # Creates a temp dir to be used by tests in this test class.
- self.tmp_dir = tempfile.mkdtemp()
-
- def tearDown(self):
- """Removes the temp dir."""
- shutil.rmtree(self.tmp_dir)
-
- # Tests for android_device module functions.
- # These tests use mock AndroidDevice instances.
-
- @mock.patch.object(
- android_device, "get_all_instances", new=mock_get_all_instances
- )
- @mock.patch.object(
- android_device, "list_adb_devices", new=mock_list_adb_devices
- )
- def test_create_with_pickup_all(self):
- pick_all_token = android_device.ANDROID_DEVICE_PICK_ALL_TOKEN
- actual_ads = android_device.create(pick_all_token)
- for actual, expected in zip(actual_ads, get_mock_ads(5)):
- self.assertEqual(actual.serial, expected.serial)
-
- def test_create_with_empty_config(self):
- expected_msg = android_device.ANDROID_DEVICE_EMPTY_CONFIG_MSG
- with self.assertRaisesRegex(
- errors.AndroidDeviceConfigError, expected_msg
- ):
- android_device.create([])
-
- def test_create_with_not_list_config(self):
- expected_msg = android_device.ANDROID_DEVICE_NOT_LIST_CONFIG_MSG
- with self.assertRaisesRegex(
- errors.AndroidDeviceConfigError, expected_msg
- ):
- android_device.create("HAHA")
-
- def test_get_device_success_with_serial(self):
- ads = get_mock_ads(5)
- expected_serial = 0
- ad = android_device.get_device(ads, serial=expected_serial)
- self.assertEqual(ad.serial, expected_serial)
-
- def test_get_device_success_with_serial_and_extra_field(self):
- ads = get_mock_ads(5)
- expected_serial = 1
- expected_h_port = 5555
- ads[1].h_port = expected_h_port
- ad = android_device.get_device(
- ads, serial=expected_serial, h_port=expected_h_port
- )
- self.assertEqual(ad.serial, expected_serial)
- self.assertEqual(ad.h_port, expected_h_port)
-
- def test_get_device_no_match(self):
- ads = get_mock_ads(5)
- expected_msg = (
- "Could not find a target device that matches condition"
- ": {'serial': 5}."
- )
- with self.assertRaisesRegex(ValueError, expected_msg):
- ad = android_device.get_device(ads, serial=len(ads))
-
- def test_get_device_too_many_matches(self):
- ads = get_mock_ads(5)
- target_serial = ads[1].serial = ads[0].serial
- expected_msg = "More than one device matched: \[0, 0\]"
- with self.assertRaisesRegex(ValueError, expected_msg):
- ad = android_device.get_device(ads, serial=target_serial)
-
- def test_start_services_on_ads(self):
- """Makes sure when an AndroidDevice fails to start some services, all
- AndroidDevice objects get cleaned up.
- """
- msg = "Some error happened."
- ads = get_mock_ads(3)
- ads[0].start_services = mock.MagicMock()
- ads[0].clean_up = mock.MagicMock()
- ads[1].start_services = mock.MagicMock()
- ads[1].clean_up = mock.MagicMock()
- ads[2].start_services = mock.MagicMock(
- side_effect=errors.AndroidDeviceError(msg)
- )
- ads[2].clean_up = mock.MagicMock()
- with self.assertRaisesRegex(errors.AndroidDeviceError, msg):
- android_device._start_services_on_ads(ads)
- ads[0].clean_up.assert_called_once_with()
- ads[1].clean_up.assert_called_once_with()
- ads[2].clean_up.assert_called_once_with()
-
- # Tests for android_device.AndroidDevice class.
- # These tests mock out any interaction with the OS and real android device
- # in AndroidDeivce.
-
- @mock.patch(
- "antlion.controllers.adb.AdbProxy",
- return_value=MockAdbProxy(MOCK_SERIAL),
- )
- @mock.patch(
- "antlion.controllers.fastboot.FastbootProxy",
- return_value=MockFastbootProxy(MOCK_SERIAL),
- )
- def test_AndroidDevice_instantiation(self, MockFastboot, MockAdbProxy):
- """Verifies the AndroidDevice object's basic attributes are correctly
- set after instantiation.
- """
- ad = android_device.AndroidDevice(serial=MOCK_SERIAL)
- self.assertEqual(ad.serial, 1)
- self.assertEqual(ad.model, "fakemodel")
- self.assertIsNone(ad.adb_logcat_process)
- expected_lp = os.path.join(
- logging.log_path, f"AndroidDevice{MOCK_SERIAL}"
- )
- self.assertEqual(ad.log_path, expected_lp)
-
- @mock.patch(
- "antlion.controllers.adb.AdbProxy",
- return_value=MockAdbProxy(MOCK_SERIAL),
- )
- @mock.patch(
- "antlion.controllers.fastboot.FastbootProxy",
- return_value=MockFastbootProxy(MOCK_SERIAL),
- )
- def test_AndroidDevice_build_info_release(self, MockFastboot, MockAdbProxy):
- """Verifies the AndroidDevice object's basic attributes are correctly
- set after instantiation.
- """
- ad = android_device.AndroidDevice(serial=1)
- build_info = ad.build_info
- self.assertEqual(build_info["build_id"], "ABC1.123456.007")
- self.assertEqual(build_info["build_type"], "userdebug")
-
- @mock.patch(
- "antlion.controllers.adb.AdbProxy",
- return_value=MockAdbProxy(MOCK_SERIAL, build_id=MOCK_DEV_BUILD_ID),
- )
- @mock.patch(
- "antlion.controllers.fastboot.FastbootProxy",
- return_value=MockFastbootProxy(MOCK_SERIAL),
- )
- def test_AndroidDevice_build_info_dev(self, MockFastboot, MockAdbProxy):
- """Verifies the AndroidDevice object's basic attributes are correctly
- set after instantiation.
- """
- ad = android_device.AndroidDevice(serial=1)
- build_info = ad.build_info
- self.assertEqual(build_info["build_id"], "123456789")
- self.assertEqual(build_info["build_type"], "userdebug")
-
- @mock.patch(
- "antlion.controllers.adb.AdbProxy",
- return_value=MockAdbProxy(MOCK_SERIAL, build_id=MOCK_NYC_BUILD_ID),
- )
- @mock.patch(
- "antlion.controllers.fastboot.FastbootProxy",
- return_value=MockFastbootProxy(MOCK_SERIAL),
- )
- def test_AndroidDevice_build_info_nyc(self, MockFastboot, MockAdbProxy):
- """Verifies the AndroidDevice object's build id is set correctly for
- NYC releases.
- """
- ad = android_device.AndroidDevice(serial=1)
- build_info = ad.build_info
- self.assertEqual(build_info["build_id"], MOCK_NYC_BUILD_ID)
-
- @mock.patch(
- "antlion.controllers.adb.AdbProxy",
- return_value=MockAdbProxy(MOCK_SERIAL),
- )
- @mock.patch(
- "antlion.controllers.fastboot.FastbootProxy",
- return_value=MockFastbootProxy(MOCK_SERIAL),
- )
- @mock.patch("os.makedirs")
- @mock.patch("antlion.utils.exe_cmd")
- @mock.patch(
- "antlion.controllers.android_device.AndroidDevice.device_log_path",
- new_callable=mock.PropertyMock,
- )
- def test_AndroidDevice_take_bug_report(
- self,
- mock_log_path,
- exe_mock,
- mock_makedirs,
- FastbootProxy,
- MockAdbProxy,
- ):
- """Verifies AndroidDevice.take_bug_report calls the correct adb command
- and writes the bugreport file to the correct path.
- """
- ad = android_device.AndroidDevice(serial=MOCK_SERIAL)
- mock_log_path.return_value = os.path.join(
- logging.log_path, f"AndroidDevice{ad.serial}"
- )
- ad.take_bug_report("test_something", 234325.32)
- mock_makedirs.assert_called_with(mock_log_path(), exist_ok=True)
-
- @mock.patch(
- "antlion.controllers.adb.AdbProxy",
- return_value=MockAdbProxy(MOCK_SERIAL, fail_br=True),
- )
- @mock.patch(
- "antlion.controllers.fastboot.FastbootProxy",
- return_value=MockFastbootProxy(MOCK_SERIAL),
- )
- @mock.patch("os.makedirs")
- @mock.patch("antlion.utils.exe_cmd")
- @mock.patch(
- "antlion.controllers.android_device.AndroidDevice.device_log_path",
- new_callable=mock.PropertyMock,
- )
- def test_AndroidDevice_take_bug_report_fail(self, mock_log_path, *_):
- """Verifies AndroidDevice.take_bug_report writes out the correct message
- when taking bugreport fails.
- """
- ad = android_device.AndroidDevice(serial=MOCK_SERIAL)
- mock_log_path.return_value = os.path.join(
- logging.log_path, f"AndroidDevice{ad.serial}"
- )
- expected_msg = "Failed to take bugreport on 1: OMG I died!"
- with self.assertRaisesRegex(errors.AndroidDeviceError, expected_msg):
- ad.take_bug_report("test_something", 4346343.23)
-
- @mock.patch(
- "antlion.controllers.adb.AdbProxy",
- return_value=MockAdbProxy(MOCK_SERIAL, fail_br_before_N=True),
- )
- @mock.patch(
- "antlion.controllers.fastboot.FastbootProxy",
- return_value=MockFastbootProxy(MOCK_SERIAL),
- )
- @mock.patch("os.makedirs")
- @mock.patch("antlion.utils.exe_cmd")
- @mock.patch(
- "antlion.controllers.android_device.AndroidDevice.device_log_path",
- new_callable=mock.PropertyMock,
- )
- def test_AndroidDevice_take_bug_report_fallback(
- self,
- mock_log_path,
- exe_mock,
- mock_makedirs,
- FastbootProxy,
- MockAdbProxy,
- ):
- """Verifies AndroidDevice.take_bug_report falls back to traditional
- bugreport on builds that do not have bugreportz.
- """
- ad = android_device.AndroidDevice(serial=MOCK_SERIAL)
- mock_log_path.return_value = os.path.join(
- logging.log_path, f"AndroidDevice{ad.serial}"
- )
- ad.take_bug_report("test_something", MOCK_ADB_EPOCH_BEGIN_TIME)
- mock_makedirs.assert_called_with(mock_log_path(), exist_ok=True)
-
- @mock.patch(
- "antlion.controllers.adb.AdbProxy",
- return_value=MockAdbProxy(MOCK_SERIAL),
- )
- @mock.patch(
- "antlion.controllers.fastboot.FastbootProxy",
- return_value=MockFastbootProxy(MOCK_SERIAL),
- )
- @mock.patch("antlion.libs.proc.process.Process")
- def test_AndroidDevice_start_adb_logcat(
- self, proc_mock, FastbootProxy, MockAdbProxy
- ):
- """Verifies the AndroidDevice method start_adb_logcat. Checks that the
- underlying logcat process is started properly and correct warning msgs
- are generated.
- """
- with mock.patch(
- (
- "antlion.controllers.android_lib.logcat."
- "create_logcat_keepalive_process"
- ),
- return_value=proc_mock,
- ) as create_proc_mock:
- ad = android_device.AndroidDevice(serial=MOCK_SERIAL)
- ad.start_adb_logcat()
- # Verify start did the correct operations.
- self.assertTrue(ad.adb_logcat_process)
- log_dir = f"AndroidDevice{ad.serial}"
- create_proc_mock.assert_called_with(ad.serial, log_dir, "-b all")
- proc_mock.start.assert_called_with()
- # Expect warning msg if start is called back to back.
- expected_msg = "Android device .* already has a running adb logcat"
- proc_mock.is_running.return_value = True
- with self.assertLogs(level="WARNING") as log:
- ad.start_adb_logcat()
- self.assertRegex(log.output[0], expected_msg)
-
- @mock.patch(
- "antlion.controllers.adb.AdbProxy",
- return_value=MockAdbProxy(MOCK_SERIAL),
- )
- @mock.patch(
- "antlion.controllers.fastboot.FastbootProxy",
- return_value=MockFastbootProxy(MOCK_SERIAL),
- )
- @mock.patch(
- "antlion.controllers.android_lib.logcat."
- "create_logcat_keepalive_process"
- )
- def test_AndroidDevice_start_adb_logcat_with_user_param(
- self, create_proc_mock, FastbootProxy, MockAdbProxy
- ):
- """Verifies that start_adb_logcat generates the correct adb logcat
- command if adb_logcat_param is specified.
- """
- ad = android_device.AndroidDevice(serial=MOCK_SERIAL)
- ad.adb_logcat_param = "-b radio"
- ad.start_adb_logcat()
- # Verify that create_logcat_keepalive_process is called with the
- # correct command.
- log_dir = f"AndroidDevice{ad.serial}"
- create_proc_mock.assert_called_with(ad.serial, log_dir, "-b radio")
-
- @mock.patch(
- "antlion.controllers.adb.AdbProxy",
- return_value=MockAdbProxy(MOCK_SERIAL),
- )
- @mock.patch(
- "antlion.controllers.fastboot.FastbootProxy",
- return_value=MockFastbootProxy(MOCK_SERIAL),
- )
- @mock.patch("antlion.libs.proc.process.Process")
- def test_AndroidDevice_stop_adb_logcat(
- self, proc_mock, FastbootProxy, MockAdbProxy
- ):
- """Verifies the AndroidDevice method stop_adb_logcat. Checks that the
- underlying logcat process is stopped properly and correct warning msgs
- are generated.
- """
- ad = android_device.AndroidDevice(serial=MOCK_SERIAL)
- ad.adb_logcat_process = proc_mock
- # Expect warning msg if stop is called before start.
- expected_msg = "Android device .* does not have an ongoing adb logcat"
- proc_mock.is_running.return_value = False
- with self.assertLogs(level="WARNING") as log:
- ad.stop_adb_logcat()
- self.assertRegex(log.output[0], expected_msg)
-
- # Verify the underlying process is stopped.
- proc_mock.is_running.return_value = True
- ad.stop_adb_logcat()
- proc_mock.stop.assert_called_with()
-
- @mock.patch(
- "antlion.controllers.adb.AdbProxy",
- return_value=MockAdbProxy(MOCK_SERIAL),
- )
- @mock.patch(
- "antlion.controllers.fastboot.FastbootProxy",
- return_value=MockFastbootProxy(MOCK_SERIAL),
- )
- def test_get_apk_process_id_process_cannot_find(
- self, fastboot_proxy, adb_proxy
- ):
- ad = android_device.AndroidDevice(serial=MOCK_SERIAL)
- ad.adb.return_value = "does_not_contain_value"
- self.assertEqual(None, ad.get_package_pid("some_package"))
-
- @mock.patch(
- "antlion.controllers.adb.AdbProxy",
- return_value=MockAdbProxy(MOCK_SERIAL),
- )
- @mock.patch(
- "antlion.controllers.fastboot.FastbootProxy",
- return_value=MockFastbootProxy(MOCK_SERIAL),
- )
- def test_get_apk_process_id_process_exists_second_try(
- self, fastboot_proxy, adb_proxy
- ):
- ad = android_device.AndroidDevice(serial=MOCK_SERIAL)
- ad.adb.return_multiple = True
- ad.adb.return_value = ["", "system 1 2 3 4 S com.some_package"]
- self.assertEqual(1, ad.get_package_pid("some_package"))
-
- @mock.patch(
- "antlion.controllers.adb.AdbProxy",
- return_value=MockAdbProxy(MOCK_SERIAL),
- )
- @mock.patch(
- "antlion.controllers.fastboot.FastbootProxy",
- return_value=MockFastbootProxy(MOCK_SERIAL),
- )
- def test_get_apk_process_id_bad_return(self, fastboot_proxy, adb_proxy):
- ad = android_device.AndroidDevice(serial=MOCK_SERIAL)
- ad.adb.return_value = "bad_return_index_error"
- self.assertEqual(None, ad.get_package_pid("some_package"))
-
- @mock.patch(
- "antlion.controllers.adb.AdbProxy",
- return_value=MockAdbProxy(MOCK_SERIAL),
- )
- @mock.patch(
- "antlion.controllers.fastboot.FastbootProxy",
- return_value=MockFastbootProxy(MOCK_SERIAL),
- )
- def test_get_apk_process_id_bad_return(self, fastboot_proxy, adb_proxy):
- ad = android_device.AndroidDevice(serial=MOCK_SERIAL)
- ad.adb.return_value = "bad return value error"
- self.assertEqual(None, ad.get_package_pid("some_package"))
-
- @mock.patch(
- "antlion.controllers.adb.AdbProxy",
- return_value=MockAdbProxy(MOCK_SERIAL),
- )
- @mock.patch(
- "antlion.controllers.fastboot.FastbootProxy",
- return_value=MockFastbootProxy(MOCK_SERIAL),
- )
- def test_ensure_verity_enabled_only_system_enabled(
- self, fastboot_proxy, adb_proxy
- ):
- ad = android_device.AndroidDevice(serial=MOCK_SERIAL)
- root_user_id = "0"
-
- ad.adb.get_user_id = mock.MagicMock()
- ad.adb.get_user_id.return_value = root_user_id
-
- ad.adb.getprop = mock.MagicMock(
- side_effect=["", "2"] # system.verified
- ) # vendor.verified
- ad.adb.ensure_user = mock.MagicMock()
- ad.reboot = mock.MagicMock()
- ad.ensure_verity_enabled()
- ad.reboot.assert_called_once()
-
- ad.adb.ensure_user.assert_called_with(root_user_id)
-
- @mock.patch(
- "antlion.controllers.adb.AdbProxy",
- return_value=MockAdbProxy(MOCK_SERIAL),
- )
- @mock.patch(
- "antlion.controllers.fastboot.FastbootProxy",
- return_value=MockFastbootProxy(MOCK_SERIAL),
- )
- def test_ensure_verity_enabled_only_vendor_enabled(
- self, fastboot_proxy, adb_proxy
- ):
- ad = android_device.AndroidDevice(serial=MOCK_SERIAL)
- root_user_id = "0"
-
- ad.adb.get_user_id = mock.MagicMock()
- ad.adb.get_user_id.return_value = root_user_id
-
- ad.adb.getprop = mock.MagicMock(
- side_effect=["2", ""] # system.verified
- ) # vendor.verified
- ad.adb.ensure_user = mock.MagicMock()
- ad.reboot = mock.MagicMock()
-
- ad.ensure_verity_enabled()
-
- ad.reboot.assert_called_once()
- ad.adb.ensure_user.assert_called_with(root_user_id)
-
- @mock.patch(
- "antlion.controllers.adb.AdbProxy",
- return_value=MockAdbProxy(MOCK_SERIAL),
- )
- @mock.patch(
- "antlion.controllers.fastboot.FastbootProxy",
- return_value=MockFastbootProxy(MOCK_SERIAL),
- )
- def test_ensure_verity_enabled_both_enabled_at_start(
- self, fastboot_proxy, adb_proxy
- ):
- ad = android_device.AndroidDevice(serial=MOCK_SERIAL)
- root_user_id = "0"
-
- ad.adb.get_user_id = mock.MagicMock()
- ad.adb.get_user_id.return_value = root_user_id
-
- ad.adb.getprop = mock.MagicMock(
- side_effect=["2", "2"] # system.verified
- ) # vendor.verified
- ad.adb.ensure_user = mock.MagicMock()
- ad.reboot = mock.MagicMock()
- ad.ensure_verity_enabled()
-
- assert not ad.reboot.called
-
- @mock.patch(
- "antlion.controllers.adb.AdbProxy",
- return_value=MockAdbProxy(MOCK_SERIAL),
- )
- @mock.patch(
- "antlion.controllers.fastboot.FastbootProxy",
- return_value=MockFastbootProxy(MOCK_SERIAL),
- )
- def test_ensure_verity_disabled_system_already_disabled(
- self, fastboot_proxy, adb_proxy
- ):
- ad = android_device.AndroidDevice(serial=MOCK_SERIAL)
- root_user_id = "0"
-
- ad.adb.get_user_id = mock.MagicMock()
- ad.adb.get_user_id.return_value = root_user_id
-
- ad.adb.getprop = mock.MagicMock(
- side_effect=["2", ""] # system.verified
- ) # vendor.verified
- ad.adb.ensure_user = mock.MagicMock()
- ad.reboot = mock.MagicMock()
- ad.ensure_verity_disabled()
-
- ad.reboot.assert_called_once()
-
- @mock.patch(
- "antlion.controllers.adb.AdbProxy",
- return_value=MockAdbProxy(MOCK_SERIAL),
- )
- @mock.patch(
- "antlion.controllers.fastboot.FastbootProxy",
- return_value=MockFastbootProxy(MOCK_SERIAL),
- )
- def test_ensure_verity_disabled_vendor_already_disabled(
- self, fastboot_proxy, adb_proxy
- ):
- ad = android_device.AndroidDevice(serial=MOCK_SERIAL)
- root_user_id = "0"
-
- ad.adb.get_user_id = mock.MagicMock()
- ad.adb.get_user_id.return_value = root_user_id
-
- ad.adb.getprop = mock.MagicMock(
- side_effect=["", "2"] # system.verified
- ) # vendor.verified
- ad.adb.ensure_user = mock.MagicMock()
- ad.reboot = mock.MagicMock()
-
- ad.ensure_verity_disabled()
-
- ad.reboot.assert_called_once()
- ad.adb.ensure_user.assert_called_with(root_user_id)
-
- @mock.patch(
- "antlion.controllers.adb.AdbProxy",
- return_value=MockAdbProxy(MOCK_SERIAL),
- )
- @mock.patch(
- "antlion.controllers.fastboot.FastbootProxy",
- return_value=MockFastbootProxy(MOCK_SERIAL),
- )
- def test_ensure_verity_disabled_disabled_at_start(
- self, fastboot_proxy, adb_proxy
- ):
- ad = android_device.AndroidDevice(serial=MOCK_SERIAL)
- root_user_id = "0"
-
- ad.adb.get_user_id = mock.MagicMock()
- ad.adb.get_user_id.return_value = root_user_id
-
- ad.adb.getprop = mock.MagicMock(
- side_effect=["", ""] # system.verified
- ) # vendor.verified
- ad.adb.ensure_user = mock.MagicMock()
- ad.reboot = mock.MagicMock()
-
- ad.ensure_verity_disabled()
-
- assert not ad.reboot.called
-
- @mock.patch(
- "antlion.controllers.adb.AdbProxy",
- return_value=MockAdbProxy(MOCK_SERIAL),
- )
- @mock.patch(
- "antlion.controllers.fastboot.FastbootProxy",
- return_value=MockFastbootProxy(MOCK_SERIAL),
- )
- def test_push_system_file(self, fastboot_proxy, adb_proxy):
- ad = android_device.AndroidDevice(serial=MOCK_SERIAL)
- ad.ensure_verity_disabled = mock.MagicMock()
- ad.adb.remount = mock.MagicMock()
- ad.adb.push = mock.MagicMock()
-
- ret = ad.push_system_file("asdf", "jkl")
- self.assertTrue(ret)
-
- @mock.patch(
- "antlion.controllers.adb.AdbProxy",
- return_value=MockAdbProxy(MOCK_SERIAL),
- )
- @mock.patch(
- "antlion.controllers.fastboot.FastbootProxy",
- return_value=MockFastbootProxy(MOCK_SERIAL),
- )
- def test_push_system_file_returns_false_on_error(
- self, fastboot_proxy, adb_proxy
- ):
- ad = android_device.AndroidDevice(serial=MOCK_SERIAL)
- ad.ensure_verity_disabled = mock.MagicMock()
- ad.adb.remount = mock.MagicMock()
- ad.adb.push = mock.MagicMock(return_value="error")
-
- ret = ad.push_system_file("asdf", "jkl")
- self.assertFalse(ret)
-
- @mock.patch(
- "antlion.controllers.adb.AdbProxy",
- return_value=MockAdbProxy(MOCK_SERIAL),
- )
- @mock.patch(
- "antlion.controllers.fastboot.FastbootProxy",
- return_value=MockFastbootProxy(MOCK_SERIAL),
- )
- def test_get_my_current_focus_window_return_empty_string(self, *_):
- ad = android_device.AndroidDevice(serial=MOCK_SERIAL)
- ad.adb.return_value = ""
-
- ret = ad.get_my_current_focus_window()
-
- self.assertEqual("", ret)
-
- @mock.patch(
- "antlion.controllers.adb.AdbProxy",
- return_value=MockAdbProxy(MOCK_SERIAL),
- )
- @mock.patch(
- "antlion.controllers.fastboot.FastbootProxy",
- return_value=MockFastbootProxy(MOCK_SERIAL),
- )
- def test_get_my_current_focus_window_return_current_window(self, *_):
- ad = android_device.AndroidDevice(serial=MOCK_SERIAL)
- ad.adb.return_value = (
- "mCurrentFocus=Window{a247ded u0 NotificationShade}"
- )
-
- ret = ad.get_my_current_focus_window()
-
- self.assertEqual("NotificationShade", ret)
-
-
-if __name__ == "__main__":
- unittest.main()
diff --git a/packages/antlion/unit_tests/acts_asserts_test.py b/packages/antlion/unit_tests/acts_asserts_test.py
deleted file mode 100755
index 1d3281d..0000000
--- a/packages/antlion/unit_tests/acts_asserts_test.py
+++ /dev/null
@@ -1,36 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import unittest
-
-from mobly import asserts, signals
-
-MSG_EXPECTED_EXCEPTION = "This is an expected exception."
-
-
-class ActsAssertsTest(unittest.TestCase):
- """Verifies that asserts.xxx functions raise the correct test signals."""
-
- def test_assert_false(self):
- asserts.assert_false(False, MSG_EXPECTED_EXCEPTION)
- with self.assertRaisesRegexp(
- signals.TestFailure, MSG_EXPECTED_EXCEPTION
- ):
- asserts.assert_false(True, MSG_EXPECTED_EXCEPTION)
-
-
-if __name__ == "__main__":
- unittest.main()
diff --git a/packages/antlion/unit_tests/acts_confidence_test_config.json b/packages/antlion/unit_tests/acts_confidence_test_config.json
deleted file mode 100644
index 6a64b7c..0000000
--- a/packages/antlion/unit_tests/acts_confidence_test_config.json
+++ /dev/null
@@ -1,18 +0,0 @@
-{
- "testbed": [
- {
- "_description": "ACTS confidence test bed, no device needed.",
- "name": "Confidence",
- "icecream": 42,
- "MagicDevice": [
- "Magic!"
- ]
- }
- ],
- "logpath": "/tmp/logs",
- "testpaths": [
- "./"
- ],
- "icecream": "mememe",
- "extra_param": "haha"
-}
diff --git a/packages/antlion/unit_tests/acts_context_test.py b/packages/antlion/unit_tests/acts_context_test.py
deleted file mode 100755
index 0634826..0000000
--- a/packages/antlion/unit_tests/acts_context_test.py
+++ /dev/null
@@ -1,225 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import unittest
-from unittest import TestCase
-
-from mock import Mock, patch
-
-from antlion import context
-from antlion.context import (
- RootContext,
- TestCaseContext,
- TestClassContext,
- TestContext,
- _update_test_case_context,
- _update_test_class_context,
- get_current_context,
-)
-from antlion.event.event import (
- TestCaseBeginEvent,
- TestCaseEndEvent,
- TestClassBeginEvent,
- TestClassEndEvent,
-)
-
-LOGGING = "antlion.context.logging"
-
-
-def reset_context():
- context._contexts = [RootContext()]
-
-
-TEST_CASE = "test_case_name"
-
-
-class TestClass:
- pass
-
-
-class ModuleTest(TestCase):
- """Unit tests for the context module."""
-
- def test_update_test_class_context_for_test_class_begin(self):
- event = Mock(spec=TestClassBeginEvent)
- event.test_class = Mock()
-
- _update_test_class_context(event)
- self.assertIsInstance(get_current_context(), TestClassContext)
- reset_context()
-
- def test_update_test_class_context_for_test_class_end(self):
- event = Mock(spec=TestClassBeginEvent)
- event.test_class = Mock()
- event2 = Mock(spec=TestClassEndEvent)
- event2.test_class = Mock()
-
- _update_test_class_context(event)
- _update_test_class_context(event2)
-
- self.assertIsInstance(get_current_context(), RootContext)
- reset_context()
-
- def test_update_test_case_context_for_test_case_begin(self):
- event = Mock(spec=TestClassBeginEvent)
- event.test_class = Mock()
- event2 = Mock(spec=TestCaseBeginEvent)
- event2.test_class = Mock()
- event2.test_case = Mock()
-
- _update_test_class_context(event)
- _update_test_case_context(event2)
-
- self.assertIsInstance(get_current_context(), TestCaseContext)
- reset_context()
-
- def test_update_test_case_context_for_test_case_end(self):
- event = Mock(spec=TestClassBeginEvent)
- event.test_class = Mock()
- event2 = Mock(spec=TestCaseBeginEvent)
- event2.test_class = Mock()
- event2.test_case = Mock()
- event3 = Mock(spec=TestCaseEndEvent)
- event3.test_class = Mock()
- event3.test_case = Mock()
-
- _update_test_class_context(event)
- _update_test_case_context(event2)
- _update_test_case_context(event3)
-
- self.assertIsInstance(get_current_context(), TestClassContext)
- reset_context()
-
-
-class TestContextTest(TestCase):
- """Unit tests for the TestContext class."""
-
- @patch(LOGGING)
- def test_get_base_output_path_uses_default(self, logging):
- context = TestContext()
-
- self.assertEqual(context.get_base_output_path(), logging.log_path)
-
- @patch(LOGGING)
- def test_add_base_path_overrides_default(self, _):
- context = TestContext()
- mock_path = Mock()
-
- context.add_base_output_path("basepath", mock_path)
-
- self.assertEqual(context.get_base_output_path("basepath"), mock_path)
-
- def test_get_subcontext_returns_empty_string_by_default(self):
- context = TestContext()
-
- self.assertEqual(context.get_subcontext(), "")
-
- def test_add_subcontext_sets_correct_path(self):
- context = TestContext()
- mock_path = Mock()
-
- context.add_subcontext("subcontext", mock_path)
-
- self.assertEqual(context.get_subcontext("subcontext"), mock_path)
-
- @patch(LOGGING)
- @patch("os.makedirs")
- def test_get_full_output_path_returns_correct_path(self, *_):
- context = TestClassContext(TestClass())
- context.add_base_output_path("foo", "base/path")
- context.add_subcontext("foo", "subcontext")
-
- full_path = "base/path/TestClass/subcontext"
- self.assertEqual(context.get_full_output_path("foo"), full_path)
-
- def test_identifier_not_implemented(self):
- context = TestContext()
-
- self.assertRaises(NotImplementedError, lambda: context.identifier)
-
-
-class TestClassContextTest(TestCase):
- """Unit tests for the TestClassContext class."""
-
- def test_init_attributes(self):
- test_class = Mock()
- context = TestClassContext(test_class)
-
- self.assertEqual(context.test_class, test_class)
-
- def test_get_class_name(self):
- class TestClass:
- pass
-
- test_class = TestClass()
- context = TestClassContext(test_class)
-
- self.assertEqual(context.test_class_name, TestClass.__name__)
-
- def test_context_dir_is_class_name(self):
- class TestClass:
- pass
-
- test_class = TestClass()
- context = TestClassContext(test_class)
-
- self.assertEqual(context._get_default_context_dir(), TestClass.__name__)
-
- def test_identifier_is_class_name(self):
- class TestClass:
- pass
-
- test_class = TestClass()
- context = TestClassContext(test_class)
-
- self.assertEqual(context.identifier, TestClass.__name__)
-
-
-class TestCaseContextTest(TestCase):
- """Unit tests for the TestCaseContext class."""
-
- def test_init_attributes(self):
- test_class = Mock()
- test_case = TEST_CASE
- context = TestCaseContext(test_class, test_case)
-
- self.assertEqual(context.test_class, test_class)
- self.assertEqual(context.test_case, test_case)
- self.assertEqual(context.test_case_name, test_case)
-
- def test_get_class_name(self):
- test_class = TestClass()
- context = TestCaseContext(test_class, TEST_CASE)
-
- self.assertEqual(context.test_class_name, TestClass.__name__)
-
- def test_context_dir_is_class_and_test_case_name(self):
- test_class = TestClass()
- context = TestCaseContext(test_class, TEST_CASE)
-
- context_dir = f"{TestClass.__name__}/{TEST_CASE}"
- self.assertEqual(context._get_default_context_dir(), context_dir)
-
- def test_identifier_is_class_and_test_case_name(self):
- test_class = TestClass()
- context = TestCaseContext(test_class, TEST_CASE)
-
- identifier = f"{TestClass.__name__}.{TEST_CASE}"
- self.assertEqual(context.identifier, identifier)
-
-
-if __name__ == "__main__":
- unittest.main()
diff --git a/packages/antlion/unit_tests/acts_error_test.py b/packages/antlion/unit_tests/acts_error_test.py
deleted file mode 100755
index 2431bd3..0000000
--- a/packages/antlion/unit_tests/acts_error_test.py
+++ /dev/null
@@ -1,45 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-import unittest
-
-from antlion import error
-
-
-class ActsErrorTest(unittest.TestCase):
- def test_assert_key_pulled_from_acts_error_code(self):
- e = error.ActsError()
- self.assertEqual(e.error_code, 100)
-
- def test_assert_description_pulled_from_docstring(self):
- e = error.ActsError()
- self.assertEqual(e.error_doc, "Base Acts Error")
-
- def test_error_without_args(self):
- e = error.ActsError()
- self.assertEqual(e.details, "")
-
- def test_error_with_args(self):
- args = ("hello",)
- e = error.ActsError(*args)
- self.assertEqual(e.details, "hello")
-
- def test_error_with_kwargs(self):
- e = error.ActsError(key="value")
- self.assertIn(("key", "value"), e.extras.items())
-
-
-if __name__ == "__main__":
- unittest.main()
diff --git a/packages/antlion/unit_tests/acts_import_unit_test.py b/packages/antlion/unit_tests/acts_import_unit_test.py
deleted file mode 100755
index b7505bf..0000000
--- a/packages/antlion/unit_tests/acts_import_unit_test.py
+++ /dev/null
@@ -1,73 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import importlib.machinery
-import os
-import re
-import unittest
-import uuid
-
-
-def import_module(name, path):
- return importlib.machinery.SourceFileLoader(name, path).load_module()
-
-
-def import_acts():
- return importlib.import_module("antlion")
-
-
-PY_FILE_REGEX = re.compile(".+\.py$")
-
-DENYLIST_DIRECTORIES = []
-
-
-class ActsImportUnitTest(unittest.TestCase):
- """Test that all acts framework imports work."""
-
- def test_import_acts_successful(self):
- """Test that importing ACTS works."""
- acts = import_acts()
- self.assertIsNotNone(acts)
-
- # TODO(b/190659975): Re-enable once permission issue is resolved.
- @unittest.skip("Permission error: b/190659975")
- def test_import_framework_successful(self):
- """Dynamically test all imports from the framework."""
- acts = import_acts()
- if hasattr(acts, "__path__") and len(antlion.__path__) > 0:
- acts_path = antlion.__path__[0]
- else:
- acts_path = os.path.dirname(antlion.__file__)
-
- for root, _, files in os.walk(acts_path):
- for f in files:
- full_path = os.path.join(root, f)
- if any(full_path.endswith(e) for e in DENYLIST) or any(
- e in full_path for e in DENYLIST_DIRECTORIES
- ):
- continue
-
- path = os.path.relpath(os.path.join(root, f), os.getcwd())
-
- if PY_FILE_REGEX.match(full_path):
- with self.subTest(msg=f"import {path}"):
- fake_module_name = str(uuid.uuid4())
- module = import_module(fake_module_name, path)
- self.assertIsNotNone(module)
-
-
-if __name__ == "__main__":
- unittest.main()
diff --git a/packages/antlion/unit_tests/acts_job_test.py b/packages/antlion/unit_tests/acts_job_test.py
deleted file mode 100755
index decebcc..0000000
--- a/packages/antlion/unit_tests/acts_job_test.py
+++ /dev/null
@@ -1,129 +0,0 @@
-#!/usr/bin/env python3
-
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import os
-import sys
-import unittest
-
-import mock
-
-from antlion.libs.proc import job
-from antlion.runner import CalledProcessError
-
-if os.name == "posix" and sys.version_info[0] < 3:
- import subprocess32 as subprocess
-else:
- import subprocess
-
-
-class FakePopen(object):
- """A fake version of the object returned from subprocess.Popen()."""
-
- def __init__(
- self, stdout=None, stderr=None, returncode=0, will_timeout=False
- ):
- self.returncode = returncode
- self._stdout = bytes(stdout, "utf-8") if stdout is not None else bytes()
- self._stderr = bytes(stderr, "utf-8") if stderr is not None else bytes()
- self._will_timeout = will_timeout
-
- def communicate(self, timeout=None):
- if self._will_timeout:
- raise subprocess.TimeoutExpired(
- -1, "Timed out according to test logic"
- )
- return self._stdout, self._stderr
-
- def kill(self):
- pass
-
- def wait(self):
- pass
-
-
-class JobTestCases(unittest.TestCase):
- @mock.patch(
- "antlion.libs.proc.job.subprocess.Popen",
- return_value=FakePopen(stdout="TEST\n"),
- )
- def test_run_success(self, popen):
- """Test running a simple shell command."""
- result = job.run("echo TEST")
- self.assertTrue(result.stdout.startswith("TEST"))
-
- @mock.patch(
- "antlion.libs.proc.job.subprocess.Popen",
- return_value=FakePopen(stderr="TEST\n"),
- )
- def test_run_stderr(self, popen):
- """Test that we can read process stderr."""
- result = job.run("echo TEST 1>&2")
- self.assertEqual(len(result.stdout), 0)
- self.assertTrue(result.stderr.startswith("TEST"))
- self.assertFalse(result.stdout)
-
- @mock.patch(
- "antlion.libs.proc.job.subprocess.Popen",
- return_value=FakePopen(returncode=1),
- )
- def test_run_error(self, popen):
- """Test that we raise on non-zero exit statuses."""
- self.assertRaises(CalledProcessError, job.run, "exit 1")
-
- @mock.patch(
- "antlion.libs.proc.job.subprocess.Popen",
- return_value=FakePopen(returncode=1),
- )
- def test_run_with_ignored_error(self, popen):
- """Test that we can ignore exit status on request."""
- result = job.run("exit 1", ignore_status=True)
- self.assertEqual(result.exit_status, 1)
-
- @mock.patch(
- "antlion.libs.proc.job.subprocess.Popen",
- return_value=FakePopen(will_timeout=True),
- )
- def test_run_timeout(self, popen):
- """Test that we correctly implement command timeouts."""
- self.assertRaises(
- CalledProcessError, job.run, "sleep 5", timeout_sec=0.1
- )
-
- @mock.patch(
- "antlion.libs.proc.job.subprocess.Popen",
- return_value=FakePopen(stdout="TEST\n"),
- )
- def test_run_no_shell(self, popen):
- """Test that we handle running without a wrapping shell."""
- result = job.run(["echo", "TEST"])
- self.assertTrue(result.stdout.startswith("TEST"))
-
- @mock.patch(
- "antlion.libs.proc.job.subprocess.Popen",
- return_value=FakePopen(stdout="TEST\n"),
- )
- def test_job_env(self, popen):
- """Test that we can set environment variables correctly."""
- test_env = {"MYTESTVAR": "20"}
- result = job.run("printenv", env=test_env.copy())
- popen.assert_called_once()
- _, kwargs = popen.call_args
- self.assertTrue("env" in kwargs)
- self.assertEqual(kwargs["env"], test_env)
-
-
-if __name__ == "__main__":
- unittest.main()
diff --git a/packages/antlion/unit_tests/acts_logger_test.py b/packages/antlion/unit_tests/acts_logger_test.py
deleted file mode 100755
index 61e1c35..0000000
--- a/packages/antlion/unit_tests/acts_logger_test.py
+++ /dev/null
@@ -1,35 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import os
-import time
-import unittest
-
-from mobly import logger
-
-
-class ActsLoggerTest(unittest.TestCase):
- """Verifies code in antlion.logger module."""
-
- def test_epoch_to_log_line_timestamp(self):
- os.environ["TZ"] = "US/Pacific"
- time.tzset()
- actual_stamp = logger.epoch_to_log_line_timestamp(1469134262116)
- self.assertEqual("2016-07-21 13:51:02.116", actual_stamp)
-
-
-if __name__ == "__main__":
- unittest.main()
diff --git a/packages/antlion/unit_tests/acts_sanity_test_config.json b/packages/antlion/unit_tests/acts_sanity_test_config.json
deleted file mode 100644
index e721333..0000000
--- a/packages/antlion/unit_tests/acts_sanity_test_config.json
+++ /dev/null
@@ -1,18 +0,0 @@
-{
- "testbed": [
- {
- "_description": "ACTS sanity test bed, no device needed.",
- "name": "Sanity",
- "icecream": 42,
- "MagicDevice": [
- "Magic!"
- ]
- }
- ],
- "logpath": "/tmp/logs",
- "testpaths": [
- "./"
- ],
- "icecream": "mememe",
- "extra_param": "haha"
-}
diff --git a/packages/antlion/unit_tests/acts_sniffer_test_config.json b/packages/antlion/unit_tests/acts_sniffer_test_config.json
deleted file mode 100644
index 9e04d34..0000000
--- a/packages/antlion/unit_tests/acts_sniffer_test_config.json
+++ /dev/null
@@ -1,22 +0,0 @@
-{
- "testbed": [
- {
- "_description": "ACTS sniffer sanity test bed, no device needed.",
- "name": "SnifferSanity",
- "Sniffer": [
- {
- "Type": "local",
- "SubType": "tcpdump",
- "Interface": "wlan0",
- "BaseConfigs": {
- "channel": 6
- }
- }
- ]
- }
- ],
- "logpath": "/tmp/logs",
- "testpaths": [
- "./"
- ]
-}
diff --git a/packages/antlion/unit_tests/acts_utils_test.py b/packages/antlion/unit_tests/acts_utils_test.py
deleted file mode 100755
index c1fc461..0000000
--- a/packages/antlion/unit_tests/acts_utils_test.py
+++ /dev/null
@@ -1,322 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import ipaddress
-import logging
-import subprocess
-import unittest
-
-import mock
-
-from antlion import utils
-from antlion.capabilities.ssh import SSHConfig, SSHResult
-from antlion.controllers.android_device import AndroidDevice
-from antlion.controllers.fuchsia_device import FuchsiaDevice
-from antlion.controllers.fuchsia_lib.sl4f import SL4F
-from antlion.controllers.fuchsia_lib.ssh import FuchsiaSSHProvider
-from antlion.controllers.utils_lib.ssh.connection import SshConnection
-from antlion.libs.proc import job
-
-PROVISIONED_STATE_GOOD = 1
-
-MOCK_ENO1_IP_ADDRESSES = """100.127.110.79
-2401:fa00:480:7a00:8d4f:85ff:cc5c:787e
-2401:fa00:480:7a00:459:b993:fcbf:1419
-fe80::c66d:3c75:2cec:1d72"""
-
-MOCK_WLAN1_IP_ADDRESSES = ""
-
-FUCHSIA_INTERFACES = {
- "id": "1",
- "result": [
- {
- "id": 1,
- "name": "lo",
- "ipv4_addresses": [
- [127, 0, 0, 1],
- ],
- "ipv6_addresses": [
- [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1],
- ],
- "online": True,
- "mac": [0, 0, 0, 0, 0, 0],
- },
- {
- "id": 2,
- "name": "eno1",
- "ipv4_addresses": [
- [100, 127, 110, 79],
- ],
- "ipv6_addresses": [
- list(ipaddress.IPv6Address("fe80::c66d:3c75:2cec:1d72").packed),
- list(
- ipaddress.IPv6Address(
- "2401:fa00:480:7a00:8d4f:85ff:cc5c:787e"
- ).packed
- ),
- list(
- ipaddress.IPv6Address(
- "2401:fa00:480:7a00:459:b993:fcbf:1419"
- ).packed
- ),
- ],
- "online": True,
- "mac": [0, 224, 76, 5, 76, 229],
- },
- {
- "id": 3,
- "name": "wlanxc0",
- "ipv4_addresses": [],
- "ipv6_addresses": [
- list(ipaddress.IPv6Address("fe80::60ff:5d60:34fd:fdf3").packed),
- list(ipaddress.IPv6Address("fe80::4607:bff:fe76:7ec0").packed),
- ],
- "online": False,
- "mac": [68, 7, 11, 118, 126, 192],
- },
- ],
- "error": None,
-}
-
-CORRECT_FULL_IP_LIST = {
- "ipv4_private": [],
- "ipv4_public": ["100.127.110.79"],
- "ipv6_link_local": ["fe80::c66d:3c75:2cec:1d72"],
- "ipv6_private_local": [],
- "ipv6_public": [
- "2401:fa00:480:7a00:8d4f:85ff:cc5c:787e",
- "2401:fa00:480:7a00:459:b993:fcbf:1419",
- ],
-}
-
-CORRECT_EMPTY_IP_LIST = {
- "ipv4_private": [],
- "ipv4_public": [],
- "ipv6_link_local": [],
- "ipv6_private_local": [],
- "ipv6_public": [],
-}
-
-
-class IpAddressUtilTest(unittest.TestCase):
- def test_positive_ipv4_normal_address(self):
- ip_address = "192.168.1.123"
- self.assertTrue(utils.is_valid_ipv4_address(ip_address))
-
- def test_positive_ipv4_any_address(self):
- ip_address = "0.0.0.0"
- self.assertTrue(utils.is_valid_ipv4_address(ip_address))
-
- def test_positive_ipv4_broadcast(self):
- ip_address = "255.255.255.0"
- self.assertTrue(utils.is_valid_ipv4_address(ip_address))
-
- def test_negative_ipv4_with_ipv6_address(self):
- ip_address = "fe80::f693:9fff:fef4:1ac"
- self.assertFalse(utils.is_valid_ipv4_address(ip_address))
-
- def test_negative_ipv4_with_invalid_string(self):
- ip_address = "fdsafdsafdsafdsf"
- self.assertFalse(utils.is_valid_ipv4_address(ip_address))
-
- def test_negative_ipv4_with_invalid_number(self):
- ip_address = "192.168.500.123"
- self.assertFalse(utils.is_valid_ipv4_address(ip_address))
-
- def test_positive_ipv6(self):
- ip_address = "fe80::f693:9fff:fef4:1ac"
- self.assertTrue(utils.is_valid_ipv6_address(ip_address))
-
- def test_positive_ipv6_link_local(self):
- ip_address = "fe80::"
- self.assertTrue(utils.is_valid_ipv6_address(ip_address))
-
- def test_negative_ipv6_with_ipv4_address(self):
- ip_address = "192.168.1.123"
- self.assertFalse(utils.is_valid_ipv6_address(ip_address))
-
- def test_negative_ipv6_invalid_characters(self):
- ip_address = "fe80:jkyr:f693:9fff:fef4:1ac"
- self.assertFalse(utils.is_valid_ipv6_address(ip_address))
-
- def test_negative_ipv6_invalid_string(self):
- ip_address = "fdsafdsafdsafdsf"
- self.assertFalse(utils.is_valid_ipv6_address(ip_address))
-
- @mock.patch(
- "antlion.controllers.utils_lib.ssh.connection.SshConnection.run"
- )
- def test_ssh_get_interface_ip_addresses_full(self, ssh_mock):
- ssh_mock.side_effect = [
- job.Result(
- stdout=bytes(MOCK_ENO1_IP_ADDRESSES, "utf-8"), encoding="utf-8"
- ),
- ]
- self.assertEqual(
- utils.get_interface_ip_addresses(
- SshConnection("mock_settings"), "eno1"
- ),
- CORRECT_FULL_IP_LIST,
- )
-
- @mock.patch(
- "antlion.controllers.utils_lib.ssh.connection.SshConnection.run"
- )
- def test_ssh_get_interface_ip_addresses_empty(self, ssh_mock):
- ssh_mock.side_effect = [
- job.Result(
- stdout=bytes(MOCK_WLAN1_IP_ADDRESSES, "utf-8"), encoding="utf-8"
- ),
- ]
- self.assertEqual(
- utils.get_interface_ip_addresses(
- SshConnection("mock_settings"), "wlan1"
- ),
- CORRECT_EMPTY_IP_LIST,
- )
-
- @mock.patch("antlion.controllers.adb.AdbProxy")
- @mock.patch.object(AndroidDevice, "is_bootloader", return_value=True)
- def test_android_get_interface_ip_addresses_full(
- self, is_bootloader, adb_mock
- ):
- adb_mock().shell.side_effect = [
- MOCK_ENO1_IP_ADDRESSES,
- ]
- self.assertEqual(
- utils.get_interface_ip_addresses(AndroidDevice(), "eno1"),
- CORRECT_FULL_IP_LIST,
- )
-
- @mock.patch("antlion.controllers.adb.AdbProxy")
- @mock.patch.object(AndroidDevice, "is_bootloader", return_value=True)
- def test_android_get_interface_ip_addresses_empty(
- self, is_bootloader, adb_mock
- ):
- adb_mock().shell.side_effect = [
- MOCK_WLAN1_IP_ADDRESSES,
- ]
- self.assertEqual(
- utils.get_interface_ip_addresses(AndroidDevice(), "wlan1"),
- CORRECT_EMPTY_IP_LIST,
- )
-
- @mock.patch(
- "antlion.controllers.fuchsia_device.FuchsiaDevice.sl4f",
- new_callable=mock.PropertyMock,
- )
- @mock.patch(
- "antlion.controllers.fuchsia_device.FuchsiaDevice.ffx",
- new_callable=mock.PropertyMock,
- )
- @mock.patch("antlion.controllers.fuchsia_lib.sl4f.wait_for_port")
- @mock.patch("antlion.controllers.fuchsia_lib.ssh.FuchsiaSSHProvider.run")
- @mock.patch("antlion.capabilities.ssh.SSHProvider.wait_until_reachable")
- @mock.patch(
- "antlion.controllers.fuchsia_device."
- "FuchsiaDevice._generate_ssh_config"
- )
- @mock.patch(
- "antlion.controllers."
- "fuchsia_lib.netstack.netstack_lib."
- "FuchsiaNetstackLib.netstackListInterfaces"
- )
- def test_fuchsia_get_interface_ip_addresses_full(
- self,
- list_interfaces_mock,
- generate_ssh_config_mock,
- ssh_wait_until_reachable_mock,
- ssh_run_mock,
- wait_for_port_mock,
- ffx_mock,
- sl4f_mock,
- ):
- # Configure the log path which is required by ACTS logger.
- logging.log_path = "/tmp/unit_test_garbage"
-
- ssh = FuchsiaSSHProvider(SSHConfig("192.168.1.1", 22, "/dev/null"))
- ssh_run_mock.return_value = SSHResult(
- subprocess.CompletedProcess([], 0, stdout=b"", stderr=b"")
- )
-
- # Don't try to wait for the SL4F server to start; it's not being used.
- wait_for_port_mock.return_value = None
-
- sl4f_mock.return_value = SL4F(ssh, "http://192.168.1.1:80")
- ssh_wait_until_reachable_mock.return_value = None
-
- list_interfaces_mock.return_value = FUCHSIA_INTERFACES
- self.assertEqual(
- utils.get_interface_ip_addresses(
- FuchsiaDevice({"ip": "192.168.1.1"}), "eno1"
- ),
- CORRECT_FULL_IP_LIST,
- )
-
- @mock.patch(
- "antlion.controllers.fuchsia_device.FuchsiaDevice.sl4f",
- new_callable=mock.PropertyMock,
- )
- @mock.patch(
- "antlion.controllers.fuchsia_device.FuchsiaDevice.ffx",
- new_callable=mock.PropertyMock,
- )
- @mock.patch("antlion.controllers.fuchsia_lib.sl4f.wait_for_port")
- @mock.patch("antlion.controllers.fuchsia_lib.ssh.FuchsiaSSHProvider.run")
- @mock.patch("antlion.capabilities.ssh.SSHProvider.wait_until_reachable")
- @mock.patch(
- "antlion.controllers.fuchsia_device."
- "FuchsiaDevice._generate_ssh_config"
- )
- @mock.patch(
- "antlion.controllers."
- "fuchsia_lib.netstack.netstack_lib."
- "FuchsiaNetstackLib.netstackListInterfaces"
- )
- def test_fuchsia_get_interface_ip_addresses_empty(
- self,
- list_interfaces_mock,
- generate_ssh_config_mock,
- ssh_wait_until_reachable_mock,
- ssh_run_mock,
- wait_for_port_mock,
- ffx_mock,
- sl4f_mock,
- ):
- # Configure the log path which is required by ACTS logger.
- logging.log_path = "/tmp/unit_test_garbage"
-
- ssh = FuchsiaSSHProvider(SSHConfig("192.168.1.1", 22, "/dev/null"))
- ssh_run_mock.return_value = SSHResult(
- subprocess.CompletedProcess([], 0, stdout=b"", stderr=b"")
- )
-
- # Don't try to wait for the SL4F server to start; it's not being used.
- wait_for_port_mock.return_value = None
- ssh_wait_until_reachable_mock.return_value = None
- sl4f_mock.return_value = SL4F(ssh, "http://192.168.1.1:80")
-
- list_interfaces_mock.return_value = FUCHSIA_INTERFACES
- self.assertEqual(
- utils.get_interface_ip_addresses(
- FuchsiaDevice({"ip": "192.168.1.1"}), "wlan1"
- ),
- CORRECT_EMPTY_IP_LIST,
- )
-
-
-if __name__ == "__main__":
- unittest.main()
diff --git a/packages/antlion/unit_tests/controllers/__init__.py b/packages/antlion/unit_tests/controllers/__init__.py
deleted file mode 100644
index 7f1a899..0000000
--- a/packages/antlion/unit_tests/controllers/__init__.py
+++ /dev/null
@@ -1,15 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
diff --git a/packages/antlion/unit_tests/controllers/android_lib/__init__.py b/packages/antlion/unit_tests/controllers/android_lib/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/packages/antlion/unit_tests/controllers/android_lib/__init__.py
+++ /dev/null
diff --git a/packages/antlion/unit_tests/controllers/android_lib/logcat_test.py b/packages/antlion/unit_tests/controllers/android_lib/logcat_test.py
deleted file mode 100644
index 209a445..0000000
--- a/packages/antlion/unit_tests/controllers/android_lib/logcat_test.py
+++ /dev/null
@@ -1,178 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-import logging
-import unittest
-
-import mock
-
-from antlion.controllers.android_lib import logcat
-from antlion.controllers.android_lib.logcat import TimestampTracker
-
-BASE_TIMESTAMP = "2000-01-01 12:34:56.789 123 75348 "
-
-
-class LogcatTest(unittest.TestCase):
- """Tests antlion.controllers.android_lib.logcat"""
-
- @staticmethod
- def patch(patched):
- return mock.patch(f"antlion.controllers.android_lib.logcat.{patched}")
-
- def setUp(self):
- self._get_log_level = logcat._get_log_level
-
- def tearDown(self):
- logcat._get_log_level = self._get_log_level
-
- # TimestampTracker
-
- def test_read_output_sets_last_timestamp_if_found(self):
- tracker = TimestampTracker()
- tracker.read_output(f"{BASE_TIMESTAMP}D message")
-
- self.assertEqual(tracker.last_timestamp, "2000-01-01 12:34:56.789")
-
- def test_read_output_keeps_last_timestamp_if_no_new_stamp_is_found(self):
- tracker = TimestampTracker()
- tracker.read_output(f"{BASE_TIMESTAMP}D message")
- tracker.read_output("--------- beginning of main")
-
- self.assertEqual(tracker.last_timestamp, "2000-01-01 12:34:56.789")
-
- def test_read_output_updates_timestamp_to_first_in_results(self):
- tracker = TimestampTracker()
- tracker.read_output(f"{BASE_TIMESTAMP}D 9999-99-99 12:34:56.789")
-
- self.assertEqual(tracker.last_timestamp, "2000-01-01 12:34:56.789")
-
- # _get_log_level
-
- def test_get_log_level_verbose(self):
- """Tests that Logcat's verbose logs make it to the debug level."""
- level = logcat._get_log_level(f"{BASE_TIMESTAMP}V")
-
- self.assertEqual(level, logging.DEBUG)
-
- def test_get_log_level_debug(self):
- """Tests that Logcat's debug logs make it to the debug level."""
- level = logcat._get_log_level(f"{BASE_TIMESTAMP}D")
-
- self.assertEqual(level, logging.DEBUG)
-
- def test_get_log_level_info(self):
- """Tests that Logcat's info logs make it to the info level."""
- level = logcat._get_log_level(f"{BASE_TIMESTAMP}I")
-
- self.assertEqual(level, logging.INFO)
-
- def test_get_log_level_warning(self):
- """Tests that Logcat's warning logs make it to the warning level."""
- level = logcat._get_log_level(f"{BASE_TIMESTAMP}W")
-
- self.assertEqual(level, logging.WARNING)
-
- def test_get_log_level_error(self):
- """Tests that Logcat's error logs make it to the error level."""
- level = logcat._get_log_level(f"{BASE_TIMESTAMP}E")
-
- self.assertEqual(level, logging.ERROR)
-
- def test_get_log_level_markers(self):
- """Tests that Logcat's marker logs make it to the error level."""
- level = logcat._get_log_level("--------- beginning of main")
-
- self.assertEqual(level, logging.ERROR)
-
- # _log_line_func
-
- def test_log_line_func_returns_func_that_logs_to_given_logger(self):
- logcat._get_log_level = lambda message: logging.INFO
- tracker = mock.Mock()
- log = mock.Mock()
- message = "MESSAGE"
-
- logcat._log_line_func(log, tracker)(message)
-
- self.assertEqual(log.log.called, True)
- log.log.assert_called_once_with(logging.INFO, message)
-
- def test_log_line_func_returns_func_that_updates_the_timestamp(self):
- logcat._get_log_level = lambda message: logging.INFO
- tracker = mock.Mock()
- log = mock.Mock()
- message = "MESSAGE"
-
- logcat._log_line_func(log, tracker)(message)
-
- self.assertEqual(tracker.read_output.called, True)
- tracker.read_output.assert_called_once_with(message)
-
- # _on_retry
-
- def test_on_retry_returns_func_that_formats_with_last_timestamp(self):
- tracker = TimestampTracker()
- tracker.read_output(BASE_TIMESTAMP)
- new_command = logcat._on_retry("S3R14L", "extra_params", tracker)(None)
-
- self.assertIn(f'-T "{tracker.last_timestamp}"', new_command)
-
- def test_on_retry_func_returns_string_that_contains_the_given_serial(self):
- tracker = TimestampTracker()
- tracker.read_output(BASE_TIMESTAMP)
- new_command = logcat._on_retry("S3R14L", "extra_params", tracker)(None)
-
- self.assertTrue("-s S3R14L" in new_command)
-
- def test_on_retry_func_returns_string_that_contains_any_extra_params(self):
- tracker = TimestampTracker()
- tracker.read_output(BASE_TIMESTAMP)
- new_command = logcat._on_retry("S3R14L", "extra_params", tracker)(None)
-
- self.assertTrue("extra_params" in new_command)
-
- # create_logcat_keepalive_process
-
- def test_create_logcat_keepalive_process_creates_a_new_logger(self):
- with self.patch("log_stream") as log_stream, self.patch("Process"):
- logcat.create_logcat_keepalive_process("S3R14L", "dir")
- self.assertEqual(
- log_stream.create_logger.call_args[0][0], "adblog_S3R14L"
- )
- self.assertEqual(
- log_stream.create_logger.call_args[1]["subcontext"], "dir"
- )
-
- def test_create_logcat_keepalive_process_creates_a_new_process(self):
- with self.patch("log_stream"), self.patch("Process") as process:
- logcat.create_logcat_keepalive_process("S3R14L", "dir")
-
- self.assertIn("S3R14L", process.call_args[0][0])
-
- def test_create_logcat_keepalive_process_sets_output_callback(self):
- with self.patch("log_stream"), self.patch("Process"):
- process = logcat.create_logcat_keepalive_process("S3R14L", "dir")
-
- self.assertEqual(process.set_on_output_callback.called, True)
-
- def test_create_logcat_keepalive_process_sets_on_terminate_callback(self):
- with self.patch("log_stream"), self.patch("Process"):
- process = logcat.create_logcat_keepalive_process("S3R14L", "dir")
-
- self.assertEqual(process.set_on_terminate_callback.called, True)
-
-
-if __name__ == "__main__":
- unittest.main()
diff --git a/packages/antlion/unit_tests/controllers/android_lib/services_test.py b/packages/antlion/unit_tests/controllers/android_lib/services_test.py
deleted file mode 100644
index d0cd787..0000000
--- a/packages/antlion/unit_tests/controllers/android_lib/services_test.py
+++ /dev/null
@@ -1,100 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import unittest
-from unittest import mock
-
-from antlion.controllers.android_lib import services
-from antlion.controllers.android_lib.events import (
- AndroidStartServicesEvent,
- AndroidStopServicesEvent,
-)
-from antlion.event import event_bus
-
-
-class ServicesTest(unittest.TestCase):
- """Tests antlion.controllers.android_lib.services"""
-
- # AndroidService
-
- def test_register_adds_both_start_and_stop_methods(self):
- """Test that both the _start and _stop methods are registered to
- their respective events upon calling register().
- """
- event_bus._event_bus = event_bus._EventBus()
- service = services.AndroidService(mock.Mock())
- service.register()
- subscriptions = event_bus._event_bus._subscriptions
- self.assertTrue(
- any(
- subscription._func == service._start
- for subscription in subscriptions[AndroidStartServicesEvent]
- )
- )
- self.assertTrue(
- any(
- subscription._func == service._stop
- for subscription in subscriptions[AndroidStopServicesEvent]
- )
- )
-
- @unittest.mock.patch.object(services.AndroidService, "_start")
- def test_event_deliver_only_to_matching_serial(self, start_fn):
- """Test that the service only responds to events that matches its
- device serial.
- """
- event_bus._event_bus = event_bus._EventBus()
- service = services.AndroidService(mock.Mock())
- service.ad.serial = "right_serial"
- service.register()
-
- wrong_ad = mock.Mock()
- wrong_ad.serial = "wrong_serial"
- wrong_event = AndroidStartServicesEvent(wrong_ad)
- event_bus.post(wrong_event)
- start_fn.assert_not_called()
-
- right_ad = mock.Mock()
- right_ad.serial = "right_serial"
- right_event = AndroidStartServicesEvent(right_ad)
- event_bus.post(right_event)
- start_fn.assert_called_with(right_event)
-
- def test_unregister_removes_both_start_and_stop_methods(self):
- """Test that both the _start and _stop methods are unregistered from
- their respective events upon calling unregister().
- """
- event_bus._event_bus = event_bus._EventBus()
- service = services.AndroidService(mock.Mock())
- service.register()
- service.unregister()
- subscriptions = event_bus._event_bus._subscriptions
- self.assertFalse(
- any(
- subscription._func == service._start
- for subscription in subscriptions[AndroidStartServicesEvent]
- )
- )
- self.assertFalse(
- any(
- subscription._func == service._stop
- for subscription in subscriptions[AndroidStopServicesEvent]
- )
- )
-
-
-if __name__ == "__main__":
- unittest.main()
diff --git a/packages/antlion/unit_tests/controllers/ap_lib/__init__.py b/packages/antlion/unit_tests/controllers/ap_lib/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/packages/antlion/unit_tests/controllers/ap_lib/__init__.py
+++ /dev/null
diff --git a/packages/antlion/unit_tests/controllers/ap_lib/dhcp_config_test.py b/packages/antlion/unit_tests/controllers/ap_lib/dhcp_config_test.py
deleted file mode 100644
index ddbd78b..0000000
--- a/packages/antlion/unit_tests/controllers/ap_lib/dhcp_config_test.py
+++ /dev/null
@@ -1,152 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import ipaddress
-import unittest
-
-from antlion.controllers.ap_lib.dhcp_config import DhcpConfig, Subnet
-
-
-class DhcpConfigTest(unittest.TestCase):
- def setUp(self):
- super().setUp()
- # These config files may have long diffs, modify this setting to
- # ensure they're printed.
- self.maxDiff = None
-
- def test_basic_dhcp_config(self):
- dhcp_conf = DhcpConfig()
-
- expected_config = "default-lease-time 600;\n" "max-lease-time 7200;"
-
- self.assertEqual(expected_config, dhcp_conf.render_config_file())
-
- def test_dhcp_config_with_lease_times(self):
- default_lease_time = 350
- max_lease_time = 5000
- dhcp_conf = DhcpConfig(
- default_lease_time=default_lease_time, max_lease_time=max_lease_time
- )
-
- expected_config = (
- f"default-lease-time {default_lease_time};\n"
- f"max-lease-time {max_lease_time};"
- )
-
- self.assertEqual(expected_config, dhcp_conf.render_config_file())
-
- def test_dhcp_config_with_subnets(self):
- default_lease_time = 150
- max_lease_time = 3000
- subnets = [
- # addresses from 10.10.1.0 - 10.10.1.255
- Subnet(ipaddress.ip_network("10.10.1.0/24")),
- # 4 addresses from 10.10.3.0 - 10.10.3.3
- Subnet(ipaddress.ip_network("10.10.3.0/30")),
- # 6 addresses from 10.10.5.20 - 10.10.5.25
- Subnet(
- ipaddress.ip_network("10.10.5.0/24"),
- start=ipaddress.ip_address("10.10.5.20"),
- end=ipaddress.ip_address("10.10.5.25"),
- router=ipaddress.ip_address("10.10.5.255"),
- lease_time=60,
- ),
- ]
- dhcp_conf = DhcpConfig(
- subnets=subnets,
- default_lease_time=default_lease_time,
- max_lease_time=max_lease_time,
- )
-
- # Unless an explicit start/end address is provided, the second
- # address in the range is used for "start", and the second to
- # last address is used for "end".
- expected_config = (
- f"default-lease-time {default_lease_time};\n"
- f"max-lease-time {max_lease_time};\n"
- "subnet 10.10.1.0 netmask 255.255.255.0 {\n"
- "\tpool {\n"
- "\t\toption subnet-mask 255.255.255.0;\n"
- "\t\toption routers 10.10.1.1;\n"
- "\t\trange 10.10.1.2 10.10.1.254;\n"
- "\t\toption domain-name-servers 8.8.8.8, 4.4.4.4;\n"
- "\t}\n"
- "}\n"
- "subnet 10.10.3.0 netmask 255.255.255.252 {\n"
- "\tpool {\n"
- "\t\toption subnet-mask 255.255.255.252;\n"
- "\t\toption routers 10.10.3.1;\n"
- "\t\trange 10.10.3.2 10.10.3.2;\n"
- "\t\toption domain-name-servers 8.8.8.8, 4.4.4.4;\n"
- "\t}\n"
- "}\n"
- "subnet 10.10.5.0 netmask 255.255.255.0 {\n"
- "\tpool {\n"
- "\t\toption subnet-mask 255.255.255.0;\n"
- "\t\toption routers 10.10.5.255;\n"
- "\t\trange 10.10.5.20 10.10.5.25;\n"
- "\t\tdefault-lease-time 60;\n"
- "\t\tmax-lease-time 60;\n"
- "\t\toption domain-name-servers 8.8.8.8, 4.4.4.4;\n"
- "\t}\n"
- "}"
- )
-
- self.assertEqual(expected_config, dhcp_conf.render_config_file())
-
- def test_additional_subnet_parameters_and_options(self):
- default_lease_time = 150
- max_lease_time = 3000
- subnets = [
- Subnet(
- ipaddress.ip_network("10.10.1.0/24"),
- additional_parameters={
- "allow": "unknown-clients",
- "foo": "bar",
- },
- additional_options={"my-option": "some-value"},
- ),
- ]
- dhcp_conf = DhcpConfig(
- subnets=subnets,
- default_lease_time=default_lease_time,
- max_lease_time=max_lease_time,
- )
-
- # Unless an explicit start/end address is provided, the second
- # address in the range is used for "start", and the second to
- # last address is used for "end".
- expected_config = (
- f"default-lease-time {default_lease_time};\n"
- f"max-lease-time {max_lease_time};\n"
- "subnet 10.10.1.0 netmask 255.255.255.0 {\n"
- "\tpool {\n"
- "\t\toption subnet-mask 255.255.255.0;\n"
- "\t\toption routers 10.10.1.1;\n"
- "\t\trange 10.10.1.2 10.10.1.254;\n"
- "\t\tallow unknown-clients;\n"
- "\t\tfoo bar;\n"
- "\t\toption my-option some-value;\n"
- "\t\toption domain-name-servers 8.8.8.8, 4.4.4.4;\n"
- "\t}\n"
- "}"
- )
-
- self.assertEqual(expected_config, dhcp_conf.render_config_file())
-
-
-if __name__ == "__main__":
- unittest.main()
diff --git a/packages/antlion/unit_tests/controllers/ap_lib/hostapd_test.py b/packages/antlion/unit_tests/controllers/ap_lib/hostapd_test.py
deleted file mode 100644
index 169c1ac..0000000
--- a/packages/antlion/unit_tests/controllers/ap_lib/hostapd_test.py
+++ /dev/null
@@ -1,113 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2023 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import unittest
-from unittest.mock import Mock
-
-from antlion.controllers.ap_lib import hostapd
-from antlion.libs.proc.job import Result
-
-# MAC address that will be used in these tests.
-STA_MAC = "aa:bb:cc:dd:ee:ff"
-
-# Abbreviated output of hostapd_cli STA commands, showing various AUTH/ASSOC/AUTHORIZED states.
-STA_OUTPUT_WITHOUT_STA_AUTHENTICATED = b"""aa:bb:cc:dd:ee:ff
-flags=[WMM][HT][VHT]"""
-
-STA_OUTPUT_WITH_STA_AUTHENTICATED = b"""aa:bb:cc:dd:ee:ff
-flags=[AUTH][WMM][HT][VHT]"""
-
-STA_OUTPUT_WITH_STA_ASSOCIATED = b"""aa:bb:cc:dd:ee:ff
-flags=[AUTH][ASSOC][WMM][HT][VHT]
-aid=42"""
-
-STA_OUTPUT_WITH_STA_AUTHORIZED = b"""aa:bb:cc:dd:ee:ff
-flags=[AUTH][ASSOC][AUTHORIZED][WMM][HT][VHT]
-aid=42"""
-
-
-class HostapdTest(unittest.TestCase):
- def test_sta_authenticated_true_for_authenticated_sta(self):
- hostapd_mock = hostapd.Hostapd("mock_runner", "wlan0")
- hostapd_mock._run_hostapd_cli_cmd = Mock(
- return_value=Result(
- command=list(),
- stdout=STA_OUTPUT_WITH_STA_AUTHENTICATED,
- exit_status=0,
- )
- )
- self.assertTrue(hostapd_mock.sta_authenticated(STA_MAC))
-
- def test_sta_authenticated_false_for_unauthenticated_sta(self):
- hostapd_mock = hostapd.Hostapd("mock_runner", "wlan0")
- hostapd_mock._run_hostapd_cli_cmd = Mock(
- return_value=Result(
- command=list(),
- stdout=STA_OUTPUT_WITHOUT_STA_AUTHENTICATED,
- exit_status=0,
- )
- )
- self.assertFalse(hostapd_mock.sta_authenticated(STA_MAC))
-
- def test_sta_associated_true_for_associated_sta(self):
- hostapd_mock = hostapd.Hostapd("mock_runner", "wlan0")
- hostapd_mock._run_hostapd_cli_cmd = Mock(
- return_value=Result(
- command=list(),
- stdout=STA_OUTPUT_WITH_STA_ASSOCIATED,
- exit_status=0,
- )
- )
- self.assertTrue(hostapd_mock.sta_associated(STA_MAC))
-
- def test_sta_associated_false_for_unassociated_sta(self):
- hostapd_mock = hostapd.Hostapd("mock_runner", "wlan0")
- # Uses the authenticated-only CLI output.
- hostapd_mock._run_hostapd_cli_cmd = Mock(
- return_value=Result(
- command=list(),
- stdout=STA_OUTPUT_WITH_STA_AUTHENTICATED,
- exit_status=0,
- )
- )
- self.assertFalse(hostapd_mock.sta_associated(STA_MAC))
-
- def test_sta_authorized_true_for_authorized_sta(self):
- hostapd_mock = hostapd.Hostapd("mock_runner", "wlan0")
- hostapd_mock._run_hostapd_cli_cmd = Mock(
- return_value=Result(
- command=list(),
- stdout=STA_OUTPUT_WITH_STA_AUTHORIZED,
- exit_status=0,
- )
- )
- self.assertTrue(hostapd_mock.sta_authorized(STA_MAC))
-
- def test_sta_associated_false_for_unassociated_sta(self):
- hostapd_mock = hostapd.Hostapd("mock_runner", "wlan0")
- # Uses the associated-only CLI output.
- hostapd_mock._run_hostapd_cli_cmd = Mock(
- return_value=Result(
- command=list(),
- stdout=STA_OUTPUT_WITH_STA_ASSOCIATED,
- exit_status=0,
- )
- )
- self.assertFalse(hostapd_mock.sta_authorized(STA_MAC))
-
-
-if __name__ == "__main__":
- unittest.main()
diff --git a/packages/antlion/unit_tests/controllers/ap_lib/radio_measurement_test.py b/packages/antlion/unit_tests/controllers/ap_lib/radio_measurement_test.py
deleted file mode 100644
index 775939d..0000000
--- a/packages/antlion/unit_tests/controllers/ap_lib/radio_measurement_test.py
+++ /dev/null
@@ -1,73 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import unittest
-
-from antlion.controllers.ap_lib.radio_measurement import (
- BssidInformation,
- BssidInformationCapabilities,
- NeighborReportElement,
- PhyType,
-)
-
-EXPECTED_BSSID = "01:23:45:ab:cd:ef"
-EXPECTED_BSSID_INFO_CAP = BssidInformationCapabilities(
- spectrum_management=True, qos=True, apsd=True, radio_measurement=True
-)
-EXPECTED_OP_CLASS = 81
-EXPECTED_CHAN = 11
-EXPECTED_PHY = PhyType.HT
-EXPECTED_BSSID_INFO = BssidInformation(
- capabilities=EXPECTED_BSSID_INFO_CAP, high_throughput=True
-)
-
-
-class RadioMeasurementTest(unittest.TestCase):
- def test_bssid_information_capabilities(self):
- self.assertTrue(EXPECTED_BSSID_INFO_CAP.spectrum_management)
- self.assertTrue(EXPECTED_BSSID_INFO_CAP.qos)
- self.assertTrue(EXPECTED_BSSID_INFO_CAP.apsd)
- self.assertTrue(EXPECTED_BSSID_INFO_CAP.radio_measurement)
- # Must also test the numeric representation.
- self.assertEqual(int(EXPECTED_BSSID_INFO_CAP), 0b111100)
-
- def test_bssid_information(self):
- self.assertEqual(
- EXPECTED_BSSID_INFO.capabilities, EXPECTED_BSSID_INFO_CAP
- )
- self.assertEqual(EXPECTED_BSSID_INFO.high_throughput, True)
- # Must also test the numeric representation.
- self.assertEqual(
- int(EXPECTED_BSSID_INFO), 0b10001111000100000000000000000000
- )
-
- def test_neighbor_report_element(self):
- element = NeighborReportElement(
- bssid=EXPECTED_BSSID,
- bssid_information=EXPECTED_BSSID_INFO,
- operating_class=EXPECTED_OP_CLASS,
- channel_number=EXPECTED_CHAN,
- phy_type=EXPECTED_PHY,
- )
- self.assertEqual(element.bssid, EXPECTED_BSSID)
- self.assertEqual(element.bssid_information, EXPECTED_BSSID_INFO)
- self.assertEqual(element.operating_class, EXPECTED_OP_CLASS)
- self.assertEqual(element.channel_number, EXPECTED_CHAN)
- self.assertEqual(element.phy_type, EXPECTED_PHY)
-
-
-if __name__ == "__main__":
- unittest.main()
diff --git a/packages/antlion/unit_tests/controllers/ap_lib/radvd_test.py b/packages/antlion/unit_tests/controllers/ap_lib/radvd_test.py
deleted file mode 100644
index 773d153..0000000
--- a/packages/antlion/unit_tests/controllers/ap_lib/radvd_test.py
+++ /dev/null
@@ -1,233 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import os
-import unittest
-from unittest.mock import patch
-
-from antlion.controllers.ap_lib import radvd_constants
-from antlion.controllers.ap_lib.radvd import Radvd, RadvdStartError
-from antlion.controllers.ap_lib.radvd_config import RadvdConfig
-
-SEARCH_FILE = (
- "antlion.controllers.utils_lib.commands.shell." "ShellCommand.search_file"
-)
-DELETE_FILE = (
- "antlion.controllers.utils_lib.commands.shell.ShellCommand." "delete_file"
-)
-
-CORRECT_COMPLEX_RADVD_CONFIG = """interface wlan0 {
- IgnoreIfMissing on;
- AdvSendAdvert off;
- UnicastOnly on;
- MaxRtrAdvInterval 60;
- MinRtrAdvInterval 5;
- MinDelayBetweenRAs 5;
- AdvManagedFlag off;
- AdvOtherConfigFlag on;
- AdvLinkMTU 1400;
- AdvReachableTime 3600000;
- AdvRetransTimer 10;
- AdvCurHopLimit 50;
- AdvDefaultLifetime 8000;
- AdvDefaultPreference off;
- AdvSourceLLAddress on;
- AdvHomeAgentFlag off;
- AdvHomeAgentInfo on;
- HomeAgentLifetime 100;
- HomeAgentPreference 100;
- AdvMobRtrSupportFlag off;
- AdvIntervalOpt on;
- prefix fd00::/64
- {
- AdvOnLink off;
- AdvAutonomous on;
- AdvRouterAddr off;
- AdvValidLifetime 86400;
- AdvPreferredLifetime 14400;
- Base6to4Interface NA;
- };
- clients
- {
- fe80::c66d:3c75:2cec:1d72;
- fe80::c66d:3c75:2cec:1d73;
- };
- route fd00::/64 {
- AdvRouteLifetime 1024;
- AdvRoutePreference high;
- };
- RDNSS 2401:fa00:480:7a00:4d56:5373:4549:1e29 2401:fa00:480:7a00:4d56:5373:4549:1e30 {
- AdvRDNSSPreference 8;
- AdvRDNSSOpen on;
- AdvRDNSSLifetime 1025;
- };
-};""".replace(
- " ", "\t"
-)
-
-CORRECT_SIMPLE_RADVD_CONFIG = """interface wlan0 {
- AdvSendAdvert on;
- prefix fd00::/64
- {
- AdvOnLink on;
- AdvAutonomous on;
- };
-};""".replace(
- " ", "\t"
-)
-
-
-def delete_file_mock(file_to_delete):
- if os.path.exists(file_to_delete):
- os.remove(file_to_delete)
-
-
-def write_configs_mock(config_file_with_path, output_config):
- with open(config_file_with_path, "w+") as config_fileId:
- config_fileId.write(output_config)
-
-
-class RadvdTest(unittest.TestCase):
- @patch("antlion.controllers.utils_lib.commands.shell.ShellCommand.kill")
- def test_radvd_ikill(self, kill):
- kill.return_value = True
- radvd_mock = Radvd("mock_runner", "wlan0")
- self.assertIsNone(radvd_mock.stop())
-
- @patch("antlion.controllers.utils_lib.commands.shell.ShellCommand.is_alive")
- def test_radvd_is_alive_True(self, is_alive_mock):
- is_alive_mock.return_value = True
- radvd_mock = Radvd("mock_runner", "wlan0")
- self.assertTrue(radvd_mock.is_alive())
-
- @patch("antlion.controllers.utils_lib.commands.shell.ShellCommand.is_alive")
- def test_radvd_is_alive_False(self, is_alive_mock):
- is_alive_mock.return_value = False
- radvd_mock = Radvd("mock_runner", "wlan0")
- self.assertFalse(radvd_mock.is_alive())
-
- @patch("antlion.controllers.ap_lib.radvd.Radvd._scan_for_errors")
- @patch("antlion.controllers.ap_lib.radvd.Radvd.is_alive")
- def test_wait_for_process_process_alive(
- self, is_alive_mock, _scan_for_errors_mock
- ):
- is_alive_mock.return_value = True
- _scan_for_errors_mock.return_value = True
- radvd_mock = Radvd("mock_runner", "wlan0")
- self.assertIsNone(radvd_mock._wait_for_process(timeout=2))
-
- @patch("antlion.controllers.ap_lib.radvd.Radvd.is_alive")
- @patch(SEARCH_FILE)
- def test_scan_for_errors_is_dead(self, search_file_mock, is_alive_mock):
- is_alive_mock.return_value = False
- search_file_mock.return_value = False
- radvd_mock = Radvd("mock_runner", "wlan0")
- with self.assertRaises(RadvdStartError) as context:
- radvd_mock._scan_for_errors(True)
- self.assertTrue("Radvd failed to start" in str(context.exception))
-
- @patch("antlion.controllers.ap_lib.radvd.Radvd.is_alive")
- @patch(SEARCH_FILE)
- def test_scan_for_errors_exited_prematurely(
- self, search_file_mock, is_alive_mock
- ):
- is_alive_mock.return_value = True
- search_file_mock.return_value = True
- radvd_mock = Radvd("mock_runner", "wlan0")
- with self.assertRaises(RadvdStartError) as context:
- radvd_mock._scan_for_errors(True)
- self.assertTrue("Radvd exited prematurely." in str(context.exception))
-
- @patch("antlion.controllers.ap_lib.radvd.Radvd.is_alive")
- @patch(SEARCH_FILE)
- def test_scan_for_errors_success(self, search_file_mock, is_alive_mock):
- is_alive_mock.return_value = True
- search_file_mock.return_value = False
- radvd_mock = Radvd("mock_runner", "wlan0")
- self.assertIsNone(radvd_mock._scan_for_errors(True))
-
- @patch(DELETE_FILE)
- @patch(
- "antlion.controllers.utils_lib.commands.shell.ShellCommand.write_file"
- )
- def test_write_configs_simple(self, write_file, delete_file):
- delete_file.side_effect = delete_file_mock
- write_file.side_effect = write_configs_mock
- basic_radvd_config = RadvdConfig()
- radvd_mock = Radvd("mock_runner", "wlan0")
- radvd_mock._write_configs(basic_radvd_config)
- radvd_config = radvd_mock._config_file
- with open(radvd_config, "r") as radvd_config_fileId:
- config_data = radvd_config_fileId.read()
- self.assertTrue(CORRECT_SIMPLE_RADVD_CONFIG == config_data)
-
- @patch(DELETE_FILE)
- @patch(
- "antlion.controllers.utils_lib.commands.shell.ShellCommand.write_file"
- )
- def test_write_configs_complex(self, write_file, delete_file):
- delete_file.side_effect = delete_file_mock
- write_file.side_effect = write_configs_mock
- complex_radvd_config = RadvdConfig(
- clients=["fe80::c66d:3c75:2cec:1d72", "fe80::c66d:3c75:2cec:1d73"],
- route=radvd_constants.DEFAULT_PREFIX,
- rdnss=[
- "2401:fa00:480:7a00:4d56:5373:4549:1e29",
- "2401:fa00:480:7a00:4d56:5373:4549:1e30",
- ],
- ignore_if_missing=radvd_constants.IGNORE_IF_MISSING_ON,
- adv_send_advert=radvd_constants.ADV_SEND_ADVERT_OFF,
- unicast_only=radvd_constants.UNICAST_ONLY_ON,
- max_rtr_adv_interval=60,
- min_rtr_adv_interval=5,
- min_delay_between_ras=5,
- adv_managed_flag=radvd_constants.ADV_MANAGED_FLAG_OFF,
- adv_other_config_flag=radvd_constants.ADV_OTHER_CONFIG_FLAG_ON,
- adv_link_mtu=1400,
- adv_reachable_time=3600000,
- adv_retrans_timer=10,
- adv_cur_hop_limit=50,
- adv_default_lifetime=8000,
- adv_default_preference=radvd_constants.ADV_DEFAULT_PREFERENCE_OFF,
- adv_source_ll_address=radvd_constants.ADV_SOURCE_LL_ADDRESS_ON,
- adv_home_agent_flag=radvd_constants.ADV_HOME_AGENT_FLAG_OFF,
- adv_home_agent_info=radvd_constants.ADV_HOME_AGENT_INFO_ON,
- home_agent_lifetime=100,
- home_agent_preference=100,
- adv_mob_rtr_support_flag=radvd_constants.ADV_MOB_RTR_SUPPORT_FLAG_OFF,
- adv_interval_opt=radvd_constants.ADV_INTERVAL_OPT_ON,
- adv_on_link=radvd_constants.ADV_ON_LINK_OFF,
- adv_autonomous=radvd_constants.ADV_AUTONOMOUS_ON,
- adv_router_addr=radvd_constants.ADV_ROUTER_ADDR_OFF,
- adv_valid_lifetime=86400,
- adv_preferred_lifetime=14400,
- base_6to4_interface="NA",
- adv_route_lifetime=1024,
- adv_route_preference=radvd_constants.ADV_ROUTE_PREFERENCE_HIGH,
- adv_rdnss_preference=8,
- adv_rdnss_open=radvd_constants.ADV_RDNSS_OPEN_ON,
- adv_rdnss_lifetime=1025,
- )
- radvd_mock = Radvd("mock_runner", "wlan0")
- radvd_mock._write_configs(complex_radvd_config)
- radvd_config = radvd_mock._config_file
- with open(radvd_config, "r") as radvd_config_fileId:
- config_data = radvd_config_fileId.read()
- self.assertTrue(CORRECT_COMPLEX_RADVD_CONFIG == config_data)
-
-
-if __name__ == "__main__":
- unittest.main()
diff --git a/packages/antlion/unit_tests/controllers/ap_lib/wireless_network_management_test.py b/packages/antlion/unit_tests/controllers/ap_lib/wireless_network_management_test.py
deleted file mode 100644
index 0994a35..0000000
--- a/packages/antlion/unit_tests/controllers/ap_lib/wireless_network_management_test.py
+++ /dev/null
@@ -1,61 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import unittest
-
-from antlion.controllers.ap_lib.radio_measurement import (
- BssidInformation,
- NeighborReportElement,
- PhyType,
-)
-from antlion.controllers.ap_lib.wireless_network_management import (
- BssTransitionCandidateList,
- BssTransitionManagementRequest,
-)
-
-EXPECTED_NEIGHBOR_1 = NeighborReportElement(
- bssid="01:23:45:ab:cd:ef",
- bssid_information=BssidInformation(),
- operating_class=81,
- channel_number=1,
- phy_type=PhyType.HT,
-)
-EXPECTED_NEIGHBOR_2 = NeighborReportElement(
- bssid="cd:ef:ab:45:67:89",
- bssid_information=BssidInformation(),
- operating_class=121,
- channel_number=149,
- phy_type=PhyType.VHT,
-)
-EXPECTED_NEIGHBORS = [EXPECTED_NEIGHBOR_1, EXPECTED_NEIGHBOR_2]
-EXPECTED_CANDIDATE_LIST = BssTransitionCandidateList(EXPECTED_NEIGHBORS)
-
-
-class WirelessNetworkManagementTest(unittest.TestCase):
- def test_bss_transition_management_request(self):
- request = BssTransitionManagementRequest(
- disassociation_imminent=True,
- abridged=True,
- candidate_list=EXPECTED_NEIGHBORS,
- )
- self.assertTrue(request.disassociation_imminent)
- self.assertTrue(request.abridged)
- self.assertIn(EXPECTED_NEIGHBOR_1, request.candidate_list)
- self.assertIn(EXPECTED_NEIGHBOR_2, request.candidate_list)
-
-
-if __name__ == "__main__":
- unittest.main()
diff --git a/packages/antlion/unit_tests/controllers/iperf_client_test.py b/packages/antlion/unit_tests/controllers/iperf_client_test.py
deleted file mode 100644
index 4d581ae..0000000
--- a/packages/antlion/unit_tests/controllers/iperf_client_test.py
+++ /dev/null
@@ -1,162 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-import logging
-import os
-import unittest
-
-import mock
-
-from antlion.capabilities.ssh import SSHConfig, SSHProvider
-from antlion.controllers import iperf_client
-from antlion.controllers.iperf_client import (
- IPerfClient,
- IPerfClientBase,
- IPerfClientOverAdb,
- IPerfClientOverSsh,
-)
-
-# The position in the call tuple that represents the args array.
-ARGS = 0
-
-# The position in the call tuple that represents the kwargs dict.
-KWARGS = 1
-
-
-class IPerfClientModuleTest(unittest.TestCase):
- """Tests the antlion.controllers.iperf_client module functions."""
-
- def test_create_can_create_client_over_adb(self):
- self.assertIsInstance(
- iperf_client.create([{"AndroidDevice": "foo"}])[0],
- IPerfClientOverAdb,
- "Unable to create IPerfClientOverAdb from create().",
- )
-
- @mock.patch("subprocess.run")
- @mock.patch("socket.create_connection")
- def test_create_can_create_client_over_ssh(
- self, mock_socket_create_connection, mock_subprocess_run
- ):
- self.assertIsInstance(
- iperf_client.create(
- [
- {
- "ssh_config": {
- "user": "root",
- "host": "192.168.42.11",
- "identity_file": "/dev/null",
- }
- }
- ]
- )[0],
- IPerfClientOverSsh,
- "Unable to create IPerfClientOverSsh from create().",
- )
-
- def test_create_can_create_local_client(self):
- self.assertIsInstance(
- iperf_client.create([{}])[0],
- IPerfClient,
- "Unable to create IPerfClient from create().",
- )
-
-
-class IPerfClientBaseTest(unittest.TestCase):
- """Tests antlion.controllers.iperf_client.IPerfClientBase."""
-
- @mock.patch("os.makedirs")
- def test_get_full_file_path_creates_parent_directory(self, mock_makedirs):
- # Will never actually be created/used.
- logging.log_path = "/tmp/unit_test_garbage"
-
- full_file_path = IPerfClientBase._get_full_file_path(0)
-
- self.assertTrue(
- mock_makedirs.called, "Did not attempt to create a directory."
- )
- self.assertEqual(
- os.path.dirname(full_file_path),
- mock_makedirs.call_args[ARGS][0],
- "The parent directory of the full file path was not created.",
- )
-
-
-class IPerfClientTest(unittest.TestCase):
- """Tests antlion.controllers.iperf_client.IPerfClient."""
-
- @mock.patch("builtins.open")
- @mock.patch("subprocess.call")
- def test_start_writes_to_full_file_path(self, mock_call, mock_open):
- client = IPerfClient()
- file_path = "/path/to/foo"
- client._get_full_file_path = lambda _: file_path
-
- client.start("127.0.0.1", "IPERF_ARGS", "TAG")
-
- mock_open.assert_called_with(file_path, "w")
- self.assertEqual(
- mock_call.call_args[KWARGS]["stdout"],
- mock_open().__enter__.return_value,
- "IPerfClient did not write the logs to the expected file.",
- )
-
-
-class IPerfClientOverSshTest(unittest.TestCase):
- """Test antlion.controllers.iperf_client.IPerfClientOverSshTest."""
-
- @mock.patch("socket.create_connection")
- @mock.patch("subprocess.run")
- @mock.patch("builtins.open")
- def test_start_writes_output_to_full_file_path(
- self, mock_open, mock_subprocess_run, mock_socket_create_connection
- ):
- ssh_provider = SSHProvider(
- SSHConfig(
- user="root",
- host_name="192.168.42.11",
- identity_file="/dev/null",
- )
- )
- client = IPerfClientOverSsh(ssh_provider)
- file_path = "/path/to/foo"
- client._get_full_file_path = lambda _: file_path
- client.start("127.0.0.1", "IPERF_ARGS", "TAG")
- mock_open.assert_called_with(file_path, "w")
- mock_open().__enter__().write.assert_called()
-
-
-class IPerfClientOverAdbTest(unittest.TestCase):
- """Test antlion.controllers.iperf_client.IPerfClientOverAdb."""
-
- @mock.patch("builtins.open")
- def test_start_writes_output_to_full_file_path(self, mock_open):
- client = IPerfClientOverAdb(None)
- file_path = "/path/to/foo"
- client._get_full_file_path = lambda _: file_path
-
- with mock.patch(
- "antlion.controllers.iperf_client."
- "IPerfClientOverAdb._android_device"
- ) as adb_device:
- adb_device.adb.shell.return_value = "output"
- client.start("127.0.0.1", "IPERF_ARGS", "TAG")
-
- mock_open.assert_called_with(file_path, "w")
- mock_open().__enter__().write.assert_called_with("output")
-
-
-if __name__ == "__main__":
- unittest.main()
diff --git a/packages/antlion/unit_tests/controllers/iperf_server_test.py b/packages/antlion/unit_tests/controllers/iperf_server_test.py
deleted file mode 100644
index 9d5a51c..0000000
--- a/packages/antlion/unit_tests/controllers/iperf_server_test.py
+++ /dev/null
@@ -1,392 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-import logging
-import os
-import unittest
-
-import mock
-
-from antlion.controllers import iperf_server
-from antlion.controllers.iperf_server import (
- IPerfServer,
- IPerfServerOverAdb,
- IPerfServerOverSsh,
-)
-from antlion.controllers.utils_lib.ssh import settings
-
-# The position in the call tuple that represents the args array.
-ARGS = 0
-
-# The position in the call tuple that represents the kwargs dict.
-KWARGS = 1
-
-MOCK_LOGFILE_PATH = "/path/to/foo"
-
-
-class IPerfServerModuleTest(unittest.TestCase):
- """Tests the antlion.controllers.iperf_server module."""
-
- def test_create_creates_local_iperf_server_with_int(self):
- self.assertIsInstance(
- iperf_server.create([12345])[0],
- IPerfServer,
- "create() failed to create IPerfServer for integer input.",
- )
-
- def test_create_creates_local_iperf_server_with_str(self):
- self.assertIsInstance(
- iperf_server.create(["12345"])[0],
- IPerfServer,
- "create() failed to create IPerfServer for integer input.",
- )
-
- def test_create_cannot_create_local_iperf_server_with_bad_str(self):
- with self.assertRaises(ValueError):
- iperf_server.create(["12345BAD_STRING"])
-
- @mock.patch("antlion.controllers.iperf_server.utils")
- def test_create_creates_server_over_ssh_with_ssh_config_and_port(self, _):
- self.assertIsInstance(
- iperf_server.create(
- [
- {
- "ssh_config": {
- "user": "",
- "host": "",
- "identity_file": "/dev/null",
- },
- "port": "",
- }
- ]
- )[0],
- IPerfServerOverSsh,
- "create() failed to create IPerfServerOverSsh for a valid config.",
- )
-
- def test_create_creates_server_over_adb_with_proper_config(self):
- self.assertIsInstance(
- iperf_server.create([{"AndroidDevice": "53R147", "port": 0}])[0],
- IPerfServerOverAdb,
- "create() failed to create IPerfServerOverAdb for a valid config.",
- )
-
- def test_create_raises_value_error_on_bad_config_dict(self):
- with self.assertRaises(ValueError):
- iperf_server.create([{"AndroidDevice": "53R147", "ssh_config": {}}])
-
- def test_get_port_from_ss_output_returns_correct_port_ipv4(self):
- ss_output = (
- "tcp LISTEN 0 5 127.0.0.1:<PORT> *:*"
- ' users:(("cmd",pid=<PID>,fd=3))'
- )
- self.assertEqual(
- iperf_server._get_port_from_ss_output(ss_output, "<PID>"), "<PORT>"
- )
-
- def test_get_port_from_ss_output_returns_correct_port_ipv6(self):
- ss_output = (
- "tcp LISTEN 0 5 ff:ff:ff:ff:ff:ff:<PORT> *:*"
- ' users:(("cmd",pid=<PID>,fd=3))'
- )
- self.assertEqual(
- iperf_server._get_port_from_ss_output(ss_output, "<PID>"), "<PORT>"
- )
-
-
-class IPerfServerBaseTest(unittest.TestCase):
- """Tests antlion.controllers.iperf_server.IPerfServerBase."""
-
- @mock.patch("os.makedirs")
- def test_get_full_file_path_creates_parent_directory(self, mock_makedirs):
- # Will never actually be created/used.
- logging.log_path = "/tmp/unit_test_garbage"
-
- server = IPerfServer("port")
-
- full_file_path = server._get_full_file_path()
-
- self.assertTrue(
- mock_makedirs.called, "Did not attempt to create a directory."
- )
- self.assertEqual(
- os.path.dirname(full_file_path),
- mock_makedirs.call_args[ARGS][0],
- "The parent directory of the full file path was not created.",
- )
-
-
-class IPerfServerTest(unittest.TestCase):
- """Tests antlion.controllers.iperf_server.IPerfServer."""
-
- PID = 123456
-
- def setUp(self):
- iperf_server._get_port_from_ss_output = lambda *_: IPerfServerTest.PID
-
- @mock.patch("builtins.open")
- @mock.patch("antlion.controllers.iperf_server.subprocess")
- @mock.patch("antlion.controllers.iperf_server.job")
- def test_start_makes_started_true(self, mock_job, __, ___):
- """Tests calling start() without calling stop() makes started True."""
- server = IPerfServer("port")
- server._get_full_file_path = lambda _: MOCK_LOGFILE_PATH
- server.start()
-
- self.assertTrue(server.started)
-
- @mock.patch("builtins.open")
- @mock.patch("antlion.controllers.iperf_server.subprocess")
- @mock.patch("antlion.controllers.iperf_server.job")
- def test_start_stop_makes_started_false(self, _, __, ___):
- """Tests calling start() without calling stop() makes started True."""
- server = IPerfServer("port")
- server._get_full_file_path = lambda _: MOCK_LOGFILE_PATH
-
- server.start()
- server.stop()
-
- self.assertFalse(server.started)
-
- @mock.patch("builtins.open")
- @mock.patch("antlion.controllers.iperf_server.subprocess")
- @mock.patch("antlion.controllers.iperf_server.job")
- def test_start_sets_current_log_file(self, _, __, ___):
- server = IPerfServer("port")
- server._get_full_file_path = lambda _: MOCK_LOGFILE_PATH
-
- server.start()
-
- self.assertEqual(
- server._current_log_file,
- MOCK_LOGFILE_PATH,
- "The _current_log_file was not received from _get_full_file_path.",
- )
-
- @mock.patch("builtins.open")
- @mock.patch("antlion.controllers.iperf_server.subprocess")
- def test_stop_returns_current_log_file(self, _, __):
- server = IPerfServer("port")
- server._get_full_file_path = lambda _: MOCK_LOGFILE_PATH
- server._current_log_file = MOCK_LOGFILE_PATH
- server._iperf_process = mock.Mock()
-
- log_file = server.stop()
-
- self.assertEqual(
- log_file,
- MOCK_LOGFILE_PATH,
- "The _current_log_file was not returned by stop().",
- )
-
- @mock.patch("builtins.open")
- @mock.patch("antlion.controllers.iperf_server.subprocess")
- @mock.patch("antlion.controllers.iperf_server.job")
- def test_start_does_not_run_two_concurrent_processes(
- self, start_proc, _, __
- ):
- server = IPerfServer("port")
- server._get_full_file_path = lambda _: MOCK_LOGFILE_PATH
- server._iperf_process = mock.Mock()
-
- server.start()
-
- self.assertFalse(
- start_proc.called,
- "start() should not begin a second process if another is running.",
- )
-
- @mock.patch("antlion.utils.stop_standing_subprocess")
- def test_stop_exits_early_if_no_process_has_started(self, stop_proc):
- server = IPerfServer("port")
- server._get_full_file_path = lambda _: MOCK_LOGFILE_PATH
- server._iperf_process = None
-
- server.stop()
-
- self.assertFalse(
- stop_proc.called,
- "stop() should not kill a process if no process is running.",
- )
-
-
-class IPerfServerOverSshTest(unittest.TestCase):
- """Tests antlion.controllers.iperf_server.IPerfServerOverSsh."""
-
- INIT_ARGS = [
- settings.from_config(
- {"host": "TEST_HOST", "user": "test", "identity_file": "/dev/null"}
- ),
- "PORT",
- ]
-
- @mock.patch("antlion.controllers.iperf_server.connection")
- def test_start_makes_started_true(self, _):
- """Tests calling start() without calling stop() makes started True."""
- server = IPerfServerOverSsh(*self.INIT_ARGS)
- server._ssh_session = mock.Mock()
- server._cleanup_iperf_port = mock.Mock()
- server._get_full_file_path = lambda _: MOCK_LOGFILE_PATH
-
- server.start()
-
- self.assertTrue(server.started)
-
- @mock.patch("builtins.open")
- @mock.patch("antlion.controllers.iperf_server.connection")
- def test_start_stop_makes_started_false(self, _, __):
- """Tests calling start() without calling stop() makes started True."""
- server = IPerfServerOverSsh(*self.INIT_ARGS)
- server._ssh_session = mock.Mock()
- server._cleanup_iperf_port = mock.Mock()
- server._get_full_file_path = lambda _: MOCK_LOGFILE_PATH
-
- server.start()
- server.stop()
-
- self.assertFalse(server.started)
-
- @mock.patch("builtins.open")
- @mock.patch("antlion.controllers.iperf_server.connection")
- def test_stop_returns_expected_log_file(self, _, __):
- server = IPerfServerOverSsh(*self.INIT_ARGS)
- server._ssh_session = mock.Mock()
- server._cleanup_iperf_port = mock.Mock()
- server._get_full_file_path = lambda _: MOCK_LOGFILE_PATH
- server._iperf_pid = mock.Mock()
-
- log_file = server.stop()
-
- self.assertEqual(
- log_file,
- MOCK_LOGFILE_PATH,
- "The expected log file was not returned by stop().",
- )
-
- @mock.patch("antlion.controllers.iperf_server.connection")
- def test_start_does_not_run_two_concurrent_processes(self, _):
- server = IPerfServerOverSsh(*self.INIT_ARGS)
- server._ssh_session = mock.Mock()
- server._cleanup_iperf_port = mock.Mock()
- server._get_full_file_path = lambda _: MOCK_LOGFILE_PATH
- server._iperf_pid = mock.Mock()
-
- server.start()
-
- self.assertFalse(
- server._ssh_session.run_async.called,
- "start() should not begin a second process if another is running.",
- )
-
- @mock.patch("antlion.utils.stop_standing_subprocess")
- @mock.patch("antlion.controllers.iperf_server.connection")
- def test_stop_exits_early_if_no_process_has_started(self, _, __):
- server = IPerfServerOverSsh(*self.INIT_ARGS)
- server._ssh_session = mock.Mock()
- server._cleanup_iperf_port = mock.Mock()
- server._get_full_file_path = lambda _: MOCK_LOGFILE_PATH
- server._iperf_pid = None
-
- server.stop()
-
- self.assertFalse(
- server._ssh_session.run_async.called,
- "stop() should not kill a process if no process is running.",
- )
-
-
-class IPerfServerOverAdbTest(unittest.TestCase):
- """Tests antlion.controllers.iperf_server.IPerfServerOverSsh."""
-
- ANDROID_DEVICE_PROP = (
- "antlion.controllers.iperf_server." "IPerfServerOverAdb._android_device"
- )
-
- @mock.patch(ANDROID_DEVICE_PROP)
- def test_start_makes_started_true(self, mock_ad):
- """Tests calling start() without calling stop() makes started True."""
- server = IPerfServerOverAdb("53R147", "PORT")
- server._get_full_file_path = lambda _: MOCK_LOGFILE_PATH
- mock_ad.adb.shell.return_value = "<PID>"
-
- server.start()
-
- self.assertTrue(server.started)
-
- @mock.patch("antlion.libs.proc.job.run")
- @mock.patch("builtins.open")
- @mock.patch(ANDROID_DEVICE_PROP)
- def test_start_stop_makes_started_false(self, mock_ad, _, __):
- """Tests calling start() without calling stop() makes started True."""
- server = IPerfServerOverAdb("53R147", "PORT")
- server._get_full_file_path = lambda _: MOCK_LOGFILE_PATH
- mock_ad.adb.shell.side_effect = ["<PID>", "", "", ""]
-
- server.start()
- server.stop()
-
- self.assertFalse(server.started)
-
- @mock.patch("antlion.libs.proc.job.run")
- @mock.patch("builtins.open")
- @mock.patch(ANDROID_DEVICE_PROP)
- def test_stop_returns_expected_log_file(self, mock_ad, _, __):
- server = IPerfServerOverAdb("53R147", "PORT")
- server._get_full_file_path = lambda _: MOCK_LOGFILE_PATH
- server._iperf_process = mock.Mock()
- server._iperf_process_adb_pid = "<PID>"
- mock_ad.adb.shell.side_effect = ["", "", ""]
-
- log_file = server.stop()
-
- self.assertEqual(
- log_file,
- MOCK_LOGFILE_PATH,
- "The expected log file was not returned by stop().",
- )
-
- @mock.patch(ANDROID_DEVICE_PROP)
- def test_start_does_not_run_two_concurrent_processes(self, android_device):
- server = IPerfServerOverAdb("53R147", "PORT")
- server._get_full_file_path = lambda _: MOCK_LOGFILE_PATH
- server._iperf_process = mock.Mock()
-
- server.start()
-
- self.assertFalse(
- android_device.adb.shell_nb.called,
- "start() should not begin a second process if another is running.",
- )
-
- @mock.patch("antlion.libs.proc.job.run")
- @mock.patch("builtins.open")
- @mock.patch(ANDROID_DEVICE_PROP)
- def test_stop_exits_early_if_no_process_has_started(
- self, android_device, _, __
- ):
- server = IPerfServerOverAdb("53R147", "PORT")
- server._get_full_file_path = lambda _: MOCK_LOGFILE_PATH
- server._iperf_pid = None
-
- server.stop()
-
- self.assertFalse(
- android_device.adb.shell_nb.called,
- "stop() should not kill a process if no process is running.",
- )
-
-
-if __name__ == "__main__":
- unittest.main()
diff --git a/packages/antlion/unit_tests/controllers/pdu_lib/__init__.py b/packages/antlion/unit_tests/controllers/pdu_lib/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/packages/antlion/unit_tests/controllers/pdu_lib/__init__.py
+++ /dev/null
diff --git a/packages/antlion/unit_tests/controllers/pdu_lib/synaccess/__init__.py b/packages/antlion/unit_tests/controllers/pdu_lib/synaccess/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/packages/antlion/unit_tests/controllers/pdu_lib/synaccess/__init__.py
+++ /dev/null
diff --git a/packages/antlion/unit_tests/controllers/pdu_lib/synaccess/np02b_test.py b/packages/antlion/unit_tests/controllers/pdu_lib/synaccess/np02b_test.py
deleted file mode 100644
index bfca23f..0000000
--- a/packages/antlion/unit_tests/controllers/pdu_lib/synaccess/np02b_test.py
+++ /dev/null
@@ -1,132 +0,0 @@
-#!/usr/bin python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""Python unittest module for pdu_lib.synaccess.np02b"""
-
-import unittest
-from unittest.mock import patch
-
-from antlion.controllers.pdu import PduError
-from antlion.controllers.pdu_lib.synaccess.np02b import (
- PduDevice,
- _TNHelperNP02B,
-)
-
-# Test Constants
-HOST = "192.168.1.2"
-VALID_COMMAND_STR = "cmd"
-VALID_COMMAND_BYTE_STR = b"cmd\n\r"
-VALID_RESPONSE_STR = ""
-VALID_RESPONSE_BYTE_STR = b"\n\r\r\n\r\n"
-STATUS_COMMAND_STR = "pshow"
-STATUS_COMMAND_BYTE_STR = b"pshow\n\r"
-STATUS_RESPONSE_STR = (
- "Port | Name |Status 1 | Outlet1 | OFF| 2 | Outlet2 | ON |"
-)
-STATUS_RESPONSE_BYTE_STR = (
- b"Port | Name |Status 1 | Outlet1 | OFF| 2 | Outlet2 | "
- b"ON |\n\r\r\n\r\n"
-)
-INVALID_COMMAND_OUTPUT_BYTE_STR = b"Invalid Command\n\r\r\n\r\n>"
-VALID_STATUS_DICT = {"1": False, "2": True}
-INVALID_STATUS_DICT = {"1": False, "2": False}
-
-
-class _TNHelperNP02BTest(unittest.TestCase):
- """Unit tests for _TNHelperNP02B."""
-
- @patch("antlion.controllers.pdu_lib.synaccess.np02b.time.sleep")
- @patch("antlion.controllers.pdu_lib.synaccess.np02b.telnetlib")
- def test_cmd_is_properly_written(self, telnetlib_mock, sleep_mock):
- """cmd should strip whitespace and encode in ASCII."""
- tnhelper = _TNHelperNP02B(HOST)
- telnetlib_mock.Telnet().read_until.return_value = (
- VALID_RESPONSE_BYTE_STR
- )
- tnhelper.cmd(VALID_COMMAND_STR)
- telnetlib_mock.Telnet().write.assert_called_with(VALID_COMMAND_BYTE_STR)
-
- @patch("antlion.controllers.pdu_lib.synaccess.np02b.time.sleep")
- @patch("antlion.controllers.pdu_lib.synaccess.np02b.telnetlib")
- def test_cmd_valid_command_output_is_properly_parsed(
- self, telnetlib_mock, sleep_mock
- ):
- """cmd should strip the prompt, separators and command from the
- output."""
- tnhelper = _TNHelperNP02B(HOST)
- telnetlib_mock.Telnet().read_until.return_value = (
- VALID_RESPONSE_BYTE_STR
- )
- res = tnhelper.cmd(VALID_COMMAND_STR)
- self.assertEqual(res, VALID_RESPONSE_STR)
-
- @patch("antlion.controllers.pdu_lib.synaccess.np02b.time.sleep")
- @patch("antlion.controllers.pdu_lib.synaccess.np02b.telnetlib")
- def test_cmd_status_output_is_properly_parsed(
- self, telnetlib_mock, sleep_mock
- ):
- """cmd should strip the prompt, separators and command from the output,
- returning just the status information."""
- tnhelper = _TNHelperNP02B(HOST)
- telnetlib_mock.Telnet().read_until.return_value = (
- STATUS_RESPONSE_BYTE_STR
- )
- res = tnhelper.cmd(STATUS_COMMAND_STR)
- self.assertEqual(res, STATUS_RESPONSE_STR)
-
- @patch("antlion.controllers.pdu_lib.synaccess.np02b.time.sleep")
- @patch("antlion.controllers.pdu_lib.synaccess.np02b.telnetlib")
- def test_cmd_invalid_command_raises_error(self, telnetlib_mock, sleep_mock):
- """cmd should raise PduError when an invalid command is given."""
- tnhelper = _TNHelperNP02B(HOST)
- telnetlib_mock.Telnet().read_until.return_value = (
- INVALID_COMMAND_OUTPUT_BYTE_STR
- )
- with self.assertRaises(PduError):
- tnhelper.cmd("Some invalid command.")
-
-
-class NP02BPduDeviceTest(unittest.TestCase):
- """Unit tests for NP02B PduDevice implementation."""
-
- @patch("antlion.controllers.pdu_lib.synaccess.np02b._TNHelperNP02B.cmd")
- def test_status_parses_output_to_valid_dictionary(self, tnhelper_cmd_mock):
- """status should parse helper response correctly into dict."""
- np02b = PduDevice(HOST, None, None)
- tnhelper_cmd_mock.return_value = STATUS_RESPONSE_STR
- self.assertEqual(np02b.status(), VALID_STATUS_DICT)
-
- @patch("antlion.controllers.pdu_lib.synaccess.np02b._TNHelperNP02B.cmd")
- def test_verify_state_matches_state(self, tnhelper_cmd_mock):
- """verify_state should return true when expected state is a subset of
- actual state"""
- np02b = PduDevice(HOST, None, None)
- tnhelper_cmd_mock.return_value = STATUS_RESPONSE_STR
- self.assertTrue(np02b._verify_state(VALID_STATUS_DICT))
-
- @patch("antlion.controllers.pdu_lib.synaccess.np02b.time")
- @patch("antlion.controllers.pdu_lib.synaccess.np02b._TNHelperNP02B.cmd")
- def test_verify_state_throws_error(self, tnhelper_cmd_mock, time_mock):
- """verify_state should throw error after timeout when actual state never
- reaches expected state"""
- time_mock.time.side_effect = [1, 2, 10]
- np02b = PduDevice(HOST, None, None)
- tnhelper_cmd_mock.return_value = STATUS_RESPONSE_STR
- with self.assertRaises(PduError):
- self.assertTrue(np02b._verify_state(INVALID_STATUS_DICT))
-
-
-if __name__ == "__main__":
- unittest.main()
diff --git a/packages/antlion/unit_tests/controllers/sl4a_lib/__init__.py b/packages/antlion/unit_tests/controllers/sl4a_lib/__init__.py
deleted file mode 100644
index 7f1a899..0000000
--- a/packages/antlion/unit_tests/controllers/sl4a_lib/__init__.py
+++ /dev/null
@@ -1,15 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
diff --git a/packages/antlion/unit_tests/controllers/sl4a_lib/rpc_client_test.py b/packages/antlion/unit_tests/controllers/sl4a_lib/rpc_client_test.py
deleted file mode 100755
index bd9d06b..0000000
--- a/packages/antlion/unit_tests/controllers/sl4a_lib/rpc_client_test.py
+++ /dev/null
@@ -1,250 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-import unittest
-
-import mock
-
-from antlion.controllers.sl4a_lib import rpc_client
-
-
-class BreakoutError(Exception):
- """Thrown to prove program execution."""
-
-
-class RpcClientTest(unittest.TestCase):
- """Tests the rpc_client.RpcClient class."""
-
- def test_terminate_warn_on_working_connections(self):
- """Tests rpc_client.RpcClient.terminate().
-
- Tests that if some connections are still working, we log this before
- closing the connections.
- """
- session = mock.Mock()
-
- client = rpc_client.RpcClient(
- session.uid,
- session.adb.serial,
- lambda _: mock.Mock(),
- lambda _: mock.Mock(),
- )
- client._log = mock.Mock()
- client._working_connections = [mock.Mock()]
-
- client.terminate()
-
- self.assertTrue(client._log.warning.called)
-
- def test_terminate_closes_all_connections(self):
- """Tests rpc_client.RpcClient.terminate().
-
- Tests that all free and working connections have been closed.
- """
- session = mock.Mock()
-
- client = rpc_client.RpcClient(
- session.uid,
- session.adb.serial,
- lambda _: mock.Mock(),
- lambda _: mock.Mock(),
- )
- client._log = mock.Mock()
- working_connections = [mock.Mock() for _ in range(3)]
- free_connections = [mock.Mock() for _ in range(3)]
- client._free_connections = free_connections
- client._working_connections = working_connections
-
- client.terminate()
-
- for connection in working_connections + free_connections:
- self.assertTrue(connection.close.called)
-
- def test_get_free_connection_get_available_client(self):
- """Tests rpc_client.RpcClient._get_free_connection().
-
- Tests that an available client is returned if one exists.
- """
-
- def fail_on_wrong_execution():
- self.fail(
- "The program is not executing the expected path. "
- "Tried to return an available free client, ended up "
- "sleeping to wait for client instead."
- )
-
- session = mock.Mock()
-
- client = rpc_client.RpcClient(
- session.uid,
- session.adb.serial,
- lambda _: mock.Mock(),
- lambda _: mock.Mock(),
- )
- expected_connection = mock.Mock()
- client._free_connections = [expected_connection]
- client._lock = mock.MagicMock()
-
- with mock.patch("time.sleep") as sleep_mock:
- sleep_mock.side_effect = fail_on_wrong_execution
-
- connection = client._get_free_connection()
-
- self.assertEqual(connection, expected_connection)
- self.assertTrue(expected_connection in client._working_connections)
- self.assertEqual(len(client._free_connections), 0)
-
- def test_get_free_connection_continues_upon_connection_taken(self):
- """Tests rpc_client.RpcClient._get_free_connection().
-
- Tests that if the free connection is taken while trying to acquire the
- lock to reserve it, the thread gives up the lock and tries again.
- """
-
- def empty_list():
- client._free_connections.clear()
-
- def fail_on_wrong_execution():
- self.fail(
- "The program is not executing the expected path. "
- "Tried to return an available free client, ended up "
- "sleeping to wait for client instead."
- )
-
- session = mock.Mock()
-
- client = rpc_client.RpcClient(
- session.uid,
- session.adb.serial,
- lambda _: mock.Mock(),
- lambda _: mock.Mock(),
- )
- client._free_connections = mock.Mock()
- client._lock = mock.MagicMock()
- client._lock.acquire.side_effect = empty_list
- client._free_connections = [mock.Mock()]
-
- with mock.patch("time.sleep") as sleep_mock:
- sleep_mock.side_effect = fail_on_wrong_execution
-
- try:
- client._get_free_connection()
- except IndexError:
- self.fail(
- "Tried to pop free connection when another thread"
- "has taken it."
- )
- # Assert that the lock has been freed.
- self.assertEqual(
- client._lock.acquire.call_count, client._lock.release.call_count
- )
-
- def test_get_free_connection_sleep(self):
- """Tests rpc_client.RpcClient._get_free_connection().
-
- Tests that if the free connection is taken, it will wait for a new one.
- """
-
- session = mock.Mock()
-
- client = rpc_client.RpcClient(
- session.uid,
- session.adb.serial,
- lambda _: mock.Mock(),
- lambda _: mock.Mock(),
- )
- client._free_connections = []
- client.max_connections = 0
- client._lock = mock.MagicMock()
- client._free_connections = []
-
- with mock.patch("time.sleep") as sleep_mock:
- sleep_mock.side_effect = BreakoutError()
- try:
- client._get_free_connection()
- except BreakoutError:
- # Assert that the lock has been freed.
- self.assertEqual(
- client._lock.acquire.call_count,
- client._lock.release.call_count,
- )
- # Asserts that the sleep has been called.
- self.assertTrue(sleep_mock.called)
- # Asserts that no changes to connections happened
- self.assertEqual(len(client._free_connections), 0)
- self.assertEqual(len(client._working_connections), 0)
- return True
- self.fail("Failed to hit sleep case")
-
- def test_release_working_connection(self):
- """Tests rpc_client.RpcClient._release_working_connection.
-
- Tests that the working connection is moved into the free connections.
- """
- session = mock.Mock()
- client = rpc_client.RpcClient(
- session.uid,
- session.adb.serial,
- lambda _: mock.Mock(),
- lambda _: mock.Mock(),
- )
-
- connection = mock.Mock()
- client._working_connections = [connection]
- client._free_connections = []
- client._release_working_connection(connection)
-
- self.assertTrue(connection in client._free_connections)
- self.assertFalse(connection in client._working_connections)
-
- def test_future(self):
- """Tests rpc_client.RpcClient.future."""
- session = mock.Mock()
- client = rpc_client.RpcClient(
- session.uid,
- session.adb.serial,
- lambda _: mock.Mock(),
- lambda _: mock.Mock(),
- )
-
- self.assertEqual(client.future, client._async_client)
-
- def test_getattr(self):
- """Tests rpc_client.RpcClient.__getattr__.
-
- Tests that the name, args, and kwargs are correctly passed to self.rpc.
- """
- session = mock.Mock()
- client = rpc_client.RpcClient(
- session.uid,
- session.adb.serial,
- lambda _: mock.Mock(),
- lambda _: mock.Mock(),
- )
- client.rpc = mock.MagicMock()
- fn = client.fake_function_please_do_not_be_implemented
-
- fn("arg1", "arg2", kwarg1=1, kwarg2=2)
- client.rpc.assert_called_with(
- "fake_function_please_do_not_be_implemented",
- "arg1",
- "arg2",
- kwarg1=1,
- kwarg2=2,
- )
-
-
-if __name__ == "__main__":
- unittest.main()
diff --git a/packages/antlion/unit_tests/controllers/sl4a_lib/rpc_connection_test.py b/packages/antlion/unit_tests/controllers/sl4a_lib/rpc_connection_test.py
deleted file mode 100755
index e32eb41..0000000
--- a/packages/antlion/unit_tests/controllers/sl4a_lib/rpc_connection_test.py
+++ /dev/null
@@ -1,158 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-import unittest
-
-import mock
-
-from antlion.controllers.sl4a_lib import rpc_client, rpc_connection
-
-MOCK_RESP = b'{"id": 0, "result": 123, "error": null, "status": 1, "uid": 1}'
-MOCK_RESP_UNKNOWN_UID = b'{"id": 0, "result": 123, "error": null, "status": 0}'
-MOCK_RESP_WITH_ERROR = b'{"id": 0, "error": 1, "status": 1, "uid": 1}'
-
-
-class MockSocketFile(object):
- def __init__(self, resp):
- self.resp = resp
- self.last_write = None
-
- def write(self, msg):
- self.last_write = msg
-
- def readline(self):
- return self.resp
-
- def flush(self):
- pass
-
-
-class RpcConnectionTest(unittest.TestCase):
- """This test class has unit tests for the implementation of everything
- under antlion.controllers.android, which is the RPC client module for sl4a.
- """
-
- @staticmethod
- def mock_rpc_connection(response=MOCK_RESP, uid=rpc_connection.UNKNOWN_UID):
- """Sets up a faked socket file from the mock connection."""
- fake_file = MockSocketFile(response)
- fake_conn = mock.MagicMock()
- fake_conn.makefile.return_value = fake_file
- adb = mock.Mock()
- ports = mock.Mock()
-
- return rpc_connection.RpcConnection(
- adb, ports, fake_conn, fake_file, uid=uid
- )
-
- def test_open_chooses_init_on_unknown_uid(self):
- """Tests rpc_connection.RpcConnection.open().
-
- Tests that open uses the init start command when the uid is unknown.
- """
-
- def pass_on_init(start_command):
- if not start_command == rpc_connection.Sl4aConnectionCommand.INIT:
- self.fail(
- f'Must call "init". Called "{start_command}" instead.'
- )
-
- connection = self.mock_rpc_connection()
- connection._initiate_handshake = pass_on_init
- connection.open()
-
- def test_open_chooses_continue_on_known_uid(self):
- """Tests rpc_connection.RpcConnection.open().
-
- Tests that open uses the continue start command when the uid is known.
- """
-
- def pass_on_continue(start_command):
- if start_command != rpc_connection.Sl4aConnectionCommand.CONTINUE:
- self.fail(
- f'Must call "continue". Called "{start_command}" instead.'
- )
-
- connection = self.mock_rpc_connection(uid=1)
- connection._initiate_handshake = pass_on_continue
- connection.open()
-
- def test_initiate_handshake_returns_uid(self):
- """Tests rpc_connection.RpcConnection._initiate_handshake().
-
- Test that at the end of a handshake with no errors the client object
- has the correct parameters.
- """
- connection = self.mock_rpc_connection()
- connection._initiate_handshake(
- rpc_connection.Sl4aConnectionCommand.INIT
- )
-
- self.assertEqual(connection.uid, 1)
-
- def test_initiate_handshake_returns_unknown_status(self):
- """Tests rpc_connection.RpcConnection._initiate_handshake().
-
- Test that when the handshake is given an unknown uid then the client
- will not be given a uid.
- """
- connection = self.mock_rpc_connection(MOCK_RESP_UNKNOWN_UID)
- connection._initiate_handshake(
- rpc_connection.Sl4aConnectionCommand.INIT
- )
-
- self.assertEqual(connection.uid, rpc_client.UNKNOWN_UID)
-
- def test_initiate_handshake_no_response(self):
- """Tests rpc_connection.RpcConnection._initiate_handshake().
-
- Test that if a handshake receives no response then it will give a
- protocol error.
- """
- connection = self.mock_rpc_connection(b"")
-
- with self.assertRaises(
- rpc_client.Sl4aProtocolError,
- msg=rpc_client.Sl4aProtocolError.NO_RESPONSE_FROM_HANDSHAKE,
- ):
- connection._initiate_handshake(
- rpc_connection.Sl4aConnectionCommand.INIT
- )
-
- def test_cmd_properly_formatted(self):
- """Tests rpc_connection.RpcConnection._cmd().
-
- Tests that the command sent is properly formatted.
- """
- connection = self.mock_rpc_connection(MOCK_RESP)
- connection._cmd("test")
- self.assertIn(
- connection._socket_file.last_write,
- [b'{"cmd": "test", "uid": -1}\n', b'{"uid": -1, "cmd": "test"}\n'],
- )
-
- def test_get_new_ticket(self):
- """Tests rpc_connection.RpcConnection.get_new_ticket().
-
- Tests that a new number is always given for get_new_ticket().
- """
- connection = self.mock_rpc_connection(MOCK_RESP)
- self.assertEqual(
- connection.get_new_ticket() + 1, connection.get_new_ticket()
- )
-
-
-if __name__ == "__main__":
- unittest.main()
diff --git a/packages/antlion/unit_tests/controllers/sl4a_lib/sl4a_manager_test.py b/packages/antlion/unit_tests/controllers/sl4a_lib/sl4a_manager_test.py
deleted file mode 100755
index acb16bd..0000000
--- a/packages/antlion/unit_tests/controllers/sl4a_lib/sl4a_manager_test.py
+++ /dev/null
@@ -1,498 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-import unittest
-
-import mock
-
-from antlion.controllers.sl4a_lib import rpc_client, sl4a_manager
-
-
-class Sl4aManagerFactoryTest(unittest.TestCase):
- """Tests the sl4a_manager module-level functions."""
-
- def setUp(self):
- """Clears the Sl4aManager cache."""
- sl4a_manager._all_sl4a_managers = {}
-
- def test_create_manager(self):
- """Tests sl4a_manager.create_sl4a_manager().
-
- Tests that a new Sl4aManager is returned without an error.
- """
- adb = mock.Mock()
- adb.serial = "SERIAL"
- sl4a_man = sl4a_manager.create_sl4a_manager(adb)
- self.assertEqual(sl4a_man.adb, adb)
-
- def test_create_sl4a_manager_return_already_created_manager(self):
- """Tests sl4a_manager.create_sl4a_manager().
-
- Tests that a second call to create_sl4a_manager() does not create a
- new Sl4aManager, and returns the first created Sl4aManager instead.
- """
- adb = mock.Mock()
- adb.serial = "SERIAL"
- first_manager = sl4a_manager.create_sl4a_manager(adb)
-
- adb_same_serial = mock.Mock()
- adb_same_serial.serial = "SERIAL"
- second_manager = sl4a_manager.create_sl4a_manager(adb)
-
- self.assertEqual(first_manager, second_manager)
-
- def test_create_sl4a_manager_multiple_devices_with_one_manager_each(self):
- """Tests sl4a_manager.create_sl4a_manager().
-
- Tests that when create_s4l4a_manager() is called for different devices,
- each device gets its own Sl4aManager object.
- """
- adb_1 = mock.Mock()
- adb_1.serial = "SERIAL"
- first_manager = sl4a_manager.create_sl4a_manager(adb_1)
-
- adb_2 = mock.Mock()
- adb_2.serial = "DIFFERENT_SERIAL_NUMBER"
- second_manager = sl4a_manager.create_sl4a_manager(adb_2)
-
- self.assertNotEqual(first_manager, second_manager)
-
-
-class Sl4aManagerTest(unittest.TestCase):
- """Tests the sl4a_manager.Sl4aManager class."""
-
- ATTEMPT_INTERVAL = 0.25
- MAX_WAIT_ON_SERVER_SECONDS = 1
- _SL4A_LAUNCH_SERVER_CMD = ""
- _SL4A_CLOSE_SERVER_CMD = ""
- _SL4A_ROOT_FIND_PORT_CMD = ""
- _SL4A_USER_FIND_PORT_CMD = ""
- _SL4A_START_SERVICE_CMD = ""
-
- @classmethod
- def setUpClass(cls):
- # Copy all module constants before testing begins.
- Sl4aManagerTest.ATTEMPT_INTERVAL = sl4a_manager.ATTEMPT_INTERVAL
- Sl4aManagerTest.MAX_WAIT_ON_SERVER_SECONDS = (
- sl4a_manager.MAX_WAIT_ON_SERVER_SECONDS
- )
- Sl4aManagerTest._SL4A_LAUNCH_SERVER_CMD = (
- sl4a_manager._SL4A_LAUNCH_SERVER_CMD
- )
- Sl4aManagerTest._SL4A_CLOSE_SERVER_CMD = (
- sl4a_manager._SL4A_CLOSE_SERVER_CMD
- )
- Sl4aManagerTest._SL4A_ROOT_FIND_PORT_CMD = (
- sl4a_manager._SL4A_ROOT_FIND_PORT_CMD
- )
- Sl4aManagerTest._SL4A_USER_FIND_PORT_CMD = (
- sl4a_manager._SL4A_USER_FIND_PORT_CMD
- )
- Sl4aManagerTest._SL4A_START_SERVICE_CMD = (
- sl4a_manager._SL4A_START_SERVICE_CMD
- )
-
- def setUp(self):
- # Restore all module constants at the beginning of each test case.
- sl4a_manager.ATTEMPT_INTERVAL = Sl4aManagerTest.ATTEMPT_INTERVAL
- sl4a_manager.MAX_WAIT_ON_SERVER_SECONDS = (
- Sl4aManagerTest.MAX_WAIT_ON_SERVER_SECONDS
- )
- sl4a_manager._SL4A_LAUNCH_SERVER_CMD = (
- Sl4aManagerTest._SL4A_LAUNCH_SERVER_CMD
- )
- sl4a_manager._SL4A_CLOSE_SERVER_CMD = (
- Sl4aManagerTest._SL4A_CLOSE_SERVER_CMD
- )
- sl4a_manager._SL4A_ROOT_FIND_PORT_CMD = (
- Sl4aManagerTest._SL4A_ROOT_FIND_PORT_CMD
- )
- sl4a_manager._SL4A_USER_FIND_PORT_CMD = (
- Sl4aManagerTest._SL4A_USER_FIND_PORT_CMD
- )
- sl4a_manager._SL4A_START_SERVICE_CMD = (
- Sl4aManagerTest._SL4A_START_SERVICE_CMD
- )
-
- # Reset module data at the beginning of each test.
- sl4a_manager._all_sl4a_managers = {}
-
- def test_sl4a_ports_in_use(self):
- """Tests sl4a_manager.Sl4aManager.sl4a_ports_in_use
-
- Tests to make sure all server ports are returned with no duplicates.
- """
- adb = mock.Mock()
- manager = sl4a_manager.Sl4aManager(adb)
- session_1 = mock.Mock()
- session_1.server_port = 12345
- manager.sessions[1] = session_1
- session_2 = mock.Mock()
- session_2.server_port = 15973
- manager.sessions[2] = session_2
- session_3 = mock.Mock()
- session_3.server_port = 12345
- manager.sessions[3] = session_3
- session_4 = mock.Mock()
- session_4.server_port = 67890
- manager.sessions[4] = session_4
- session_5 = mock.Mock()
- session_5.server_port = 75638
- manager.sessions[5] = session_5
-
- returned_ports = manager.sl4a_ports_in_use
-
- # No duplicated ports.
- self.assertEqual(len(returned_ports), len(set(returned_ports)))
- # One call for each session
- self.assertSetEqual(set(returned_ports), {12345, 15973, 67890, 75638})
-
- @mock.patch("time.sleep", return_value=None)
- def test_start_sl4a_server_uses_all_retries(self, _):
- """Tests sl4a_manager.Sl4aManager.start_sl4a_server().
-
- Tests to ensure that _start_sl4a_server retries and successfully returns
- a port.
- """
- adb = mock.Mock()
- adb.shell = lambda _, **kwargs: ""
-
- side_effects = []
- expected_port = 12345
- for _ in range(
- int(
- sl4a_manager.MAX_WAIT_ON_SERVER_SECONDS
- / sl4a_manager.ATTEMPT_INTERVAL
- )
- - 1
- ):
- side_effects.append(None)
- side_effects.append(expected_port)
-
- manager = sl4a_manager.create_sl4a_manager(adb)
- manager._get_open_listening_port = mock.Mock(side_effect=side_effects)
- try:
- found_port = manager.start_sl4a_server(0)
- self.assertTrue(found_port)
- except rpc_client.Sl4aConnectionError:
- self.fail("start_sl4a_server failed to respect FIND_PORT_RETRIES.")
-
- @mock.patch("time.sleep", return_value=None)
- def test_start_sl4a_server_fails_all_retries(self, _):
- """Tests sl4a_manager.Sl4aManager.start_sl4a_server().
-
- Tests to ensure that start_sl4a_server throws an error if all retries
- fail.
- """
- adb = mock.Mock()
- adb.shell = lambda _, **kwargs: ""
-
- side_effects = []
- for _ in range(
- int(
- sl4a_manager.MAX_WAIT_ON_SERVER_SECONDS
- / sl4a_manager.ATTEMPT_INTERVAL
- )
- ):
- side_effects.append(None)
-
- manager = sl4a_manager.create_sl4a_manager(adb)
- manager._get_open_listening_port = mock.Mock(side_effect=side_effects)
- try:
- manager.start_sl4a_server(0)
- self.fail("Sl4aConnectionError was not thrown.")
- except rpc_client.Sl4aConnectionError:
- pass
-
- def test_get_all_ports_command_uses_root_cmd(self):
- """Tests sl4a_manager.Sl4aManager._get_all_ports_command().
-
- Tests that _get_all_ports_command calls the root command when root is
- available.
- """
- adb = mock.Mock()
- adb.is_root = lambda: True
- command = "ngo45hke3b4vie3mv5ni93,vfu3j"
- sl4a_manager._SL4A_ROOT_FIND_PORT_CMD = command
-
- manager = sl4a_manager.create_sl4a_manager(adb)
- self.assertEqual(manager._get_all_ports_command(), command)
-
- def test_get_all_ports_command_escalates_to_root(self):
- """Tests sl4a_manager.Sl4aManager._call_get_ports_command().
-
- Tests that _call_get_ports_command calls the root command when adb is
- user but can escalate to root.
- """
- adb = mock.Mock()
- adb.is_root = lambda: False
- adb.ensure_root = lambda: True
- command = "ngo45hke3b4vie3mv5ni93,vfu3j"
- sl4a_manager._SL4A_ROOT_FIND_PORT_CMD = command
-
- manager = sl4a_manager.create_sl4a_manager(adb)
- self.assertEqual(manager._get_all_ports_command(), command)
-
- def test_get_all_ports_command_uses_user_cmd(self):
- """Tests sl4a_manager.Sl4aManager._call_get_ports_command().
-
- Tests that _call_get_ports_command calls the user command when root is
- unavailable.
- """
- adb = mock.Mock()
- adb.is_root = lambda: False
- adb.ensure_root = lambda: False
- command = "ngo45hke3b4vie3mv5ni93,vfu3j"
- sl4a_manager._SL4A_USER_FIND_PORT_CMD = command
-
- manager = sl4a_manager.create_sl4a_manager(adb)
- self.assertEqual(manager._get_all_ports_command(), command)
-
- def test_get_open_listening_port_no_port_found(self):
- """Tests sl4a_manager.Sl4aManager._get_open_listening_port().
-
- Tests to ensure None is returned if no open port is found.
- """
- adb = mock.Mock()
- adb.shell = lambda _: ""
-
- manager = sl4a_manager.create_sl4a_manager(adb)
- self.assertIsNone(manager._get_open_listening_port())
-
- def test_get_open_listening_port_no_new_port_found(self):
- """Tests sl4a_manager.Sl4aManager._get_open_listening_port().
-
- Tests to ensure None is returned if the ports returned have all been
- marked as in used.
- """
- adb = mock.Mock()
- adb.shell = lambda _: "12345 67890"
-
- manager = sl4a_manager.create_sl4a_manager(adb)
- manager._sl4a_ports = {"12345", "67890"}
- self.assertIsNone(manager._get_open_listening_port())
-
- def test_get_open_listening_port_port_is_avaiable(self):
- """Tests sl4a_manager.Sl4aManager._get_open_listening_port().
-
- Tests to ensure a port is returned if a port is found and has not been
- marked as used.
- """
- adb = mock.Mock()
- adb.shell = lambda _: "12345 67890"
-
- manager = sl4a_manager.create_sl4a_manager(adb)
- manager._sl4a_ports = {"12345"}
- self.assertEqual(manager._get_open_listening_port(), 67890)
-
- def test_is_sl4a_installed_is_true(self):
- """Tests sl4a_manager.Sl4aManager.is_sl4a_installed().
-
- Tests is_sl4a_installed() returns true when pm returns data
- """
- adb = mock.Mock()
- adb.shell = lambda _, **kwargs: "asdf"
- manager = sl4a_manager.create_sl4a_manager(adb)
- self.assertTrue(manager.is_sl4a_installed())
-
- def test_is_sl4a_installed_is_false(self):
- """Tests sl4a_manager.Sl4aManager.is_sl4a_installed().
-
- Tests is_sl4a_installed() returns true when pm returns data
- """
- adb = mock.Mock()
- adb.shell = lambda _, **kwargs: ""
- manager = sl4a_manager.create_sl4a_manager(adb)
- self.assertFalse(manager.is_sl4a_installed())
-
- def test_start_sl4a_throws_error_on_sl4a_not_installed(self):
- """Tests sl4a_manager.Sl4aManager.start_sl4a_service().
-
- Tests that a MissingSl4aError is thrown when SL4A is not installed.
- """
- adb = mock.Mock()
-
- manager = sl4a_manager.create_sl4a_manager(adb)
- manager.is_sl4a_installed = lambda: False
- try:
- manager.start_sl4a_service()
- self.fail("An error should have been thrown.")
- except rpc_client.Sl4aNotInstalledError:
- pass
-
- def test_start_sl4a_starts_sl4a_if_not_running(self):
- """Tests sl4a_manager.Sl4aManager.start_sl4a_service().
-
- Tests that SL4A is started if it was not already running.
- """
- adb = mock.Mock()
- adb.shell = mock.Mock(side_effect=["", "", ""])
-
- manager = sl4a_manager.create_sl4a_manager(adb)
- manager.is_sl4a_installed = lambda: True
- try:
- manager.start_sl4a_service()
- except rpc_client.Sl4aNotInstalledError:
- self.fail("An error should not have been thrown.")
- adb.shell.assert_called_with(sl4a_manager._SL4A_START_SERVICE_CMD)
-
- def test_create_session_uses_oldest_server_port(self):
- """Tests sl4a_manager.Sl4aManager.create_session().
-
- Tests that when no port is given, the oldest server port opened is used
- as the server port for a new session. The oldest server port can be
- found by getting the oldest session's server port.
- """
- adb = mock.Mock()
-
- manager = sl4a_manager.create_sl4a_manager(adb)
- # Ignore starting SL4A.
- manager.start_sl4a_service = lambda: None
-
- session_1 = mock.Mock()
- session_1.server_port = 12345
- session_2 = mock.Mock()
- session_2.server_port = 67890
- session_3 = mock.Mock()
- session_3.server_port = 67890
-
- manager.sessions[3] = session_3
- manager.sessions[1] = session_1
- manager.sessions[2] = session_2
-
- with mock.patch.object(
- rpc_client.RpcClient, "__init__", return_value=None
- ):
- created_session = manager.create_session()
-
- self.assertEqual(created_session.server_port, session_1.server_port)
-
- def test_create_session_uses_random_port_when_no_session_exists(self):
- """Tests sl4a_manager.Sl4aManager.create_session().
-
- Tests that when no port is given, and no SL4A server exists, the server
- port for the session is set to 0.
- """
- adb = mock.Mock()
-
- manager = sl4a_manager.create_sl4a_manager(adb)
- # Ignore starting SL4A.
- manager.start_sl4a_service = lambda: None
-
- with mock.patch.object(
- rpc_client.RpcClient, "__init__", return_value=None
- ):
- created_session = manager.create_session()
-
- self.assertEqual(created_session.server_port, 0)
-
- def test_terminate_all_session_call_terminate_on_all_sessions(self):
- """Tests sl4a_manager.Sl4aManager.terminate_all_sessions().
-
- Tests to see that the manager has called terminate on all sessions.
- """
- called_terminate_on = list()
-
- def called_on(session):
- called_terminate_on.append(session)
-
- adb = mock.Mock()
- manager = sl4a_manager.Sl4aManager(adb)
-
- session_1 = mock.Mock()
- session_1.terminate = lambda *args, **kwargs: called_on(session_1)
- manager.sessions[1] = session_1
- session_4 = mock.Mock()
- session_4.terminate = lambda *args, **kwargs: called_on(session_4)
- manager.sessions[4] = session_4
- session_5 = mock.Mock()
- session_5.terminate = lambda *args, **kwargs: called_on(session_5)
- manager.sessions[5] = session_5
-
- manager._get_all_ports = lambda: []
- manager.terminate_all_sessions()
- # No duplicates calls to terminate.
- self.assertEqual(
- len(called_terminate_on), len(set(called_terminate_on))
- )
- # One call for each session
- self.assertSetEqual(
- set(called_terminate_on), {session_1, session_4, session_5}
- )
-
- def test_terminate_all_session_close_each_server(self):
- """Tests sl4a_manager.Sl4aManager.terminate_all_sessions().
-
- Tests to see that the manager has called terminate on all sessions.
- """
- closed_ports = list()
-
- def close(command):
- if str.isdigit(command):
- closed_ports.append(command)
- return ""
-
- adb = mock.Mock()
- adb.shell = close
- sl4a_manager._SL4A_CLOSE_SERVER_CMD = "%s"
- ports_to_close = {"12345", "67890", "24680", "13579"}
-
- manager = sl4a_manager.Sl4aManager(adb)
- manager._sl4a_ports = set(ports_to_close)
- manager._get_all_ports = lambda: []
- manager.terminate_all_sessions()
-
- # No duplicate calls to close port
- self.assertEqual(len(closed_ports), len(set(closed_ports)))
- # One call for each port
- self.assertSetEqual(ports_to_close, set(closed_ports))
-
- def test_obtain_sl4a_server_starts_new_server(self):
- """Tests sl4a_manager.Sl4aManager.obtain_sl4a_server().
-
- Tests that a new server can be returned if the server does not exist.
- """
- adb = mock.Mock()
- manager = sl4a_manager.Sl4aManager(adb)
- manager.start_sl4a_server = mock.Mock()
-
- manager.obtain_sl4a_server(0)
-
- self.assertTrue(manager.start_sl4a_server.called)
-
- @mock.patch(
- "antlion.controllers.sl4a_lib.sl4a_manager.Sl4aManager.sl4a_ports_in_use",
- new_callable=mock.PropertyMock,
- )
- def test_obtain_sl4a_server_returns_existing_server(
- self, sl4a_ports_in_use
- ):
- """Tests sl4a_manager.Sl4aManager.obtain_sl4a_server().
-
- Tests that an existing server is returned if it is already opened.
- """
- adb = mock.Mock()
- manager = sl4a_manager.Sl4aManager(adb)
- manager.start_sl4a_server = mock.Mock()
- sl4a_ports_in_use.return_value = [12345]
-
- ret = manager.obtain_sl4a_server(12345)
-
- self.assertFalse(manager.start_sl4a_server.called)
- self.assertEqual(12345, ret)
-
-
-if __name__ == "__main__":
- unittest.main()
diff --git a/packages/antlion/unit_tests/controllers/sl4a_lib/sl4a_session_test.py b/packages/antlion/unit_tests/controllers/sl4a_lib/sl4a_session_test.py
deleted file mode 100755
index c28ba5b..0000000
--- a/packages/antlion/unit_tests/controllers/sl4a_lib/sl4a_session_test.py
+++ /dev/null
@@ -1,250 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-import errno
-import unittest
-from socket import error as socket_error
-from socket import timeout
-
-import mock
-from mock import patch
-
-from antlion.controllers.adb_lib.error import AdbError
-from antlion.controllers.sl4a_lib import rpc_client, sl4a_ports
-from antlion.controllers.sl4a_lib.rpc_client import Sl4aStartError
-from antlion.controllers.sl4a_lib.sl4a_session import Sl4aSession
-
-
-class Sl4aSessionTest(unittest.TestCase):
- """Tests the Sl4aSession class."""
-
- def test_is_alive_true_on_not_terminated(self):
- """Tests Sl4aSession.is_alive.
-
- Tests that the session is_alive when it has not been terminated.
- """
- session = mock.Mock()
- session._terminated = False
- session.is_alive = Sl4aSession.is_alive
- self.assertNotEqual(session._terminated, session.is_alive)
-
- def test_is_alive_false_on_terminated(self):
- """Tests Sl4aSession.is_alive.
-
- Tests that the session is_alive when it has not been terminated.
- """
- session = mock.Mock()
- session._terminated = True
- session.is_alive = Sl4aSession.is_alive
- self.assertNotEqual(session._terminated, session.is_alive)
-
- @patch("antlion.controllers.sl4a_lib.event_dispatcher.EventDispatcher")
- def test_get_event_dispatcher_create_on_none(self, _):
- """Tests Sl4aSession.get_event_dispatcher.
-
- Tests that a new event_dispatcher is created if one does not exist.
- """
- session = mock.Mock()
- session._event_dispatcher = None
- ed = Sl4aSession.get_event_dispatcher(session)
- self.assertTrue(session._event_dispatcher is not None)
- self.assertEqual(session._event_dispatcher, ed)
-
- def test_get_event_dispatcher_returns_existing_event_dispatcher(self):
- """Tests Sl4aSession.get_event_dispatcher.
-
- Tests that the existing event_dispatcher is returned.
- """
- session = mock.Mock()
- session._event_dispatcher = "Something that is not None"
- ed = Sl4aSession.get_event_dispatcher(session)
- self.assertEqual(session._event_dispatcher, ed)
-
- def test_create_client_side_connection_hint_already_in_use(self):
- """Tests Sl4aSession._create_client_side_connection().
-
- Tests that if the hinted port is already in use, the function will
- call itself with a hinted port of 0 (random).
- """
- session = mock.Mock()
- session._create_client_side_connection = mock.Mock()
- with mock.patch("socket.socket") as socket:
- # Throw an error when trying to bind to the hinted port.
- error = OSError()
- error.errno = errno.EADDRINUSE
- socket_instance = mock.Mock()
- socket_instance.bind = mock.Mock()
- socket_instance.bind.side_effect = error
- socket.return_value = socket_instance
-
- Sl4aSession._create_client_side_connection(
- session, sl4a_ports.Sl4aPorts(1, 2, 3)
- )
-
- fn = session._create_client_side_connection
- self.assertEqual(fn.call_count, 1)
- # Asserts that the 1st argument (Sl4aPorts) sent to the function
- # has a client port of 0.
- self.assertEqual(fn.call_args_list[0][0][0].client_port, 0)
-
- def test_create_client_side_connection_catches_timeout(self):
- """Tests Sl4aSession._create_client_side_connection().
-
- Tests that the function will raise an Sl4aConnectionError upon timeout.
- """
- session = mock.Mock()
- session._create_client_side_connection = mock.Mock()
- error = timeout()
- with mock.patch("socket.socket") as socket:
- # Throw an error when trying to bind to the hinted port.
- socket_instance = mock.Mock()
- socket_instance.connect = mock.Mock()
- socket_instance.connect.side_effect = error
- socket.return_value = socket_instance
-
- with self.assertRaises(rpc_client.Sl4aConnectionError):
- Sl4aSession._create_client_side_connection(
- session, sl4a_ports.Sl4aPorts(0, 2, 3)
- )
-
- def test_create_client_side_connection_hint_taken_during_fn(self):
- """Tests Sl4aSession._create_client_side_connection().
-
- Tests that the function will call catch an EADDRNOTAVAIL OSError and
- call itself again, this time with a hinted port of 0 (random).
- """
- session = mock.Mock()
- session._create_client_side_connection = mock.Mock()
- error = socket_error()
- error.errno = errno.EADDRNOTAVAIL
- with mock.patch("socket.socket") as socket:
- # Throw an error when trying to bind to the hinted port.
- socket_instance = mock.Mock()
- socket_instance.connect = mock.Mock()
- socket_instance.connect.side_effect = error
- socket.return_value = socket_instance
-
- Sl4aSession._create_client_side_connection(
- session, sl4a_ports.Sl4aPorts(0, 2, 3)
- )
-
- fn = session._create_client_side_connection
- self.assertEqual(fn.call_count, 1)
- # Asserts that the 1st argument (Sl4aPorts) sent to the function
- # has a client port of 0.
- self.assertEqual(fn.call_args_list[0][0][0].client_port, 0)
-
- def test_create_client_side_connection_re_raises_uncaught_errors(self):
- """Tests Sl4aSession._create_client_side_connection().
-
- Tests that the function will re-raise any socket error that does not
- have errno.EADDRNOTAVAIL.
- """
- session = mock.Mock()
- session._create_client_side_connection = mock.Mock()
- error = socket_error()
- # Some error that isn't EADDRNOTAVAIL
- error.errno = errno.ESOCKTNOSUPPORT
- with mock.patch("socket.socket") as socket:
- # Throw an error when trying to bind to the hinted port.
- socket_instance = mock.Mock()
- socket_instance.connect = mock.Mock()
- socket_instance.connect.side_effect = error
- socket.return_value = socket_instance
-
- with self.assertRaises(socket_error):
- Sl4aSession._create_client_side_connection(
- session, sl4a_ports.Sl4aPorts(0, 2, 3)
- )
-
- def test_terminate_only_closes_if_not_terminated(self):
- """Tests Sl4aSession.terminate()
-
- Tests that terminate only runs termination steps if the session has not
- already been terminated.
- """
- session = mock.Mock()
- session._terminate_lock = mock.MagicMock()
- session._terminated = True
- Sl4aSession.terminate(session)
-
- self.assertFalse(session._event_dispatcher.close.called)
- self.assertFalse(session.rpc_client.terminate.called)
-
- def test_terminate_closes_session_first(self):
- """Tests Sl4aSession.terminate()
-
- Tests that terminate only runs termination steps if the session has not
- already been terminated.
- """
- session = mock.Mock()
- session._terminate_lock = mock.MagicMock()
- session._terminated = True
- Sl4aSession.terminate(session)
-
- self.assertFalse(session._event_dispatcher.close.called)
- self.assertFalse(session.rpc_client.terminate.called)
-
- def test_create_forwarded_port(self):
- """Tests Sl4aSession._create_forwarded_port returns the hinted port."""
- mock_adb = mock.Mock()
- mock_adb.get_version_number = lambda: 37
- mock_adb.tcp_forward = lambda hinted_port, device_port: hinted_port
- mock_session = mock.Mock()
- mock_session.adb = mock_adb
- mock_session.log = mock.Mock()
-
- self.assertEqual(
- 8080, Sl4aSession._create_forwarded_port(mock_session, 9999, 8080)
- )
-
- def test_create_forwarded_port_fail_once(self):
- """Tests that _create_forwarded_port can return a non-hinted port.
-
- This will only happen if the hinted port is already taken.
- """
- mock_adb = mock.Mock()
- mock_adb.get_version_number = lambda: 37
-
- mock_adb.tcp_forward = mock.Mock(
- side_effect=AdbError(
- "cmd", "stdout", stderr="cannot bind listener", ret_code=1
- )
- )
- mock_session = mock.MagicMock()
- mock_session.adb = mock_adb
- mock_session.log = mock.Mock()
- mock_session._create_forwarded_port = lambda *args, **kwargs: 12345
-
- self.assertEqual(
- 12345, Sl4aSession._create_forwarded_port(mock_session, 9999, 8080)
- )
-
- def test_create_forwarded_port_raises_if_adb_version_is_old(self):
- """Tests that _create_forwarded_port raises if adb version < 37."""
- mock_adb = mock.Mock()
- mock_adb.get_version_number = lambda: 31
- mock_adb.tcp_forward = lambda _, __: self.fail(
- "Calling adb.tcp_forward despite ADB version being too old."
- )
- mock_session = mock.Mock()
- mock_session.adb = mock_adb
- mock_session.log = mock.Mock()
- with self.assertRaises(Sl4aStartError):
- Sl4aSession._create_forwarded_port(mock_session, 9999, 0)
-
-
-if __name__ == "__main__":
- unittest.main()
diff --git a/packages/antlion/unit_tests/event/__init__.py b/packages/antlion/unit_tests/event/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/packages/antlion/unit_tests/event/__init__.py
+++ /dev/null
diff --git a/packages/antlion/unit_tests/event/decorators_test.py b/packages/antlion/unit_tests/event/decorators_test.py
deleted file mode 100755
index e1542b5..0000000
--- a/packages/antlion/unit_tests/event/decorators_test.py
+++ /dev/null
@@ -1,50 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-import unittest
-from unittest import TestCase
-
-from mock import Mock
-
-from antlion.event.decorators import subscribe_static
-from antlion.event.subscription_handle import SubscriptionHandle
-
-
-class DecoratorsTest(TestCase):
- """Tests the decorators found in antlion.event.decorators."""
-
- def test_subscribe_static_return_type(self):
- """Tests that the subscribe_static is the correct type."""
- mock = Mock()
-
- @subscribe_static(type)
- def test(_):
- return mock
-
- self.assertTrue(isinstance(test, SubscriptionHandle))
-
- def test_subscribe_static_calling_the_function_returns_normally(self):
- """Tests that functions decorated by subscribe_static can be called."""
- static_mock = Mock()
-
- @subscribe_static(type)
- def test(_):
- return static_mock
-
- self.assertEqual(test(Mock()), static_mock)
-
-
-if __name__ == "__main__":
- unittest.main()
diff --git a/packages/antlion/unit_tests/event/event_bus_test.py b/packages/antlion/unit_tests/event/event_bus_test.py
deleted file mode 100755
index 6b88628..0000000
--- a/packages/antlion/unit_tests/event/event_bus_test.py
+++ /dev/null
@@ -1,273 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-import unittest
-from unittest import TestCase
-
-from mock import Mock, patch
-
-from antlion.event import event_bus
-from antlion.event.event import Event
-from antlion.event.event_subscription import EventSubscription
-
-
-class EventBusTest(TestCase):
- """Tests the event_bus functions."""
-
- def setUp(self):
- """Clears all state from the event_bus between test cases."""
- event_bus._event_bus = event_bus._EventBus()
-
- def get_subscription_argument(self, register_subscription_call):
- """Gets the subscription argument from a register_subscription call."""
- return register_subscription_call[0][0]
-
- @patch("antlion.event.event_bus._event_bus.register_subscription")
- def test_register_registers_a_subscription(self, register_subscription):
- """Tests that register creates and registers a subscription."""
- mock_event = Mock()
- mock_func = Mock()
- order = 43
- event_bus.register(mock_event, mock_func, order=order)
-
- args, _ = register_subscription.call_args
- subscription = args[0]
-
- # Instead of writing an equality operator for only testing,
- # check the internals to make sure they are expected values.
- self.assertEqual(subscription._event_type, mock_event)
- self.assertEqual(subscription._func, mock_func)
- self.assertEqual(subscription.order, order)
-
- @patch("antlion.event.event_bus._event_bus.register_subscription")
- def test_register_subscriptions_for_list(self, register_subscription):
- """Tests that register_subscription is called for each subscription."""
- mocks = [Mock(), Mock(), Mock()]
- subscriptions = [
- EventSubscription(mocks[0], lambda _: None),
- EventSubscription(mocks[1], lambda _: None),
- EventSubscription(mocks[2], lambda _: None),
- ]
-
- event_bus.register_subscriptions(subscriptions)
- received_subscriptions = set()
- for index, call in enumerate(register_subscription.call_args_list):
- received_subscriptions.add(self.get_subscription_argument(call))
-
- self.assertEqual(register_subscription.call_count, len(subscriptions))
- self.assertSetEqual(received_subscriptions, set(subscriptions))
-
- def test_register_subscription_new_event_type(self):
- """Tests that the event_bus can register a new event type."""
- mock_type = Mock()
- bus = event_bus._event_bus
- subscription = EventSubscription(mock_type, lambda _: None)
-
- reg_id = event_bus.register_subscription(subscription)
-
- self.assertTrue(mock_type in bus._subscriptions.keys())
- self.assertTrue(subscription in bus._subscriptions[mock_type])
- self.assertTrue(reg_id in bus._registration_id_map.keys())
-
- def test_register_subscription_existing_type(self):
- """Tests that the event_bus can register an existing event type."""
- mock_type = Mock()
- bus = event_bus._event_bus
- bus._subscriptions[mock_type] = [
- EventSubscription(mock_type, lambda _: None)
- ]
- new_subscription = EventSubscription(mock_type, lambda _: True)
-
- reg_id = event_bus.register_subscription(new_subscription)
-
- self.assertTrue(new_subscription in bus._subscriptions[mock_type])
- self.assertTrue(reg_id in bus._registration_id_map.keys())
-
- def test_post_to_unregistered_event_does_not_call_other_funcs(self):
- """Tests posting an unregistered event will not call other funcs."""
- mock_subscription = Mock()
- bus = event_bus._event_bus
- mock_type = Mock()
- mock_subscription.event_type = mock_type
- bus._subscriptions[mock_type] = [mock_subscription]
-
- event_bus.post(Mock())
-
- self.assertEqual(mock_subscription.deliver.call_count, 0)
-
- def test_post_to_registered_event_calls_all_registered_funcs(self):
- """Tests posting to a registered event calls all registered funcs."""
- mock_subscriptions = [Mock(), Mock(), Mock()]
- bus = event_bus._event_bus
- for subscription in mock_subscriptions:
- subscription.order = 0
- mock_event = Mock()
- bus._subscriptions[type(mock_event)] = mock_subscriptions
-
- event_bus.post(mock_event)
-
- for subscription in mock_subscriptions:
- subscription.deliver.assert_called_once_with(mock_event)
-
- def test_post_with_ignore_errors_calls_all_registered_funcs(self):
- """Tests posting with ignore_errors=True calls all registered funcs,
- even if they raise errors.
- """
-
- def _raise(_):
- raise Exception
-
- mock_event = Mock()
- mock_subscriptions = [Mock(), Mock(), Mock()]
- mock_subscriptions[0].deliver.side_effect = _raise
- bus = event_bus._event_bus
- for i, subscription in enumerate(mock_subscriptions):
- subscription.order = i
- bus._subscriptions[type(mock_event)] = mock_subscriptions
-
- event_bus.post(mock_event, ignore_errors=True)
-
- for subscription in mock_subscriptions:
- subscription.deliver.assert_called_once_with(mock_event)
-
- @patch("antlion.event.event_bus._event_bus.unregister")
- def test_unregister_all_from_list(self, unregister):
- """Tests unregistering from a list unregisters the specified list."""
- unregister_list = [Mock(), Mock()]
-
- event_bus.unregister_all(from_list=unregister_list)
-
- self.assertEqual(unregister.call_count, len(unregister_list))
- for args, _ in unregister.call_args_list:
- subscription = args[0]
- self.assertTrue(subscription in unregister_list)
-
- @patch("antlion.event.event_bus._event_bus.unregister")
- def test_unregister_all_from_event(self, unregister):
- """Tests that all subscriptions under the event are unregistered."""
- mock_event = Mock()
- mock_event_2 = Mock()
- bus = event_bus._event_bus
- unregister_list = [Mock(), Mock()]
- bus._subscriptions[type(mock_event_2)] = [Mock(), Mock(), Mock()]
- bus._subscriptions[type(mock_event)] = unregister_list
- for sub_type in bus._subscriptions.keys():
- for subscription in bus._subscriptions[sub_type]:
- subscription.event_type = sub_type
- bus._registration_id_map[id(subscription)] = subscription
-
- event_bus.unregister_all(from_event=type(mock_event))
-
- self.assertEqual(unregister.call_count, len(unregister_list))
- for args, _ in unregister.call_args_list:
- subscription = args[0]
- self.assertTrue(subscription in unregister_list)
-
- @patch("antlion.event.event_bus._event_bus.unregister")
- def test_unregister_all_no_args_unregisters_everything(self, unregister):
- """Tests unregister_all without arguments will unregister everything."""
- mock_event_1 = Mock()
- mock_event_2 = Mock()
- bus = event_bus._event_bus
- unregister_list_1 = [Mock(), Mock()]
- unregister_list_2 = [Mock(), Mock(), Mock()]
- bus._subscriptions[type(mock_event_1)] = unregister_list_1
- bus._subscriptions[type(mock_event_2)] = unregister_list_2
- for sub_type in bus._subscriptions.keys():
- for subscription in bus._subscriptions[sub_type]:
- subscription.event_type = sub_type
- bus._registration_id_map[id(subscription)] = subscription
-
- event_bus.unregister_all()
-
- self.assertEqual(
- unregister.call_count,
- len(unregister_list_1) + len(unregister_list_2),
- )
- for args, _ in unregister.call_args_list:
- subscription = args[0]
- self.assertTrue(
- subscription in unregister_list_1
- or subscription in unregister_list_2
- )
-
- def test_unregister_given_an_event_subscription(self):
- """Tests that unregister can unregister a given EventSubscription."""
- mock_event = Mock()
- bus = event_bus._event_bus
- subscription = EventSubscription(type(mock_event), lambda _: None)
- bus._registration_id_map[id(subscription)] = subscription
- bus._subscriptions[type(mock_event)] = [subscription]
-
- val = event_bus.unregister(subscription)
-
- self.assertTrue(val)
- self.assertTrue(subscription not in bus._registration_id_map)
- self.assertTrue(
- subscription not in bus._subscriptions[type(mock_event)]
- )
-
- def test_unregister_given_a_registration_id(self):
- """Tests that unregister can unregister a given EventSubscription."""
- mock_event = Mock()
- bus = event_bus._event_bus
- subscription = EventSubscription(type(mock_event), lambda _: None)
- registration_id = id(subscription)
- bus._registration_id_map[id(subscription)] = subscription
- bus._subscriptions[type(mock_event)] = [subscription]
-
- val = event_bus.unregister(registration_id)
-
- self.assertTrue(val)
- self.assertTrue(subscription not in bus._registration_id_map)
- self.assertTrue(
- subscription not in bus._subscriptions[type(mock_event)]
- )
-
- def test_unregister_given_object_that_is_not_a_subscription(self):
- """Asserts that a ValueError is raised upon invalid arguments."""
- with self.assertRaises(ValueError):
- event_bus.unregister(Mock())
-
- def test_unregister_given_invalid_registration_id(self):
- """Asserts that a false is returned upon invalid registration_id."""
- val = event_bus.unregister(9)
- self.assertFalse(val)
-
- def test_listen_for_registers_listener(self):
- """Tests listen_for registers the listener within the with statement."""
- bus = event_bus._event_bus
-
- def event_listener(_):
- pass
-
- with event_bus.listen_for(Event, event_listener):
- self.assertEqual(len(bus._registration_id_map), 1)
-
- def test_listen_for_unregisters_listener(self):
- """Tests listen_for unregisters the listener after the with statement."""
- bus = event_bus._event_bus
-
- def event_listener(_):
- pass
-
- with event_bus.listen_for(Event, event_listener):
- pass
-
- self.assertEqual(len(bus._registration_id_map), 0)
-
-
-if __name__ == "__main__":
- unittest.main()
diff --git a/packages/antlion/unit_tests/event/event_subscription_test.py b/packages/antlion/unit_tests/event/event_subscription_test.py
deleted file mode 100755
index 50415dd..0000000
--- a/packages/antlion/unit_tests/event/event_subscription_test.py
+++ /dev/null
@@ -1,64 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-import unittest
-from unittest import TestCase
-
-from mock import Mock
-
-from antlion.event.event_subscription import EventSubscription
-
-
-class EventSubscriptionTest(TestCase):
- """Tests the EventSubscription class."""
-
- @staticmethod
- def filter_out_event(_):
- return False
-
- @staticmethod
- def pass_filter(_):
- return True
-
- def test_event_type_returns_correct_value(self):
- """Tests that event_type returns the correct event type."""
- expected_event_type = Mock()
- subscription = EventSubscription(expected_event_type, lambda _: None)
- self.assertEqual(expected_event_type, subscription.event_type)
-
- def test_deliver_dont_deliver_if_event_is_filtered(self):
- """Tests deliver does not call func if the event is filtered out."""
- func = Mock()
- subscription = EventSubscription(
- Mock(), func, event_filter=self.filter_out_event
- )
-
- subscription.deliver(Mock())
-
- self.assertFalse(func.called)
-
- def test_deliver_deliver_accepted_event(self):
- """Tests deliver does call func when the event is accepted."""
- func = Mock()
- subscription = EventSubscription(
- Mock(), func, event_filter=self.pass_filter
- )
-
- subscription.deliver(Mock())
- self.assertTrue(func.called)
-
-
-if __name__ == "__main__":
- unittest.main()
diff --git a/packages/antlion/unit_tests/libs/__init__.py b/packages/antlion/unit_tests/libs/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/packages/antlion/unit_tests/libs/__init__.py
+++ /dev/null
diff --git a/packages/antlion/unit_tests/libs/logging/__init__.py b/packages/antlion/unit_tests/libs/logging/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/packages/antlion/unit_tests/libs/logging/__init__.py
+++ /dev/null
diff --git a/packages/antlion/unit_tests/libs/logging/log_stream_test.py b/packages/antlion/unit_tests/libs/logging/log_stream_test.py
deleted file mode 100755
index 33de813..0000000
--- a/packages/antlion/unit_tests/libs/logging/log_stream_test.py
+++ /dev/null
@@ -1,477 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-import logging
-import os
-import unittest
-
-import mock
-
-from antlion import context
-from antlion.libs.logging import log_stream
-from antlion.libs.logging.log_stream import (
- AlsoToLogHandler,
- InvalidStyleSetError,
- LogStyles,
- _LogStream,
-)
-
-
-class TestClass(object):
- """Dummy class for TestEvents"""
-
- def __init__(self):
- self.test_name = self.test_case.__name__
-
- def test_case(self):
- """Dummy test case for test events."""
-
-
-class LogStreamTest(unittest.TestCase):
- """Tests the _LogStream class in antlion.libs.logging.log_stream."""
-
- @staticmethod
- def patch(imported_name, *args, **kwargs):
- return mock.patch(
- f"antlion.libs.logging.log_stream.{imported_name}", *args, **kwargs
- )
-
- @classmethod
- def setUpClass(cls):
- # logging.log_path only exists if logger._setup_test_logger is called.
- # Here we set it to a value that is likely to not exist so file IO is
- # not executed (an error is raised instead of creating the file).
- logging.log_path = "/f/a/i/l/p/a/t/h"
-
- def setUp(self):
- log_stream._log_streams = dict()
-
- # __init__
-
- @mock.patch("os.makedirs")
- def test_init_adds_null_handler(self, *_):
- """Tests that a NullHandler is added to the logger upon initialization.
- This ensures that no log output is generated when a test class is not
- running.
- """
- debug_monolith_log = LogStyles.LOG_DEBUG | LogStyles.MONOLITH_LOG
- with self.patch("MovableFileHandler"):
- log = log_stream.create_logger(
- self._testMethodName, log_styles=debug_monolith_log
- )
-
- self.assertTrue(isinstance(log.handlers[0], logging.NullHandler))
-
- # __validate_style
-
- @mock.patch("os.makedirs")
- def test_validate_styles_raises_when_same_location_set_multiple_times(
- self, *_
- ):
- """Tests that a style is invalid if it sets the same handler twice.
-
- If the error is NOT raised, then a LogStream can create a Logger that
- has multiple LogHandlers trying to write to the same file.
- """
- with self.assertRaises(InvalidStyleSetError) as catch:
- log_stream.create_logger(
- self._testMethodName,
- log_styles=[
- LogStyles.LOG_DEBUG | LogStyles.MONOLITH_LOG,
- LogStyles.LOG_DEBUG | LogStyles.MONOLITH_LOG,
- ],
- )
- self.assertTrue(
- "has been set multiple" in catch.exception.args[0],
- msg="__validate_styles did not raise the expected error message",
- )
-
- @mock.patch("os.makedirs")
- def test_validate_styles_raises_when_multiple_file_outputs_set(self, *_):
- """Tests that a style is invalid if more than one of MONOLITH_LOG,
- TESTCLASS_LOG, and TESTCASE_LOG is set for the same log level.
-
- If the error is NOT raised, then a LogStream can create a Logger that
- has multiple LogHandlers trying to write to the same file.
- """
- with self.assertRaises(InvalidStyleSetError) as catch:
- log_stream.create_logger(
- self._testMethodName,
- log_styles=[
- LogStyles.LOG_DEBUG | LogStyles.TESTCASE_LOG,
- LogStyles.LOG_DEBUG | LogStyles.TESTCLASS_LOG,
- ],
- )
- self.assertTrue(
- "More than one of" in catch.exception.args[0],
- msg="__validate_styles did not raise the expected error message",
- )
-
- with self.assertRaises(InvalidStyleSetError) as catch:
- log_stream.create_logger(
- self._testMethodName,
- log_styles=[
- LogStyles.LOG_DEBUG | LogStyles.TESTCASE_LOG,
- LogStyles.LOG_DEBUG | LogStyles.MONOLITH_LOG,
- ],
- )
- self.assertTrue(
- "More than one of" in catch.exception.args[0],
- msg="__validate_styles did not raise the expected error message",
- )
-
- with self.assertRaises(InvalidStyleSetError) as catch:
- log_stream.create_logger(
- self._testMethodName,
- log_styles=[
- LogStyles.LOG_DEBUG | LogStyles.TESTCASE_LOG,
- LogStyles.LOG_DEBUG | LogStyles.TESTCLASS_LOG,
- LogStyles.LOG_DEBUG | LogStyles.MONOLITH_LOG,
- ],
- )
- self.assertTrue(
- "More than one of" in catch.exception.args[0],
- msg="__validate_styles did not raise the expected error message",
- )
-
- @mock.patch("os.makedirs")
- def test_validate_styles_raises_when_no_level_exists(self, *_):
- """Tests that a style is invalid if it does not contain a log level.
-
- If the style does not contain a log level, then there is no way to
- pass the information coming from the logger to the correct file.
- """
- with self.assertRaises(InvalidStyleSetError) as catch:
- log_stream.create_logger(
- self._testMethodName, log_styles=[LogStyles.MONOLITH_LOG]
- )
-
- self.assertTrue(
- "log level" in catch.exception.args[0],
- msg="__validate_styles did not raise the expected error message",
- )
-
- @mock.patch("os.makedirs")
- def test_validate_styles_raises_when_no_location_exists(self, *_):
- """Tests that a style is invalid if it does not contain a log level.
-
- If the style does not contain a log level, then there is no way to
- pass the information coming from the logger to the correct file.
- """
- with self.assertRaises(InvalidStyleSetError) as catch:
- log_stream.create_logger(
- self._testMethodName, log_styles=[LogStyles.LOG_INFO]
- )
-
- self.assertTrue(
- "log location" in catch.exception.args[0],
- msg="__validate_styles did not raise the expected error message",
- )
-
- @mock.patch("os.makedirs")
- def test_validate_styles_raises_when_rotate_logs_no_file_handler(self, *_):
- """Tests that a LogStyle cannot set ROTATE_LOGS without *_LOG flag.
-
- If the LogStyle contains ROTATE_LOGS, it must be associated with a log
- that is rotatable. TO_ACTS_LOG and TO_STDOUT are not rotatable logs,
- since those are both controlled by another object/process. The user
- must specify MONOLITHIC_LOG or TESTCASE_LOG.
- """
- with self.assertRaises(InvalidStyleSetError) as catch:
- log_stream.create_logger(
- self._testMethodName,
- # Added LOG_DEBUG here to prevent the no_level_exists raise from
- # occurring.
- log_styles=[LogStyles.LOG_DEBUG + LogStyles.ROTATE_LOGS],
- )
-
- self.assertTrue(
- "log type" in catch.exception.args[0],
- msg="__validate_styles did not raise the expected error message",
- )
-
- # __handle_style
-
- @mock.patch("os.makedirs")
- def test_handle_style_to_acts_log_creates_handler(self, *_):
- """Tests that using the flag TO_ACTS_LOG creates an AlsoToLogHandler."""
- info_acts_log = LogStyles.LOG_INFO + LogStyles.TO_ACTS_LOG
-
- log = log_stream.create_logger(
- self._testMethodName, log_styles=info_acts_log
- )
-
- self.assertTrue(isinstance(log.handlers[1], AlsoToLogHandler))
-
- @mock.patch("os.makedirs")
- def test_handle_style_to_acts_log_creates_handler_is_lowest_level(self, *_):
- """Tests that using the flag TO_ACTS_LOG creates an AlsoToLogHandler
- that is set to the lowest LogStyles level."""
- info_acts_log = (
- LogStyles.LOG_DEBUG + LogStyles.LOG_INFO + LogStyles.TO_ACTS_LOG
- )
-
- log = log_stream.create_logger(
- self._testMethodName, log_styles=info_acts_log
- )
-
- self.assertTrue(isinstance(log.handlers[1], AlsoToLogHandler))
- self.assertEqual(log.handlers[1].level, logging.DEBUG)
-
- @mock.patch("os.makedirs")
- def test_handle_style_to_stdout_creates_stream_handler(self, *_):
- """Tests that using the flag TO_STDOUT creates a StreamHandler."""
- info_acts_log = LogStyles.LOG_INFO + LogStyles.TO_STDOUT
-
- log = log_stream.create_logger(
- self._testMethodName, log_styles=info_acts_log
- )
-
- self.assertTrue(isinstance(log.handlers[1], logging.StreamHandler))
-
- @mock.patch("os.makedirs")
- def test_handle_style_creates_file_handler(self, *_):
- """Tests handle_style creates a MovableFileHandler for the MONOLITH_LOG."""
- info_acts_log = LogStyles.LOG_INFO + LogStyles.MONOLITH_LOG
-
- expected = mock.MagicMock()
- with self.patch("MovableFileHandler", return_value=expected):
- log = log_stream.create_logger(
- self._testMethodName, log_styles=info_acts_log
- )
-
- self.assertEqual(log.handlers[1], expected)
-
- @mock.patch("os.makedirs")
- def test_handle_style_creates_rotating_file_handler(self, *_):
- """Tests handle_style creates a MovableFileHandler for the ROTATE_LOGS."""
- info_acts_log = (
- LogStyles.LOG_INFO + LogStyles.ROTATE_LOGS + LogStyles.MONOLITH_LOG
- )
-
- expected = mock.MagicMock()
- with self.patch("MovableRotatingFileHandler", return_value=expected):
- log = log_stream.create_logger(
- self._testMethodName, log_styles=info_acts_log
- )
-
- self.assertEqual(log.handlers[1], expected)
-
- # __create_rotating_file_handler
-
- def test_create_rotating_file_handler_does_what_it_says_it_does(self):
- """Tests that __create_rotating_file_handler does exactly that."""
- expected = mock.MagicMock()
-
- with self.patch("MovableRotatingFileHandler", return_value=expected):
- # Through name-mangling, this function is automatically renamed. See
- # https://docs.python.org/3/tutorial/classes.html#private-variables
- fh = _LogStream._LogStream__create_rotating_file_handler("")
-
- self.assertEqual(
- expected,
- fh,
- "The function did not return a MovableRotatingFileHandler.",
- )
-
- # __get_file_handler_creator
-
- def test_get_file_handler_creator_returns_rotating_file_handler(self):
- """Tests the function returns a MovableRotatingFileHandler when the log_style
- has LogStyle.ROTATE_LOGS."""
- expected = mock.MagicMock()
-
- with self.patch(
- "_LogStream._LogStream__create_rotating_file_handler",
- return_value=expected,
- ):
- # Through name-mangling, this function is automatically renamed. See
- # https://docs.python.org/3/tutorial/classes.html#private-variables
- fh_creator = _LogStream._LogStream__get_file_handler_creator(
- LogStyles.ROTATE_LOGS
- )
-
- self.assertEqual(
- expected,
- fh_creator("/d/u/m/m/y/p/a/t/h"),
- "The function did not return a MovableRotatingFileHandler.",
- )
-
- def test_get_file_handler_creator_returns_file_handler(self):
- """Tests the function returns a MovableFileHandler when the log_style does NOT
- have LogStyle.ROTATE_LOGS."""
- expected = mock.MagicMock()
-
- with self.patch("MovableFileHandler", return_value=expected):
- # Through name-mangling, this function is automatically renamed. See
- # https://docs.python.org/3/tutorial/classes.html#private-variables
- handler = _LogStream._LogStream__get_file_handler_creator(
- LogStyles.NONE
- )()
-
- self.assertTrue(isinstance(handler, mock.Mock))
-
- # __get_lowest_log_level
-
- def test_get_lowest_level_gets_lowest_level(self):
- """Tests __get_lowest_level returns the lowest LogStyle level given."""
- level = _LogStream._LogStream__get_lowest_log_level(
- LogStyles.ALL_LEVELS
- )
- self.assertEqual(level, LogStyles.LOG_DEBUG)
-
- # __get_current_output_dir
-
- @mock.patch("os.makedirs")
- def test_get_current_output_dir_gets_correct_path(self, *_):
- """Tests __get_current_output_dir gets the correct path from the context"""
- info_monolith_log = LogStyles.LOG_INFO + LogStyles.MONOLITH_LOG
-
- base_path = "BASEPATH"
- subcontext = "SUBCONTEXT"
- with self.patch("MovableFileHandler"):
- logstream = log_stream._LogStream(
- self._testMethodName,
- log_styles=info_monolith_log,
- base_path=base_path,
- subcontext=subcontext,
- )
-
- expected = os.path.join(base_path, subcontext)
- self.assertEqual(
- logstream._LogStream__get_current_output_dir(), expected
- )
-
- # __create_handler
-
- @mock.patch("os.makedirs")
- def test_create_handler_creates_handler_at_correct_path(self, *_):
- """Tests that __create_handler calls the handler creator with the
- correct absolute path to the log file.
- """
- info_monolith_log = LogStyles.LOG_INFO + LogStyles.MONOLITH_LOG
- base_path = "BASEPATH"
- with self.patch("MovableFileHandler") as file_handler:
- log_stream.create_logger(
- self._testMethodName,
- log_styles=info_monolith_log,
- base_path=base_path,
- )
- expected = os.path.join(
- base_path, f"{self._testMethodName}_{'info'}.txt"
- )
- file_handler.assert_called_with(expected)
-
- # __remove_handler
-
- @mock.patch("os.makedirs")
- def test_remove_handler_removes_a_handler(self, *_):
- """Tests that __remove_handler removes the handler from the logger and
- closes the handler.
- """
- dummy_obj = mock.Mock()
- dummy_obj.logger = mock.Mock()
- handler = mock.Mock()
- _LogStream._LogStream__remove_handler(dummy_obj, handler)
-
- self.assertTrue(dummy_obj.logger.removeHandler.called)
- self.assertTrue(handler.close.called)
-
- # update_handlers
-
- @mock.patch("os.makedirs")
- def test_update_handlers_updates_filehandler_target(self, _):
- """Tests that update_handlers invokes the underlying
- MovableFileHandler.set_file method on the correct path.
- """
- info_testclass_log = LogStyles.LOG_INFO + LogStyles.TESTCLASS_LOG
- file_name = "FILENAME"
- with self.patch("MovableFileHandler"):
- log = log_stream.create_logger(
- self._testMethodName, log_styles=info_testclass_log
- )
- handler = log.handlers[-1]
- handler.baseFilename = file_name
- stream = log_stream._log_streams[log.name]
- stream._LogStream__get_current_output_dir = (
- lambda: "BASEPATH/TestClass"
- )
-
- stream.update_handlers(context.NewTestClassContextEvent())
-
- handler.set_file.assert_called_with("BASEPATH/TestClass/FILENAME")
-
- # cleanup
-
- @mock.patch("os.makedirs")
- def test_cleanup_removes_all_handlers(self, *_):
- """Tests that cleanup removes all handlers in the logger, except
- the NullHandler.
- """
- info_testcase_log = LogStyles.LOG_INFO + LogStyles.MONOLITH_LOG
- with self.patch("MovableFileHandler"):
- log_stream.create_logger(
- self._testMethodName, log_styles=info_testcase_log
- )
-
- created_log_stream = log_stream._log_streams[self._testMethodName]
- created_log_stream.cleanup()
-
- self.assertEqual(len(created_log_stream.logger.handlers), 1)
-
-
-class LogStreamModuleTests(unittest.TestCase):
- @classmethod
- def setUpClass(cls):
- # logging.log_path only exists if logger._setup_test_logger is called.
- # Here we set it to a value that is likely to not exist so file IO is
- # not executed (an error is raised instead of creating the file).
- logging.log_path = "/f/a/i/l/p/a/t/h"
-
- def setUp(self):
- log_stream._log_streams = {}
-
- # _update_handlers
-
- @staticmethod
- def create_new_context_event():
- return context.NewContextEvent()
-
- def test_update_handlers_delegates_calls_to_log_streams(self):
- """Tests _update_handlers calls update_handlers on each log_stream."""
- log_stream._log_streams = {"a": mock.Mock(), "b": mock.Mock()}
-
- log_stream._update_handlers(self.create_new_context_event())
-
- self.assertTrue(log_stream._log_streams["a"].update_handlers.called)
- self.assertTrue(log_stream._log_streams["b"].update_handlers.called)
-
- # _set_logger
-
- def test_set_logger_overwrites_previous_logger(self):
- """Tests that calling set_logger overwrites the previous logger within
- log_stream._log_streams.
- """
- previous = mock.Mock()
- log_stream._log_streams = {"a": previous}
- expected = mock.Mock()
- expected.name = "a"
- log_stream._set_logger(expected)
-
- self.assertEqual(log_stream._log_streams["a"], expected)
-
-
-if __name__ == "__main__":
- unittest.main()
diff --git a/packages/antlion/unit_tests/libs/ota/__init__.py b/packages/antlion/unit_tests/libs/ota/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/packages/antlion/unit_tests/libs/ota/__init__.py
+++ /dev/null
diff --git a/packages/antlion/unit_tests/libs/ota/dummy_ota_package.zip b/packages/antlion/unit_tests/libs/ota/dummy_ota_package.zip
deleted file mode 100644
index 78e64f2..0000000
--- a/packages/antlion/unit_tests/libs/ota/dummy_ota_package.zip
+++ /dev/null
Binary files differ
diff --git a/packages/antlion/unit_tests/libs/ota/ota_runners/__init__.py b/packages/antlion/unit_tests/libs/ota/ota_runners/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/packages/antlion/unit_tests/libs/ota/ota_runners/__init__.py
+++ /dev/null
diff --git a/packages/antlion/unit_tests/libs/ota/ota_runners/ota_runner_factory_test.py b/packages/antlion/unit_tests/libs/ota/ota_runners/ota_runner_factory_test.py
deleted file mode 100644
index 76c026f..0000000
--- a/packages/antlion/unit_tests/libs/ota/ota_runners/ota_runner_factory_test.py
+++ /dev/null
@@ -1,144 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import logging
-import unittest
-
-import mock
-
-from antlion.libs.ota.ota_runners import ota_runner, ota_runner_factory
-from antlion.libs.ota.ota_runners.ota_runner_factory import OtaConfigError
-
-
-class OtaRunnerFactoryTests(unittest.TestCase):
- """Tests all of the functions in the ota_runner_factory module."""
-
- def setUp(self):
- self.device = mock.MagicMock()
- self.device.serial = "fake_serial"
-
- def test_get_ota_value_from_config_no_map_key_missing(self):
- acts_config = {}
- with self.assertRaises(OtaConfigError):
- ota_runner_factory.get_ota_value_from_config(
- acts_config, "ota_tool", self.device
- )
-
- def test_get_ota_value_from_config_with_map_key_missing(self):
- acts_config = {"ota_map": {"fake_serial": "MockOtaTool"}}
- with self.assertRaises(OtaConfigError):
- ota_runner_factory.get_ota_value_from_config(
- acts_config, "ota_tool", self.device
- )
-
- def test_get_ota_value_from_config_with_map_key_found(self):
- expected_value = "/path/to/tool"
- acts_config = {
- "ota_map": {"fake_serial": "MockOtaTool"},
- "ota_tool_MockOtaTool": expected_value,
- }
- ret = ota_runner_factory.get_ota_value_from_config(
- acts_config, "ota_tool", self.device
- )
- self.assertEqual(expected_value, ret)
-
- def test_create_from_configs_raise_when_non_default_tool_path_missing(self):
- acts_config = {
- "ota_tool": "FakeTool",
- }
- try:
- ota_runner_factory.create_from_configs(acts_config, self.device)
- except OtaConfigError:
- return
- self.fail(
- "create_from_configs did not throw an error when a tool was"
- "specified without a tool path."
- )
-
- def test_create_from_configs_without_map_makes_proper_calls(self):
- acts_config = {
- "ota_package": "jkl;",
- "ota_sl4a": "qaz",
- "ota_tool": "FakeTool",
- "FakeTool": "qwerty",
- }
- function_path = "antlion.libs.ota.ota_runners.ota_runner_factory.create"
- with mock.patch(function_path) as mocked_function:
- ota_runner_factory.create_from_configs(acts_config, self.device)
- mocked_function.assert_called_with(
- "jkl;", "qaz", self.device, "FakeTool", "qwerty"
- )
-
- def test_create_from_configs_with_map_makes_proper_calls(self):
- acts_config = {
- "ota_map": {"fake_serial": "hardwareA"},
- "ota_package_hardwareA": "jkl;",
- "ota_sl4a_hardwareA": "qaz",
- "ota_tool_hardwareA": "FakeTool",
- "FakeTool": "qwerty",
- }
- function_path = "antlion.libs.ota.ota_runners.ota_runner_factory.create"
- with mock.patch(function_path) as mocked_function:
- ota_runner_factory.create_from_configs(acts_config, self.device)
- mocked_function.assert_called_with(
- "jkl;", "qaz", self.device, "FakeTool", "qwerty"
- )
-
- def test_create_raise_on_ota_pkg_and_sl4a_fields_have_different_types(self):
- with mock.patch("antlion.libs.ota.ota_tools.ota_tool_factory.create"):
- with self.assertRaises(TypeError):
- ota_runner_factory.create(
- "ota_package", ["ota_sl4a"], self.device
- )
-
- def test_create_raise_on_ota_package_not_a_list_or_string(self):
- with mock.patch("antlion.libs.ota.ota_tools.ota_tool_factory.create"):
- with self.assertRaises(TypeError):
- ota_runner_factory.create(
- {"ota": "pkg"}, {"ota": "sl4a"}, self.device
- )
-
- def test_create_returns_single_ota_runner_on_ota_package_being_a_str(self):
- with mock.patch("antlion.libs.ota.ota_tools.ota_tool_factory.create"):
- ret = ota_runner_factory.create("", "", self.device)
- self.assertEqual(type(ret), ota_runner.SingleUseOtaRunner)
-
- def test_create_returns_multi_ota_runner_on_ota_package_being_a_list(self):
- with mock.patch("antlion.libs.ota.ota_tools.ota_tool_factory.create"):
- ret = ota_runner_factory.create([], [], self.device)
- self.assertEqual(type(ret), ota_runner.MultiUseOtaRunner)
-
- def test_create_returns_bound_ota_runner_on_second_request(self):
- with mock.patch("antlion.libs.ota.ota_tools.ota_tool_factory.create"):
- first_return = ota_runner_factory.create([], [], self.device)
- logging.disable(logging.WARNING)
- second_return = ota_runner_factory.create([], [], self.device)
- logging.disable(logging.NOTSET)
- self.assertEqual(first_return, second_return)
-
- def test_create_returns_different_ota_runner_on_second_request(self):
- with mock.patch("antlion.libs.ota.ota_tools.ota_tool_factory.create"):
- first_return = ota_runner_factory.create(
- [], [], self.device, use_cached_runners=False
- )
- second_return = ota_runner_factory.create(
- [], [], self.device, use_cached_runners=False
- )
- self.assertNotEqual(first_return, second_return)
-
-
-if __name__ == "__main__":
- unittest.main()
diff --git a/packages/antlion/unit_tests/libs/ota/ota_runners/ota_runner_test.py b/packages/antlion/unit_tests/libs/ota/ota_runners/ota_runner_test.py
deleted file mode 100644
index e61f032..0000000
--- a/packages/antlion/unit_tests/libs/ota/ota_runners/ota_runner_test.py
+++ /dev/null
@@ -1,288 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import os
-import unittest
-
-import mock
-
-from antlion.libs.ota.ota_runners import ota_runner
-from antlion.libs.ota.ota_tools import ota_tool
-
-
-class MockOtaTool(ota_tool.OtaTool):
- def __init__(self, command):
- super(MockOtaTool, self).__init__(command)
- self.update_call_count = 0
- self.cleanup_call_count = 0
-
- def update(self, unused):
- self.update_call_count += 1
-
- def cleanup(self, unused):
- self.cleanup_call_count += 1
-
- def reset_count(self):
- self.update_call_count = 0
- self.cleanup_call_count = 0
-
- def assert_calls_equal(self, test, number_of_expected_calls):
- test.assertEqual(number_of_expected_calls, self.update_call_count)
- test.assertEqual(number_of_expected_calls, self.cleanup_call_count)
-
-
-class OtaRunnerImpl(ota_runner.OtaRunner):
- """Sets properties to return an empty string to allow OtaRunner tests."""
-
- def get_sl4a_apk(self):
- return ""
-
- def get_ota_package(self):
- return ""
-
- def validate_update(self):
- pass
-
-
-class OtaRunnerTest(unittest.TestCase):
- """Tests the OtaRunner class."""
-
- def setUp(self):
- self.prev_sl4a_service_setup_time = ota_runner.SL4A_SERVICE_SETUP_TIME
- ota_runner.SL4A_SERVICE_SETUP_TIME = 0
-
- def tearDown(self):
- ota_runner.SL4A_SERVICE_SETUP_TIME = self.prev_sl4a_service_setup_time
-
- def test_update(self):
- device = mock.MagicMock()
- device.skip_sl4a = False
- tool = MockOtaTool("mock_command")
- runner = OtaRunnerImpl(tool, device)
- runner.android_device.adb.getprop = mock.Mock(side_effect=["a", "b"])
- runner.get_post_build_id = lambda: "abc"
-
- runner._update()
-
- self.assertTrue(device.stop_services.called)
- self.assertTrue(device.wait_for_boot_completion.called)
- self.assertTrue(device.start_services.called)
- self.assertTrue(device.adb.install.called)
- tool.assert_calls_equal(self, 1)
-
- def test_update_fail_on_no_change_to_build(self):
- device = mock.MagicMock()
- tool = MockOtaTool("mock_command")
- runner = OtaRunnerImpl(tool, device)
- runner.android_device.adb.getprop = mock.Mock(side_effect=["a", "a"])
- runner.get_post_build_id = lambda: "abc"
- try:
- runner._update()
- self.fail("Matching build fingerprints did not throw an error!")
- except ota_runner.OtaError:
- pass
-
- def test_init(self):
- device = mock.MagicMock()
- tool = MockOtaTool("mock_command")
- runner = ota_runner.OtaRunner(tool, device)
-
- self.assertEqual(runner.ota_tool, tool)
- self.assertEqual(runner.android_device, device)
- self.assertEqual(runner.serial, device.serial)
-
- def test_get_post_build_id_grabs_valid_data(self):
- device = mock.MagicMock()
- tool = MockOtaTool("mock_command")
- runner = OtaRunnerImpl(tool, device)
- ota_package_path = os.path.join(
- os.path.dirname(os.path.dirname(os.path.realpath(__file__))),
- "dummy_ota_package.zip",
- )
- runner.get_ota_package = lambda: ota_package_path
- self.assertEqual(runner.get_post_build_id(), "post-build_information")
-
- def test_get_ota_package_metadata_value_does_not_exist(self):
- device = mock.MagicMock()
- tool = MockOtaTool("mock_command")
- runner = OtaRunnerImpl(tool, device)
- ota_package_path = os.path.join(
- os.path.dirname(os.path.dirname(os.path.realpath(__file__))),
- "dummy_ota_package.zip",
- )
- runner.get_ota_package = lambda: ota_package_path
- self.assertEqual(runner.get_ota_package_metadata("garbage-data"), None)
-
-
-class SingleUseOtaRunnerTest(unittest.TestCase):
- """Tests the SingleUseOtaRunner class."""
-
- def setUp(self):
- self.device = mock.MagicMock()
- self.tool = MockOtaTool("mock_command")
-
- def test_update_first_update_runs(self):
- runner = ota_runner.SingleUseOtaRunner(self.tool, self.device, "", "")
- try:
- with mock.patch.object(ota_runner.OtaRunner, "_update"):
- runner.update()
- except ota_runner.OtaError:
- self.fail(
- "SingleUseOtaRunner threw an exception on the first "
- "update call."
- )
-
- def test_update_second_update_raises_error(self):
- runner = ota_runner.SingleUseOtaRunner(self.tool, self.device, "", "")
- with mock.patch.object(ota_runner.OtaRunner, "_update"):
- runner.update()
- try:
- runner.update()
- except ota_runner.OtaError:
- return
- self.fail(
- "SingleUseOtaRunner did not throw an exception on the second"
- "update call."
- )
-
- def test_can_update_no_updates_called(self):
- runner = ota_runner.SingleUseOtaRunner(self.tool, self.device, "", "")
- self.assertEqual(True, runner.can_update())
-
- def test_can_update_has_updated_already(self):
- runner = ota_runner.SingleUseOtaRunner(self.tool, self.device, "", "")
- with mock.patch.object(ota_runner.OtaRunner, "_update"):
- runner.update()
- self.assertEqual(False, runner.can_update())
-
- def test_get_ota_package(self):
- runner = ota_runner.SingleUseOtaRunner(self.tool, self.device, "a", "b")
- self.assertEqual(runner.get_ota_package(), "a")
-
- def test_get_sl4a_apk(self):
- runner = ota_runner.SingleUseOtaRunner(self.tool, self.device, "a", "b")
- self.assertEqual(runner.get_sl4a_apk(), "b")
-
-
-class MultiUseOtaRunnerTest(unittest.TestCase):
- """Tests the MultiUseOtaRunner class."""
-
- def setUp(self):
- self.device = mock.MagicMock()
- self.tool = MockOtaTool("mock_command")
-
- def test_update_first_update_runs(self):
- runner = ota_runner.MultiUseOtaRunner(
- self.tool, self.device, [""], [""]
- )
- try:
- with mock.patch.object(ota_runner.OtaRunner, "_update"):
- runner.update()
- except ota_runner.OtaError:
- self.fail(
- "MultiUseOtaRunner threw an exception on the first "
- "update call."
- )
-
- def test_update_multiple_updates_run(self):
- runner = ota_runner.MultiUseOtaRunner(
- self.tool,
- self.device,
- ["first_pkg", "second_pkg"],
- ["first_apk", "second_apk"],
- )
- with mock.patch.object(ota_runner.OtaRunner, "_update"):
- runner.update()
- try:
- runner.update()
- except ota_runner.OtaError:
- self.fail(
- "MultiUseOtaRunner threw an exception before "
- "running out of update packages."
- )
-
- def test_update_too_many_update_calls_raises_error(self):
- runner = ota_runner.MultiUseOtaRunner(
- self.tool,
- self.device,
- ["first_pkg", "second_pkg"],
- ["first_apk", "second_apk"],
- )
- with mock.patch.object(ota_runner.OtaRunner, "_update"):
- runner.update()
- runner.update()
- try:
- runner.update()
- except ota_runner.OtaError:
- return
- self.fail(
- "MultiUseOtaRunner did not throw an exception after running "
- "out of update packages."
- )
-
- def test_can_update_no_updates_called(self):
- runner = ota_runner.MultiUseOtaRunner(
- self.tool,
- self.device,
- ["first_pkg", "second_pkg"],
- ["first_apk", "second_apk"],
- )
- self.assertEqual(True, runner.can_update())
-
- def test_can_update_has_more_updates_left(self):
- runner = ota_runner.MultiUseOtaRunner(
- self.tool,
- self.device,
- ["first_pkg", "second_pkg"],
- ["first_apk", "second_apk"],
- )
- with mock.patch.object(ota_runner.OtaRunner, "_update"):
- runner.update()
- self.assertEqual(True, runner.can_update())
-
- def test_can_update_ran_out_of_updates(self):
- runner = ota_runner.MultiUseOtaRunner(
- self.tool,
- self.device,
- ["first_pkg", "second_pkg"],
- ["first_apk", "second_apk"],
- )
- with mock.patch.object(ota_runner.OtaRunner, "_update"):
- runner.update()
- runner.update()
- self.assertEqual(False, runner.can_update())
-
- def test_get_ota_package(self):
- runner = ota_runner.MultiUseOtaRunner(
- self.tool,
- self.device,
- ["first_pkg", "second_pkg"],
- ["first_apk", "second_apk"],
- )
- self.assertEqual(runner.get_ota_package(), "first_pkg")
-
- def test_get_sl4a_apk(self):
- runner = ota_runner.MultiUseOtaRunner(
- self.tool,
- self.device,
- ["first_pkg", "second_pkg"],
- ["first_apk", "second_apk"],
- )
- self.assertEqual(runner.get_sl4a_apk(), "first_apk")
-
-
-if __name__ == "__main__":
- unittest.main()
diff --git a/packages/antlion/unit_tests/libs/ota/ota_tools/__init__.py b/packages/antlion/unit_tests/libs/ota/ota_tools/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/packages/antlion/unit_tests/libs/ota/ota_tools/__init__.py
+++ /dev/null
diff --git a/packages/antlion/unit_tests/libs/ota/ota_tools/adb_sideload_ota_tool_test.py b/packages/antlion/unit_tests/libs/ota/ota_tools/adb_sideload_ota_tool_test.py
deleted file mode 100644
index 2304c61..0000000
--- a/packages/antlion/unit_tests/libs/ota/ota_tools/adb_sideload_ota_tool_test.py
+++ /dev/null
@@ -1,81 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import logging
-import os
-import unittest
-
-import mock
-
-from antlion.controllers import android_device
-from antlion.libs.ota.ota_runners import ota_runner
-from antlion.libs.ota.ota_tools import adb_sideload_ota_tool, ota_tool
-
-
-def get_mock_android_device(serial="", ssh_connection=None):
- """Returns a mocked AndroidDevice with a mocked adb/fastboot."""
- with mock.patch(
- "antlion.controllers.adb.AdbProxy"
- ) as adb_proxy, mock.patch(
- "antlion.controllers.fastboot.FastbootProxy"
- ) as fb_proxy:
- adb_proxy.return_value.getprop.return_value = "1.2.3"
- fb_proxy.return_value.devices.return_value = ""
- ret = mock.Mock(
- android_device.AndroidDevice(
- serial=serial, ssh_connection=ssh_connection
- )
- )
- fb_proxy.reset_mock()
- return ret
-
-
-class AdbSideloadOtaToolTest(unittest.TestCase):
- """Tests the OtaTool class."""
-
- def test_init(self):
- expected_value = "commmand string"
- self.assertEqual(
- ota_tool.OtaTool(expected_value).command, expected_value
- )
-
- def setUp(self):
- self.sl4a_service_setup_time = ota_runner.SL4A_SERVICE_SETUP_TIME
- ota_runner.SL4A_SERVICE_SETUP_TIME = 0
- logging.log_path = "/tmp/log"
-
- def tearDown(self):
- ota_runner.SL4A_SERVICE_SETUP_TIME = self.sl4a_service_setup_time
-
- @staticmethod
- def test_start():
- # This test could have a bunch of verify statements,
- # but its probably not worth it.
- device = get_mock_android_device()
- ota_package_path = os.path.join(
- os.path.dirname(os.path.dirname(os.path.realpath(__file__))),
- "dummy_ota_package.zip",
- )
- tool = adb_sideload_ota_tool.AdbSideloadOtaTool(ota_package_path)
- runner = ota_runner.SingleUseOtaRunner(
- tool, device, ota_package_path, ""
- )
- runner.android_device.adb.getprop = mock.Mock(side_effect=["a", "b"])
- runner.update()
-
-
-if __name__ == "__main__":
- unittest.main()
diff --git a/packages/antlion/unit_tests/libs/ota/ota_tools/ota_tool_factory_test.py b/packages/antlion/unit_tests/libs/ota/ota_tools/ota_tool_factory_test.py
deleted file mode 100644
index 4769171..0000000
--- a/packages/antlion/unit_tests/libs/ota/ota_tools/ota_tool_factory_test.py
+++ /dev/null
@@ -1,54 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import unittest
-
-from antlion.libs.ota.ota_tools import ota_tool_factory
-
-
-class MockOtaTool(object):
- def __init__(self, command):
- self.command = command
-
-
-class OtaToolFactoryTests(unittest.TestCase):
- def setUp(self):
- ota_tool_factory._constructed_tools = {}
-
- def test_create_constructor_exists(self):
- ota_tool_factory._CONSTRUCTORS = {
- MockOtaTool.__name__: lambda command: MockOtaTool(command),
- }
- ret = ota_tool_factory.create(MockOtaTool.__name__, "command")
- self.assertEqual(type(ret), MockOtaTool)
- self.assertTrue(ret in ota_tool_factory._constructed_tools.values())
-
- def test_create_not_in_constructors(self):
- ota_tool_factory._CONSTRUCTORS = {}
- with self.assertRaises(KeyError):
- ota_tool_factory.create(MockOtaTool.__name__, "command")
-
- def test_create_returns_cached_tool(self):
- ota_tool_factory._CONSTRUCTORS = {
- MockOtaTool.__name__: lambda command: MockOtaTool(command),
- }
- ret_a = ota_tool_factory.create(MockOtaTool.__name__, "command")
- ret_b = ota_tool_factory.create(MockOtaTool.__name__, "command")
- self.assertEqual(ret_a, ret_b)
-
-
-if __name__ == "__main__":
- unittest.main()
diff --git a/packages/antlion/unit_tests/libs/ota/ota_tools/ota_tool_test.py b/packages/antlion/unit_tests/libs/ota/ota_tools/ota_tool_test.py
deleted file mode 100644
index 84a16ae..0000000
--- a/packages/antlion/unit_tests/libs/ota/ota_tools/ota_tool_test.py
+++ /dev/null
@@ -1,45 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import unittest
-
-from antlion.libs.ota.ota_tools import ota_tool
-
-
-class OtaToolTests(unittest.TestCase):
- """Tests the OtaTool class."""
-
- def test_init(self):
- expected_value = "commmand string"
- self.assertEqual(
- ota_tool.OtaTool(expected_value).command, expected_value
- )
-
- def test_start_throws_error_on_unimplemented(self):
- obj = "some object"
- with self.assertRaises(NotImplementedError):
- ota_tool.OtaTool("").update(obj)
-
- def test_end_is_not_abstract(self):
- obj = "some object"
- try:
- ota_tool.OtaTool("").cleanup(obj)
- except:
- self.fail("End is not required and should be a virtual function.")
-
-
-if __name__ == "__main__":
- unittest.main()
diff --git a/packages/antlion/unit_tests/libs/ota/ota_tools/update_device_ota_tool_test.py b/packages/antlion/unit_tests/libs/ota/ota_tools/update_device_ota_tool_test.py
deleted file mode 100644
index b2fa3b7..0000000
--- a/packages/antlion/unit_tests/libs/ota/ota_tools/update_device_ota_tool_test.py
+++ /dev/null
@@ -1,94 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import logging
-import os
-import unittest
-
-import mock
-
-from antlion.controllers import android_device
-from antlion.libs.ota.ota_runners import ota_runner
-from antlion.libs.ota.ota_tools import update_device_ota_tool
-
-
-def get_mock_android_device(serial="", ssh_connection=None):
- """Returns a mocked AndroidDevice with a mocked adb/fastboot."""
- with mock.patch(
- "antlion.controllers.adb.AdbProxy"
- ) as adb_proxy, mock.patch(
- "antlion.controllers.fastboot.FastbootProxy"
- ) as fb_proxy:
- adb_proxy.return_value.getprop.return_value = "1.2.3"
- fb_proxy.return_value.devices.return_value = ""
- ret = mock.Mock(
- android_device.AndroidDevice(
- serial=serial, ssh_connection=ssh_connection
- )
- )
- fb_proxy.reset_mock()
- return ret
-
-
-class UpdateDeviceOtaToolTest(unittest.TestCase):
- """Tests for UpdateDeviceOtaTool."""
-
- def setUp(self):
- self.sl4a_service_setup_time = ota_runner.SL4A_SERVICE_SETUP_TIME
- ota_runner.SL4A_SERVICE_SETUP_TIME = 0
- logging.log_path = "/tmp/log"
-
- def tearDown(self):
- ota_runner.SL4A_SERVICE_SETUP_TIME = self.sl4a_service_setup_time
-
- def test_update(self):
- ota_package_path = os.path.join(
- os.path.dirname(os.path.dirname(os.path.realpath(__file__))),
- "dummy_ota_package.zip",
- )
- with mock.patch("tempfile.mkdtemp") as mkdtemp, mock.patch(
- "shutil.rmtree"
- ) as rmtree, mock.patch("antlion.utils.unzip_maintain_permissions"):
- mkdtemp.return_value = ""
- rmtree.return_value = ""
- device = get_mock_android_device()
- tool = update_device_ota_tool.UpdateDeviceOtaTool(ota_package_path)
- runner = mock.Mock(
- ota_runner.SingleUseOtaRunner(tool, device, "", "")
- )
- runner.return_value.android_device = device
- with mock.patch("antlion.libs.proc.job.run"):
- tool.update(runner)
- del tool
-
- def test_del(self):
- ota_package_path = os.path.join(
- os.path.dirname(os.path.dirname(os.path.realpath(__file__))),
- "dummy_ota_package.zip",
- )
- with mock.patch("tempfile.mkdtemp") as mkdtemp, mock.patch(
- "shutil.rmtree"
- ) as rmtree, mock.patch("antlion.utils.unzip_maintain_permissions"):
- mkdtemp.return_value = ""
- rmtree.return_value = ""
- tool = update_device_ota_tool.UpdateDeviceOtaTool(ota_package_path)
- del tool
- self.assertTrue(mkdtemp.called)
- self.assertTrue(rmtree.called)
-
-
-if __name__ == "__main__":
- unittest.main()
diff --git a/packages/antlion/unit_tests/libs/ota/ota_updater_test.py b/packages/antlion/unit_tests/libs/ota/ota_updater_test.py
deleted file mode 100644
index de32ea1..0000000
--- a/packages/antlion/unit_tests/libs/ota/ota_updater_test.py
+++ /dev/null
@@ -1,110 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import unittest
-
-import mock
-
-from antlion.libs.ota import ota_updater
-from antlion.libs.ota.ota_runners import ota_runner
-
-
-class MockAndroidDevice(object):
- def __init__(self, serial):
- self.serial = serial
- self.log = mock.Mock()
- self.take_bug_report = mock.MagicMock()
-
-
-class MockOtaRunner(object):
- def __init__(self):
- self.call_count = 0
- self.should_fail = False
- self.can_update_value = "CAN_UPDATE_CALLED"
-
- def set_failure(self, should_fail=True):
- self.should_fail = should_fail
-
- def update(self):
- self.call_count += 1
- if self.should_fail:
- raise ota_runner.OtaError
-
- def can_update(self):
- return self.can_update_value
-
- def validate_update(self):
- pass
-
-
-class OtaUpdaterTests(unittest.TestCase):
- """Tests the methods in the ota_updater module."""
-
- def test_initialize(self):
- user_params = {"a": 1, "b": 2, "c": 3}
- android_devices = ["x", "y", "z"]
- with mock.patch(
- "antlion.libs.ota.ota_runners.ota_runner_factory."
- "create_from_configs"
- ) as fn:
- ota_updater.initialize(user_params, android_devices)
- for i in range(len(android_devices)):
- fn.assert_any_call(user_params, android_devices[i])
- self.assertSetEqual(
- set(android_devices), set(ota_updater.ota_runners.keys())
- )
-
- def test_check_initialization_is_initialized(self):
- device = MockAndroidDevice("serial")
- ota_updater.ota_runners = {device: ota_runner.OtaRunner("tool", device)}
- try:
- ota_updater._check_initialization(device)
- except ota_runner.OtaError:
- self.fail("_check_initialization raised for initialized runner!")
-
- def test_check_initialization_is_not_initialized(self):
- device = MockAndroidDevice("serial")
- ota_updater.ota_runners = {}
- with self.assertRaises(KeyError):
- ota_updater._check_initialization(device)
-
- def test_update_do_not_ignore_failures_and_failures_occur(self):
- device = MockAndroidDevice("serial")
- runner = MockOtaRunner()
- runner.set_failure(True)
- ota_updater.ota_runners = {device: runner}
- with self.assertRaises(ota_runner.OtaError):
- ota_updater.update(device)
-
- def test_update_ignore_failures_and_failures_occur(self):
- device = MockAndroidDevice("serial")
- runner = MockOtaRunner()
- runner.set_failure(True)
- ota_updater.ota_runners = {device: runner}
- try:
- ota_updater.update(device, ignore_update_errors=True)
- except ota_runner.OtaError:
- self.fail("OtaError was raised when errors are to be ignored!")
-
- def test_can_update(self):
- device = MockAndroidDevice("serial")
- runner = MockOtaRunner()
- ota_updater.ota_runners = {device: runner}
- self.assertEqual(ota_updater.can_update(device), "CAN_UPDATE_CALLED")
-
-
-if __name__ == "__main__":
- unittest.main()
diff --git a/packages/antlion/unit_tests/libs/proc/__init__.py b/packages/antlion/unit_tests/libs/proc/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/packages/antlion/unit_tests/libs/proc/__init__.py
+++ /dev/null
diff --git a/packages/antlion/unit_tests/libs/proc/process_test.py b/packages/antlion/unit_tests/libs/proc/process_test.py
deleted file mode 100644
index 6fded9e..0000000
--- a/packages/antlion/unit_tests/libs/proc/process_test.py
+++ /dev/null
@@ -1,372 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-import subprocess
-import unittest
-
-import mock
-
-from antlion.libs.proc.process import Process, ProcessError
-
-
-class FakeThread(object):
- def __init__(self, target=None):
- self.target = target
- self.alive = False
-
- def _on_start(self):
- pass
-
- def start(self):
- self.alive = True
- if self._on_start:
- self._on_start()
-
- def stop(self):
- self.alive = False
-
- def join(self):
- pass
-
-
-class ProcessTest(unittest.TestCase):
- """Tests the antlion.libs.proc.process.Process class."""
-
- def setUp(self):
- self._Process__start_process = Process._Process__start_process
-
- def tearDown(self):
- Process._Process__start_process = self._Process__start_process
-
- @staticmethod
- def patch(imported_name, *args, **kwargs):
- return mock.patch(
- f"antlion.libs.proc.process.{imported_name}", *args, **kwargs
- )
-
- # set_on_output_callback
-
- def test_set_on_output_callback(self):
- """Tests that set_on_output_callback sets on_output_callback."""
- callback = mock.Mock()
-
- process = Process("cmd").set_on_output_callback(callback)
- process._on_output_callback()
-
- self.assertTrue(callback.called)
-
- # set_on_terminate_callback
-
- def test_set_on_terminate_callback(self):
- """Tests that set_on_terminate_callback sets _on_terminate_callback."""
- callback = mock.Mock()
-
- process = Process("cmd").set_on_terminate_callback(callback)
- process._on_terminate_callback()
-
- self.assertTrue(callback.called)
-
- # start
-
- def test_start_raises_if_called_back_to_back(self):
- """Tests that start raises an exception if it has already been called
- prior.
-
- This is required to prevent references to processes and threads from
- being overwritten, potentially causing ACTS to hang."""
- process = Process("cmd")
-
- # Here we need the thread to start the process object.
- class FakeThreadImpl(FakeThread):
- def _on_start(self):
- process._process = mock.Mock()
-
- with self.patch("Thread", FakeThreadImpl):
- process.start()
- expected_msg = "Process has already started."
- with self.assertRaisesRegex(ProcessError, expected_msg):
- process.start()
-
- def test_start_starts_listening_thread(self):
- """Tests that start starts the _exec_popen_loop function."""
- process = Process("cmd")
-
- # Here we need the thread to start the process object.
- class FakeThreadImpl(FakeThread):
- def _on_start(self):
- process._process = mock.Mock()
-
- with self.patch("Thread", FakeThreadImpl):
- process.start()
-
- self.assertTrue(process._listening_thread.alive)
- self.assertEqual(process._listening_thread.target, process._exec_loop)
-
- # wait
-
- def test_wait_raises_if_called_back_to_back(self):
- """Tests that wait raises an exception if it has already been called
- prior."""
- process = Process("cmd")
- process._process = mock.Mock()
-
- process.wait(0)
- expected_msg = "Process is already being stopped."
- with self.assertRaisesRegex(ProcessError, expected_msg):
- process.wait(0)
-
- @mock.patch.object(Process, "_kill_process")
- def test_wait_kills_after_timeout(self, *_):
- """Tests that if a TimeoutExpired error is thrown during wait, the
- process is killed."""
- process = Process("cmd")
- process._process = mock.Mock()
- process._process.wait.side_effect = subprocess.TimeoutExpired("", "")
-
- process.wait(0)
-
- self.assertEqual(process._kill_process.called, True)
-
- @mock.patch("os.getpgid", side_effect=lambda id: id)
- @mock.patch("os.killpg")
- def test_sends_signal(self, mock_os, *_):
- """Tests that signal is sent to process.."""
- process = Process("cmd")
- mock_process = mock.Mock()
- mock_process.pid = -1
- process._process = mock_process
-
- process.signal(51641)
-
- mock_os.assert_called_with(-1, 51641)
-
- def test_signal_raises_error_on_windows(self, *_):
- """Tests that signaling is unsupported in windows with appropriate
- error msg."""
- process = Process("cmd")
- mock_inner_process = mock.Mock()
- mock_inner_process.pid = -1
- process._process = mock_inner_process
-
- with mock.patch("antlion.libs.proc.process._on_windows", True):
- with self.assertRaises(ProcessError):
- process.signal(51641)
-
- @mock.patch.object(Process, "_kill_process")
- def test_wait_sets_stopped_to_true_before_process_kill(self, *_):
- """Tests that stop() sets the _stopped attribute to True.
-
- This order is required to prevent the _exec_loop from calling
- _on_terminate_callback when the user has killed the process.
- """
- verifier = mock.Mock()
- verifier.passed = False
-
- def test_call_order():
- self.assertTrue(process._stopped)
- verifier.passed = True
-
- process = Process("cmd")
- process._process = mock.Mock()
- process._process.poll.return_value = None
- process._process.wait.side_effect = subprocess.TimeoutExpired("", "")
- process._kill_process = test_call_order
-
- process.wait()
-
- self.assertEqual(verifier.passed, True)
-
- def test_wait_joins_listening_thread_if_it_exists(self):
- """Tests wait() joins _listening_thread if it exists."""
- process = Process("cmd")
- process._process = mock.Mock()
- mocked_thread = mock.Mock()
- process._listening_thread = mocked_thread
-
- process.wait(0)
-
- self.assertEqual(mocked_thread.join.called, True)
-
- def test_wait_clears_listening_thread_if_it_exists(self):
- """Tests wait() joins _listening_thread if it exists.
-
- Threads can only be started once, so after wait has been called, we
- want to make sure we clear the listening thread.
- """
- process = Process("cmd")
- process._process = mock.Mock()
- process._listening_thread = mock.Mock()
-
- process.wait(0)
-
- self.assertEqual(process._listening_thread, None)
-
- def test_wait_joins_redirection_thread_if_it_exists(self):
- """Tests wait() joins _listening_thread if it exists."""
- process = Process("cmd")
- process._process = mock.Mock()
- mocked_thread = mock.Mock()
- process._redirection_thread = mocked_thread
-
- process.wait(0)
-
- self.assertEqual(mocked_thread.join.called, True)
-
- def test_wait_clears_redirection_thread_if_it_exists(self):
- """Tests wait() joins _listening_thread if it exists.
-
- Threads can only be started once, so after wait has been called, we
- want to make sure we clear the listening thread.
- """
- process = Process("cmd")
- process._process = mock.Mock()
- process._redirection_thread = mock.Mock()
-
- process.wait(0)
-
- self.assertEqual(process._redirection_thread, None)
-
- # stop
-
- def test_stop_sets_stopped_to_true(self):
- """Tests that stop() sets the _stopped attribute to True."""
- process = Process("cmd")
- process._process = mock.Mock()
-
- process.stop()
-
- self.assertTrue(process._stopped)
-
- def test_stop_sets_stopped_to_true_before_process_kill(self):
- """Tests that stop() sets the _stopped attribute to True.
-
- This order is required to prevent the _exec_loop from calling
- _on_terminate_callback when the user has killed the process.
- """
- verifier = mock.Mock()
- verifier.passed = False
-
- def test_call_order():
- self.assertTrue(process._stopped)
- verifier.passed = True
-
- process = Process("cmd")
- process._process = mock.Mock()
- process._process.poll.return_value = None
- process._kill_process = test_call_order
- process._process.wait.side_effect = subprocess.TimeoutExpired("", "")
-
- process.stop()
-
- self.assertEqual(verifier.passed, True)
-
- def test_stop_calls_wait(self):
- """Tests that stop() also has the functionality of wait()."""
- process = Process("cmd")
- process._process = mock.Mock()
- process.wait = mock.Mock()
-
- process.stop()
-
- self.assertEqual(process.wait.called, True)
-
- # _redirect_output
-
- def test_redirect_output_feeds_all_lines_to_on_output_callback(self):
- """Tests that _redirect_output loops until all lines are parsed."""
- received_list = []
-
- def appender(line):
- received_list.append(line)
-
- process = Process("cmd")
- process.set_on_output_callback(appender)
- process._process = mock.Mock()
- process._process.stdout.readline.side_effect = [b"a\n", b"b\n", b""]
-
- process._redirect_output()
-
- self.assertEqual(received_list[0], "a")
- self.assertEqual(received_list[1], "b")
- self.assertEqual(len(received_list), 2)
-
- # __start_process
-
- def test_start_process_returns_a_popen_object(self):
- """Tests that a Popen object is returned by __start_process."""
- with self.patch("subprocess.Popen", return_value="verification"):
- self.assertEqual(
- Process._Process__start_process("cmd"), "verification"
- )
-
- # _exec_loop
-
- def test_exec_loop_redirections_output(self):
- """Tests that the _exec_loop function calls to redirect the output."""
- process = Process("cmd")
- Process._Process__start_process = mock.Mock()
-
- with self.patch("Thread", FakeThread):
- process._exec_loop()
-
- self.assertEqual(
- process._redirection_thread.target, process._redirect_output
- )
- self.assertEqual(process._redirection_thread.alive, True)
-
- def test_exec_loop_waits_for_process(self):
- """Tests that the _exec_loop waits for the process to complete before
- returning."""
- process = Process("cmd")
- Process._Process__start_process = mock.Mock()
-
- with self.patch("Thread", FakeThread):
- process._exec_loop()
-
- self.assertEqual(process._process.wait.called, True)
-
- def test_exec_loop_loops_if_not_stopped(self):
- process = Process("1st")
- Process._Process__start_process = mock.Mock()
- process._on_terminate_callback = mock.Mock(side_effect=[["2nd"], None])
-
- with self.patch("Thread", FakeThread):
- process._exec_loop()
-
- self.assertEqual(Process._Process__start_process.call_count, 2)
- self.assertEqual(
- Process._Process__start_process.call_args_list[0][0], (["1st"],)
- )
- self.assertEqual(
- Process._Process__start_process.call_args_list[1][0], (["2nd"],)
- )
-
- def test_exec_loop_does_not_loop_if_stopped(self):
- process = Process("1st")
- Process._Process__start_process = mock.Mock()
- process._on_terminate_callback = mock.Mock(side_effect=["2nd", None])
- process._stopped = True
-
- with self.patch("Thread", FakeThread):
- process._exec_loop()
-
- self.assertEqual(Process._Process__start_process.call_count, 1)
- self.assertEqual(
- Process._Process__start_process.call_args_list[0][0], (["1st"],)
- )
-
-
-if __name__ == "__main__":
- unittest.main()
diff --git a/packages/antlion/unit_tests/mock_controller.py b/packages/antlion/unit_tests/mock_controller.py
deleted file mode 100644
index f3a2576..0000000
--- a/packages/antlion/unit_tests/mock_controller.py
+++ /dev/null
@@ -1,53 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-# This is a mock third-party controller module used for unit testing antlion.
-
-import logging
-
-MOBLY_CONTROLLER_CONFIG_NAME = "MagicDevice"
-
-
-def create(configs):
- objs = []
- for c in configs:
- if isinstance(c, dict):
- c.pop("serial")
- objs.append(MagicDevice(c))
- return objs
-
-
-def destroy(objs):
- print("Destroying magic")
-
-
-def get_info(objs):
- infos = []
- for obj in objs:
- infos.append(obj.who_am_i())
- return infos
-
-
-class MagicDevice(object):
- def __init__(self, config):
- self.magic = config
-
- def get_magic(self):
- logging.info("My magic is %s.", self.magic)
- return self.magic
-
- def who_am_i(self):
- return {"MyMagic": self.magic}
diff --git a/packages/antlion/unit_tests/test_data/1k_2k.raw b/packages/antlion/unit_tests/test_data/1k_2k.raw
deleted file mode 100644
index 42e7ab9..0000000
--- a/packages/antlion/unit_tests/test_data/1k_2k.raw
+++ /dev/null
Binary files differ
diff --git a/packages/antlion/unit_tests/test_suite.py b/packages/antlion/unit_tests/test_suite.py
deleted file mode 100755
index 288c7eb..0000000
--- a/packages/antlion/unit_tests/test_suite.py
+++ /dev/null
@@ -1,102 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import os
-import subprocess
-import sys
-import tempfile
-import unittest
-
-
-class TestResult(object):
- """
- Attributes:
- failures_future: The list of failed test cases during this test.
- output_file: The file containing the stderr/stdout for this test.
- test_suite: The unittest.TestSuite used. Useful for debugging.
- test_filename: The *_test.py file that ran in this test.
- """
-
- def __init__(self, test_result, output_file, test_suite, test_filename):
- self.test_result = test_result
- self.output_file = output_file
- self.test_suite = test_suite
- self.test_filename = test_filename
-
-
-def run_all_unit_tests():
- suite = unittest.TestSuite()
- test_files = []
- loader = unittest.TestLoader()
- for root, _, files in os.walk(os.path.dirname(__file__)):
- for filename in files:
- if filename.endswith("_test.py"):
- test_files.append(os.path.join(root, filename))
- try:
- suite.addTest(loader.discover(root, filename))
- except ImportError as e:
- if "Start directory is not importable" not in e.args[0]:
- raise
- message = ". Did you forget to add an __init__.py file?"
- raise ImportError(e.args[0] + message)
-
- output_dir = tempfile.mkdtemp()
-
- results = []
-
- for index, test in enumerate(suite._tests):
- output_file = os.path.join(output_dir, f"test_{index}.output")
-
- test_result = subprocess.Popen(
- [sys.executable, test_files[index]],
- stdout=open(output_file, "w+"),
- stderr=subprocess.STDOUT,
- )
- results.append(
- TestResult(test_result, output_file, test, test_files[index])
- )
-
- all_failures = []
- for index, result in enumerate(results):
- try:
- failures = result.test_result.wait(timeout=60)
- if failures:
- print(
- f"Failure logs for {result.test_filename}:", file=sys.stderr
- )
- with open(result.output_file, "r") as out_file:
- print(out_file.read(), file=sys.stderr)
- all_failures.append(f"{result.test_filename} (failed)")
- except subprocess.TimeoutExpired:
- all_failures.append(f"{result.test_filename} (timed out)")
- print(
- f"The following test timed out: {result.test_filename!r}",
- file=sys.stderr,
- )
- with open(result.output_file, "r") as out_file:
- print(out_file.read(), file=sys.stderr)
-
- # Prints a summary over all unit tests failed.
- if all_failures:
- print("The following tests failed:", file=sys.stderr)
- for failure in all_failures:
- print(" ", failure, file=sys.stderr)
-
- exit(bool(all_failures))
-
-
-if __name__ == "__main__":
- run_all_unit_tests()
diff --git a/packages/antlion/utils.py b/packages/antlion/utils.py
deleted file mode 100755
index 219feab..0000000
--- a/packages/antlion/utils.py
+++ /dev/null
@@ -1,1089 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from __future__ import annotations
-
-import concurrent.futures
-import datetime
-import ipaddress
-import json
-import logging
-import os
-import platform
-import random
-import re
-import signal
-import socket
-import string
-import subprocess
-import time
-import traceback
-import zipfile
-from concurrent.futures import ThreadPoolExecutor
-from dataclasses import dataclass
-from pathlib import Path
-from typing import TYPE_CHECKING, Any
-
-from mobly import signals
-
-from antlion.libs.proc import job
-from antlion.runner import CalledProcessError, Runner
-
-if TYPE_CHECKING:
- from antlion.controllers.android_device import AndroidDevice
- from antlion.controllers.fuchsia_device import FuchsiaDevice
- from antlion.controllers.utils_lib.ssh.connection import SshConnection
-
-# File name length is limited to 255 chars on some OS, so we need to make sure
-# the file names we output fits within the limit.
-MAX_FILENAME_LEN = 255
-
-# All Fuchsia devices use this suffix for link-local mDNS host names.
-FUCHSIA_MDNS_TYPE = "_fuchsia._udp.local."
-
-# Default max seconds it takes to Duplicate Address Detection to finish before
-# assigning an IPv6 address.
-DAD_TIMEOUT_SEC = 30
-
-
-class ActsUtilsError(Exception):
- """Generic error raised for exceptions in ACTS utils."""
-
-
-ascii_letters_and_digits = string.ascii_letters + string.digits
-valid_filename_chars = f"-_.{ascii_letters_and_digits}"
-
-models = (
- "sprout",
- "occam",
- "hammerhead",
- "bullhead",
- "razor",
- "razorg",
- "shamu",
- "angler",
- "volantis",
- "volantisg",
- "mantaray",
- "fugu",
- "ryu",
- "marlin",
- "sailfish",
-)
-
-manufacture_name_to_model = {
- "flo": "razor",
- "flo_lte": "razorg",
- "flounder": "volantis",
- "flounder_lte": "volantisg",
- "dragon": "ryu",
-}
-
-GMT_to_olson = {
- "GMT-9": "America/Anchorage",
- "GMT-8": "US/Pacific",
- "GMT-7": "US/Mountain",
- "GMT-6": "US/Central",
- "GMT-5": "US/Eastern",
- "GMT-4": "America/Barbados",
- "GMT-3": "America/Buenos_Aires",
- "GMT-2": "Atlantic/South_Georgia",
- "GMT-1": "Atlantic/Azores",
- "GMT+0": "Africa/Casablanca",
- "GMT+1": "Europe/Amsterdam",
- "GMT+2": "Europe/Athens",
- "GMT+3": "Europe/Moscow",
- "GMT+4": "Asia/Baku",
- "GMT+5": "Asia/Oral",
- "GMT+6": "Asia/Almaty",
- "GMT+7": "Asia/Bangkok",
- "GMT+8": "Asia/Hong_Kong",
- "GMT+9": "Asia/Tokyo",
- "GMT+10": "Pacific/Guam",
- "GMT+11": "Pacific/Noumea",
- "GMT+12": "Pacific/Fiji",
- "GMT+13": "Pacific/Tongatapu",
- "GMT-11": "Pacific/Midway",
- "GMT-10": "Pacific/Honolulu",
-}
-
-
-def abs_path(path: str) -> str:
- """Resolve the '.' and '~' in a path to get the absolute path.
-
- Args:
- path: The path to expand.
-
- Returns:
- The absolute path of the input path.
- """
- return os.path.abspath(os.path.expanduser(path))
-
-
-def get_current_epoch_time() -> int:
- """Current epoch time in milliseconds.
-
- Returns:
- An integer representing the current epoch time in milliseconds.
- """
- return int(round(time.time() * 1000))
-
-
-def get_current_human_time() -> str:
- """Returns the current time in human readable format.
-
- Returns:
- The current time stamp in Month-Day-Year Hour:Min:Sec format.
- """
- return time.strftime("%m-%d-%Y %H:%M:%S ")
-
-
-def epoch_to_human_time(epoch_time: int) -> str | None:
- """Converts an epoch timestamp to human readable time.
-
- This essentially converts an output of get_current_epoch_time to an output
- of get_current_human_time
-
- Args:
- epoch_time: An integer representing an epoch timestamp in milliseconds.
-
- Returns:
- A time string representing the input time.
- None if input param is invalid.
- """
- if isinstance(epoch_time, int):
- try:
- d = datetime.datetime.fromtimestamp(epoch_time / 1000)
- return d.strftime("%m-%d-%Y %H:%M:%S ")
- except ValueError:
- return None
-
-
-def get_timezone_olson_id() -> str:
- """Return the Olson ID of the local (non-DST) timezone.
-
- Returns:
- A string representing one of the Olson IDs of the local (non-DST)
- timezone.
- """
- tzoffset = int(time.timezone / 3600)
- gmt = None
- if tzoffset <= 0:
- gmt = f"GMT+{-tzoffset}"
- else:
- gmt = f"GMT-{tzoffset}"
- return GMT_to_olson[gmt]
-
-
-def load_config(file_full_path: str, log_errors: bool = True) -> Any:
- """Loads a JSON config file.
-
- Returns:
- A JSON object.
- """
- with open(file_full_path, "r") as f:
- try:
- return json.load(f)
- except Exception as e:
- if log_errors:
- logging.error("Exception error to load %s: %s", f, e)
- raise
-
-
-def rand_ascii_str(length: int) -> str:
- """Generates a random string of specified length, composed of ascii letters
- and digits.
-
- Args:
- length: The number of characters in the string.
-
- Returns:
- The random string generated.
- """
- letters = [random.choice(ascii_letters_and_digits) for i in range(length)]
- return "".join(letters)
-
-
-def rand_hex_str(length: int) -> str:
- """Generates a random string of specified length, composed of hex digits
-
- Args:
- length: The number of characters in the string.
-
- Returns:
- The random string generated.
- """
- letters = [random.choice(string.hexdigits) for i in range(length)]
- return "".join(letters)
-
-
-# Thead/Process related functions.
-def concurrent_exec(func: Any, param_list: Any) -> list[Any]:
- """Executes a function with different parameters pseudo-concurrently.
-
- This is basically a map function. Each element (should be an iterable) in
- the param_list is unpacked and passed into the function. Due to Python's
- GIL, there's no true concurrency. This is suited for IO-bound tasks.
-
- Args:
- func: The function that parforms a task.
- param_list: A list of iterables, each being a set of params to be
- passed into the function.
-
- Returns:
- A list of return values from each function execution. If an execution
- caused an exception, the exception object will be the corresponding
- result.
- """
- with concurrent.futures.ThreadPoolExecutor(max_workers=30) as executor:
- # Start the load operations and mark each future with its params
- future_to_params = {executor.submit(func, *p): p for p in param_list}
- return_vals = []
- for future in concurrent.futures.as_completed(future_to_params):
- params = future_to_params[future]
- try:
- return_vals.append(future.result())
- except Exception as exc:
- print(
- f"{params} generated an exception: {traceback.format_exc()}"
- )
- return_vals.append(exc)
- return return_vals
-
-
-def exe_cmd(*cmds: Any) -> bytes:
- """Executes commands in a new shell.
-
- Args:
- cmds: A sequence of commands and arguments.
-
- Returns:
- The output of the command run.
-
- Raises:
- OSError is raised if an error occurred during the command execution.
- """
- cmd = " ".join(cmds)
- proc = subprocess.Popen(
- cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True
- )
- (out, err) = proc.communicate()
- if not err:
- return out
- raise OSError(err)
-
-
-def require_sl4a(android_devices: list[AndroidDevice]) -> None:
- """Makes sure sl4a connection is established on the given AndroidDevice
- objects.
-
- Args:
- android_devices: A list of AndroidDevice objects.
-
- Raises:
- AssertionError is raised if any given android device does not have SL4A
- connection established.
- """
- for ad in android_devices:
- msg = f"SL4A connection not established properly on {ad.serial}."
- assert ad.droid, msg
-
-
-def _assert_subprocess_running(proc: subprocess.Popen[bytes]) -> None:
- """Checks if a subprocess has terminated on its own.
-
- Args:
- proc: A subprocess returned by subprocess.Popen.
-
- Raises:
- ActsUtilsError is raised if the subprocess has stopped.
- """
- ret = proc.poll()
- if ret is not None:
- out, err = proc.communicate()
- raise ActsUtilsError(
- "Process %d has terminated. ret: %d, stderr: %s,"
- " stdout: %s" % (proc.pid, ret, str(err), str(out))
- )
-
-
-def start_standing_subprocess(
- cmd: str, check_health_delay: int = 0, shell: bool = True
-) -> subprocess.Popen[bytes]:
- """Starts a long-running subprocess.
-
- This is not a blocking call and the subprocess started by it should be
- explicitly terminated with stop_standing_subprocess.
-
- For short-running commands, you should use exe_cmd, which blocks.
-
- You can specify a health check after the subprocess is started to make sure
- it did not stop prematurely.
-
- Args:
- cmd: string, the command to start the subprocess with.
- check_health_delay: float, the number of seconds to wait after the
- subprocess starts to check its health. Default is 0,
- which means no check.
-
- Returns:
- The subprocess that got started.
- """
- proc = subprocess.Popen(
- cmd,
- stdout=subprocess.PIPE,
- stderr=subprocess.PIPE,
- shell=shell,
- preexec_fn=os.setpgrp,
- )
- logging.debug("Start standing subprocess with cmd: %s", cmd)
- if check_health_delay > 0:
- time.sleep(check_health_delay)
- _assert_subprocess_running(proc)
- return proc
-
-
-def stop_standing_subprocess(
- proc: subprocess.Popen[bytes], kill_signal: signal.Signals = signal.SIGTERM
-) -> None:
- """Stops a subprocess started by start_standing_subprocess.
-
- Before killing the process, we check if the process is running, if it has
- terminated, ActsUtilsError is raised.
-
- Catches and ignores the PermissionError which only happens on Macs.
-
- Args:
- proc: Subprocess to terminate.
- """
- pid = proc.pid
- logging.debug("Stop standing subprocess %d", pid)
- _assert_subprocess_running(proc)
- try:
- os.killpg(pid, kill_signal)
- except PermissionError:
- pass
-
-
-def wait_for_standing_subprocess(
- proc: subprocess.Popen[bytes], timeout: int | None = None
-) -> None:
- """Waits for a subprocess started by start_standing_subprocess to finish
- or times out.
-
- Propagates the exception raised by the subprocess.wait(.) function.
- The subprocess.TimeoutExpired exception is raised if the process timed-out
- rather then terminating.
-
- If no exception is raised: the subprocess terminated on its own. No need
- to call stop_standing_subprocess() to kill it.
-
- If an exception is raised: the subprocess is still alive - it did not
- terminate. Either call stop_standing_subprocess() to kill it, or call
- wait_for_standing_subprocess() to keep waiting for it to terminate on its
- own.
-
- Args:
- p: Subprocess to wait for.
- timeout: An integer number of seconds to wait before timing out.
- """
- proc.wait(timeout)
-
-
-def sync_device_time(
- ad: AndroidDevice,
-) -> None:
- """Sync the time of an android device with the current system time.
-
- Both epoch time and the timezone will be synced.
-
- Args:
- ad: The android device to sync time on.
- """
- ad.adb.shell("settings put global auto_time 0", ignore_status=True)
- ad.adb.shell("settings put global auto_time_zone 0", ignore_status=True)
- droid = ad.droid
- if not droid:
- raise signals.ControllerError("missing ad.droid")
- droid.setTimeZone(get_timezone_olson_id())
- droid.setTime(get_current_epoch_time())
-
-
-def set_ambient_display(ad: AndroidDevice, new_state: bool) -> None:
- """Set "Ambient Display" in Settings->Display
-
- Args:
- ad: android device object.
- new_state: new state for "Ambient Display". True or False.
- """
- ad.adb.shell(f"settings put secure doze_enabled {1 if new_state else 0}")
-
-
-def set_location_service(ad: AndroidDevice, new_state: bool) -> None:
- """Set Location service on/off in Settings->Location
-
- Args:
- ad: android device object.
- new_state: new state for "Location service".
- If new_state is False, turn off location service.
- If new_state if True, set location service to "High accuracy".
- """
- ad.adb.shell(
- "content insert --uri "
- " content://com.google.settings/partner --bind "
- "name:s:network_location_opt_in --bind value:s:1"
- )
- ad.adb.shell(
- "content insert --uri "
- " content://com.google.settings/partner --bind "
- "name:s:use_location_for_services --bind value:s:1"
- )
- if new_state:
- ad.adb.shell("settings put secure location_mode 3")
- else:
- ad.adb.shell("settings put secure location_mode 0")
-
-
-def parse_ping_ouput(
- ad: AndroidDevice, count: int, out: str, loss_tolerance: int = 20
-) -> bool:
- """Ping Parsing util.
-
- Args:
- ad: Android Device Object.
- count: Number of ICMP packets sent
- out: shell output text of ping operation
- loss_tolerance: Threshold after which flag test as false
- Returns:
- False: if packet loss is more than loss_tolerance%
- True: if all good
- """
- result = re.search(
- r"(\d+) packets transmitted, (\d+) received, (\d+)% packet loss", out
- )
- if not result:
- ad.log.info("Ping failed with %s", out)
- return False
-
- packet_loss = int(result.group(3))
- packet_xmit = int(result.group(1))
- packet_rcvd = int(result.group(2))
- min_packet_xmit_rcvd = (100 - loss_tolerance) * 0.01
- if (
- packet_loss > loss_tolerance
- or packet_xmit < count * min_packet_xmit_rcvd
- or packet_rcvd < count * min_packet_xmit_rcvd
- ):
- ad.log.error(
- "%s, ping failed with loss more than tolerance %s%%",
- result.group(0),
- loss_tolerance,
- )
- return False
- ad.log.info("Ping succeed with %s", result.group(0))
- return True
-
-
-def adb_shell_ping(
- ad: AndroidDevice,
- dest_ip: str,
- count: int = 120,
- timeout: int = 200,
- loss_tolerance: int = 20,
-) -> bool:
- """Ping utility using adb shell.
-
- Args:
- ad: Android Device Object.
- count: Number of ICMP packets to send
- dest_ip: hostname or IP address
- default www.google.com
- timeout: timeout for icmp pings to complete.
- """
- ping_cmd = "ping -W 1"
- if count:
- ping_cmd += f" -c {count}"
- if dest_ip:
- ping_cmd += f" {dest_ip}"
- try:
- ad.log.info(
- "Starting ping test to %s using adb command %s", dest_ip, ping_cmd
- )
- out = str(ad.adb.shell(ping_cmd, timeout=timeout, ignore_status=True))
- if not parse_ping_ouput(ad, count, out, loss_tolerance):
- return False
- return True
- except Exception as e:
- ad.log.warning("Ping Test to %s failed with exception %s", dest_ip, e)
- return False
-
-
-def zip_directory(zip_name: str, src_dir: str) -> None:
- """Compress a directory to a .zip file.
-
- This implementation is thread-safe.
-
- Args:
- zip_name: str, name of the generated archive
- src_dir: str, path to the source directory
- """
- with zipfile.ZipFile(zip_name, "w", zipfile.ZIP_DEFLATED) as zip:
- for root, dirs, files in os.walk(src_dir):
- for file in files:
- path = os.path.join(root, file)
- zip.write(path, os.path.relpath(path, src_dir))
-
-
-def unzip_maintain_permissions(zip_path: str, extract_location: str) -> None:
- """Unzip a .zip file while maintaining permissions.
-
- Args:
- zip_path: The path to the zipped file.
- extract_location: the directory to extract to.
- """
- with zipfile.ZipFile(zip_path, "r") as zip_file:
- for info in zip_file.infolist():
- _extract_file(zip_file, info, extract_location)
-
-
-def _extract_file(
- zip_file: zipfile.ZipFile, zip_info: zipfile.ZipInfo, extract_location: str
-) -> None:
- """Extracts a single entry from a ZipFile while maintaining permissions.
-
- Args:
- zip_file: A zipfile.ZipFile.
- zip_info: A ZipInfo object from zip_file.
- extract_location: The directory to extract to.
- """
- out_path = zip_file.extract(zip_info.filename, path=extract_location)
- perm = zip_info.external_attr >> 16
- os.chmod(out_path, perm)
-
-
-def get_command_uptime(command_regex: str) -> str:
- """Returns the uptime for a given command.
-
- Args:
- command_regex: A regex that matches the command line given. Must be
- pgrep compatible.
- """
- pid = job.run(f"pgrep -f {command_regex}").stdout.decode("utf-8")
- runtime = ""
- if pid:
- runtime = job.run(f'ps -o etime= -p "{pid}"').stdout.decode("utf-8")
- return runtime
-
-
-def get_device_process_uptime(adb: Any, process: str | int) -> Any:
- """Returns the uptime of a device process."""
- pid = adb.shell(f"pidof {process}", ignore_status=True)
- runtime = ""
- if pid:
- runtime = adb.shell(f'ps -o etime= -p "{pid}"')
- return runtime
-
-
-def is_valid_ipv4_address(address: str) -> bool:
- try:
- socket.inet_pton(socket.AF_INET, address)
- except AttributeError: # no inet_pton here, sorry
- try:
- socket.inet_aton(address)
- except socket.error:
- return False
- return address.count(".") == 3
- except socket.error: # not a valid address
- return False
-
- return True
-
-
-def is_valid_ipv6_address(address: str) -> bool:
- if "%" in address:
- address = address.split("%")[0]
- try:
- socket.inet_pton(socket.AF_INET6, address)
- except socket.error: # not a valid address
- return False
- return True
-
-
-def get_interface_ip_addresses(
- comm_channel: AndroidDevice | SshConnection | FuchsiaDevice,
- interface: str,
-) -> dict[str, list[str]]:
- """Gets all of the ip addresses, ipv4 and ipv6, associated with a
- particular interface name.
-
- Args:
- comm_channel: How to send commands to a device. Can be ssh, adb serial,
- etc. Must have the run function implemented.
- interface: The interface name on the device, ie eth0
-
- Returns:
- A list of dictionaries of the the various IP addresses:
- ipv4_private: Any 192.168, 172.16, 10, or 169.254 addresses
- ipv4_public: Any IPv4 public addresses
- ipv6_link_local: Any fe80:: addresses
- ipv6_private_local: Any fd00:: addresses
- ipv6_public: Any publicly routable addresses
- """
- # Local imports are used here to prevent cyclic dependency.
- from antlion.controllers.android_device import AndroidDevice
- from antlion.controllers.fuchsia_device import FuchsiaDevice
- from antlion.controllers.utils_lib.ssh.connection import SshConnection
-
- addrs: list[str] = []
-
- if isinstance(comm_channel, AndroidDevice):
- addrs = str(
- comm_channel.adb.shell(
- f'ip -o addr show {interface} | awk \'{{gsub("/", " "); print $4}}\''
- )
- ).splitlines()
- elif isinstance(comm_channel, SshConnection):
- ip = comm_channel.run(["ip", "-o", "addr", "show", interface])
- addrs = [
- addr.replace("/", " ").split()[3]
- for addr in ip.stdout.decode("utf-8").splitlines()
- ]
- elif isinstance(comm_channel, FuchsiaDevice):
- for iface in comm_channel.honeydew_fd.netstack.list_interfaces():
- if iface.name != interface:
- continue
- for ipv4_address in iface.ipv4_addresses:
- addrs.append(str(ipv4_address))
- for ipv6_address in iface.ipv6_addresses:
- addrs.append(str(ipv6_address))
- else:
- raise ValueError("Unsupported method to send command to device.")
-
- ipv4_private_local_addresses = []
- ipv4_public_addresses = []
- ipv6_link_local_addresses = []
- ipv6_private_local_addresses = []
- ipv6_public_addresses = []
-
- for addr in addrs:
- on_device_ip = ipaddress.ip_address(addr)
- if on_device_ip.version == 4:
- if on_device_ip.is_private:
- ipv4_private_local_addresses.append(str(on_device_ip))
- elif on_device_ip.is_global or (
- # Carrier private doesn't have a property, so we check if
- # all other values are left unset.
- not on_device_ip.is_reserved
- and not on_device_ip.is_unspecified
- and not on_device_ip.is_link_local
- and not on_device_ip.is_loopback
- and not on_device_ip.is_multicast
- ):
- ipv4_public_addresses.append(str(on_device_ip))
- elif on_device_ip.version == 6:
- if on_device_ip.is_link_local:
- ipv6_link_local_addresses.append(str(on_device_ip))
- elif on_device_ip.is_private:
- ipv6_private_local_addresses.append(str(on_device_ip))
- elif on_device_ip.is_global:
- ipv6_public_addresses.append(str(on_device_ip))
-
- return {
- "ipv4_private": ipv4_private_local_addresses,
- "ipv4_public": ipv4_public_addresses,
- "ipv6_link_local": ipv6_link_local_addresses,
- "ipv6_private_local": ipv6_private_local_addresses,
- "ipv6_public": ipv6_public_addresses,
- }
-
-
-class AddressTimeout(signals.TestError):
- pass
-
-
-class MultipleAddresses(signals.TestError):
- pass
-
-
-def get_addr(
- comm_channel: AndroidDevice | SshConnection | FuchsiaDevice,
- interface: str,
- addr_type: str = "ipv4_private",
- timeout_sec: int | None = None,
-) -> str:
- """Get the requested type of IP address for an interface; if an address is
- not available, retry until the timeout has been reached.
-
- Args:
- addr_type: Type of address to get as defined by the return value of
- utils.get_interface_ip_addresses.
- timeout_sec: Seconds to wait to acquire an address if there isn't one
- already available. If fetching an IPv4 address, the default is 3
- seconds. If IPv6, the default is 30 seconds for Duplicate Address
- Detection.
-
- Returns:
- A string containing the requested address.
-
- Raises:
- TestAbortClass: timeout_sec is None and invalid addr_type
- AddressTimeout: No address is available after timeout_sec
- MultipleAddresses: Several addresses are available
- """
- if not timeout_sec:
- if "ipv4" in addr_type:
- timeout_sec = 3
- elif "ipv6" in addr_type:
- timeout_sec = DAD_TIMEOUT_SEC
- else:
- raise signals.TestAbortClass(f'Unknown addr_type "{addr_type}"')
-
- timeout = time.time() + timeout_sec
- while time.time() < timeout:
- ip_addrs = get_interface_ip_addresses(comm_channel, interface)[
- addr_type
- ]
- if len(ip_addrs) > 1:
- raise MultipleAddresses(
- f'Expected only one "{addr_type}" address, got {ip_addrs}'
- )
- elif len(ip_addrs) == 1:
- return ip_addrs[0]
-
- raise AddressTimeout(
- f'No available "{addr_type}" address after {timeout_sec}s'
- )
-
-
-def get_interface_based_on_ip(runner: Runner, desired_ip_address: str) -> str:
- """Gets the interface for a particular IP
-
- Args:
- comm_channel: How to send commands to a device. Can be ssh, adb serial,
- etc. Must have the run function implemented.
- desired_ip_address: The IP address that is being looked for on a device.
-
- Returns:
- The name of the test interface.
-
- Raises:
- RuntimeError: when desired_ip_address is not found
- """
-
- desired_ip_address = desired_ip_address.split("%", 1)[0]
- ip = runner.run(["ip", "-o", "addr", "show"])
- for line in ip.stdout.decode("utf-8").splitlines():
- if desired_ip_address in line:
- return line.split()[1]
- raise RuntimeError(
- f'IP "{desired_ip_address}" not found in list:\n{ip.stdout.decode("utf-8")}'
- )
-
-
-def renew_linux_ip_address(runner: Runner, interface: str) -> None:
- runner.run(f"sudo ip link set {interface} down")
- runner.run(f"sudo ip link set {interface} up")
- runner.run(f"sudo dhclient -r {interface}")
- runner.run(f"sudo dhclient {interface}")
-
-
-def get_ping_command(
- dest_ip: str,
- count: int = 3,
- interval: int = 1000,
- timeout: int = 1000,
- size: int = 56,
- os_type: str = "Linux",
- additional_ping_params: str = "",
-) -> str:
- """Builds ping command string based on address type, os, and params.
-
- Args:
- dest_ip: string, address to ping (ipv4 or ipv6)
- count: int, number of requests to send
- interval: int, time in seconds between requests
- timeout: int, time in seconds to wait for response
- size: int, number of bytes to send,
- os_type: string, os type of the source device (supports 'Linux',
- 'Darwin')
- additional_ping_params: string, command option flags to
- append to the command string
-
- Returns:
- The ping command.
- """
- if is_valid_ipv4_address(dest_ip):
- ping_binary = "ping"
- elif is_valid_ipv6_address(dest_ip):
- ping_binary = "ping6"
- else:
- raise ValueError(f"Invalid ip addr: {dest_ip}")
-
- if os_type == "Darwin":
- if is_valid_ipv6_address(dest_ip):
- # ping6 on MacOS doesn't support timeout
- logging.debug(
- "Ignoring timeout, as ping6 on MacOS does not support it."
- )
- timeout_flag = []
- else:
- timeout_flag = ["-t", str(timeout / 1000)]
- elif os_type == "Linux":
- timeout_flag = ["-W", str(timeout / 1000)]
- else:
- raise ValueError("Invalid OS. Only Linux and MacOS are supported.")
-
- ping_cmd = [
- ping_binary,
- *timeout_flag,
- "-c",
- str(count),
- "-i",
- str(interval / 1000),
- "-s",
- str(size),
- additional_ping_params,
- dest_ip,
- ]
- return " ".join(ping_cmd)
-
-
-def ping(
- comm_channel: Runner,
- dest_ip: str,
- count: int = 3,
- interval: int = 1000,
- timeout: int = 1000,
- size: int = 56,
- additional_ping_params: str = "",
-) -> PingResult:
- """Generic linux ping function, supports local (acts.libs.proc.job) and
- SshConnections (acts.libs.proc.job over ssh) to Linux based OSs and MacOS.
-
- NOTES: This will work with Android over SSH, but does not function over ADB
- as that has a unique return format.
-
- Args:
- comm_channel: communication channel over which to send ping command.
- Must have 'run' function that returns at least command, stdout,
- stderr, and exit_status (see acts.libs.proc.job)
- dest_ip: address to ping (ipv4 or ipv6)
- count: int, number of packets to send
- interval: int, time in milliseconds between pings
- timeout: int, time in milliseconds to wait for response
- size: int, size of packets in bytes
- additional_ping_params: string, command option flags to
- append to the command string
-
- Returns:
- Dict containing:
- command: string
- exit_status: int (0 or 1)
- stdout: string
- stderr: string
- transmitted: int, number of packets transmitted
- received: int, number of packets received
- packet_loss: int, percentage packet loss
- time: int, time of ping command execution (in milliseconds)
- rtt_min: float, minimum round trip time
- rtt_avg: float, average round trip time
- rtt_max: float, maximum round trip time
- rtt_mdev: float, round trip time standard deviation
-
- Any values that cannot be parsed are left as None
- """
- from antlion.controllers.utils_lib.ssh.connection import SshConnection
-
- is_local = comm_channel == job # type: ignore # Blanket ignore to enable mypy
- os_type = platform.system() if is_local else "Linux"
- ping_cmd = get_ping_command(
- dest_ip,
- count=count,
- interval=interval,
- timeout=timeout,
- size=size,
- os_type=os_type,
- additional_ping_params=additional_ping_params,
- )
-
- if isinstance(comm_channel, SshConnection) or is_local:
- logging.debug(
- "Running ping with parameters (count: %s, interval: %s, "
- "timeout: %s, size: %s)",
- count,
- interval,
- timeout,
- size,
- )
- try:
- ping_result: (
- subprocess.CompletedProcess[bytes] | CalledProcessError
- ) = comm_channel.run(ping_cmd)
- except CalledProcessError as e:
- ping_result = e
- else:
- raise ValueError(f"Unsupported comm_channel: {type(comm_channel)}")
-
- summary = re.search(
- "([0-9]+) packets transmitted.*?([0-9]+) received.*?([0-9]+)% packet "
- "loss.*?time ([0-9]+)",
- ping_result.stdout.decode("utf-8"),
- )
- rtt_stats = re.search(
- "= ([0-9.]+)/([0-9.]+)/([0-9.]+)/([0-9.]+)",
- ping_result.stdout.decode("utf-8"),
- )
- return PingResult(
- exit_status=ping_result.returncode,
- stdout=ping_result.stdout.decode("utf-8"),
- stderr=ping_result.stderr.decode("utf-8"),
- transmitted=int(summary.group(1)) if summary else None,
- received=int(summary.group(2)) if summary else None,
- time_ms=float(summary.group(4)) / 1000 if summary else None,
- rtt_min_ms=float(rtt_stats.group(1)) if rtt_stats else None,
- rtt_avg_ms=float(rtt_stats.group(2)) if rtt_stats else None,
- rtt_max_ms=float(rtt_stats.group(3)) if rtt_stats else None,
- rtt_mdev_ms=float(rtt_stats.group(4)) if rtt_stats else None,
- )
-
-
-@dataclass
-class PingResult:
- exit_status: int
- stdout: str
- stderr: str
- transmitted: int | None
- received: int | None
- time_ms: float | None
- rtt_min_ms: float | None
- rtt_avg_ms: float | None
- rtt_max_ms: float | None
- rtt_mdev_ms: float | None
-
- @property
- def success(self) -> bool:
- return self.exit_status == 0
-
-
-def ip_in_subnet(ip: str, subnet: str) -> bool:
- """Validate that ip is in a given subnet.
-
- Args:
- ip: string, ip address to verify (eg. '192.168.42.158')
- subnet: string, subnet to check (eg. '192.168.42.0/24')
-
- Returns:
- True, if ip in subnet, else False
- """
- return ipaddress.ip_address(ip) in ipaddress.ip_network(subnet)
-
-
-def mac_address_list_to_str(mac_addr_list: bytes) -> str:
- """Converts list of decimal octets representing mac address to string.
-
- Args:
- mac_addr_list: list, representing mac address octets in decimal
- e.g. [18, 52, 86, 120, 154, 188]
-
- Returns:
- string, mac address
- e.g. '12:34:56:78:9a:bc'
- """
- # Print each octet as hex, right justified, width of 2, and fill with "0".
- return ":".join([f"{octet:0>2x}" for octet in mac_addr_list])
-
-
-def get_fuchsia_mdns_ipv6_address(device_mdns_name: str) -> None | str:
- """Finds the IPv6 link-local address of a Fuchsia device matching a mDNS
- name.
-
- Args:
- device_mdns_name: name of Fuchsia device (e.g. gig-clone-sugar-slash)
-
- Returns:
- string, IPv6 link-local address
- """
- import psutil
- from zeroconf import IPVersion, Zeroconf
-
- if not device_mdns_name:
- return None
-
- def mdns_query(interface: str, address: str) -> None | str:
- logging.info(
- f'Sending mDNS query for device "{device_mdns_name}" using "{address}"'
- )
- try:
- zeroconf = Zeroconf(
- ip_version=IPVersion.V6Only, interfaces=[address]
- )
- except RuntimeError as e:
- if "No adapter found for IP address" in e.args[0]:
- # Most likely, a device went offline and its control
- # interface was deleted. This is acceptable since the
- # device that went offline isn't guaranteed to be the
- # device we're searching for.
- logging.warning(f'No adapter found for "{address}"')
- return None
- raise
-
- device_records = zeroconf.get_service_info(
- FUCHSIA_MDNS_TYPE, f"{device_mdns_name}.{FUCHSIA_MDNS_TYPE}"
- )
-
- if device_records:
- for device_address in device_records.parsed_addresses():
- device_ip_address = ipaddress.ip_address(device_address)
- scoped_address = f"{device_address}%{interface}"
- if (
- device_ip_address.version == 6
- and device_ip_address.is_link_local
- and ping(job, dest_ip=scoped_address).success # type: ignore # Blanket ignore to enable mypy
- ):
- logging.info(
- f'Found device "{device_mdns_name}" at "{scoped_address}"'
- )
- zeroconf.close()
- del zeroconf
- return scoped_address
-
- zeroconf.close()
- del zeroconf
- return None
-
- with ThreadPoolExecutor() as executor:
- futures = []
-
- interfaces = psutil.net_if_addrs()
- for interface in interfaces:
- for addr in interfaces[interface]:
- address = addr.address.split("%")[0]
- if (
- addr.family == socket.AF_INET6
- and ipaddress.ip_address(address).is_link_local
- and address != "fe80::1"
- ):
- futures.append(
- executor.submit(mdns_query, interface, address)
- )
-
- for future in futures:
- addr = future.result()
- if addr:
- return addr
-
- logging.error(f'Unable to find IP address for device "{device_mdns_name}"')
- return None
diff --git a/packages/antlion/validation.py b/packages/antlion/validation.py
deleted file mode 100644
index 750f8b5..0000000
--- a/packages/antlion/validation.py
+++ /dev/null
@@ -1,172 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2023 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from __future__ import annotations
-
-from typing import Collection, Literal, Mapping, TypeGuard, TypeVar, overload
-
-from mobly import signals
-
-
-class ValidatorError(signals.TestAbortClass):
- pass
-
-
-class FieldNotFoundError(ValidatorError):
- pass
-
-
-class FieldTypeError(ValidatorError):
- pass
-
-
-T = TypeVar("T")
-
-
-class _NO_DEFAULT:
- pass
-
-
-class MapValidator:
- def __init__(self, map: Mapping[str, object]) -> None:
- self.map = map
-
- @overload
- def get(self, type: type[T], key: str, default: None) -> T | None:
- ...
-
- @overload
- def get(
- self, type: type[T], key: str, default: T | _NO_DEFAULT = _NO_DEFAULT()
- ) -> T:
- ...
-
- def get(
- self,
- type: type[T],
- key: str,
- default: T | None | _NO_DEFAULT = _NO_DEFAULT(),
- ) -> T | None:
- """Access the map requiring a value type at the specified key.
-
- If default is set and the map does not contain the specified key, the
- default will be returned.
-
- Args:
- type: Expected type of the value
- key: Key to index into the map with
- default: Default value when the map does not contain key
-
- Returns:
- Value of the expected type, or None if default is None.
-
- Raises:
- FieldNotFound: when default is not set and the map does not contain
- the specified key
- FieldTypeError: when the value at the specified key is not the
- expected type
- """
- if key not in self.map:
- if isinstance(default, type) or default is None:
- return default
- raise FieldNotFoundError(
- f'Required field "{key}" is missing; expected {type.__name__}'
- )
- val = self.map[key]
- if val is None and default is None:
- return None
- if not isinstance(val, type):
- raise FieldTypeError(
- f'Expected "{key}" to be {type.__name__}, got {describe_type(val)}'
- )
- return val
-
- @overload
- def list(self, key: str) -> ListValidator:
- ...
-
- @overload
- def list(self, key: str, optional: Literal[False]) -> ListValidator:
- ...
-
- @overload
- def list(self, key: str, optional: Literal[True]) -> ListValidator | None:
- ...
-
- def list(self, key: str, optional: bool = False) -> ListValidator | None:
- """Access the map requiring a list at the specified key.
-
- If optional is True and the map does not contain the specified key, None
- will be returned.
-
- Args:
- key: Key to index into the map with
- optional: If True, will return None if the map does not contain key
-
- Returns:
- ListValidator or None if optional is True.
-
- Raises:
- FieldNotFound: when optional is False and the map does not contain
- the specified key
- FieldTypeError: when the value at the specified key is not a list
- """
- if optional:
- val = self.get(list, key, None)
- else:
- val = self.get(list, key)
- return None if val is None else ListValidator(key, val)
-
-
-class ListValidator:
- def __init__(self, name: str, val: list[object]) -> None:
- self.name = name
- self.val = val
-
- def all(self, type: type[T]) -> list[T]:
- """Access the list requiring all elements to be the specified type.
-
- Args:
- type: Expected type of all elements
-
- Raises:
- FieldTypeError: when an element is not the expected type
- """
- if not is_list_of(self.val, type):
- raise FieldTypeError(
- f'Expected "{self.name}" to be list[{type.__name__}], '
- f"got {describe_type(self.val)}"
- )
- return self.val
-
-
-def describe_type(o: object) -> str:
- """Describe the complete type of the object.
-
- Different from type() by recursing when a mapping or collection is found.
- """
- if isinstance(o, Mapping):
- keys = set([describe_type(k) for k in o.keys()])
- values = set([describe_type(v) for v in o.values()])
- return f'dict[{" | ".join(keys)}, {" | ".join(values)}]'
- if isinstance(o, Collection) and not isinstance(o, str):
- elements = set([describe_type(x) for x in o])
- return f'list[{" | ".join(elements)}]'
- return type(o).__name__
-
-
-def is_list_of(val: list[object], type: type[T]) -> TypeGuard[list[T]]:
- return all(isinstance(x, type) for x in val)
diff --git a/pyproject.toml b/pyproject.toml
deleted file mode 100644
index d8ad68b..0000000
--- a/pyproject.toml
+++ /dev/null
@@ -1,143 +0,0 @@
-# Reference at https://peps.python.org/pep-0621/
-
-[build-system]
-requires = ["setuptools", "setuptools-scm[toml]"]
-build-backend = "setuptools.build_meta"
-
-[project]
-name = "antlion"
-description = "Host-driven, hardware-agnostic Fuchsia connectivity tests"
-requires-python = ">=3.8"
-license = { text = "Apache-2.0" }
-dynamic = ["version"]
-readme = "README.md"
-dependencies = ["mobly==1.12.2", "pyyaml>=5.1", "tenacity~=8.0"]
-
-[project.optional-dependencies]
-# Required to support development tools
-dev = [
- "autoflake~=2.1", # remove unused code
- "black~=23.3", # code formatting
- "isort~=5.12", # import sorting
- "mock~=5.0", # required for unit tests
- "mypy~=1.8", # static type checking
- "shiv~=1.0", # packaging python
- "toml==0.10.2", # required for pyproject.toml
- "vulture~=2.11", # finds unused code
-
- # Library stubs for type checking
- "types-PyYAML~=6.0",
- "types-mock~=5.0",
- "types-psutil~=5.9",
-]
-digital_loggers_pdu = ["dlipower"]
-html_graphing = ["bokeh"]
-mdns = ["psutil", "zeroconf"]
-android = ["numpy", "scapy"]
-
-[tool.setuptools.packages.find]
-where = ["packages"]
-
-[tool.autoflake]
-imports = [
- "antlion",
- "dataclasses",
- "dlipower",
- "mobly",
- "mock",
- "numpy",
- "scapy",
- "tenacity",
- "zeroconf",
-]
-
-[tool.black]
-line-length = 80
-
-[tool.isort]
-profile = "black"
-line_length = 80
-known_local_folder = ["antlion"]
-
-[tool.mypy]
-mypy_path = "stubs, $FUCHSIA_DIR/src/testing/end_to_end/honeydew, $FUCHSIA_DIR/src/developer/ffx/lib/fuchsia-controller/python"
-python_version = "3.11"
-
-#
-# Disallow dynamic typing
-#
-
-#disallow_any_unimported = true
-#disallow_any_expr = true
-#disallow_any_decorated = true
-#disallow_any_explicit = true
-#disallow_any_generics = true
-disallow_subclassing_any = true
-
-#
-# Untyped definitions and calls
-#
-
-#disallow_untyped_calls = true
-#disallow_untyped_defs = true
-#disallow_incomplete_defs = true
-check_untyped_defs = true
-disallow_untyped_decorators = true
-
-#
-# Configuring warnings
-#
-
-warn_redundant_casts = true
-#warn_unused_ignores = true
-warn_no_return = true
-#warn_return_any = true
-#warn_unreachable = true
-
-#
-# Miscellaneous strictness flags
-#
-
-strict_equality = true
-
-exclude = [
- # TODO(http://b/285950835): Fix typing of relevant Fuchsia-related utilities and
- # tests
- "packages/antlion/unit_tests/",
-
- # TODO(http://b/274619290): Remove the following files when the migration from ACTS
- # to Mobly is complete.
- "packages/antlion/base_test.py",
- "packages/antlion/context.py",
- "packages/antlion/libs/yaml_writer.py",
-
- # TODO(http://b/285950976): Fix typing of non-Fuchsia related controllers and test
- # utilities, or remove if no longer relevant.
- "packages/antlion/controllers/adb.py",
- "packages/antlion/controllers/android_device.py",
- "packages/antlion/controllers/pdu_lib/digital_loggers/webpowerswitch.py",
- "packages/antlion/controllers/sl4a_lib/event_dispatcher.py",
- "packages/antlion/controllers/sl4a_lib/rpc_client.py",
- "packages/antlion/controllers/sl4a_lib/sl4a_manager.py",
- "packages/antlion/controllers/sniffer_lib/local/tcpdump.py",
- "packages/antlion/controllers/sniffer_lib/local/tshark.py",
- "packages/antlion/libs/logging/log_stream.py",
- "packages/antlion/libs/ota/ota_runners/ota_runner_factory.py",
- "packages/antlion/libs/ota/ota_tools/ota_tool_factory.py",
- "setup.py",
-
- "stubs/mobly/",
-]
-
-[[tool.mypy.overrides]]
-module = ["png", "fidl.*", "fuchsia_controller_internal", "fuchsia_inspect"]
-ignore_missing_imports = true
-
-[[tool.mypy.overrides]]
-module = ["fidl.*", "fuchsia_controller_py"]
-ignore_errors = true
-
-[tool.vulture]
-paths = ["packages", "tests"]
-sort_by_size = true
-min_confidence = 80
diff --git a/runner/BUILD.gn b/runner/BUILD.gn
deleted file mode 100644
index 1986b59..0000000
--- a/runner/BUILD.gn
+++ /dev/null
@@ -1,58 +0,0 @@
-# Copyright 2024 The Fuchsia Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import("//build/rust/rustc_binary.gni")
-import("//build/rust/rustc_test.gni")
-
-assert(is_host, "antlion-runner only supported on the host toolchain")
-
-rustc_binary("runner") {
- output_name = "antlion-runner"
- edition = "2021"
- with_unit_tests = true
-
- deps = [
- "//src/developer/ffx/lib/netext:lib",
- "//src/lib/mdns/rust:mdns",
- "//src/lib/network/packet",
- "//third_party/rust_crates:anyhow",
- "//third_party/rust_crates:argh",
- "//third_party/rust_crates:home",
- "//third_party/rust_crates:itertools",
- "//third_party/rust_crates:lazy_static",
- "//third_party/rust_crates:libc",
- "//third_party/rust_crates:nix",
- "//third_party/rust_crates:serde",
- "//third_party/rust_crates:serde_json",
- "//third_party/rust_crates:serde_yaml",
- "//third_party/rust_crates:signal-hook",
- "//third_party/rust_crates:socket2",
- "//third_party/rust_crates:thiserror",
- ]
-
- test_deps = [
- "//third_party/rust_crates:assert_matches",
- "//third_party/rust_crates:indoc",
- "//third_party/rust_crates:pretty_assertions",
- "//third_party/rust_crates:tempfile",
- ]
-
- sources = [
- "src/config.rs",
- "src/driver/infra.rs",
- "src/driver/local.rs",
- "src/driver/mod.rs",
- "src/env.rs",
- "src/finder.rs",
- "src/main.rs",
- "src/net.rs",
- "src/runner.rs",
- "src/yaml.rs",
- ]
-}
-
-group("tests") {
- testonly = true
- deps = [ ":runner_test" ]
-}
diff --git a/runner/README.md b/runner/README.md
deleted file mode 100644
index 169394a..0000000
--- a/runner/README.md
+++ /dev/null
@@ -1,50 +0,0 @@
-# antlion-runner
-
-A program to run antlion locally and in infrastructure. Includes a config
-generator with mDNS discovery and sensible defaults.
-
-## Using locally with an emulator
-
-Running antlion locally with a Fuchsia emulator allows developers to perform a
-sanity checks on their changes. Running this way is very quick (~5 seconds) and
-can spot simple mistakes before code review!
-
-1. Build Fuchsia with antlion support
-
- ```sh
- jiri update -gc # if you haven't updated in a while
- fx set workstation_eng_paused.qemu-x64 \
- --with-host //third_party/antlion:e2e_tests \
- --with-host //third_party/antlion:tests
- fx build # if you haven't built in a while
- ```
-
-2. Start the package server. Keep this running in the background.
-
- ```sh
- fx serve
- ```
-
-3. In a separate terminal, start the emulator with access to external networks.
-
- ```sh
- fx ffx emu stop && fx ffx emu start -H --net tap && fx ffx log
- ```
-
-4. In a separate terminal, run a test
-
- ```sh
- fx test --e2e --output //third_party/antlion:sl4f_sanity_test
- ```
-
-## Using a specified config file
-
-```sh
-fx test --e2e --output //third_party/antlion:sl4f_sanity_test -- --config-override $(pwd)/config.yaml
-```
-
-## Testing
-
-```sh
-fx test --output //third_party/antlion/runner:runner_test
-```
diff --git a/runner/src/config.rs b/runner/src/config.rs
deleted file mode 100644
index c2afce0..0000000
--- a/runner/src/config.rs
+++ /dev/null
@@ -1,168 +0,0 @@
-// Copyright 2022 The Fuchsia Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-use crate::net::IpAddr;
-use crate::yaml;
-
-use std::path::PathBuf;
-
-use serde::{Deserialize, Serialize};
-use serde_yaml::Value;
-
-#[derive(Clone, Debug, Serialize)]
-#[serde(rename_all = "PascalCase")]
-/// Config used by antlion for declaring testbeds and test parameters.
-pub(crate) struct Config {
- #[serde(rename = "TestBeds")]
- pub testbeds: Vec<Testbed>,
- pub mobly_params: MoblyParams,
-}
-
-impl Config {
- /// Merge the given test parameters into all testbeds.
- pub fn merge_test_params(&mut self, test_params: Value) {
- for testbed in self.testbeds.iter_mut() {
- match testbed.test_params.as_mut() {
- Some(existing) => yaml::merge(existing, test_params.clone()),
- None => testbed.test_params = Some(test_params.clone()),
- }
- }
- }
-}
-
-#[derive(Clone, Debug, Serialize)]
-#[serde(rename_all = "PascalCase")]
-/// Parameters consumed by Mobly.
-pub(crate) struct MoblyParams {
- pub log_path: PathBuf,
-}
-
-#[derive(Clone, Debug, Serialize)]
-#[serde(rename_all = "PascalCase")]
-/// A group of interconnected devices to be used together during an antlion test.
-pub(crate) struct Testbed {
- pub name: String,
- pub controllers: Controllers,
- #[serde(skip_serializing_if = "Option::is_none")]
- pub test_params: Option<Value>,
-}
-
-#[derive(Clone, Debug, Default, Serialize)]
-pub(crate) struct Controllers {
- #[serde(rename = "FuchsiaDevice", skip_serializing_if = "Vec::is_empty")]
- pub fuchsia_devices: Vec<Fuchsia>,
- #[serde(rename = "AccessPoint", skip_serializing_if = "Vec::is_empty")]
- pub access_points: Vec<AccessPoint>,
- #[serde(rename = "Attenuator", skip_serializing_if = "Vec::is_empty")]
- pub attenuators: Vec<Attenuator>,
- #[serde(rename = "PduDevice", skip_serializing_if = "Vec::is_empty")]
- pub pdus: Vec<Pdu>,
- #[serde(rename = "IPerfServer", skip_serializing_if = "Vec::is_empty")]
- pub iperf_servers: Vec<IPerfServer>,
-}
-
-#[derive(Clone, Debug, Serialize)]
-/// A Fuchsia device for use with antlion as defined by [fuchsia_device.py].
-///
-/// [fuchsia_device.py]: https://cs.opensource.google/fuchsia/fuchsia/+/main:third_party/antlion/packages/antlion/controllers/fuchsia_device.py
-pub(crate) struct Fuchsia {
- pub mdns_name: String,
- pub ip: IpAddr,
- #[serde(skip_serializing_if = "Option::is_none")]
- pub ssh_port: Option<u16>,
- pub take_bug_report_on_fail: bool,
- pub ssh_binary_path: PathBuf,
- #[serde(skip_serializing_if = "Option::is_none")]
- pub ssh_config: Option<PathBuf>,
- pub ffx_binary_path: PathBuf,
- #[serde(skip_serializing_if = "Option::is_none")]
- pub ffx_subtools_search_path: Option<PathBuf>,
- pub ssh_priv_key: PathBuf,
- #[serde(rename = "PduDevice", skip_serializing_if = "Option::is_none")]
- pub pdu_device: Option<PduRef>,
- pub hard_reboot_on_fail: bool,
-}
-
-#[derive(Clone, Debug, Serialize, Deserialize)]
-/// Reference to a PDU device. Used to specify which port the attached device
-/// maps to on the PDU.
-pub(crate) struct PduRef {
- #[serde(default = "default_pdu_device")]
- pub device: String,
- #[serde(rename(serialize = "host"))]
- pub ip: IpAddr,
- pub port: u8,
-}
-
-fn default_pdu_device() -> String {
- "synaccess.np02b".to_string()
-}
-
-#[derive(Clone, Debug, Serialize)]
-/// Declares an access point for use with antlion as defined by [access_point.py].
-///
-/// [access_point.py]: https://cs.opensource.google/fuchsia/fuchsia/+/main:third_party/antlion/packages/antlion/controllers/access_point.py
-pub(crate) struct AccessPoint {
- pub wan_interface: String,
- pub ssh_config: SshConfig,
- #[serde(rename = "PduDevice", skip_serializing_if = "Option::is_none")]
- pub pdu_device: Option<PduRef>,
- #[serde(rename = "Attenuator", skip_serializing_if = "Option::is_none")]
- pub attenuators: Option<Vec<AttenuatorRef>>,
-}
-
-#[derive(Clone, Debug, Serialize)]
-pub(crate) struct SshConfig {
- pub ssh_binary_path: PathBuf,
- pub host: IpAddr,
- #[serde(skip_serializing_if = "Option::is_none")]
- pub port: Option<u16>,
- pub user: String,
- pub identity_file: PathBuf,
-}
-
-#[derive(Clone, Debug, Serialize)]
-/// Reference to an attenuator device. Used to specify which ports the attached
-/// devices' channels maps to on the attenuator.
-pub(crate) struct AttenuatorRef {
- #[serde(rename = "Address")]
- pub address: IpAddr,
- #[serde(rename = "attenuator_ports_wifi_2g")]
- pub ports_2g: Vec<u8>,
- #[serde(rename = "attenuator_ports_wifi_5g")]
- pub ports_5g: Vec<u8>,
-}
-
-#[derive(Clone, Debug, Eq, PartialEq, Serialize)]
-#[serde(rename_all = "PascalCase")]
-/// Declares an attenuator for use with antlion as defined by [attenuator.py].
-///
-/// [access_point.py]: https://cs.opensource.google/fuchsia/fuchsia/+/main:third_party/antlion/packages/antlion/controllers/attenuator.py
-pub(crate) struct Attenuator {
- pub model: String,
- pub instrument_count: u8,
- pub address: IpAddr,
- pub protocol: String,
- pub port: u16,
-}
-
-#[derive(Clone, Debug, Eq, PartialEq, Serialize)]
-/// Declares a power distribution unit for use with antlion as defined by [pdu.py].
-///
-/// [pdu.py]: https://cs.opensource.google/fuchsia/fuchsia/+/main:third_party/antlion/packages/antlion/controllers/pdu.py
-pub(crate) struct Pdu {
- pub device: String,
- pub host: IpAddr,
-}
-
-#[derive(Clone, Debug, Serialize)]
-/// Declares an iPerf3 server for use with antlion as defined by [iperf_server.py].
-///
-/// [iperf_server.py]: https://cs.opensource.google/fuchsia/fuchsia/+/main:third_party/antlion/packages/antlion/controllers/iperf_server.py
-pub(crate) struct IPerfServer {
- pub ssh_config: SshConfig,
- pub port: u16,
- pub test_interface: String,
- pub use_killall: bool,
-}
diff --git a/runner/src/driver/infra.rs b/runner/src/driver/infra.rs
deleted file mode 100644
index 424f014..0000000
--- a/runner/src/driver/infra.rs
+++ /dev/null
@@ -1,932 +0,0 @@
-// Copyright 2023 The Fuchsia Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-use crate::config::{self, Config, PduRef};
-use crate::driver::Driver;
-use crate::env::Environment;
-use crate::net::IpAddr;
-use crate::yaml;
-
-use std::collections::{HashMap, HashSet};
-use std::fs;
-use std::path::{Path, PathBuf};
-
-use anyhow::{anyhow, Context, Result};
-use itertools::Itertools;
-use serde::Deserialize;
-use serde_yaml::Value;
-use thiserror::Error;
-
-const TESTBED_NAME: &'static str = "antlion-runner";
-const ENV_OUT_DIR: &'static str = "FUCHSIA_TEST_OUTDIR";
-const ENV_TESTBED_CONFIG: &'static str = "FUCHSIA_TESTBED_CONFIG";
-const TEST_SUMMARY_FILE: &'static str = "test_summary.yaml";
-
-#[derive(Debug)]
-/// Driver for running antlion on emulated and hardware testbeds hosted by
-/// Fuchsia infrastructure.
-pub(crate) struct InfraDriver {
- output_dir: PathBuf,
- config: Config,
-}
-
-#[derive(Error, Debug)]
-pub(crate) enum InfraDriverError {
- #[error("infra environment not detected, \"{0}\" environment variable not present")]
- NotDetected(String),
- #[error(transparent)]
- Config(#[from] ConfigError),
- #[error(transparent)]
- Other(#[from] anyhow::Error),
-}
-
-#[derive(Error, Debug)]
-pub(crate) enum ConfigError {
- #[error("ip {ip} in use by several devices")]
- DuplicateIp { ip: IpAddr },
- #[error("ip {ip} port {port} in use by several devices")]
- DuplicatePort { ip: IpAddr, port: u8 },
-}
-
-impl InfraDriver {
- /// Detect an InfraDriver. Returns None if the required environmental
- /// variables are not found.
- pub fn new<E: Environment>(
- env: E,
- ssh_binary: PathBuf,
- ffx_binary: PathBuf,
- ffx_subtools_search_path: Option<PathBuf>,
- ) -> Result<Self, InfraDriverError> {
- let config_path = match env.var(ENV_TESTBED_CONFIG) {
- Ok(p) => PathBuf::from(p),
- Err(std::env::VarError::NotPresent) => {
- return Err(InfraDriverError::NotDetected(ENV_TESTBED_CONFIG.to_string()))
- }
- Err(e) => {
- return Err(InfraDriverError::Other(anyhow!(
- "Failed to read \"{ENV_TESTBED_CONFIG}\" {e}"
- )))
- }
- };
- let config = fs::read_to_string(&config_path)
- .with_context(|| format!("Failed to read \"{}\"", config_path.display()))?;
- let targets: Vec<InfraTarget> = serde_json::from_str(&config)
- .with_context(|| format!("Failed to parse into InfraTarget: \"{config}\""))?;
- if targets.len() == 0 {
- return Err(InfraDriverError::Other(anyhow!(
- "Expected at least one target declared in \"{}\"",
- config_path.display()
- )));
- }
-
- let output_path = match env.var(ENV_OUT_DIR) {
- Ok(p) => p,
- Err(std::env::VarError::NotPresent) => {
- return Err(InfraDriverError::NotDetected(ENV_OUT_DIR.to_string()))
- }
- Err(e) => {
- return Err(InfraDriverError::Other(anyhow!(
- "Failed to read \"{ENV_OUT_DIR}\" {e}"
- )))
- }
- };
- let output_dir = PathBuf::from(output_path);
- if !fs::metadata(&output_dir).context("Failed to stat the output directory")?.is_dir() {
- return Err(InfraDriverError::Other(anyhow!(
- "Expected a directory but found a file at \"{}\"",
- output_dir.display()
- )));
- }
-
- Ok(InfraDriver {
- output_dir: output_dir.clone(),
- config: InfraDriver::parse_targets(
- targets,
- ssh_binary,
- ffx_binary,
- ffx_subtools_search_path,
- output_dir,
- )?,
- })
- }
-
- fn parse_targets(
- targets: Vec<InfraTarget>,
- ssh_binary: PathBuf,
- ffx_binary: PathBuf,
- ffx_subtools_search_path: Option<PathBuf>,
- output_dir: PathBuf,
- ) -> Result<Config, InfraDriverError> {
- let mut fuchsia_devices: Vec<config::Fuchsia> = vec![];
- let mut access_points: Vec<config::AccessPoint> = vec![];
- let mut attenuators: HashMap<IpAddr, config::Attenuator> = HashMap::new();
- let mut pdus: HashMap<IpAddr, config::Pdu> = HashMap::new();
- let mut iperf_servers: Vec<config::IPerfServer> = vec![];
- let mut test_params: Option<Value> = None;
-
- let mut used_ips: HashSet<IpAddr> = HashSet::new();
- let mut used_ports: HashMap<IpAddr, HashSet<u8>> = HashMap::new();
-
- let mut register_ip = |ip: IpAddr| -> Result<(), InfraDriverError> {
- if !used_ips.insert(ip.clone()) {
- return Err(ConfigError::DuplicateIp { ip }.into());
- }
- Ok(())
- };
-
- let mut register_port = |ip: IpAddr, port: u8| -> Result<(), InfraDriverError> {
- match used_ports.get_mut(&ip) {
- Some(ports) => {
- if !ports.insert(port) {
- return Err(ConfigError::DuplicatePort { ip, port }.into());
- }
- }
- None => {
- if used_ports.insert(ip, HashSet::from([port])).is_some() {
- return Err(InfraDriverError::Other(anyhow!(
- "Used ports set was unexpectedly modified by concurrent use",
- )));
- }
- }
- };
- Ok(())
- };
-
- let mut register_pdu = |p: Option<PduRef>| -> Result<(), InfraDriverError> {
- if let Some(PduRef { device, ip, port }) = p {
- register_port(ip.clone(), port)?;
- let new = config::Pdu { device, host: ip.clone() };
- if let Some(old) = pdus.insert(ip.clone(), new.clone()) {
- if old != new {
- return Err(ConfigError::DuplicateIp { ip }.into());
- }
- }
- }
- Ok(())
- };
-
- let mut register_attenuator = |a: Option<AttenuatorRef>| -> Result<(), InfraDriverError> {
- if let Some(a) = a {
- let new = config::Attenuator {
- model: "minicircuits".to_string(),
- instrument_count: 4,
- address: a.ip.clone(),
- protocol: "http".to_string(),
- port: 80,
- };
- if let Some(old) = attenuators.insert(a.ip.clone(), new.clone()) {
- if old != new {
- return Err(ConfigError::DuplicateIp { ip: a.ip }.into());
- }
- }
- }
- Ok(())
- };
-
- let mut merge_test_params = |p: Option<Value>| {
- match (test_params.as_mut(), p) {
- (None, Some(new)) => test_params = Some(new),
- (Some(existing), Some(new)) => yaml::merge(existing, new),
- (_, None) => {}
- };
- };
-
- for target in targets {
- match target {
- InfraTarget::FuchsiaDevice { nodename, ipv4, ipv6, ssh_key, pdu, test_params } => {
- let ip: IpAddr = if !ipv4.is_empty() {
- ipv4.parse().context("Invalid IPv4 address")
- } else if !ipv6.is_empty() {
- ipv6.parse().context("Invalid IPv6 address")
- } else {
- Err(anyhow!("IP address not specified"))
- }?;
-
- fuchsia_devices.push(config::Fuchsia {
- mdns_name: nodename.clone(),
- ip: ip.clone(),
- ssh_port: None,
- take_bug_report_on_fail: true,
- ssh_binary_path: ssh_binary.clone(),
- // TODO(http://b/244747218): Remove when ssh_config is refactored away
- ssh_config: None,
- ffx_binary_path: ffx_binary.clone(),
- ffx_subtools_search_path: ffx_subtools_search_path.clone(),
- ssh_priv_key: ssh_key.clone(),
- pdu_device: pdu.clone(),
- hard_reboot_on_fail: true,
- });
-
- register_ip(ip)?;
- register_pdu(pdu)?;
- merge_test_params(test_params);
- }
- InfraTarget::AccessPoint { ip, attenuator, pdu, ssh_key } => {
- access_points.push(config::AccessPoint {
- wan_interface: "eth0".to_string(),
- ssh_config: config::SshConfig {
- ssh_binary_path: ssh_binary.clone(),
- host: ip.clone(),
- port: None,
- user: "root".to_string(),
- identity_file: ssh_key.clone(),
- },
- pdu_device: pdu.clone(),
- attenuators: attenuator.as_ref().map(|a| {
- vec![config::AttenuatorRef {
- address: a.ip.clone(),
- ports_2g: vec![1, 2, 3],
- ports_5g: vec![1, 2, 3],
- }]
- }),
- });
-
- register_ip(ip)?;
- register_pdu(pdu)?;
- register_attenuator(attenuator)?;
- }
- InfraTarget::IPerfServer { ip, user, test_interface, pdu, ssh_key } => {
- iperf_servers.push(config::IPerfServer {
- ssh_config: config::SshConfig {
- ssh_binary_path: ssh_binary.clone(),
- host: ip.clone(),
- port: None,
- user: user.to_string(),
- identity_file: ssh_key.clone(),
- },
- port: 5201,
- test_interface: test_interface.clone(),
- use_killall: true,
- });
-
- register_ip(ip.clone())?;
- register_pdu(pdu)?;
- }
- };
- }
-
- Ok(Config {
- testbeds: vec![config::Testbed {
- name: TESTBED_NAME.to_string(),
- controllers: config::Controllers {
- fuchsia_devices: fuchsia_devices,
- access_points: access_points,
- attenuators: attenuators
- .into_values()
- .sorted_by_key(|a| a.address.clone())
- .collect(),
- pdus: pdus.into_values().sorted_by_key(|p| p.host.clone()).collect(),
- iperf_servers: iperf_servers,
- },
- test_params,
- }],
- mobly_params: config::MoblyParams { log_path: output_dir },
- })
- }
-}
-
-impl Driver for InfraDriver {
- fn output_path(&self) -> &Path {
- self.output_dir.as_path()
- }
- fn config(&self) -> Config {
- self.config.clone()
- }
- fn teardown(&self) -> Result<()> {
- let results_path =
- self.output_dir.join(TESTBED_NAME).join("latest").join(TEST_SUMMARY_FILE);
- match fs::File::open(&results_path) {
- Ok(mut results) => {
- println!("\nTest results from {}\n", results_path.display());
- println!("[=====MOBLY RESULTS=====]");
- std::io::copy(&mut results, &mut std::io::stdout())
- .context("Failed to copy results to stdout")?;
- }
- Err(e) => eprintln!("Failed to open \"{}\": {}", results_path.display(), e),
- };
-
- // Remove any symlinks from the output directory; this causes errors
- // while uploading to CAS.
- //
- // TODO: Remove when the fix is released and supported on Swarming bots
- // https://github.com/bazelbuild/remote-apis-sdks/pull/229.
- remove_symlinks(self.output_dir.clone())?;
-
- Ok(())
- }
-}
-
-fn remove_symlinks<P: AsRef<Path>>(path: P) -> Result<()> {
- let meta = fs::symlink_metadata(path.as_ref())?;
- if meta.is_symlink() {
- fs::remove_file(path)?;
- } else if meta.is_dir() {
- for entry in fs::read_dir(path)? {
- remove_symlinks(entry?.path())?;
- }
- }
- Ok(())
-}
-
-#[derive(Debug, Deserialize)]
-#[serde(tag = "type")]
-/// Schema used to communicate target information from the test environment set
-/// up by botanist.
-///
-/// See https://cs.opensource.google/fuchsia/fuchsia/+/main:tools/botanist/README.md
-enum InfraTarget {
- FuchsiaDevice {
- nodename: String,
- ipv4: String,
- ipv6: String,
- ssh_key: PathBuf,
- pdu: Option<PduRef>,
- test_params: Option<Value>,
- },
- AccessPoint {
- ip: IpAddr,
- ssh_key: PathBuf,
- attenuator: Option<AttenuatorRef>,
- pdu: Option<PduRef>,
- },
- IPerfServer {
- ip: IpAddr,
- ssh_key: PathBuf,
- #[serde(default = "default_iperf_user")]
- user: String,
- test_interface: String,
- pdu: Option<PduRef>,
- },
-}
-
-fn default_iperf_user() -> String {
- "pi".to_string()
-}
-
-#[derive(Clone, Debug, Deserialize)]
-struct AttenuatorRef {
- ip: IpAddr,
-}
-
-#[cfg(test)]
-mod test {
- use super::*;
-
- use crate::generate_config_and_run;
- use crate::runner::{ExitStatus, Runner};
-
- use std::ffi::OsStr;
-
- use assert_matches::assert_matches;
- use indoc::formatdoc;
- use pretty_assertions::assert_eq;
- use serde_json::json;
- use tempfile::{NamedTempFile, TempDir};
-
- const FUCHSIA_NAME: &'static str = "fuchsia-1234-5678-9abc";
- const FUCHSIA_ADDR: &'static str = "fe80::1%2";
-
- #[derive(Default)]
- struct MockRunner {
- out_dir: PathBuf,
- config: std::cell::Cell<PathBuf>,
- }
- impl MockRunner {
- fn new(out_dir: PathBuf) -> Self {
- Self { out_dir, ..Default::default() }
- }
- }
- impl Runner for MockRunner {
- fn run(&self, config: PathBuf) -> Result<ExitStatus> {
- self.config.set(config);
-
- let antlion_out = self.out_dir.join(TESTBED_NAME).join("latest");
- fs::create_dir_all(&antlion_out)
- .context("Failed to create antlion output directory")?;
- fs::write(antlion_out.join(TEST_SUMMARY_FILE), "")
- .context("Failed to write test_summary.yaml")?;
- Ok(ExitStatus::Ok)
- }
- }
-
- struct MockEnvironment {
- config: Option<PathBuf>,
- out_dir: Option<PathBuf>,
- }
- impl Environment for MockEnvironment {
- fn var<K: AsRef<OsStr>>(&self, key: K) -> Result<String, std::env::VarError> {
- if key.as_ref() == ENV_TESTBED_CONFIG {
- self.config
- .clone()
- .ok_or(std::env::VarError::NotPresent)
- .map(|p| p.into_os_string().into_string().unwrap())
- } else if key.as_ref() == ENV_OUT_DIR {
- self.out_dir
- .clone()
- .ok_or(std::env::VarError::NotPresent)
- .map(|p| p.into_os_string().into_string().unwrap())
- } else {
- Err(std::env::VarError::NotPresent)
- }
- }
- }
-
- #[test]
- fn infra_not_detected() {
- let ssh = NamedTempFile::new().unwrap();
- let ffx = NamedTempFile::new().unwrap();
- let env = MockEnvironment { config: None, out_dir: None };
-
- let got = InfraDriver::new(env, ssh.path().to_path_buf(), ffx.path().to_path_buf(), None);
- assert_matches!(got, Err(InfraDriverError::NotDetected(_)));
- }
-
- #[test]
- fn infra_not_detected_config() {
- let ssh = NamedTempFile::new().unwrap();
- let ffx = NamedTempFile::new().unwrap();
- let out_dir = TempDir::new().unwrap();
- let env = MockEnvironment { config: None, out_dir: Some(out_dir.path().to_path_buf()) };
-
- let got = InfraDriver::new(env, ssh.path().to_path_buf(), ffx.path().to_path_buf(), None);
- assert_matches!(got, Err(InfraDriverError::NotDetected(v)) if v == ENV_TESTBED_CONFIG);
- }
-
- #[test]
- fn infra_not_detected_out_dir() {
- let ssh = NamedTempFile::new().unwrap();
- let ssh_key = NamedTempFile::new().unwrap();
- let ffx = NamedTempFile::new().unwrap();
-
- let testbed_config = NamedTempFile::new().unwrap();
- serde_json::to_writer_pretty(
- testbed_config.as_file(),
- &json!([{
- "type": "FuchsiaDevice",
- "nodename": FUCHSIA_NAME,
- "ipv4": "",
- "ipv6": FUCHSIA_ADDR,
- "ssh_key": ssh_key.path(),
- }]),
- )
- .unwrap();
-
- let env =
- MockEnvironment { config: Some(testbed_config.path().to_path_buf()), out_dir: None };
-
- let got = InfraDriver::new(env, ssh.path().to_path_buf(), ffx.path().to_path_buf(), None);
- assert_matches!(got, Err(InfraDriverError::NotDetected(v)) if v == ENV_OUT_DIR);
- }
-
- #[test]
- fn infra_invalid_config() {
- let ssh = NamedTempFile::new().unwrap();
- let ffx = NamedTempFile::new().unwrap();
- let out_dir = TempDir::new().unwrap();
-
- let testbed_config = NamedTempFile::new().unwrap();
- serde_json::to_writer_pretty(testbed_config.as_file(), &json!({ "foo": "bar" })).unwrap();
-
- let env = MockEnvironment {
- config: Some(testbed_config.path().to_path_buf()),
- out_dir: Some(out_dir.path().to_path_buf()),
- };
-
- let got = InfraDriver::new(env, ssh.path().to_path_buf(), ffx.path().to_path_buf(), None);
- assert_matches!(got, Err(_));
- }
-
- #[test]
- fn infra() {
- let ssh = NamedTempFile::new().unwrap();
- let ssh_key = NamedTempFile::new().unwrap();
- let ffx_subtools = TempDir::new().unwrap();
- let ffx = NamedTempFile::new().unwrap();
- let out_dir = TempDir::new().unwrap();
-
- let testbed_config = NamedTempFile::new().unwrap();
- serde_json::to_writer_pretty(
- testbed_config.as_file(),
- &json!([{
- "type": "FuchsiaDevice",
- "nodename": FUCHSIA_NAME,
- "ipv4": "",
- "ipv6": FUCHSIA_ADDR,
- "ssh_key": ssh_key.path(),
- }]),
- )
- .unwrap();
-
- let runner = MockRunner::new(out_dir.path().to_path_buf());
- let env = MockEnvironment {
- config: Some(testbed_config.path().to_path_buf()),
- out_dir: Some(out_dir.path().to_path_buf()),
- };
- let driver = InfraDriver::new(
- env,
- ssh.path().to_path_buf(),
- ffx.path().to_path_buf(),
- Some(ffx_subtools.path().to_path_buf()),
- )
- .unwrap();
- generate_config_and_run(runner, driver, None).unwrap();
-
- let got = fs::read_to_string(out_dir.path().join("config.yaml")).unwrap();
-
- let ssh_path = ssh.path().display().to_string();
- let ssh_key_path = ssh_key.path().display().to_string();
- let ffx_path = ffx.path().display().to_string();
- let ffx_subtools_path = ffx_subtools.path().display();
- let out_path = out_dir.path().display();
- let want = formatdoc! {r#"
- TestBeds:
- - Name: {TESTBED_NAME}
- Controllers:
- FuchsiaDevice:
- - mdns_name: {FUCHSIA_NAME}
- ip: {FUCHSIA_ADDR}
- take_bug_report_on_fail: true
- ssh_binary_path: {ssh_path}
- ffx_binary_path: {ffx_path}
- ffx_subtools_search_path: {ffx_subtools_path}
- ssh_priv_key: {ssh_key_path}
- hard_reboot_on_fail: true
- MoblyParams:
- LogPath: {out_path}
- "#};
-
- assert_eq!(got, want);
- }
-
- #[test]
- fn infra_with_test_params() {
- let ssh = NamedTempFile::new().unwrap();
- let ssh_key = NamedTempFile::new().unwrap();
- let ffx = NamedTempFile::new().unwrap();
- let ffx_subtools = TempDir::new().unwrap();
- let out_dir = TempDir::new().unwrap();
-
- let testbed_config = NamedTempFile::new().unwrap();
- serde_json::to_writer_pretty(
- testbed_config.as_file(),
- &json!([{
- "type": "FuchsiaDevice",
- "nodename": FUCHSIA_NAME,
- "ipv4": "",
- "ipv6": FUCHSIA_ADDR,
- "ssh_key": ssh_key.path(),
- "test_params": {
- "sl4f_sanity_test_params": {
- "can_overwrite": false,
- "from_original": true,
- }
- }
- }]),
- )
- .unwrap();
-
- let runner = MockRunner::new(out_dir.path().to_path_buf());
- let env = MockEnvironment {
- config: Some(testbed_config.path().to_path_buf()),
- out_dir: Some(out_dir.path().to_path_buf()),
- };
- let driver = InfraDriver::new(
- env,
- ssh.path().to_path_buf(),
- ffx.path().to_path_buf(),
- Some(ffx_subtools.path().to_path_buf()),
- )
- .unwrap();
- let params = "
- sl4f_sanity_test_params:
- merged_with: true
- can_overwrite: true
- ";
- let params = serde_yaml::from_str(params).unwrap();
- generate_config_and_run(runner, driver, Some(params)).unwrap();
-
- let got = fs::read_to_string(out_dir.path().join("config.yaml")).unwrap();
-
- let ssh_path = ssh.path().display().to_string();
- let ssh_key_path = ssh_key.path().display().to_string();
- let ffx_path = ffx.path().display().to_string();
- let ffx_subtools_path = ffx_subtools.path().display();
- let out_path = out_dir.path().display();
- let want = formatdoc! {r#"
- TestBeds:
- - Name: {TESTBED_NAME}
- Controllers:
- FuchsiaDevice:
- - mdns_name: {FUCHSIA_NAME}
- ip: {FUCHSIA_ADDR}
- take_bug_report_on_fail: true
- ssh_binary_path: {ssh_path}
- ffx_binary_path: {ffx_path}
- ffx_subtools_search_path: {ffx_subtools_path}
- ssh_priv_key: {ssh_key_path}
- hard_reboot_on_fail: true
- TestParams:
- sl4f_sanity_test_params:
- can_overwrite: true
- from_original: true
- merged_with: true
- MoblyParams:
- LogPath: {out_path}
- "#};
-
- assert_eq!(got, want);
- }
-
- #[test]
- fn infra_with_auxiliary_devices() {
- const FUCHSIA_PDU_IP: &'static str = "192.168.42.14";
- const FUCHSIA_PDU_PORT: u8 = 1;
- const AP_IP: &'static str = "192.168.42.11";
- const AP_AND_IPERF_PDU_IP: &'static str = "192.168.42.13";
- const AP_PDU_PORT: u8 = 1;
- const ATTENUATOR_IP: &'static str = "192.168.42.15";
- const IPERF_IP: &'static str = "192.168.42.12";
- const IPERF_USER: &'static str = "alice";
- const IPERF_PDU_PORT: u8 = 2;
-
- let ssh = NamedTempFile::new().unwrap();
- let ssh_key = NamedTempFile::new().unwrap();
- let ffx = NamedTempFile::new().unwrap();
- let ffx_subtools = TempDir::new().unwrap();
- let out_dir = TempDir::new().unwrap();
-
- let testbed_config = NamedTempFile::new().unwrap();
- serde_json::to_writer_pretty(
- testbed_config.as_file(),
- &json!([{
- "type": "FuchsiaDevice",
- "nodename": FUCHSIA_NAME,
- "ipv4": "",
- "ipv6": FUCHSIA_ADDR,
- "ssh_key": ssh_key.path(),
- "pdu": {
- "ip": FUCHSIA_PDU_IP,
- "port": FUCHSIA_PDU_PORT,
- },
- }, {
- "type": "AccessPoint",
- "ip": AP_IP,
- "ssh_key": ssh_key.path(),
- "attenuator": {
- "ip": ATTENUATOR_IP,
- },
- "pdu": {
- "ip": AP_AND_IPERF_PDU_IP,
- "port": AP_PDU_PORT,
- "device": "fancy-pdu",
- },
- }, {
- "type": "IPerfServer",
- "ip": IPERF_IP,
- "ssh_key": ssh_key.path(),
- "user": IPERF_USER,
- "test_interface": "eth0",
- "pdu": {
- "ip": AP_AND_IPERF_PDU_IP,
- "port": IPERF_PDU_PORT,
- "device": "fancy-pdu",
- },
- }]),
- )
- .unwrap();
-
- let runner = MockRunner::new(out_dir.path().to_path_buf());
- let env = MockEnvironment {
- config: Some(testbed_config.path().to_path_buf()),
- out_dir: Some(out_dir.path().to_path_buf()),
- };
- let driver = InfraDriver::new(
- env,
- ssh.path().to_path_buf(),
- ffx.path().to_path_buf(),
- Some(ffx_subtools.path().to_path_buf()),
- )
- .unwrap();
- generate_config_and_run(runner, driver, None).unwrap();
-
- let got = std::fs::read_to_string(out_dir.path().join("config.yaml")).unwrap();
-
- let ssh_path = ssh.path().display().to_string();
- let ssh_key_path = ssh_key.path().display().to_string();
- let ffx_path = ffx.path().display().to_string();
- let ffx_subtools_path = ffx_subtools.path().display();
- let out_path = out_dir.path().display();
- let want = formatdoc! {r#"
- TestBeds:
- - Name: {TESTBED_NAME}
- Controllers:
- FuchsiaDevice:
- - mdns_name: {FUCHSIA_NAME}
- ip: {FUCHSIA_ADDR}
- take_bug_report_on_fail: true
- ssh_binary_path: {ssh_path}
- ffx_binary_path: {ffx_path}
- ffx_subtools_search_path: {ffx_subtools_path}
- ssh_priv_key: {ssh_key_path}
- PduDevice:
- device: synaccess.np02b
- host: {FUCHSIA_PDU_IP}
- port: {FUCHSIA_PDU_PORT}
- hard_reboot_on_fail: true
- AccessPoint:
- - wan_interface: eth0
- ssh_config:
- ssh_binary_path: {ssh_path}
- host: {AP_IP}
- user: root
- identity_file: {ssh_key_path}
- PduDevice:
- device: fancy-pdu
- host: {AP_AND_IPERF_PDU_IP}
- port: {AP_PDU_PORT}
- Attenuator:
- - Address: {ATTENUATOR_IP}
- attenuator_ports_wifi_2g:
- - 1
- - 2
- - 3
- attenuator_ports_wifi_5g:
- - 1
- - 2
- - 3
- Attenuator:
- - Model: minicircuits
- InstrumentCount: 4
- Address: {ATTENUATOR_IP}
- Protocol: http
- Port: 80
- PduDevice:
- - device: fancy-pdu
- host: {AP_AND_IPERF_PDU_IP}
- - device: synaccess.np02b
- host: {FUCHSIA_PDU_IP}
- IPerfServer:
- - ssh_config:
- ssh_binary_path: {ssh_path}
- host: {IPERF_IP}
- user: {IPERF_USER}
- identity_file: {ssh_key_path}
- port: 5201
- test_interface: eth0
- use_killall: true
- MoblyParams:
- LogPath: {out_path}
- "#};
-
- assert_eq!(got, want);
- }
-
- #[test]
- fn infra_duplicate_port_pdu() {
- let pdu_ip: IpAddr = "192.168.42.13".parse().unwrap();
- let pdu_port = 1;
-
- let ssh = NamedTempFile::new().unwrap();
- let ssh_key = NamedTempFile::new().unwrap();
- let ffx = NamedTempFile::new().unwrap();
- let out_dir = TempDir::new().unwrap();
-
- let testbed_config = NamedTempFile::new().unwrap();
- serde_json::to_writer_pretty(
- testbed_config.as_file(),
- &json!([{
- "type": "FuchsiaDevice",
- "nodename": "foo",
- "ipv4": "",
- "ipv6": "fe80::1%2",
- "ssh_key": ssh_key.path(),
- "pdu": {
- "ip": pdu_ip,
- "port": pdu_port,
- },
- }, {
- "type": "AccessPoint",
- "ip": "192.168.42.11",
- "ssh_key": ssh_key.path(),
- "pdu": {
- "ip": pdu_ip,
- "port": pdu_port,
- },
- }]),
- )
- .unwrap();
-
- let env = MockEnvironment {
- config: Some(testbed_config.path().to_path_buf()),
- out_dir: Some(out_dir.path().to_path_buf()),
- };
- let got = InfraDriver::new(env, ssh.path().to_path_buf(), ffx.path().to_path_buf(), None);
- assert_matches!(got,
- Err(InfraDriverError::Config(ConfigError::DuplicatePort { ip, port }))
- if ip == pdu_ip && port == pdu_port
- );
- }
-
- #[test]
- fn infra_duplicate_ip_pdu() {
- let duplicate_ip: IpAddr = "192.168.42.13".parse().unwrap();
-
- let ssh = NamedTempFile::new().unwrap();
- let ssh_key = NamedTempFile::new().unwrap();
- let ffx = NamedTempFile::new().unwrap();
- let out_dir = TempDir::new().unwrap();
-
- let testbed_config = NamedTempFile::new().unwrap();
- serde_json::to_writer_pretty(
- testbed_config.as_file(),
- &json!([{
- "type": "FuchsiaDevice",
- "nodename": "foo",
- "ipv4": "",
- "ipv6": "fe80::1%2",
- "ssh_key": ssh_key.path(),
- "pdu": {
- "ip": duplicate_ip,
- "port": 1,
- "device": "A",
- },
- }, {
- "type": "AccessPoint",
- "ip": "192.168.42.11",
- "ssh_key": ssh_key.path(),
- "pdu": {
- "ip": duplicate_ip,
- "port": 2,
- "device": "B",
- },
- }]),
- )
- .unwrap();
-
- let env = MockEnvironment {
- config: Some(testbed_config.path().to_path_buf()),
- out_dir: Some(out_dir.path().to_path_buf()),
- };
- assert_matches!(
- InfraDriver::new(env, ssh.path().to_path_buf(), ffx.path().to_path_buf(), None),
- Err(InfraDriverError::Config(ConfigError::DuplicateIp { ip }))
- if ip == duplicate_ip
- );
- }
-
- #[test]
- fn infra_duplicate_ip_devices() {
- let duplicate_ip: IpAddr = "192.168.42.11".parse().unwrap();
-
- let ssh = NamedTempFile::new().unwrap();
- let ssh_key = NamedTempFile::new().unwrap();
- let ffx = NamedTempFile::new().unwrap();
- let out_dir = TempDir::new().unwrap();
-
- let testbed_config = NamedTempFile::new().unwrap();
- serde_json::to_writer_pretty(
- testbed_config.as_file(),
- &json!([{
- "type": "FuchsiaDevice",
- "nodename": "foo",
- "ipv4": duplicate_ip,
- "ipv6": "",
- "ssh_key": ssh_key.path(),
- }, {
- "type": "AccessPoint",
- "ip": duplicate_ip,
- "ssh_key": ssh_key.path(),
- }]),
- )
- .unwrap();
-
- let env = MockEnvironment {
- config: Some(testbed_config.path().to_path_buf()),
- out_dir: Some(out_dir.path().to_path_buf()),
- };
- let got = InfraDriver::new(env, ssh.path().to_path_buf(), ffx.path().to_path_buf(), None);
- assert_matches!(got,
- Err(InfraDriverError::Config(ConfigError::DuplicateIp { ip }))
- if ip == duplicate_ip
- );
- }
-
- #[test]
- fn remove_symlinks_works() {
- const SYMLINK_FILE: &'static str = "latest";
-
- let out_dir = TempDir::new().unwrap();
- let test_file = NamedTempFile::new_in(&out_dir).unwrap();
- let symlink_path = out_dir.path().join(SYMLINK_FILE);
-
- #[cfg(unix)]
- std::os::unix::fs::symlink(&test_file, &symlink_path).unwrap();
- #[cfg(windows)]
- std::os::windows::fs::symlink_file(&test_file, &symlink_path).unwrap();
-
- assert_matches!(remove_symlinks(out_dir.path()), Ok(()));
- assert_matches!(fs::symlink_metadata(symlink_path), Err(e) if e.kind() == std::io::ErrorKind::NotFound);
- assert_matches!(fs::symlink_metadata(test_file), Ok(meta) if meta.is_file());
- }
-}
diff --git a/runner/src/driver/local.rs b/runner/src/driver/local.rs
deleted file mode 100644
index e067034..0000000
--- a/runner/src/driver/local.rs
+++ /dev/null
@@ -1,504 +0,0 @@
-// Copyright 2023 The Fuchsia Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-use crate::config;
-use crate::driver::Driver;
-use crate::finder::{Answer, Finder};
-use crate::net::IpAddr;
-
-use anyhow::format_err;
-use std::path::{Path, PathBuf};
-
-use anyhow::{ensure, Context, Result};
-use home::home_dir;
-
-const TESTBED_NAME: &'static str = "antlion-runner";
-
-/// Driver for running antlion locally on an emulated or hardware testbed with
-/// optional mDNS discovery when a DHCP server is not available. This is useful
-/// for testing changes locally in a development environment.
-pub(crate) struct LocalDriver {
- target: LocalTarget,
- access_point: Option<LocalAccessPoint>,
- output_dir: PathBuf,
- ssh_binary: PathBuf,
- ffx_binary: PathBuf,
- ffx_subtools_search_path: Option<PathBuf>,
-}
-
-impl LocalDriver {
- pub fn new<F>(
- finder: F,
- device: Option<String>,
- ssh_binary: PathBuf,
- ssh_key: Option<PathBuf>,
- ffx_binary: PathBuf,
- ffx_subtools_search_path: Option<PathBuf>,
- out_dir: Option<PathBuf>,
- ap_ip: Option<String>,
- ap_ssh_port: Option<u16>,
- ap_ssh_key: Option<PathBuf>,
- ) -> Result<Self>
- where
- F: Finder,
- {
- let output_dir = match out_dir {
- Some(p) => Ok(p),
- None => std::env::current_dir().context("Failed to get current working directory"),
- }?;
-
- let target = LocalTarget::new(finder, device, ssh_key)?;
-
- // If an access point IP has been provided, try to derive other AP-related parameters
- let access_point = if let Some(ip_str) = ap_ip {
- let ssh_port = ap_ssh_port.unwrap_or_else(|| {
- let default_ssh_port = 22;
- println!("AP IP provided without AP SSH port, assuming {default_ssh_port}");
- default_ssh_port
- });
- let ssh_key = match ap_ssh_key {
- Some(path) => Ok(path),
- None => match find_ap_ssh_key() {
- Ok(path) => {
- println!("Using AP SSH key found at {}", path.display());
- Ok(path)
- }
- Err(e) => Err(e),
- },
- }?;
- Some(LocalAccessPoint {
- ip: ip_str.parse::<IpAddr>().expect("Failed to parse AP IP address"),
- ssh_port: Some(ssh_port),
- ssh_key,
- })
- } else {
- None
- };
-
- Ok(Self {
- target,
- access_point,
- output_dir,
- ssh_binary,
- ffx_binary,
- ffx_subtools_search_path,
- })
- }
-}
-
-fn find_ap_ssh_key() -> Result<PathBuf> {
- // Look for the SSH key at some known paths
- let home_dir = std::env::var("HOME").map_err(|_| {
- format_err!(
- "AP IP was provided, but AP SSH key not provided and could not be automatically found"
- )
- })?;
- let home_dir = Path::new(&home_dir);
- let ssh_key_search_paths =
- [home_dir.join(".ssh/onhub_testing_rsa"), home_dir.join(".ssh/testing_rsa")];
- for path in ssh_key_search_paths.clone() {
- if path.exists() {
- return Ok(path);
- }
- }
- let ssh_key_search_paths =
- ssh_key_search_paths.map(|p| p.to_string_lossy().into_owned()).join(", ");
- return Err(format_err!("AP IP is provided, but AP SSH key was not provided, and not found in default locations: [{}]", ssh_key_search_paths));
-}
-
-impl Driver for LocalDriver {
- fn output_path(&self) -> &Path {
- self.output_dir.as_path()
- }
- fn config(&self) -> config::Config {
- let mut access_points = vec![];
- if let Some(ref ap) = self.access_point {
- access_points.push(config::AccessPoint {
- wan_interface: "eth0".to_string(),
- ssh_config: config::SshConfig {
- ssh_binary_path: self.ssh_binary.clone(),
- host: ap.ip.clone(),
- port: ap.ssh_port,
- user: "root".to_string(),
- identity_file: ap.ssh_key.clone(),
- },
- pdu_device: None,
- attenuators: None,
- });
- }
-
- config::Config {
- testbeds: vec![config::Testbed {
- name: TESTBED_NAME.to_string(),
- controllers: config::Controllers {
- fuchsia_devices: vec![config::Fuchsia {
- mdns_name: self.target.name.clone(),
- ip: self.target.ip.clone(),
- take_bug_report_on_fail: true,
- ssh_port: self.target.ssh_port.clone(),
- ssh_binary_path: self.ssh_binary.clone(),
- // TODO(http://b/244747218): Remove when ssh_config is refactored away
- ssh_config: None,
- ffx_binary_path: self.ffx_binary.clone(),
- ffx_subtools_search_path: self.ffx_subtools_search_path.clone(),
- ssh_priv_key: self.target.ssh_key.clone(),
- pdu_device: None,
- hard_reboot_on_fail: false,
- }],
- access_points: access_points,
- ..Default::default()
- },
- test_params: None,
- }],
- mobly_params: config::MoblyParams { log_path: self.output_dir.clone() },
- }
- }
- fn teardown(&self) -> Result<()> {
- println!(
- "\nView full antlion logs at {}",
- self.output_dir.join(TESTBED_NAME).join("latest").display()
- );
- Ok(())
- }
-}
-
-struct LocalAccessPoint {
- ip: IpAddr,
- ssh_port: Option<u16>,
- ssh_key: PathBuf,
-}
-
-/// LocalTargetInfo performs best-effort discovery of target information from
-/// standard Fuchsia environmental variables.
-struct LocalTarget {
- name: String,
- ip: IpAddr,
- ssh_port: Option<u16>,
- ssh_key: PathBuf,
-}
-
-impl LocalTarget {
- fn new<F: Finder>(finder: F, device: Option<String>, ssh_key: Option<PathBuf>) -> Result<Self> {
- let Answer { name, ip, ssh_port } = finder.find_device(device)?;
-
- // TODO: Move this validation out to Args
- let ssh_key = ssh_key
- .or_else(|| home_dir().map(|p| p.join(".ssh/fuchsia_ed25519")))
- .context("Failed to detect the private Fuchsia SSH key")?;
-
- ensure!(
- ssh_key.try_exists().with_context(|| format!(
- "Failed to check existence of SSH key \"{}\"",
- ssh_key.display()
- ))?,
- "Cannot find SSH key \"{}\"",
- ssh_key.display()
- );
-
- Ok(LocalTarget { name, ip, ssh_port, ssh_key })
- }
-}
-
-#[cfg(test)]
-mod test {
- use super::*;
-
- use crate::generate_config_and_run;
- use crate::runner::{ExitStatus, Runner};
-
- use indoc::formatdoc;
- use pretty_assertions::assert_eq;
- use tempfile::{NamedTempFile, TempDir};
-
- const FUCHSIA_NAME: &'static str = "fuchsia-1234-5678-9abc";
- const FUCHSIA_ADDR: &'static str = "fe80::1%eth0";
- const FUCHSIA_IP: &'static str = "fe80::1";
- const FUCHSIA_IPV4: &'static str = "127.0.0.1";
- const FUCHSIA_SSH_PORT: u16 = 5002;
- const SCOPE_ID: &'static str = "eth0";
-
- struct MockFinder;
- impl Finder for MockFinder {
- fn find_device(&self, _: Option<String>) -> Result<Answer> {
- Ok(Answer {
- name: FUCHSIA_NAME.to_string(),
- ip: IpAddr::V6(FUCHSIA_IP.parse().unwrap(), Some(SCOPE_ID.to_string())),
- ssh_port: None,
- })
- }
- }
-
- struct MockFinderWithSsh;
- impl Finder for MockFinderWithSsh {
- fn find_device(&self, _: Option<String>) -> Result<Answer> {
- Ok(Answer {
- name: FUCHSIA_NAME.to_string(),
- ip: IpAddr::V4(FUCHSIA_IPV4.parse().unwrap()),
- ssh_port: Some(FUCHSIA_SSH_PORT),
- })
- }
- }
-
- #[derive(Default)]
- struct MockRunner {
- config: std::cell::Cell<PathBuf>,
- }
- impl Runner for MockRunner {
- fn run(&self, config: PathBuf) -> Result<ExitStatus> {
- self.config.set(config);
- Ok(ExitStatus::Ok)
- }
- }
-
- #[test]
- fn local_invalid_ssh_key() {
- let ssh = NamedTempFile::new().unwrap();
- let ffx = NamedTempFile::new().unwrap();
- let out_dir = TempDir::new().unwrap();
-
- assert!(LocalDriver::new(
- MockFinder {},
- None,
- ssh.path().to_path_buf(),
- Some(PathBuf::new()),
- ffx.path().to_path_buf(),
- None,
- Some(out_dir.path().to_path_buf()),
- None,
- None,
- None,
- )
- .is_err());
- }
-
- #[test]
- fn local() {
- let ssh = NamedTempFile::new().unwrap();
- let ssh_key = NamedTempFile::new().unwrap();
- let ffx = NamedTempFile::new().unwrap();
- let ffx_subtools = TempDir::new().unwrap();
- let out_dir = TempDir::new().unwrap();
-
- let runner = MockRunner::default();
- let driver = LocalDriver::new(
- MockFinder {},
- None,
- ssh.path().to_path_buf(),
- Some(ssh_key.path().to_path_buf()),
- ffx.path().to_path_buf(),
- Some(ffx_subtools.path().to_path_buf()),
- Some(out_dir.path().to_path_buf()),
- None,
- None,
- None,
- )
- .unwrap();
-
- generate_config_and_run(runner, driver, None).unwrap();
-
- let got = std::fs::read_to_string(out_dir.path().join("config.yaml")).unwrap();
-
- let ssh_path = ssh.path().display();
- let ssh_key_path = ssh_key.path().display();
- let ffx_path = ffx.path().display();
- let ffx_subtools_path = ffx_subtools.path().display();
- let out_path = out_dir.path().display();
- let want = formatdoc! {r#"
- TestBeds:
- - Name: {TESTBED_NAME}
- Controllers:
- FuchsiaDevice:
- - mdns_name: {FUCHSIA_NAME}
- ip: {FUCHSIA_ADDR}
- take_bug_report_on_fail: true
- ssh_binary_path: {ssh_path}
- ffx_binary_path: {ffx_path}
- ffx_subtools_search_path: {ffx_subtools_path}
- ssh_priv_key: {ssh_key_path}
- hard_reboot_on_fail: false
- MoblyParams:
- LogPath: {out_path}
- "#};
-
- assert_eq!(got, want);
- }
-
- #[test]
- fn local_with_ssh_port() {
- let ssh = NamedTempFile::new().unwrap();
- let ssh_key = NamedTempFile::new().unwrap();
- let ffx = NamedTempFile::new().unwrap();
- let ffx_subtools = TempDir::new().unwrap();
- let out_dir = TempDir::new().unwrap();
-
- let runner = MockRunner::default();
- let driver = LocalDriver::new(
- MockFinderWithSsh {},
- None,
- ssh.path().to_path_buf(),
- Some(ssh_key.path().to_path_buf()),
- ffx.path().to_path_buf(),
- Some(ffx_subtools.path().to_path_buf()),
- Some(out_dir.path().to_path_buf()),
- None,
- None,
- None,
- )
- .unwrap();
-
- generate_config_and_run(runner, driver, None).unwrap();
-
- let got = std::fs::read_to_string(out_dir.path().join("config.yaml")).unwrap();
-
- let ssh_path = ssh.path().display();
- let ssh_key_path = ssh_key.path().display();
- let ffx_path = ffx.path().display();
- let ffx_subtools_path = ffx_subtools.path().display();
- let out_path = out_dir.path().display();
- let want = formatdoc! {r#"
- TestBeds:
- - Name: {TESTBED_NAME}
- Controllers:
- FuchsiaDevice:
- - mdns_name: {FUCHSIA_NAME}
- ip: {FUCHSIA_IPV4}
- ssh_port: {FUCHSIA_SSH_PORT}
- take_bug_report_on_fail: true
- ssh_binary_path: {ssh_path}
- ffx_binary_path: {ffx_path}
- ffx_subtools_search_path: {ffx_subtools_path}
- ssh_priv_key: {ssh_key_path}
- hard_reboot_on_fail: false
- MoblyParams:
- LogPath: {out_path}
- "#};
-
- assert_eq!(got, want);
- }
-
- #[test]
- fn local_with_test_params() {
- let ssh = NamedTempFile::new().unwrap();
- let ssh_key = NamedTempFile::new().unwrap();
- let ffx = NamedTempFile::new().unwrap();
- let ffx_subtools = TempDir::new().unwrap();
- let out_dir = TempDir::new().unwrap();
-
- let runner = MockRunner::default();
- let driver = LocalDriver::new(
- MockFinder {},
- None,
- ssh.path().to_path_buf(),
- Some(ssh_key.path().to_path_buf()),
- ffx.path().to_path_buf(),
- Some(ffx_subtools.path().to_path_buf()),
- Some(out_dir.path().to_path_buf()),
- None,
- None,
- None,
- )
- .unwrap();
-
- let params_yaml = "
- sl4f_sanity_test_params:
- foo: bar
- ";
- let params = serde_yaml::from_str(params_yaml).unwrap();
-
- generate_config_and_run(runner, driver, Some(params)).unwrap();
-
- let got = std::fs::read_to_string(out_dir.path().join("config.yaml")).unwrap();
-
- let ssh_path = ssh.path().display().to_string();
- let ssh_key_path = ssh_key.path().display().to_string();
- let ffx_path = ffx.path().display().to_string();
- let ffx_subtools_path = ffx_subtools.path().display();
- let out_path = out_dir.path().display();
- let want = formatdoc! {r#"
- TestBeds:
- - Name: {TESTBED_NAME}
- Controllers:
- FuchsiaDevice:
- - mdns_name: {FUCHSIA_NAME}
- ip: {FUCHSIA_ADDR}
- take_bug_report_on_fail: true
- ssh_binary_path: {ssh_path}
- ffx_binary_path: {ffx_path}
- ffx_subtools_search_path: {ffx_subtools_path}
- ssh_priv_key: {ssh_key_path}
- hard_reboot_on_fail: false
- TestParams:
- sl4f_sanity_test_params:
- foo: bar
- MoblyParams:
- LogPath: {out_path}
- "#};
-
- assert_eq!(got, want);
- }
-
- #[test]
- fn local_with_ap() {
- let ssh = NamedTempFile::new().unwrap();
- let ssh_key = NamedTempFile::new().unwrap();
- let ffx = NamedTempFile::new().unwrap();
- let ffx_subtools = TempDir::new().unwrap();
- let out_dir = TempDir::new().unwrap();
- let ap_ssh_key = NamedTempFile::new().unwrap();
- let ap_ssh_port: u16 = 1245;
- let ap_ip = "192.168.1.1".to_string();
-
- let runner = MockRunner::default();
- let driver = LocalDriver::new(
- MockFinder {},
- None,
- ssh.path().to_path_buf(),
- Some(ssh_key.path().to_path_buf()),
- ffx.path().to_path_buf(),
- Some(ffx_subtools.path().to_path_buf()),
- Some(out_dir.path().to_path_buf()),
- Some(ap_ip.clone()),
- Some(ap_ssh_port),
- Some(ap_ssh_key.path().to_path_buf()),
- )
- .unwrap();
-
- generate_config_and_run(runner, driver, None).unwrap();
-
- let got = std::fs::read_to_string(out_dir.path().join("config.yaml")).unwrap();
-
- let ssh_path = ssh.path().display();
- let ssh_key_path = ssh_key.path().display();
- let ap_ssh_key_path = ap_ssh_key.path().display();
- let ffx_path = ffx.path().display();
- let ffx_subtools_path = ffx_subtools.path().display();
- let out_path = out_dir.path().display();
- let want = formatdoc! {r#"
- TestBeds:
- - Name: {TESTBED_NAME}
- Controllers:
- FuchsiaDevice:
- - mdns_name: {FUCHSIA_NAME}
- ip: {FUCHSIA_ADDR}
- take_bug_report_on_fail: true
- ssh_binary_path: {ssh_path}
- ffx_binary_path: {ffx_path}
- ffx_subtools_search_path: {ffx_subtools_path}
- ssh_priv_key: {ssh_key_path}
- hard_reboot_on_fail: false
- AccessPoint:
- - wan_interface: eth0
- ssh_config:
- ssh_binary_path: {ssh_path}
- host: {ap_ip}
- port: {ap_ssh_port}
- user: root
- identity_file: {ap_ssh_key_path}
- MoblyParams:
- LogPath: {out_path}
- "#};
-
- assert_eq!(got, want);
- }
-}
diff --git a/runner/src/driver/mod.rs b/runner/src/driver/mod.rs
deleted file mode 100644
index 35de41f..0000000
--- a/runner/src/driver/mod.rs
+++ /dev/null
@@ -1,24 +0,0 @@
-// Copyright 2023 The Fuchsia Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-pub(crate) mod infra;
-pub(crate) mod local;
-
-use crate::config::Config;
-
-use std::path::Path;
-
-use anyhow::Result;
-
-/// Driver provide insight into the information surrounding running an antlion
-/// test.
-pub(crate) trait Driver {
- /// Path to output directory for test artifacts.
- fn output_path(&self) -> &Path;
- /// Antlion config for use during test.
- fn config(&self) -> Config;
- /// Additional logic to run after all tests run, regardless of tests passing
- /// or failing.
- fn teardown(&self) -> Result<()>;
-}
diff --git a/runner/src/env.rs b/runner/src/env.rs
deleted file mode 100644
index ede8b74..0000000
--- a/runner/src/env.rs
+++ /dev/null
@@ -1,25 +0,0 @@
-// Copyright 2023 The Fuchsia Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-use std::ffi::OsStr;
-use std::env::VarError;
-
-/// Inspection of the process's environment.
-pub(crate) trait Environment {
- /// Fetches the environment variable `key` from the current process.
- ///
- /// See [std::env::var] for details.
- ///
- /// [std::env::var]: https://doc.rust-lang.org/std/env/fn.var.html
- fn var<K: AsRef<OsStr>>(&self, key: K) -> Result<String, VarError>;
-}
-
-/// Query the local process's environment.
-pub(crate) struct LocalEnvironment;
-
-impl Environment for LocalEnvironment {
- fn var<K: AsRef<OsStr>>(&self, key: K) -> Result<String, VarError> {
- std::env::var(key)
- }
-}
diff --git a/runner/src/finder.rs b/runner/src/finder.rs
deleted file mode 100644
index 9cb2032..0000000
--- a/runner/src/finder.rs
+++ /dev/null
@@ -1,266 +0,0 @@
-// Copyright 2023 The Fuchsia Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-use crate::net::IpAddr;
-
-use itertools::Itertools;
-use std::net::{Ipv6Addr, SocketAddr, SocketAddrV6, UdpSocket};
-use std::path::PathBuf;
-use std::process::Command;
-use std::time::{Duration, Instant};
-use std::{io, str};
-
-use anyhow::{bail, format_err, Context, Result};
-use mdns::protocol as dns;
-use netext::{get_mcast_interfaces, IsLocalAddr, McastInterface};
-use packet::{InnerPacketBuilder, ParseBuffer};
-use socket2::{Domain, Protocol, Socket, Type};
-
-const FUCHSIA_DOMAIN: &str = "_fuchsia._udp.local";
-const MDNS_MCAST_V6: Ipv6Addr = Ipv6Addr::new(0xff02, 0, 0, 0, 0, 0, 0, 0x00fb);
-const MDNS_PORT: u16 = 5353;
-const MDNS_TIMEOUT: Duration = Duration::from_secs(10);
-
-lazy_static::lazy_static! {
- static ref MDNS_QUERY: &'static [u8] = construct_query_buf(FUCHSIA_DOMAIN);
-}
-
-/// Find Fuchsia devices.
-pub(crate) trait Finder {
- /// Find a Fuchsia device, preferring `device_name` if specified.
- fn find_device(&self, device_name: Option<String>) -> Result<Answer>;
-}
-
-/// Answer from a Finder.
-pub(crate) struct Answer {
- /// Name of the Fuchsia device.
- pub name: String,
- /// IP address of the Fuchsia device.
- pub ip: IpAddr,
- /// Port of the Fuchsia device for SSH.
- pub ssh_port: Option<u16>,
-}
-
-pub(crate) struct FfxDevice {
- pub ffx_binary: PathBuf,
-}
-pub(crate) struct MulticastDns {}
-
-impl Finder for FfxDevice {
- /// Queries FFX for a registered device
- fn find_device(&self, device_name: Option<String>) -> Result<Answer> {
- let program = self.ffx_binary.clone().into_os_string().into_string().unwrap();
- let mut args: Vec<&str> = vec!["--machine", "json"];
- if device_name.is_some() {
- args.push("-t");
- args.push(device_name.as_ref().unwrap());
- }
- args.push("target");
- args.push("show");
-
- println!("Querying FFX for device parameters: {} {}", program, args.iter().format(" "));
-
- let output = Command::new(program).args(args).output().expect("failed to execute process");
- if output.status.success() {
- let output_str = String::from_utf8(output.stdout).unwrap();
- let output_json: serde_json::Value = serde_json::from_str(&output_str).unwrap();
- let target = output_json["target"].as_object().unwrap();
- let name = target["name"].as_str().unwrap();
- let ssh_address = target["ssh_address"].as_object().unwrap();
- let host = ssh_address["host"].as_str().unwrap();
- let port = ssh_address["port"].as_u64().unwrap();
- let ip = host
- .replace("[", "") // FFX returns IPv6 addresses wrapped in brackets, which doesn't work with `.parse()`
- .replace("]", "")
- .parse()
- .context(format!("Attempting to parse string into IP address: {}", host))
- .unwrap();
-
- let answer = Answer { name: name.to_string(), ip, ssh_port: Some(port as u16) };
- println!("Device {} at {}:{:?}", answer.name, answer.ip, port);
- Ok(answer)
- } else {
- return Err(format_err!(
- "FFX exited with status {}: {} {}",
- output.status,
- String::from_utf8(output.stdout).unwrap(),
- String::from_utf8(output.stderr).unwrap()
- ));
- }
- }
-}
-
-impl Finder for MulticastDns {
- /// Find a Fuchsia device using mDNS. If `device_name` is not specified, the
- /// first device will be used.
- fn find_device(&self, device_name: Option<String>) -> Result<Answer> {
- let interfaces =
- get_mcast_interfaces().context("Failed to list multicast-enabled interfaces")?;
- let interface_names =
- interfaces.iter().map(|i| i.name.clone()).collect::<Vec<String>>().join(", ");
- if let Some(ref d) = device_name {
- println!("Performing mDNS discovery for {d} on interfaces: {interface_names}");
- } else {
- println!("Performing mDNS discovery on interfaces: {interface_names}");
- }
-
- let socket = create_socket(interfaces.iter()).context("Failed to create mDNS socket")?;
-
- // TODO(http://b/264936590): Remove the race condition where the Fuchsia
- // device can send its answer before this socket starts listening. Add an
- // async runtime and concurrently listen for answers while sending queries.
- send_queries(&socket, interfaces.iter()).context("Failed to send mDNS queries")?;
- let answer = listen_for_answers(socket, device_name)?;
-
- println!("Device {} found at {}", answer.name, answer.ip,);
- Ok(Answer { name: answer.name, ip: answer.ip, ssh_port: None })
- }
-}
-
-fn construct_query_buf(service: &str) -> &'static [u8] {
- let question = dns::QuestionBuilder::new(
- dns::DomainBuilder::from_str(service).unwrap(),
- dns::Type::Ptr,
- dns::Class::In,
- true,
- );
-
- let mut message = dns::MessageBuilder::new(0, true);
- message.add_question(question);
-
- let mut buf = vec![0; message.bytes_len()];
- message.serialize(buf.as_mut_slice());
- Box::leak(buf.into_boxed_slice())
-}
-
-/// Create a socket for both sending and listening on all multicast-capable
-/// interfaces.
-fn create_socket<'a>(interfaces: impl Iterator<Item = &'a McastInterface>) -> Result<Socket> {
- let socket = Socket::new(Domain::IPV6, Type::DGRAM, Some(Protocol::UDP))?;
- let read_timeout = Duration::from_millis(100);
- socket
- .set_read_timeout(Some(read_timeout))
- .with_context(|| format!("Failed to set SO_RCVTIMEO to {}ms", read_timeout.as_millis()))?;
- socket.set_only_v6(true).context("Failed to set IPV6_V6ONLY")?;
- socket.set_reuse_address(true).context("Failed to set SO_REUSEADDR")?;
- socket.set_reuse_port(true).context("Failed to set SO_REUSEPORT")?;
-
- for interface in interfaces {
- // Listen on all multicast-enabled interfaces
- match interface.id() {
- Ok(id) => match socket.join_multicast_v6(&MDNS_MCAST_V6, id) {
- Ok(()) => {}
- Err(e) => eprintln!("Failed to join mDNS multicast group on interface {id}: {e}"),
- },
- Err(e) => eprintln!("Failed to listen on interface {}: {}", interface.name, e),
- }
- }
-
- socket
- .bind(&SocketAddrV6::new(Ipv6Addr::UNSPECIFIED, 0, 0, 0).into())
- .with_context(|| format!("Failed to bind to unspecified IPv6"))?;
-
- Ok(socket)
-}
-
-fn send_queries<'a>(
- socket: &Socket,
- interfaces: impl Iterator<Item = &'a McastInterface>,
-) -> Result<()> {
- let to_addr = SocketAddrV6::new(MDNS_MCAST_V6, MDNS_PORT, 0, 0).into();
-
- for interface in interfaces {
- let id = interface
- .id()
- .with_context(|| format!("Failed to get interface ID for {}", interface.name))?;
- socket
- .set_multicast_if_v6(id)
- .with_context(|| format!("Failed to set multicast interface for {}", interface.name))?;
- for addr in &interface.addrs {
- if let SocketAddr::V6(addr_v6) = addr {
- if !addr.ip().is_local_addr() || addr.ip().is_loopback() {
- continue;
- }
- if let Err(e) = socket.send_to(&MDNS_QUERY, &to_addr) {
- eprintln!(
- "Failed to send mDNS query out {} via {}: {e}",
- interface.name,
- addr_v6.ip()
- );
- continue;
- }
- }
- }
- }
- Ok(())
-}
-
-struct MdnsAnswer {
- name: String,
- ip: IpAddr,
-}
-
-fn listen_for_answers(socket: Socket, device_name: Option<String>) -> Result<MdnsAnswer> {
- let s: UdpSocket = socket.into();
- let mut buf = [0; 1500];
-
- let end = Instant::now() + MDNS_TIMEOUT;
- while Instant::now() < end {
- match s.recv_from(&mut buf) {
- Ok((packet_bytes, src_sock_addr)) => {
- if !src_sock_addr.ip().is_local_addr() {
- continue;
- }
-
- let mut packet_buf = &mut buf[..packet_bytes];
- match packet_buf.parse::<dns::Message<_>>() {
- Ok(message) => {
- if !message.answers.iter().any(|a| a.domain == FUCHSIA_DOMAIN) {
- continue;
- }
- for answer in message.additional {
- if let Some(std::net::IpAddr::V6(addr)) = answer.rdata.ip_addr() {
- if let SocketAddr::V6(src_v6) = src_sock_addr {
- let name = answer
- .domain
- .to_string()
- .trim_end_matches(".local")
- .to_string();
- let scope_id = scope_id_to_name_checked(src_v6.scope_id())?;
-
- if let Some(ref device) = device_name {
- if &name != device {
- println!("Found irrelevant device {name} at {addr}%{scope_id}");
- continue;
- }
- }
-
- return Ok(MdnsAnswer {
- name,
- ip: IpAddr::V6(addr, Some(scope_id)),
- });
- }
- }
- }
- }
- Err(err) => eprintln!("Failed to parse mDNS packet: {err:?}"),
- }
- }
- Err(err) if err.kind() == io::ErrorKind::WouldBlock => {}
- Err(err) => return Err(err.into()),
- }
- }
-
- bail!("device {device_name:?} not found")
-}
-
-fn scope_id_to_name_checked(scope_id: u32) -> Result<String> {
- let mut buf = vec![0; libc::IF_NAMESIZE];
- let res = unsafe { libc::if_indextoname(scope_id, buf.as_mut_ptr() as *mut libc::c_char) };
- if res.is_null() {
- bail!("{scope_id} is not a valid network interface ID")
- } else {
- Ok(String::from_utf8_lossy(&buf.split(|&c| c == 0u8).next().unwrap_or(&[0u8])).to_string())
- }
-}
diff --git a/runner/src/main.rs b/runner/src/main.rs
deleted file mode 100644
index d2e89dc..0000000
--- a/runner/src/main.rs
+++ /dev/null
@@ -1,225 +0,0 @@
-// Copyright 2023 The Fuchsia Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-mod config;
-mod driver;
-mod env;
-mod finder;
-mod net;
-mod runner;
-mod yaml;
-
-use crate::driver::infra::{InfraDriver, InfraDriverError};
-use crate::runner::ExitStatus;
-
-use std::fs;
-use std::fs::File;
-use std::path::PathBuf;
-use std::process::ExitCode;
-
-use anyhow::{Context, Result};
-use argh::FromArgs;
-use serde_yaml::Value;
-
-#[derive(FromArgs)]
-/// antlion runner with config generation
-struct Args {
- /// name of the Fuchsia device to use for testing; defaults to using mDNS
- /// discovery
- #[argh(option)]
- device: Option<String>,
-
- /// path to the SSH binary used to communicate with all devices
- #[argh(option, from_str_fn(parse_file))]
- ssh_binary: PathBuf,
-
- /// path to the SSH private key used to communicate with Fuchsia; defaults
- /// to ~/.ssh/fuchsia_ed25519
- #[argh(option, from_str_fn(parse_file))]
- ssh_key: Option<PathBuf>,
-
- /// path to the FFX binary used to communicate with Fuchsia
- #[argh(option, from_str_fn(parse_file))]
- ffx_binary: PathBuf,
-
- /// search path to the FFX binary used to communicate with Fuchsia
- #[argh(option, from_str_fn(parse_directory))]
- ffx_subtools_search_path: Option<PathBuf>,
-
- /// path to the python interpreter binary (e.g. /bin/python3.9)
- #[argh(option)]
- python_bin: String,
-
- /// path to the antlion zipapp, ending in .pyz
- #[argh(option, from_str_fn(parse_file))]
- antlion_pyz: PathBuf,
-
- /// path to a directory for outputting artifacts; defaults to the current
- /// working directory or FUCHSIA_TEST_OUTDIR
- #[argh(option, from_str_fn(parse_directory))]
- out_dir: Option<PathBuf>,
-
- /// path to additional YAML config for this test; placed in the
- /// "test_params" key in the antlion config
- #[argh(option, from_str_fn(parse_file))]
- test_params: Option<PathBuf>,
-
- /// list of test cases to run; defaults to all test cases
- #[argh(positional)]
- test_cases: Vec<String>,
-
- /// user-defined configuration for the test; overrides all other options related to the test
- /// configratuion. By default, a config file will be generated based on the other parameters.
- #[argh(option, from_str_fn(parse_file))]
- config_override: Option<PathBuf>,
-
- /// ip of the AP
- #[argh(option)]
- ap_ip: Option<String>,
-
- /// ssh port of the AP
- #[argh(option)]
- ap_ssh_port: Option<u16>,
-
- /// path to the SSH private key used to communicate with the AP
- #[argh(option, from_str_fn(parse_file))]
- ap_ssh_key: Option<PathBuf>,
-}
-
-fn parse_file(s: &str) -> Result<PathBuf, String> {
- let path = PathBuf::from(s);
- let _ = File::open(&path).map_err(|e| format!("Failed to open \"{s}\": {e}"))?;
- Ok(path)
-}
-
-fn parse_directory(s: &str) -> Result<PathBuf, String> {
- let path = PathBuf::from(s);
- let meta =
- std::fs::metadata(&path).map_err(|e| format!("Failed to read metadata of \"{s}\": {e}"))?;
- if meta.is_file() {
- return Err(format!("Expected a directory but found a file at \"{s}\""));
- }
- Ok(path)
-}
-
-fn run_with_config<R>(runner: R, config_path: PathBuf) -> Result<ExitCode>
-where
- R: runner::Runner,
-{
- let exit_code = runner.run(config_path).context("Failed to run antlion")?;
- match exit_code {
- ExitStatus::Ok => println!("Antlion successfully exited"),
- ExitStatus::Err(code) => eprintln!("Antlion failed with status code {}", code),
- ExitStatus::Interrupt(Some(code)) => eprintln!("Antlion interrupted by signal {}", code),
- ExitStatus::Interrupt(None) => eprintln!("Antlion interrupted by signal"),
- };
- Ok(exit_code.into())
-}
-
-fn generate_config_and_run<R, D>(
- runner: R,
- driver: D,
- test_params: Option<Value>,
-) -> Result<ExitCode>
-where
- R: runner::Runner,
- D: driver::Driver,
-{
- let mut config = driver.config();
- if let Some(params) = test_params {
- config.merge_test_params(params);
- }
-
- let yaml =
- serde_yaml::to_string(&config).context("Failed to convert antlion config to YAML")?;
-
- let output_path = driver.output_path().to_path_buf();
- let config_path = output_path.join("config.yaml");
- println!("Generating config {}", config_path.display());
- println!("\n{yaml}\n");
- fs::write(&config_path, yaml).context("Failed to write config to file")?;
-
- let result = run_with_config(runner, config_path);
- driver.teardown().context("Failed to teardown environment")?;
-
- result
-}
-
-fn main() -> Result<ExitCode> {
- let args: Args = argh::from_env();
- let env = env::LocalEnvironment;
- let runner = runner::ProcessRunner {
- python_bin: args.python_bin,
- antlion_pyz: args.antlion_pyz,
- test_cases: args.test_cases,
- };
-
- let test_params = match args.test_params {
- Some(path) => {
- let text = fs::read_to_string(&path)
- .with_context(|| format!("Failed to read file \"{}\"", path.display()))?;
- let yaml = serde_yaml::from_str(&text)
- .with_context(|| format!("Failed to parse \"{text}\" as YAML"))?;
- Some(yaml)
- }
- None => None,
- };
-
- if let Some(config_path) = args.config_override {
- println!("Using config at {}", config_path.display());
- return run_with_config(runner, config_path);
- }
-
- match InfraDriver::new(
- env,
- args.ssh_binary.clone(),
- args.ffx_binary.clone(),
- args.ffx_subtools_search_path.clone(),
- ) {
- Ok(env) => return generate_config_and_run(runner, env, test_params),
- Err(InfraDriverError::NotDetected(_)) => {}
- Err(InfraDriverError::Config(e)) => {
- return Err(anyhow::Error::from(e).context("Config validation"))
- }
- Err(InfraDriverError::Other(e)) => {
- return Err(anyhow::Error::from(e).context("Unexpected infra driver error"))
- }
- };
-
- let ffx_finder = finder::FfxDevice { ffx_binary: args.ffx_binary.clone() };
- let driver_via_ffx_discovery = driver::local::LocalDriver::new(
- ffx_finder,
- args.device.clone(),
- args.ssh_binary.clone(),
- args.ssh_key.clone(),
- args.ffx_binary.clone(),
- args.ffx_subtools_search_path.clone(),
- args.out_dir.clone(),
- args.ap_ip.clone(),
- args.ap_ssh_port,
- args.ap_ssh_key.clone(),
- );
- match driver_via_ffx_discovery {
- Ok(driver) => return generate_config_and_run(runner, driver, test_params),
- Err(e) => {
- println!("Failed to generate device config via FFX: {:?}", e);
- println!("Falling back to mDNS discovery");
- }
- };
-
- let driver = driver::local::LocalDriver::new(
- finder::MulticastDns {},
- args.device.clone(),
- args.ssh_binary.clone(),
- args.ssh_key.clone(),
- args.ffx_binary.clone(),
- args.ffx_subtools_search_path.clone(),
- args.out_dir.clone(),
- args.ap_ip.clone(),
- args.ap_ssh_port,
- args.ap_ssh_key.clone(),
- )
- .context("Failed to generate config for local environment")?;
- generate_config_and_run(runner, driver, test_params)
-}
diff --git a/runner/src/net.rs b/runner/src/net.rs
deleted file mode 100644
index 35dc07a..0000000
--- a/runner/src/net.rs
+++ /dev/null
@@ -1,219 +0,0 @@
-// Copyright 2023 The Fuchsia Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-use std::fmt::{Debug, Display};
-use std::marker::PhantomData;
-use std::net::{Ipv4Addr, Ipv6Addr};
-
-use netext::IsLocalAddr;
-use serde::{Deserialize, Serialize};
-use thiserror::Error;
-
-/// IP address with support for IPv6 scope identifiers as defined in RFC 4007.
-#[derive(Clone, Eq, PartialEq, Hash, PartialOrd, Ord)]
-pub enum IpAddr {
- /// An IPv4 address.
- V4(Ipv4Addr),
- /// An IPv6 address with optional scope identifier.
- V6(Ipv6Addr, Option<String>),
-}
-
-impl Into<std::net::IpAddr> for IpAddr {
- fn into(self) -> std::net::IpAddr {
- match self {
- IpAddr::V4(ip) => std::net::IpAddr::from(ip),
- IpAddr::V6(ip, _) => std::net::IpAddr::from(ip),
- }
- }
-}
-
-impl From<Ipv6Addr> for IpAddr {
- fn from(value: Ipv6Addr) -> Self {
- IpAddr::V6(value, None)
- }
-}
-
-impl From<Ipv4Addr> for IpAddr {
- fn from(value: Ipv4Addr) -> Self {
- IpAddr::V4(value)
- }
-}
-
-impl From<std::net::IpAddr> for IpAddr {
- fn from(value: std::net::IpAddr) -> Self {
- match value {
- std::net::IpAddr::V4(ip) => IpAddr::from(ip),
- std::net::IpAddr::V6(ip) => IpAddr::from(ip),
- }
- }
-}
-
-#[derive(Debug, Clone, PartialEq, Eq, Error)]
-/// An error which can be returned when parsing an IP address with optional IPv6
-/// scope ID. See [`std::net::AddrParseError`].
-pub enum AddrParseError {
- #[error(transparent)]
- IpInvalid(#[from] std::net::AddrParseError),
- #[error("no interface found with name \"{0}\"")]
- InterfaceNotFound(String),
- #[error("only IPv6 link-local may include a scope ID")]
- /// Scope IDs are only supported for IPv6 link-local addresses as per RFC
- /// 6874 Section 4.
- ScopeNotSupported,
-}
-
-impl std::str::FromStr for IpAddr {
- type Err = AddrParseError;
-
- fn from_str(s: &str) -> Result<Self, Self::Err> {
- let mut parts = s.splitn(2, '%');
- let addr = parts.next().unwrap(); // first element is guaranteed
- let ip = std::net::IpAddr::from_str(addr)?;
- let scope = parts.next();
- match (ip, scope) {
- (std::net::IpAddr::V4(ip), None) => Ok(IpAddr::from(ip)),
- (std::net::IpAddr::V4(_), Some(_)) => Err(AddrParseError::ScopeNotSupported),
- (std::net::IpAddr::V6(ip), None) => Ok(IpAddr::V6(ip, None)),
- (std::net::IpAddr::V6(ip), Some(scope)) => {
- if !ip.is_link_local_addr() {
- return Err(AddrParseError::ScopeNotSupported);
- }
- if scope.len() == 0 {
- return Err(AddrParseError::InterfaceNotFound(scope.to_string()))
- }
- Ok(IpAddr::V6(ip, Some(scope.to_string())))
- }
- }
- }
-}
-
-impl Display for IpAddr {
- fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
- match self {
- IpAddr::V4(ip) => Display::fmt(ip, f),
- IpAddr::V6(ip, None) => Display::fmt(ip, f),
- IpAddr::V6(ip, Some(scope)) => {
- Display::fmt(ip, f)?;
- write!(f, "%{}", scope)
- }
- }
- }
-}
-
-impl Debug for IpAddr {
- fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
- Display::fmt(self, f)
- }
-}
-
-impl Serialize for IpAddr {
- fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
- where
- S: serde::Serializer,
- {
- serializer.serialize_str(self.to_string().as_str())
- }
-}
-
-impl<'de> Deserialize<'de> for IpAddr {
- fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
- where
- D: serde::Deserializer<'de>,
- {
- deserializer.deserialize_str(FromStrVisitor::new())
- }
-}
-
-struct FromStrVisitor<T> {
- ty: PhantomData<T>,
-}
-
-impl<T> FromStrVisitor<T> {
- fn new() -> Self {
- FromStrVisitor { ty: PhantomData }
- }
-}
-
-impl<'de, T> serde::de::Visitor<'de> for FromStrVisitor<T>
-where
- T: std::str::FromStr,
- T::Err: std::fmt::Display,
-{
- type Value = T;
-
- fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
- formatter.write_str("IP address")
- }
-
- fn visit_str<E>(self, s: &str) -> Result<Self::Value, E>
- where
- E: serde::de::Error,
- {
- s.parse().map_err(serde::de::Error::custom)
- }
-}
-
-#[cfg(test)]
-mod test {
- use super::{AddrParseError, IpAddr};
- use assert_matches::assert_matches;
-
- #[test]
- fn parse_ip_invalid() {
- assert_matches!("".parse::<IpAddr>(), Err(AddrParseError::IpInvalid(_)));
- assert_matches!("192.168.1.".parse::<IpAddr>(), Err(AddrParseError::IpInvalid(_)));
- assert_matches!("fe80:".parse::<IpAddr>(), Err(AddrParseError::IpInvalid(_)));
- }
-
- #[test]
- fn parse_ipv4() {
- assert_matches!(
- "192.168.1.1".parse::<IpAddr>(),
- Ok(IpAddr::V4(ip))
- if ip == "192.168.1.1".parse::<std::net::Ipv4Addr>().unwrap()
- );
- }
-
- #[test]
- fn parse_ipv4_with_scope() {
- assert_matches!(
- "192.168.1.1%1".parse::<IpAddr>(),
- Err(AddrParseError::ScopeNotSupported)
- );
- }
-
- #[test]
- fn parse_ipv6() {
- assert_matches!(
- "fe80::1".parse::<IpAddr>(),
- Ok(IpAddr::V6(ip, None))
- if ip == "fe80::1".parse::<std::net::Ipv6Addr>().unwrap()
- );
- }
-
- #[test]
- fn parse_ipv6_global_with_scope() {
- assert_matches!("2001::1%1".parse::<IpAddr>(), Err(AddrParseError::ScopeNotSupported));
- }
-
- #[test]
- fn parse_ipv6_link_local_with_scope() {
- assert_matches!(
- "fe80::1%1".parse::<IpAddr>(),
- Ok(IpAddr::V6(ip, Some(scope)))
- if ip == "fe80::1".parse::<std::net::Ipv6Addr>().unwrap()
- && scope == "1"
- );
- }
-
- #[test]
- fn parse_ipv6_link_local_with_scope_interface_not_found() {
- // An empty scope ID should trigger a failed lookup.
- assert_matches!(
- "fe80::1%".parse::<IpAddr>(),
- Err(AddrParseError::InterfaceNotFound(name))
- if name == ""
- );
- }
-}
diff --git a/runner/src/runner.rs b/runner/src/runner.rs
deleted file mode 100644
index 9f4519e..0000000
--- a/runner/src/runner.rs
+++ /dev/null
@@ -1,175 +0,0 @@
-// Copyright 2023 The Fuchsia Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-use signal_hook::consts::signal::{SIGINT, SIGQUIT, SIGTERM};
-#[cfg(unix)]
-use std::os::unix::process::ExitStatusExt;
-use std::path::PathBuf;
-use std::process::{Command, ExitCode};
-use std::sync::atomic::{AtomicUsize, Ordering};
-use std::sync::Arc;
-use std::time::{Duration, Instant};
-
-use anyhow::{bail, Context, Result};
-use itertools::Itertools;
-
-// Time to wait for antlion to cleanup after receiving a termination signal. If
-// the process is unable to terminate within this time, the process will be
-// killed without further warning.
-const TERM_TIMEOUT_SEC: u64 = 3;
-
-// Busy-wait sleep duration between polling for antlion termination.
-const TERM_CHECK_INTERVAL_MS: u64 = 100;
-
-/// Runner for dispatching antlion.
-pub(crate) trait Runner {
- /// Run antlion using the provided config and output directory.
- fn run(&self, config: PathBuf) -> Result<ExitStatus>;
-}
-
-/// Executes antlion as a local process.
-pub(crate) struct ProcessRunner {
- pub python_bin: String,
- pub antlion_pyz: PathBuf,
- pub test_cases: Vec<String>,
-}
-
-// TODO(http://b/401318909): Remove this once Fuchsia Controller no longer panics during teardown.
-fn test_affected_by_b_401318909(test_name: String) -> bool {
- let test_substrings_affected_by_b_401318909 = [
- "channel_switch_test",
- "deprecated_configuration_test"
- ];
-
- for test in test_substrings_affected_by_b_401318909 {
- if test_name.contains(test) {
- return true;
- }
- }
-
- false
-}
-
-impl Runner for ProcessRunner {
- fn run(&self, config: PathBuf) -> Result<ExitStatus> {
- let mut args = vec![
- self.antlion_pyz.clone().into_os_string().into_string().unwrap(),
- "--config".to_string(),
- config.into_os_string().into_string().unwrap(),
- ];
-
- if !self.test_cases.is_empty() {
- args.push("--test_case".to_string());
- for test_case in self.test_cases.iter() {
- args.push(test_case.clone());
- }
- }
-
- println!(
- "Launching antlion to run: \"{} {}\"\n",
- &self.python_bin,
- args.iter().format(" "),
- );
-
- let mut child =
- Command::new(&self.python_bin).args(args).spawn().context("Failed to spawn antlion")?;
-
- // Start monitoring for termination signals.
- let term = Arc::new(AtomicUsize::new(0));
- signal_hook::flag::register_usize(SIGINT, term.clone(), SIGINT as usize)?;
- signal_hook::flag::register_usize(SIGTERM, term.clone(), SIGTERM as usize)?;
- signal_hook::flag::register_usize(SIGQUIT, term.clone(), SIGQUIT as usize)?;
-
- loop {
- if let Some(exit_status) =
- child.try_wait().context("Failed waiting for antlion to finish")?
- {
- if exit_status.core_dumped() {
- if test_affected_by_b_401318909(
- self.antlion_pyz.clone().into_os_string().into_string().unwrap()
- ) {
- eprintln!(
- "Received expected core dump after running test. \
- Remove this once http://b/401318909 has been resolved."
- );
- return Ok(ExitStatus::Ok);
- } else {
- bail!(
- "Expected core dump after running test, but didn't receive one. \
- Perhaps http://b/401318909 has been resolved? If so, remove this failure."
- );
- }
- }
-
- return Ok(ExitStatus::from(exit_status));
- }
-
- let signal = term.load(Ordering::Relaxed) as i32;
- if signal != 0 {
- println!("Forwarding signal {signal} to antlion");
- nix::sys::signal::kill(
- nix::unistd::Pid::from_raw(
- child.id().try_into().context("Failed to convert pid to i32")?,
- ),
- Some(signal.try_into().context("Failed to convert signal")?),
- )
- .context("Failed to forward signal to antlion")?;
-
- println!("Waiting {} seconds for antlion to terminate", TERM_TIMEOUT_SEC);
- let timeout = Instant::now() + Duration::from_secs(TERM_TIMEOUT_SEC);
- while Instant::now() < timeout {
- if let Some(_) =
- child.try_wait().context("Failed waiting for antlion to finish")?
- {
- return Ok(ExitStatus::Interrupt(Some(signal)));
- }
- std::thread::sleep(std::time::Duration::from_millis(TERM_CHECK_INTERVAL_MS));
- }
-
- eprintln!("antlion is unresponsive, killing process");
- child.kill().context("Failed to kill antlion process")?;
- return Ok(ExitStatus::Interrupt(Some(signal)));
- }
-
- std::thread::sleep(std::time::Duration::from_millis(TERM_CHECK_INTERVAL_MS));
- }
- }
-}
-
-/// Describes the result of a child process after it has terminated.
-pub(crate) enum ExitStatus {
- /// Process terminated without error.
- Ok,
- /// Process terminated with a non-zero status code.
- Err(i32),
- /// Process was interrupted by a signal.
- Interrupt(Option<i32>),
-}
-
-impl From<std::process::ExitStatus> for ExitStatus {
- fn from(status: std::process::ExitStatus) -> Self {
- match status.code() {
- Some(0) => ExitStatus::Ok,
- Some(code) => ExitStatus::Err(code),
- None if cfg!(target_os = "unix") => ExitStatus::Interrupt(status.signal()),
- None => ExitStatus::Interrupt(None),
- }
- }
-}
-
-impl Into<ExitCode> for ExitStatus {
- fn into(self) -> ExitCode {
- match self {
- ExitStatus::Ok => ExitCode::SUCCESS,
- ExitStatus::Err(code) => {
- let code = match u8::try_from(code) {
- Ok(c) => c,
- Err(_) => 1,
- };
- ExitCode::from(code)
- }
- ExitStatus::Interrupt(_) => ExitCode::FAILURE,
- }
- }
-}
diff --git a/runner/src/yaml.rs b/runner/src/yaml.rs
deleted file mode 100644
index ae972bf..0000000
--- a/runner/src/yaml.rs
+++ /dev/null
@@ -1,95 +0,0 @@
-use serde_yaml::Value;
-
-/// Merge `b` into `a`, appending arrays and overwriting everything else.
-pub fn merge(a: &mut Value, b: Value) {
- match (a, b) {
- (Value::Mapping(ref mut a), Value::Mapping(b)) => {
- for (k, v) in b {
- if !a.contains_key(&k) {
- a.insert(k, v);
- } else {
- merge(&mut a[&k], v);
- }
- }
- }
- (Value::Sequence(ref mut a), Value::Sequence(ref mut b)) => {
- a.append(b);
- }
- (a, b) => *a = b,
- }
-}
-
-#[cfg(test)]
-mod test {
- use super::*;
-
- #[test]
- fn test_merge_mapping() {
- let a = "
- test_params:
- name: a
- who_called:
- was_a: true
- ";
- let mut a: Value = serde_yaml::from_str(a).unwrap();
- let b = "
- test_params:
- name: b
- who_called:
- was_b: true
- ";
- let b: Value = serde_yaml::from_str(b).unwrap();
- merge(&mut a, b);
- let want = "
- test_params:
- name: b
- who_called:
- was_a: true
- was_b: true
- ";
- let want: Value = serde_yaml::from_str(want).unwrap();
- assert_eq!(a, want);
- }
-
- #[test]
- fn test_merge_append_arrays() {
- let mut a: Value = serde_yaml::from_str(" - a").unwrap();
- let b: Value = serde_yaml::from_str(" - b").unwrap();
- merge(&mut a, b);
- let want = "
- - a
- - b
- ";
- let want: Value = serde_yaml::from_str(want).unwrap();
- assert_eq!(a, want);
- }
-
- #[test]
- fn test_merge_append_arrays_allow_duplicates() {
- let mut a: Value = serde_yaml::from_str(" - a").unwrap();
- let b: Value = serde_yaml::from_str(" - a").unwrap();
- merge(&mut a, b);
- let want = "
- - a
- - a
- ";
- let want: Value = serde_yaml::from_str(want).unwrap();
- assert_eq!(a, want);
- }
-
- #[test]
- fn test_merge_overwrite_from_null() {
- let mut a: Value = Value::Null;
- let b: Value = serde_yaml::from_str("true").unwrap();
- merge(&mut a, b.clone());
- assert_eq!(a, b);
- }
-
- #[test]
- fn test_merge_overwrite_with_null() {
- let mut a: Value = serde_yaml::from_str("true").unwrap();
- let b: Value = Value::Null;
- merge(&mut a, b.clone());
- assert_eq!(a, b);
- }
-}
diff --git a/setup.py b/setup.py
deleted file mode 100644
index f6b0241..0000000
--- a/setup.py
+++ /dev/null
@@ -1,50 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from setuptools import find_packages, setup
-
-install_requires = [
- "mobly==1.12.2",
- "pyyaml>=5.1",
- "tenacity~=8.0",
- # TODO(b/240443856): Remove these dependencies once antlion runs in
- # Fuchsia's LUCI infrastructure. These are needed for flashing and using
- # mDNS discovery, which are unnecessary in the future infrastructure.
- "psutil",
- "zeroconf",
-]
-
-setup(
- name="antlion",
- version="0.2.0",
- description="Host-driven, hardware-agnostic Fuchsia connectivity tests",
- license="Apache-2.0",
- packages=find_packages(
- where="packages",
- ),
- package_dir={"": "packages"},
- include_package_data=True,
- tests_require=[],
- install_requires=install_requires,
- extras_require={
- "html_graphing": ["bokeh"],
- "digital_loggers_pdu": ["dlipower"],
- "android": [
- "numpy",
- "scapy",
- ],
- },
-)
diff --git a/stubs/README.md b/stubs/README.md
deleted file mode 100644
index 07ec6ae..0000000
--- a/stubs/README.md
+++ /dev/null
@@ -1,11 +0,0 @@
-# Python typing stubs
-
-Contains typing stubs for Python packages that do not expose typing of their
-own. Generated initially with [`stubgen`][stubgen] then manually modified to
-satisfy [`mypy`][mypy].
-
-> TODO(http://b/285005406): Contribute type annotations to Mobly, bump the
-> version of Mobly, then remove these type stubs.
-
-[stubgen]: https://mypy.readthedocs.io/en/stable/stubgen.html
-[mypy]: https://mypy.readthedocs.io/en/stable/
diff --git a/stubs/mobly/__init__.pyi b/stubs/mobly/__init__.pyi
deleted file mode 100644
index e69de29..0000000
--- a/stubs/mobly/__init__.pyi
+++ /dev/null
diff --git a/stubs/mobly/asserts.pyi b/stubs/mobly/asserts.pyi
deleted file mode 100644
index d72fc1f..0000000
--- a/stubs/mobly/asserts.pyi
+++ /dev/null
@@ -1,117 +0,0 @@
-from _typeshed import Incomplete
-from mobly import signals as signals
-
-def assert_equal(
- first, second, msg: Incomplete | None = ..., extras: Incomplete | None = ...
-) -> None: ...
-def assert_not_equal(
- first, second, msg: Incomplete | None = ..., extras: Incomplete | None = ...
-) -> None: ...
-def assert_almost_equal(
- first,
- second,
- places: Incomplete | None = ...,
- msg: Incomplete | None = ...,
- delta: Incomplete | None = ...,
- extras: Incomplete | None = ...,
-) -> None: ...
-def assert_not_almost_equal(
- first,
- second,
- places: Incomplete | None = ...,
- msg: Incomplete | None = ...,
- delta: Incomplete | None = ...,
- extras: Incomplete | None = ...,
-) -> None: ...
-def assert_in(
- member,
- container,
- msg: Incomplete | None = ...,
- extras: Incomplete | None = ...,
-) -> None: ...
-def assert_not_in(
- member,
- container,
- msg: Incomplete | None = ...,
- extras: Incomplete | None = ...,
-) -> None: ...
-def assert_is(
- expr1, expr2, msg: Incomplete | None = ..., extras: Incomplete | None = ...
-) -> None: ...
-def assert_is_not(
- expr1, expr2, msg: Incomplete | None = ..., extras: Incomplete | None = ...
-) -> None: ...
-def assert_count_equal(
- first, second, msg: Incomplete | None = ..., extras: Incomplete | None = ...
-) -> None: ...
-def assert_less(
- a, b, msg: Incomplete | None = ..., extras: Incomplete | None = ...
-) -> None: ...
-def assert_less_equal(
- a, b, msg: Incomplete | None = ..., extras: Incomplete | None = ...
-) -> None: ...
-def assert_greater(
- a, b, msg: Incomplete | None = ..., extras: Incomplete | None = ...
-) -> None: ...
-def assert_greater_equal(
- a, b, msg: Incomplete | None = ..., extras: Incomplete | None = ...
-) -> None: ...
-def assert_is_none(
- obj, msg: Incomplete | None = ..., extras: Incomplete | None = ...
-) -> None: ...
-def assert_is_not_none(
- obj, msg: Incomplete | None = ..., extras: Incomplete | None = ...
-) -> None: ...
-def assert_is_instance(
- obj, cls, msg: Incomplete | None = ..., extras: Incomplete | None = ...
-) -> None: ...
-def assert_not_is_instance(
- obj, cls, msg: Incomplete | None = ..., extras: Incomplete | None = ...
-) -> None: ...
-def assert_regex(
- text,
- expected_regex,
- msg: Incomplete | None = ...,
- extras: Incomplete | None = ...,
-) -> None: ...
-def assert_not_regex(
- text,
- unexpected_regex,
- msg: Incomplete | None = ...,
- extras: Incomplete | None = ...,
-) -> None: ...
-def assert_raises(
- expected_exception, extras: Incomplete | None = ..., *args, **kwargs
-): ...
-def assert_raises_regex(
- expected_exception,
- expected_regex,
- extras: Incomplete | None = ...,
- *args,
- **kwargs,
-): ...
-def assert_true(expr, msg, extras: Incomplete | None = ...) -> None: ...
-def assert_false(expr, msg, extras: Incomplete | None = ...) -> None: ...
-def skip(reason, extras: Incomplete | None = ...) -> None: ...
-def skip_if(expr, reason, extras: Incomplete | None = ...) -> None: ...
-def abort_class(reason, extras: Incomplete | None = ...) -> None: ...
-def abort_class_if(expr, reason, extras: Incomplete | None = ...) -> None: ...
-def abort_all(reason, extras: Incomplete | None = ...) -> None: ...
-def abort_all_if(expr, reason, extras: Incomplete | None = ...) -> None: ...
-def fail(msg, extras: Incomplete | None = ...) -> None: ...
-def explicit_pass(msg, extras: Incomplete | None = ...) -> None: ...
-
-class _AssertRaisesContext:
- expected: Incomplete
- failureException: Incomplete
- expected_regexp: Incomplete
- extras: Incomplete
- def __init__(
- self,
- expected,
- expected_regexp: Incomplete | None = ...,
- extras: Incomplete | None = ...,
- ) -> None: ...
- def __enter__(self): ...
- exception: Incomplete
- def __exit__(self, exc_type, exc_value, tb): ...
diff --git a/stubs/mobly/base_instrumentation_test.pyi b/stubs/mobly/base_instrumentation_test.pyi
deleted file mode 100644
index 7f744a9..0000000
--- a/stubs/mobly/base_instrumentation_test.pyi
+++ /dev/null
@@ -1,99 +0,0 @@
-from enum import Enum
-
-from _typeshed import Incomplete
-from mobly import base_test as base_test
-from mobly import records as records
-from mobly import signals as signals
-from mobly import utils as utils
-
-class _InstrumentationStructurePrefixes:
- STATUS: str
- STATUS_CODE: str
- RESULT: str
- CODE: str
- FAILED: str
-
-class _InstrumentationKnownStatusKeys:
- CLASS: str
- ERROR: str
- STACK: str
- TEST: str
- STREAM: str
-
-class _InstrumentationStatusCodes:
- UNKNOWN: Incomplete
- OK: str
- START: str
- IN_PROGRESS: str
- ERROR: str
- FAILURE: str
- IGNORED: str
- ASSUMPTION_FAILURE: str
-
-class _InstrumentationStatusCodeCategories:
- TIMING: Incomplete
- PASS: Incomplete
- FAIL: Incomplete
- SKIPPED: Incomplete
-
-class _InstrumentationKnownResultKeys:
- LONGMSG: str
- SHORTMSG: str
-
-class _InstrumentationResultSignals:
- FAIL: str
- PASS: str
-
-class _InstrumentationBlockStates(Enum):
- UNKNOWN: int
- METHOD: int
- RESULT: int
-
-class _InstrumentationBlock:
- state: Incomplete
- prefix: Incomplete
- previous_instrumentation_block: Incomplete
- error_message: str
- status_code: Incomplete
- current_key: Incomplete
- known_keys: Incomplete
- unknown_keys: Incomplete
- begin_time: Incomplete
- def __init__(
- self,
- state=...,
- prefix: Incomplete | None = ...,
- previous_instrumentation_block: Incomplete | None = ...,
- ) -> None: ...
- @property
- def is_empty(self): ...
- def set_error_message(self, error_message) -> None: ...
- def set_status_code(self, status_code_line) -> None: ...
- def set_key(self, structure_prefix, key_line) -> None: ...
- def add_value(self, line) -> None: ...
- def transition_state(self, new_state): ...
-
-class _InstrumentationBlockFormatter:
- DEFAULT_INSTRUMENTATION_METHOD_NAME: str
- def __init__(self, instrumentation_block) -> None: ...
- def create_test_record(self, mobly_test_class): ...
- def has_completed_result_block_format(self, error_message): ...
-
-class InstrumentationTestMixin:
- DEFAULT_INSTRUMENTATION_OPTION_PREFIX: str
- DEFAULT_INSTRUMENTATION_ERROR_MESSAGE: str
- def parse_instrumentation_options(
- self, parameters: Incomplete | None = ...
- ): ...
- def run_instrumentation_test(
- self,
- device,
- package,
- options: Incomplete | None = ...,
- prefix: Incomplete | None = ...,
- runner: Incomplete | None = ...,
- ): ...
-
-class BaseInstrumentationTestClass(
- InstrumentationTestMixin, base_test.BaseTestClass
-): ...
diff --git a/stubs/mobly/base_suite.pyi b/stubs/mobly/base_suite.pyi
deleted file mode 100644
index 48912a5..0000000
--- a/stubs/mobly/base_suite.pyi
+++ /dev/null
@@ -1,18 +0,0 @@
-import abc
-
-from _typeshed import Incomplete
-
-class BaseSuite(abc.ABC, metaclass=abc.ABCMeta):
- def __init__(self, runner, config) -> None: ...
- @property
- def user_params(self): ...
- def add_test_class(
- self,
- clazz,
- config: Incomplete | None = ...,
- tests: Incomplete | None = ...,
- name_suffix: Incomplete | None = ...,
- ) -> None: ...
- @abc.abstractmethod
- def setup_suite(self, config): ...
- def teardown_suite(self) -> None: ...
diff --git a/stubs/mobly/base_test.pyi b/stubs/mobly/base_test.pyi
deleted file mode 100644
index 1f1d7d1..0000000
--- a/stubs/mobly/base_test.pyi
+++ /dev/null
@@ -1,68 +0,0 @@
-from _typeshed import Incomplete
-from mobly import controller_manager as controller_manager
-from mobly import expects as expects
-from mobly import records as records
-from mobly import runtime_test_info as runtime_test_info
-from mobly import signals as signals
-from mobly import utils as utils
-
-TEST_CASE_TOKEN: str
-RESULT_LINE_TEMPLATE: Incomplete
-TEST_STAGE_BEGIN_LOG_TEMPLATE: str
-TEST_STAGE_END_LOG_TEMPLATE: str
-STAGE_NAME_PRE_RUN: str
-STAGE_NAME_SETUP_GENERATED_TESTS: str
-STAGE_NAME_SETUP_CLASS: str
-STAGE_NAME_SETUP_TEST: str
-STAGE_NAME_TEARDOWN_TEST: str
-STAGE_NAME_TEARDOWN_CLASS: str
-STAGE_NAME_CLEAN_UP: str
-ATTR_REPEAT_CNT: str
-ATTR_MAX_RETRY_CNT: str
-ATTR_MAX_CONSEC_ERROR: str
-
-class Error(Exception): ...
-
-def repeat(count, max_consecutive_error: Incomplete | None = ...): ...
-def retry(max_count): ...
-
-class BaseTestClass:
- TAG: Incomplete
- tests: Incomplete
- root_output_path: Incomplete
- log_path: Incomplete
- test_bed_name: Incomplete
- testbed_name: Incomplete
- user_params: Incomplete
- results: Incomplete
- summary_writer: Incomplete
- controller_configs: Incomplete
- def __init__(self, configs) -> None: ...
- def unpack_userparams(
- self,
- req_param_names: Incomplete | None = ...,
- opt_param_names: Incomplete | None = ...,
- **kwargs,
- ) -> None: ...
- def register_controller(
- self, module, required: bool = ..., min_number: int = ...
- ): ...
- def pre_run(self) -> None: ...
- def setup_generated_tests(self) -> None: ...
- def setup_class(self) -> None: ...
- def teardown_class(self) -> None: ...
- def setup_test(self) -> None: ...
- def teardown_test(self) -> None: ...
- def on_fail(self, record) -> None: ...
- def on_pass(self, record) -> None: ...
- def on_skip(self, record) -> None: ...
- def record_data(self, content) -> None: ...
- current_test_info: Incomplete
- def exec_one_test(
- self, test_name, test_method, record: Incomplete | None = ...
- ): ...
- def generate_tests(
- self, test_logic, name_func, arg_sets, uid_func: Incomplete | None = ...
- ) -> None: ...
- def get_existing_test_names(self): ...
- def run(self, test_names: Incomplete | None = ...): ...
diff --git a/stubs/mobly/config_parser.pyi b/stubs/mobly/config_parser.pyi
deleted file mode 100644
index 0b7c01f..0000000
--- a/stubs/mobly/config_parser.pyi
+++ /dev/null
@@ -1,22 +0,0 @@
-from _typeshed import Incomplete
-from mobly import keys as keys
-from mobly import utils as utils
-
-ENV_MOBLY_LOGPATH: str
-
-class MoblyConfigError(Exception): ...
-
-def load_test_config_file(
- test_config_path, tb_filters: Incomplete | None = ...
-): ...
-
-class TestRunConfig:
- log_path: str
- test_bed_name: Incomplete
- testbed_name: Incomplete
- controller_configs: Incomplete
- user_params: Incomplete
- summary_writer: Incomplete
- test_class_name_suffix: Incomplete
- def __init__(self) -> None: ...
- def copy(self): ...
diff --git a/stubs/mobly/controller_manager.pyi b/stubs/mobly/controller_manager.pyi
deleted file mode 100644
index 6e59a30..0000000
--- a/stubs/mobly/controller_manager.pyi
+++ /dev/null
@@ -1,15 +0,0 @@
-from _typeshed import Incomplete
-from mobly import expects as expects
-from mobly import records as records
-from mobly import signals as signals
-
-def verify_controller_module(module) -> None: ...
-
-class ControllerManager:
- controller_configs: Incomplete
- def __init__(self, class_name, controller_configs) -> None: ...
- def register_controller(
- self, module, required: bool = ..., min_number: int = ...
- ): ...
- def unregister_controllers(self) -> None: ...
- def get_controller_info_records(self): ...
diff --git a/stubs/mobly/controllers/__init__.pyi b/stubs/mobly/controllers/__init__.pyi
deleted file mode 100644
index e69de29..0000000
--- a/stubs/mobly/controllers/__init__.pyi
+++ /dev/null
diff --git a/stubs/mobly/controllers/android_device.pyi b/stubs/mobly/controllers/android_device.pyi
deleted file mode 100644
index b9c8866..0000000
--- a/stubs/mobly/controllers/android_device.pyi
+++ /dev/null
@@ -1,143 +0,0 @@
-import enum
-import logging
-from collections.abc import Generator
-
-from _typeshed import Incomplete
-from mobly import runtime_test_info as runtime_test_info
-from mobly import utils as utils
-from mobly.controllers.android_device_lib import adb as adb
-from mobly.controllers.android_device_lib import errors as errors
-from mobly.controllers.android_device_lib import fastboot as fastboot
-from mobly.controllers.android_device_lib import (
- service_manager as service_manager,
-)
-from mobly.controllers.android_device_lib.services import logcat as logcat
-from mobly.controllers.android_device_lib.services import (
- snippet_management_service as snippet_management_service,
-)
-
-MBS_PACKAGE: str
-MOBLY_CONTROLLER_CONFIG_NAME: str
-ANDROID_DEVICE_PICK_ALL_TOKEN: str
-ANDROID_DEVICE_ADB_LOGCAT_PARAM_KEY: str
-ANDROID_DEVICE_EMPTY_CONFIG_MSG: str
-ANDROID_DEVICE_NOT_LIST_CONFIG_MSG: str
-CACHED_SYSTEM_PROPS: Incomplete
-KEY_DEVICE_REQUIRED: str
-DEFAULT_VALUE_DEVICE_REQUIRED: bool
-KEY_SKIP_LOGCAT: str
-DEFAULT_VALUE_SKIP_LOGCAT: bool
-SERVICE_NAME_LOGCAT: str
-DEFAULT_BUG_REPORT_NAME: str
-DEFAULT_TIMEOUT_BOOT_COMPLETION_SECOND: Incomplete
-TAKE_SCREENSHOT_TIMEOUT_SECOND: int
-Error = errors.Error
-DeviceError = errors.DeviceError
-SnippetError = snippet_management_service.Error
-EMULATOR_SERIAL_REGEX: Incomplete
-
-def create(configs): ...
-def destroy(ads) -> None: ...
-def get_info(ads): ...
-def parse_device_list(device_list_str, key): ...
-def list_adb_devices(): ...
-def list_adb_devices_by_usb_id(): ...
-def list_fastboot_devices(): ...
-def get_instances(serials): ...
-def get_instances_with_configs(configs): ...
-def get_all_instances(include_fastboot: bool = ...): ...
-def filter_devices(ads, func): ...
-def get_devices(ads, **kwargs): ...
-def get_device(ads, **kwargs): ...
-def take_bug_reports(
- ads,
- test_name: Incomplete | None = ...,
- begin_time: Incomplete | None = ...,
- destination: Incomplete | None = ...,
-) -> None: ...
-
-class BuildInfoConstants(enum.Enum):
- BUILD_ID: Incomplete
- BUILD_TYPE: Incomplete
- BUILD_FINGERPRINT: Incomplete
- BUILD_VERSION_CODENAME: Incomplete
- BUILD_VERSION_INCREMENTAL: Incomplete
- BUILD_VERSION_SDK: Incomplete
- BUILD_PRODUCT: Incomplete
- BUILD_CHARACTERISTICS: Incomplete
- DEBUGGABLE: Incomplete
- PRODUCT_NAME: Incomplete
- HARDWARE: Incomplete
- build_info_key: Incomplete
- system_prop_key: Incomplete
- def __init__(self, build_info_key, system_prop_key) -> None: ...
-
-class AndroidDevice:
- log: Incomplete
- adb: Incomplete
- fastboot: Incomplete
- services: Incomplete
- def __init__(self, serial: str = ...) -> None: ...
- @property
- def adb_logcat_file_path(self): ...
- @property
- def device_info(self): ...
- def add_device_info(self, name, info) -> None: ...
- @property
- def sl4a(self): ...
- @property
- def ed(self): ...
- @property
- def debug_tag(self): ...
- @debug_tag.setter
- def debug_tag(self, tag) -> None: ...
- @property
- def has_active_service(self): ...
- @property
- def log_path(self): ...
- @log_path.setter
- def log_path(self, new_path) -> None: ...
- @property
- def serial(self): ...
- def update_serial(self, new_serial) -> None: ...
- def handle_reboot(self) -> Generator[None, None, None]: ...
- def handle_usb_disconnect(self) -> Generator[None, None, None]: ...
- @property
- def build_info(self): ...
- @property
- def is_bootloader(self): ...
- @property
- def is_adb_root(self): ...
- @property
- def is_rootable(self): ...
- @property
- def model(self): ...
- @property
- def is_emulator(self): ...
- def load_config(self, config) -> None: ...
- def root_adb(self) -> None: ...
- def load_snippet(self, name, package) -> None: ...
- def unload_snippet(self, name) -> None: ...
- def generate_filename(
- self,
- file_type,
- time_identifier: Incomplete | None = ...,
- extension_name: Incomplete | None = ...,
- ): ...
- def take_bug_report(
- self,
- test_name: Incomplete | None = ...,
- begin_time: Incomplete | None = ...,
- timeout: int = ...,
- destination: Incomplete | None = ...,
- ): ...
- def take_screenshot(self, destination, prefix: str = ...): ...
- def run_iperf_client(self, server_host, extra_args: str = ...): ...
- def wait_for_boot_completion(self, timeout=...) -> None: ...
- def is_boot_completed(self): ...
- def is_adb_detectable(self): ...
- def reboot(self) -> None: ...
- def __getattr__(self, name): ...
-
-class AndroidDeviceLoggerAdapter(logging.LoggerAdapter):
- def process(self, msg, kwargs): ...
diff --git a/stubs/mobly/controllers/android_device_lib/__init__.pyi b/stubs/mobly/controllers/android_device_lib/__init__.pyi
deleted file mode 100644
index e69de29..0000000
--- a/stubs/mobly/controllers/android_device_lib/__init__.pyi
+++ /dev/null
diff --git a/stubs/mobly/controllers/android_device_lib/adb.pyi b/stubs/mobly/controllers/android_device_lib/adb.pyi
deleted file mode 100644
index fe38ba8..0000000
--- a/stubs/mobly/controllers/android_device_lib/adb.pyi
+++ /dev/null
@@ -1,55 +0,0 @@
-from _typeshed import Incomplete
-from mobly import utils as utils
-
-ADB: str
-ADB_PORT_LOCK: Incomplete
-ADB_ROOT_RETRY_ATTMEPTS: int
-ADB_ROOT_RETRY_ATTEMPT_INTERVAL_SEC: int
-DEFAULT_INSTRUMENTATION_RUNNER: str
-DEFAULT_GETPROP_TIMEOUT_SEC: int
-DEFAULT_GETPROPS_ATTEMPTS: int
-DEFAULT_GETPROPS_RETRY_SLEEP_SEC: int
-PATTERN_ADB_CONNECT_SUCCESS: Incomplete
-
-class Error(Exception): ...
-
-class AdbError(Error):
- cmd: Incomplete
- stdout: Incomplete
- stderr: Incomplete
- ret_code: Incomplete
- serial: Incomplete
- def __init__(
- self, cmd, stdout, stderr, ret_code, serial: str = ...
- ) -> None: ...
-
-class AdbTimeoutError(Error):
- cmd: Incomplete
- timeout: Incomplete
- serial: Incomplete
- def __init__(self, cmd, timeout, serial: str = ...) -> None: ...
-
-def is_adb_available(): ...
-def list_occupied_adb_ports(): ...
-
-class AdbProxy:
- serial: Incomplete
- def __init__(self, serial: str = ...) -> None: ...
- @property
- def current_user_id(self) -> int: ...
- def connect(self, address) -> bytes: ...
- def getprop(self, prop_name): ...
- def getprops(self, prop_names): ...
- def has_shell_command(self, command) -> bool: ...
- def forward(
- self, args: Incomplete | None = ..., shell: bool = ...
- ) -> bytes: ...
- def instrument(
- self,
- package,
- options: Incomplete | None = ...,
- runner: Incomplete | None = ...,
- handler: Incomplete | None = ...,
- ) -> bytes: ...
- def root(self) -> bytes: ...
- def __getattr__(self, name): ...
diff --git a/stubs/mobly/controllers/android_device_lib/callback_handler.pyi b/stubs/mobly/controllers/android_device_lib/callback_handler.pyi
deleted file mode 100644
index 0fb9383..0000000
--- a/stubs/mobly/controllers/android_device_lib/callback_handler.pyi
+++ /dev/null
@@ -1,19 +0,0 @@
-from _typeshed import Incomplete
-from mobly.controllers.android_device_lib import snippet_event as snippet_event
-from mobly.snippet import errors as errors
-
-MAX_TIMEOUT: Incomplete
-DEFAULT_TIMEOUT: int
-Error = errors.CallbackHandlerBaseError
-TimeoutError = errors.CallbackHandlerTimeoutError
-
-class CallbackHandler:
- ret_value: Incomplete
- def __init__(
- self, callback_id, event_client, ret_value, method_name, ad
- ) -> None: ...
- @property
- def callback_id(self): ...
- def waitAndGet(self, event_name, timeout=...): ...
- def waitForEvent(self, event_name, predicate, timeout=...): ...
- def getAll(self, event_name): ...
diff --git a/stubs/mobly/controllers/android_device_lib/callback_handler_v2.pyi b/stubs/mobly/controllers/android_device_lib/callback_handler_v2.pyi
deleted file mode 100644
index a24f38f..0000000
--- a/stubs/mobly/controllers/android_device_lib/callback_handler_v2.pyi
+++ /dev/null
@@ -1,8 +0,0 @@
-from mobly.snippet import callback_handler_base as callback_handler_base
-from mobly.snippet import errors as errors
-
-TIMEOUT_ERROR_MESSAGE: str
-
-class CallbackHandlerV2(callback_handler_base.CallbackHandlerBase):
- def callEventWaitAndGetRpc(self, callback_id, event_name, timeout_sec): ...
- def callEventGetAllRpc(self, callback_id, event_name): ...
diff --git a/stubs/mobly/controllers/android_device_lib/errors.pyi b/stubs/mobly/controllers/android_device_lib/errors.pyi
deleted file mode 100644
index 562da05..0000000
--- a/stubs/mobly/controllers/android_device_lib/errors.pyi
+++ /dev/null
@@ -1,13 +0,0 @@
-from _typeshed import Incomplete
-from mobly import signals as signals
-
-HIERARCHY_TOKEN: str
-
-class Error(signals.ControllerError): ...
-
-class DeviceError(Error):
- def __init__(self, ad, msg) -> None: ...
-
-class ServiceError(DeviceError):
- SERVICE_TYPE: Incomplete
- def __init__(self, device, msg) -> None: ...
diff --git a/stubs/mobly/controllers/android_device_lib/event_dispatcher.pyi b/stubs/mobly/controllers/android_device_lib/event_dispatcher.pyi
deleted file mode 100644
index 6d1e397..0000000
--- a/stubs/mobly/controllers/android_device_lib/event_dispatcher.pyi
+++ /dev/null
@@ -1,38 +0,0 @@
-from _typeshed import Incomplete
-
-class EventDispatcherError(Exception): ...
-class IllegalStateError(EventDispatcherError): ...
-class DuplicateError(EventDispatcherError): ...
-
-class EventDispatcher:
- DEFAULT_TIMEOUT: int
- started: bool
- executor: Incomplete
- poller: Incomplete
- event_dict: Incomplete
- handlers: Incomplete
- lock: Incomplete
- def __init__(self, sl4a) -> None: ...
- def poll_events(self) -> None: ...
- def register_handler(self, handler, event_name, args) -> None: ...
- def start(self) -> None: ...
- def clean_up(self) -> None: ...
- def pop_event(self, event_name, timeout=...): ...
- def wait_for_event(
- self, event_name, predicate, timeout=..., *args, **kwargs
- ): ...
- def pop_events(self, regex_pattern, timeout): ...
- def get_event_q(self, event_name): ...
- def handle_subscribed_event(self, event_obj, event_name) -> None: ...
- def handle_event(
- self,
- event_handler,
- event_name,
- user_args,
- event_timeout: Incomplete | None = ...,
- cond: Incomplete | None = ...,
- cond_timeout: Incomplete | None = ...,
- ): ...
- def pop_all(self, event_name): ...
- def clear_events(self, event_name) -> None: ...
- def clear_all_events(self) -> None: ...
diff --git a/stubs/mobly/controllers/android_device_lib/fastboot.pyi b/stubs/mobly/controllers/android_device_lib/fastboot.pyi
deleted file mode 100644
index e734c1a..0000000
--- a/stubs/mobly/controllers/android_device_lib/fastboot.pyi
+++ /dev/null
@@ -1,10 +0,0 @@
-from _typeshed import Incomplete
-
-def exe_cmd(*cmds): ...
-
-class FastbootProxy:
- serial: Incomplete
- fastboot_str: Incomplete
- def __init__(self, serial: str = ...) -> None: ...
- def args(self, *args): ...
- def __getattr__(self, name): ...
diff --git a/stubs/mobly/controllers/android_device_lib/jsonrpc_client_base.pyi b/stubs/mobly/controllers/android_device_lib/jsonrpc_client_base.pyi
deleted file mode 100644
index e3d2155..0000000
--- a/stubs/mobly/controllers/android_device_lib/jsonrpc_client_base.pyi
+++ /dev/null
@@ -1,38 +0,0 @@
-import abc
-
-from _typeshed import Incomplete
-from mobly.controllers.android_device_lib import (
- callback_handler as callback_handler,
-)
-from mobly.snippet import errors as errors
-
-UNKNOWN_UID: int
-Error = errors.Error
-AppStartError = errors.ServerStartError
-AppRestoreConnectionError = errors.ServerRestoreConnectionError
-ApiError = errors.ApiError
-ProtocolError = errors.ProtocolError
-
-class JsonRpcCommand:
- INIT: str
- CONTINUE: str
-
-class JsonRpcClientBase(abc.ABC):
- host_port: Incomplete
- device_port: Incomplete
- app_name: Incomplete
- log: Incomplete
- uid: Incomplete
- verbose_logging: bool
- def __init__(self, app_name, ad) -> None: ...
- def __del__(self) -> None: ...
- def start_app_and_connect(self) -> None: ...
- def stop_app(self) -> None: ...
- def restore_app_connection(self, port: Incomplete | None = ...) -> None: ...
- def connect(self, uid=..., cmd=...) -> None: ...
- def disconnect(self) -> None: ...
- def close_socket_connection(self) -> None: ...
- def clear_host_port(self) -> None: ...
- def disable_hidden_api_blacklist(self) -> None: ...
- def __getattr__(self, name): ...
- def set_snippet_client_verbose_logging(self, verbose) -> None: ...
diff --git a/stubs/mobly/controllers/android_device_lib/jsonrpc_shell_base.pyi b/stubs/mobly/controllers/android_device_lib/jsonrpc_shell_base.pyi
deleted file mode 100644
index 6033e90..0000000
--- a/stubs/mobly/controllers/android_device_lib/jsonrpc_shell_base.pyi
+++ /dev/null
@@ -1,9 +0,0 @@
-from _typeshed import Incomplete
-from mobly.controllers import android_device as android_device
-
-class Error(Exception): ...
-
-class JsonRpcShellBase:
- def load_device(self, serial: Incomplete | None = ...) -> None: ...
- def start_console(self) -> None: ...
- def main(self, serial: Incomplete | None = ...) -> None: ...
diff --git a/stubs/mobly/controllers/android_device_lib/service_manager.pyi b/stubs/mobly/controllers/android_device_lib/service_manager.pyi
deleted file mode 100644
index e39ade4..0000000
--- a/stubs/mobly/controllers/android_device_lib/service_manager.pyi
+++ /dev/null
@@ -1,33 +0,0 @@
-from _typeshed import Incomplete
-from mobly import expects as expects
-from mobly.controllers.android_device_lib import errors as errors
-from mobly.controllers.android_device_lib.services import (
- base_service as base_service,
-)
-
-class Error(errors.DeviceError): ...
-
-class ServiceManager:
- def __init__(self, device) -> None: ...
- def has_service_by_name(self, name): ...
- @property
- def is_any_alive(self): ...
- def register(
- self,
- alias,
- service_class,
- configs: Incomplete | None = ...,
- start_service: bool = ...,
- ) -> None: ...
- def unregister(self, alias) -> None: ...
- def for_each(self, func) -> None: ...
- def list_live_services(self): ...
- def create_output_excerpts_all(self, test_info): ...
- def unregister_all(self) -> None: ...
- def start_all(self) -> None: ...
- def start_services(self, service_alises) -> None: ...
- def stop_all(self) -> None: ...
- def pause_all(self) -> None: ...
- def resume_all(self) -> None: ...
- def resume_services(self, service_alises) -> None: ...
- def __getattr__(self, name): ...
diff --git a/stubs/mobly/controllers/android_device_lib/services/__init__.pyi b/stubs/mobly/controllers/android_device_lib/services/__init__.pyi
deleted file mode 100644
index e69de29..0000000
--- a/stubs/mobly/controllers/android_device_lib/services/__init__.pyi
+++ /dev/null
diff --git a/stubs/mobly/controllers/android_device_lib/services/base_service.pyi b/stubs/mobly/controllers/android_device_lib/services/base_service.pyi
deleted file mode 100644
index c99f0e7..0000000
--- a/stubs/mobly/controllers/android_device_lib/services/base_service.pyi
+++ /dev/null
@@ -1,17 +0,0 @@
-import abc
-
-from _typeshed import Incomplete
-
-class BaseService(abc.ABC):
- def __init__(self, device, configs: Incomplete | None = ...) -> None: ...
- @property
- def alias(self): ...
- @alias.setter
- def alias(self, alias) -> None: ...
- @property
- def is_alive(self) -> None: ...
- def start(self) -> None: ...
- def stop(self) -> None: ...
- def pause(self) -> None: ...
- def resume(self) -> None: ...
- def create_output_excerpts(self, test_info): ...
diff --git a/stubs/mobly/controllers/android_device_lib/services/logcat.pyi b/stubs/mobly/controllers/android_device_lib/services/logcat.pyi
deleted file mode 100644
index d6b41b3..0000000
--- a/stubs/mobly/controllers/android_device_lib/services/logcat.pyi
+++ /dev/null
@@ -1,39 +0,0 @@
-from _typeshed import Incomplete
-from mobly import utils as utils
-from mobly.controllers.android_device_lib import adb as adb
-from mobly.controllers.android_device_lib import errors as errors
-from mobly.controllers.android_device_lib.services import (
- base_service as base_service,
-)
-
-CREATE_LOGCAT_FILE_TIMEOUT_SEC: int
-
-class Error(errors.ServiceError):
- SERVICE_TYPE: str
-
-class Config:
- clear_log: Incomplete
- logcat_params: Incomplete
- output_file_path: Incomplete
- def __init__(
- self,
- logcat_params: Incomplete | None = ...,
- clear_log: bool = ...,
- output_file_path: Incomplete | None = ...,
- ) -> None: ...
-
-class Logcat(base_service.BaseService):
- OUTPUT_FILE_TYPE: str
- adb_logcat_file_path: Incomplete
- def __init__(
- self, android_device, configs: Incomplete | None = ...
- ) -> None: ...
- def create_output_excerpts(self, test_info): ...
- @property
- def is_alive(self): ...
- def clear_adb_log(self) -> None: ...
- def update_config(self, new_config) -> None: ...
- def start(self) -> None: ...
- def stop(self) -> None: ...
- def pause(self) -> None: ...
- def resume(self) -> None: ...
diff --git a/stubs/mobly/controllers/android_device_lib/services/sl4a_service.pyi b/stubs/mobly/controllers/android_device_lib/services/sl4a_service.pyi
deleted file mode 100644
index dd6cf7a..0000000
--- a/stubs/mobly/controllers/android_device_lib/services/sl4a_service.pyi
+++ /dev/null
@@ -1,15 +0,0 @@
-from _typeshed import Incomplete
-from mobly.controllers.android_device_lib import sl4a_client as sl4a_client
-from mobly.controllers.android_device_lib.services import (
- base_service as base_service,
-)
-
-class Sl4aService(base_service.BaseService):
- def __init__(self, device, configs: Incomplete | None = ...) -> None: ...
- @property
- def is_alive(self): ...
- def start(self) -> None: ...
- def stop(self) -> None: ...
- def pause(self) -> None: ...
- def resume(self) -> None: ...
- def __getattr__(self, name): ...
diff --git a/stubs/mobly/controllers/android_device_lib/services/snippet_management_service.pyi b/stubs/mobly/controllers/android_device_lib/services/snippet_management_service.pyi
deleted file mode 100644
index 9eae0bf..0000000
--- a/stubs/mobly/controllers/android_device_lib/services/snippet_management_service.pyi
+++ /dev/null
@@ -1,26 +0,0 @@
-from _typeshed import Incomplete
-from mobly.controllers.android_device_lib import errors as errors
-from mobly.controllers.android_device_lib import (
- snippet_client_v2 as snippet_client_v2,
-)
-from mobly.controllers.android_device_lib.services import (
- base_service as base_service,
-)
-
-MISSING_SNIPPET_CLIENT_MSG: str
-
-class Error(errors.ServiceError):
- SERVICE_TYPE: str
-
-class SnippetManagementService(base_service.BaseService):
- def __init__(self, device, configs: Incomplete | None = ...) -> None: ...
- @property
- def is_alive(self): ...
- def get_snippet_client(self, name): ...
- def add_snippet_client(self, name, package) -> None: ...
- def remove_snippet_client(self, name) -> None: ...
- def start(self) -> None: ...
- def stop(self) -> None: ...
- def pause(self) -> None: ...
- def resume(self) -> None: ...
- def __getattr__(self, name): ...
diff --git a/stubs/mobly/controllers/android_device_lib/sl4a_client.pyi b/stubs/mobly/controllers/android_device_lib/sl4a_client.pyi
deleted file mode 100644
index e2195f8..0000000
--- a/stubs/mobly/controllers/android_device_lib/sl4a_client.pyi
+++ /dev/null
@@ -1,18 +0,0 @@
-from _typeshed import Incomplete
-from mobly import utils as utils
-from mobly.controllers.android_device_lib import (
- event_dispatcher as event_dispatcher,
-)
-from mobly.controllers.android_device_lib import (
- jsonrpc_client_base as jsonrpc_client_base,
-)
-
-class Sl4aClient(jsonrpc_client_base.JsonRpcClientBase):
- ed: Incomplete
- def __init__(self, ad) -> None: ...
- device_port: Incomplete
- def start_app_and_connect(self) -> None: ...
- host_port: Incomplete
- def restore_app_connection(self, port: Incomplete | None = ...) -> None: ...
- def stop_app(self) -> None: ...
- def stop_event_dispatcher(self) -> None: ...
diff --git a/stubs/mobly/controllers/android_device_lib/snippet_client.pyi b/stubs/mobly/controllers/android_device_lib/snippet_client.pyi
deleted file mode 100644
index 96f0a88..0000000
--- a/stubs/mobly/controllers/android_device_lib/snippet_client.pyi
+++ /dev/null
@@ -1,24 +0,0 @@
-from _typeshed import Incomplete
-from mobly import utils as utils
-from mobly.controllers.android_device_lib import adb as adb
-from mobly.controllers.android_device_lib import errors as errors
-from mobly.controllers.android_device_lib import (
- jsonrpc_client_base as jsonrpc_client_base,
-)
-from mobly.snippet import errors as snippet_errors
-
-AppStartPreCheckError = snippet_errors.ServerStartPreCheckError
-ProtocolVersionError = snippet_errors.ServerStartProtocolError
-
-class SnippetClient(jsonrpc_client_base.JsonRpcClientBase):
- package: Incomplete
- def __init__(self, package, ad) -> None: ...
- @property
- def is_alive(self): ...
- @property
- def user_id(self): ...
- def start_app_and_connect(self) -> None: ...
- host_port: Incomplete
- def restore_app_connection(self, port: Incomplete | None = ...) -> None: ...
- def stop_app(self) -> None: ...
- def help(self, print_output: bool = ...): ...
diff --git a/stubs/mobly/controllers/android_device_lib/snippet_client_v2.pyi b/stubs/mobly/controllers/android_device_lib/snippet_client_v2.pyi
deleted file mode 100644
index 658740b..0000000
--- a/stubs/mobly/controllers/android_device_lib/snippet_client_v2.pyi
+++ /dev/null
@@ -1,43 +0,0 @@
-import enum
-
-from _typeshed import Incomplete
-from mobly import utils as utils
-from mobly.controllers.android_device_lib import adb as adb
-from mobly.controllers.android_device_lib import (
- callback_handler_v2 as callback_handler_v2,
-)
-from mobly.snippet import client_base as client_base
-from mobly.snippet import errors as errors
-
-UNKNOWN_UID: int
-
-class ConnectionHandshakeCommand(enum.Enum):
- INIT: str
- CONTINUE: str
-
-class SnippetClientV2(client_base.ClientBase):
- host_port: Incomplete
- device_port: Incomplete
- uid: Incomplete
- def __init__(self, package, ad) -> None: ...
- @property
- def user_id(self): ...
- @property
- def is_alive(self): ...
- def before_starting_server(self) -> None: ...
- def start_server(self) -> None: ...
- def make_connection(self) -> None: ...
- def create_socket_connection(self) -> None: ...
- def send_handshake_request(self, uid=..., cmd=...) -> None: ...
- def check_server_proc_running(self) -> None: ...
- def send_rpc_request(self, request): ...
- def handle_callback(self, callback_id, ret_value, rpc_func_name): ...
- def make_connection_with_forwarded_port(
- self, host_port, device_port, uid=..., cmd=...
- ) -> None: ...
- def stop(self) -> None: ...
- def close_connection(self) -> None: ...
- def restore_server_connection(
- self, port: Incomplete | None = ...
- ) -> None: ...
- def help(self, print_output: bool = ...): ...
diff --git a/stubs/mobly/controllers/android_device_lib/snippet_event.pyi b/stubs/mobly/controllers/android_device_lib/snippet_event.pyi
deleted file mode 100644
index 5d99106..0000000
--- a/stubs/mobly/controllers/android_device_lib/snippet_event.pyi
+++ /dev/null
@@ -1,10 +0,0 @@
-from _typeshed import Incomplete
-
-def from_dict(event_dict): ...
-
-class SnippetEvent:
- callback_id: Incomplete
- name: Incomplete
- creation_time: Incomplete
- data: Incomplete
- def __init__(self, callback_id, name, creation_time, data) -> None: ...
diff --git a/stubs/mobly/controllers/attenuator.pyi b/stubs/mobly/controllers/attenuator.pyi
deleted file mode 100644
index e07d7b9..0000000
--- a/stubs/mobly/controllers/attenuator.pyi
+++ /dev/null
@@ -1,24 +0,0 @@
-from _typeshed import Incomplete
-
-MOBLY_CONTROLLER_CONFIG_NAME: str
-KEY_ADDRESS: str
-KEY_PORT: str
-KEY_MODEL: str
-KEY_PATHS: str
-PACKAGE_PATH_TEMPLATE: str
-
-def create(configs): ...
-def destroy(objs) -> None: ...
-
-class Error(Exception): ...
-
-class AttenuatorPath:
- model: Incomplete
- attenuation_device: Incomplete
- idx: Incomplete
- def __init__(
- self, attenuation_device, idx: int = ..., name: Incomplete | None = ...
- ) -> None: ...
- def set_atten(self, value) -> None: ...
- def get_atten(self): ...
- def get_max_atten(self): ...
diff --git a/stubs/mobly/controllers/attenuator_lib/__init__.pyi b/stubs/mobly/controllers/attenuator_lib/__init__.pyi
deleted file mode 100644
index e69de29..0000000
--- a/stubs/mobly/controllers/attenuator_lib/__init__.pyi
+++ /dev/null
diff --git a/stubs/mobly/controllers/attenuator_lib/minicircuits.pyi b/stubs/mobly/controllers/attenuator_lib/minicircuits.pyi
deleted file mode 100644
index 9f3dfca..0000000
--- a/stubs/mobly/controllers/attenuator_lib/minicircuits.pyi
+++ /dev/null
@@ -1,17 +0,0 @@
-from _typeshed import Incomplete
-from mobly.controllers import attenuator as attenuator
-from mobly.controllers.attenuator_lib import (
- telnet_scpi_client as telnet_scpi_client,
-)
-
-class AttenuatorDevice:
- path_count: Incomplete
- def __init__(self, path_count: int = ...) -> None: ...
- @property
- def is_open(self): ...
- properties: Incomplete
- max_atten: Incomplete
- def open(self, host, port: int = ...) -> None: ...
- def close(self) -> None: ...
- def set_atten(self, idx, value) -> None: ...
- def get_atten(self, idx: int = ...): ...
diff --git a/stubs/mobly/controllers/attenuator_lib/telnet_scpi_client.pyi b/stubs/mobly/controllers/attenuator_lib/telnet_scpi_client.pyi
deleted file mode 100644
index 3ebb042..0000000
--- a/stubs/mobly/controllers/attenuator_lib/telnet_scpi_client.pyi
+++ /dev/null
@@ -1,20 +0,0 @@
-from _typeshed import Incomplete
-from mobly.controllers import attenuator as attenuator
-
-class TelnetScpiClient:
- tx_cmd_separator: Incomplete
- rx_cmd_separator: Incomplete
- prompt: Incomplete
- host: Incomplete
- port: Incomplete
- def __init__(
- self,
- tx_cmd_separator: str = ...,
- rx_cmd_separator: str = ...,
- prompt: str = ...,
- ) -> None: ...
- def open(self, host, port: int = ...) -> None: ...
- @property
- def is_open(self): ...
- def close(self) -> None: ...
- def cmd(self, cmd_str, wait_ret: bool = ...): ...
diff --git a/stubs/mobly/controllers/iperf_server.pyi b/stubs/mobly/controllers/iperf_server.pyi
deleted file mode 100644
index 29fd940..0000000
--- a/stubs/mobly/controllers/iperf_server.pyi
+++ /dev/null
@@ -1,31 +0,0 @@
-from _typeshed import Incomplete
-from mobly import utils as utils
-
-MOBLY_CONTROLLER_CONFIG_NAME: str
-
-def create(configs): ...
-def destroy(objs) -> None: ...
-
-class IPerfResult:
- result: Incomplete
- def __init__(self, result_path) -> None: ...
- def get_json(self): ...
- @property
- def error(self): ...
- @property
- def avg_rate(self): ...
- @property
- def avg_receive_rate(self): ...
- @property
- def avg_send_rate(self): ...
-
-class IPerfServer:
- port: Incomplete
- log_path: Incomplete
- iperf_str: Incomplete
- iperf_process: Incomplete
- log_files: Incomplete
- started: bool
- def __init__(self, port, log_path) -> None: ...
- def start(self, extra_args: str = ..., tag: str = ...) -> None: ...
- def stop(self) -> None: ...
diff --git a/stubs/mobly/controllers/sniffer.pyi b/stubs/mobly/controllers/sniffer.pyi
deleted file mode 100644
index 7ee9062..0000000
--- a/stubs/mobly/controllers/sniffer.pyi
+++ /dev/null
@@ -1,36 +0,0 @@
-from _typeshed import Incomplete
-
-MOBLY_CONTROLLER_CONFIG_NAME: str
-
-def create(configs): ...
-def destroy(objs) -> None: ...
-
-class SnifferError(Exception): ...
-class InvalidDataError(Exception): ...
-class ExecutionError(SnifferError): ...
-class InvalidOperationError(SnifferError): ...
-
-class Sniffer:
- CONFIG_KEY_CHANNEL: str
- def __init__(
- self, interface, logger, base_configs: Incomplete | None = ...
- ) -> None: ...
- def get_descriptor(self) -> None: ...
- def get_type(self) -> None: ...
- def get_subtype(self) -> None: ...
- def get_interface(self) -> None: ...
- def get_capture_file(self) -> None: ...
- def start_capture(
- self,
- override_configs: Incomplete | None = ...,
- additional_args: Incomplete | None = ...,
- duration: Incomplete | None = ...,
- packet_count: Incomplete | None = ...,
- ) -> None: ...
- def stop_capture(self) -> None: ...
- def wait_for_capture(self, timeout: Incomplete | None = ...) -> None: ...
-
-class ActiveCaptureContext:
- def __init__(self, sniffer, timeout: Incomplete | None = ...) -> None: ...
- def __enter__(self) -> None: ...
- def __exit__(self, type, value, traceback) -> None: ...
diff --git a/stubs/mobly/controllers/sniffer_lib/__init__.pyi b/stubs/mobly/controllers/sniffer_lib/__init__.pyi
deleted file mode 100644
index e69de29..0000000
--- a/stubs/mobly/controllers/sniffer_lib/__init__.pyi
+++ /dev/null
diff --git a/stubs/mobly/controllers/sniffer_lib/local/__init__.pyi b/stubs/mobly/controllers/sniffer_lib/local/__init__.pyi
deleted file mode 100644
index e69de29..0000000
--- a/stubs/mobly/controllers/sniffer_lib/local/__init__.pyi
+++ /dev/null
diff --git a/stubs/mobly/controllers/sniffer_lib/local/local_base.pyi b/stubs/mobly/controllers/sniffer_lib/local/local_base.pyi
deleted file mode 100644
index 4e56926..0000000
--- a/stubs/mobly/controllers/sniffer_lib/local/local_base.pyi
+++ /dev/null
@@ -1,21 +0,0 @@
-from _typeshed import Incomplete
-from mobly import logger as logger
-from mobly import utils as utils
-from mobly.controllers import sniffer as sniffer
-
-class SnifferLocalBase(sniffer.Sniffer):
- def __init__(
- self, interface, logger, base_configs: Incomplete | None = ...
- ) -> None: ...
- def get_interface(self): ...
- def get_type(self): ...
- def get_capture_file(self): ...
- def start_capture(
- self,
- override_configs: Incomplete | None = ...,
- additional_args: Incomplete | None = ...,
- duration: Incomplete | None = ...,
- packet_count: Incomplete | None = ...,
- ): ...
- def stop_capture(self) -> None: ...
- def wait_for_capture(self, timeout: Incomplete | None = ...) -> None: ...
diff --git a/stubs/mobly/controllers/sniffer_lib/local/tcpdump.pyi b/stubs/mobly/controllers/sniffer_lib/local/tcpdump.pyi
deleted file mode 100644
index 2cc12b3..0000000
--- a/stubs/mobly/controllers/sniffer_lib/local/tcpdump.pyi
+++ /dev/null
@@ -1,10 +0,0 @@
-from _typeshed import Incomplete
-from mobly.controllers import sniffer as sniffer
-from mobly.controllers.sniffer_lib.local import local_base as local_base
-
-class Sniffer(local_base.SnifferLocalBase):
- def __init__(
- self, config_path, logger, base_configs: Incomplete | None = ...
- ) -> None: ...
- def get_descriptor(self): ...
- def get_subtype(self): ...
diff --git a/stubs/mobly/controllers/sniffer_lib/local/tshark.pyi b/stubs/mobly/controllers/sniffer_lib/local/tshark.pyi
deleted file mode 100644
index 2cc12b3..0000000
--- a/stubs/mobly/controllers/sniffer_lib/local/tshark.pyi
+++ /dev/null
@@ -1,10 +0,0 @@
-from _typeshed import Incomplete
-from mobly.controllers import sniffer as sniffer
-from mobly.controllers.sniffer_lib.local import local_base as local_base
-
-class Sniffer(local_base.SnifferLocalBase):
- def __init__(
- self, config_path, logger, base_configs: Incomplete | None = ...
- ) -> None: ...
- def get_descriptor(self): ...
- def get_subtype(self): ...
diff --git a/stubs/mobly/expects.pyi b/stubs/mobly/expects.pyi
deleted file mode 100644
index e1de6f3..0000000
--- a/stubs/mobly/expects.pyi
+++ /dev/null
@@ -1,30 +0,0 @@
-from collections.abc import Generator
-
-from _typeshed import Incomplete
-from mobly import asserts as asserts
-from mobly import records as records
-from mobly import signals as signals
-
-DEFAULT_TEST_RESULT_RECORD: Incomplete
-
-class _ExpectErrorRecorder:
- def __init__(self, record: Incomplete | None = ...) -> None: ...
- def reset_internal_states(
- self, record: Incomplete | None = ...
- ) -> None: ...
- @property
- def has_error(self): ...
- @property
- def error_count(self): ...
- def add_error(self, error) -> None: ...
-
-def expect_true(condition, msg, extras: Incomplete | None = ...) -> None: ...
-def expect_false(condition, msg, extras: Incomplete | None = ...) -> None: ...
-def expect_equal(
- first, second, msg: Incomplete | None = ..., extras: Incomplete | None = ...
-) -> None: ...
-def expect_no_raises(
- message: Incomplete | None = ..., extras: Incomplete | None = ...
-) -> Generator[None, None, None]: ...
-
-recorder: Incomplete
diff --git a/stubs/mobly/keys.pyi b/stubs/mobly/keys.pyi
deleted file mode 100644
index 9007329..0000000
--- a/stubs/mobly/keys.pyi
+++ /dev/null
@@ -1,9 +0,0 @@
-import enum
-
-class Config(enum.Enum):
- key_mobly_params: str
- key_log_path: str
- key_testbed: str
- key_testbed_name: str
- key_testbed_controllers: str
- key_testbed_test_params: str
diff --git a/stubs/mobly/logger.pyi b/stubs/mobly/logger.pyi
deleted file mode 100644
index 2122ade..0000000
--- a/stubs/mobly/logger.pyi
+++ /dev/null
@@ -1,42 +0,0 @@
-import logging
-
-from _typeshed import Incomplete
-from mobly import records as records
-from mobly import utils as utils
-
-LINUX_MAX_FILENAME_LENGTH: int
-WINDOWS_MAX_FILENAME_LENGTH: int
-WINDOWS_RESERVED_CHARACTERS_REPLACEMENTS: Incomplete
-WINDOWS_RESERVED_FILENAME_REGEX: Incomplete
-WINDOWS_RESERVED_FILENAME_PREFIX: str
-log_line_format: str
-log_line_time_format: str
-log_line_timestamp_len: int
-logline_timestamp_re: Incomplete
-
-def is_valid_logline_timestamp(timestamp): ...
-def logline_timestamp_comparator(t1, t2): ...
-def epoch_to_log_line_timestamp(
- epoch_time, time_zone: Incomplete | None = ...
-): ...
-def get_log_line_timestamp(delta: Incomplete | None = ...): ...
-def get_log_file_timestamp(delta: Incomplete | None = ...): ...
-def kill_test_logger(logger) -> None: ...
-def create_latest_log_alias(actual_path, alias) -> None: ...
-def setup_test_logger(
- log_path,
- prefix: Incomplete | None = ...,
- alias: str = ...,
- console_level=...,
-) -> None: ...
-def sanitize_filename(filename): ...
-def normalize_log_line_timestamp(log_line_timestamp): ...
-
-class PrefixLoggerAdapter(logging.LoggerAdapter):
- EXTRA_KEY_LOG_PREFIX: str
- _KWARGS_TYPE: Incomplete
- _PROCESS_RETURN_TYPE: Incomplete
- extra: _KWARGS_TYPE
- def process(
- self, msg: str, kwargs: _KWARGS_TYPE
- ) -> _PROCESS_RETURN_TYPE: ...
diff --git a/stubs/mobly/records.pyi b/stubs/mobly/records.pyi
deleted file mode 100644
index 2ae6905..0000000
--- a/stubs/mobly/records.pyi
+++ /dev/null
@@ -1,118 +0,0 @@
-import enum
-
-from _typeshed import Incomplete
-from mobly import signals as signals
-from mobly import utils as utils
-
-OUTPUT_FILE_INFO_LOG: str
-OUTPUT_FILE_DEBUG_LOG: str
-OUTPUT_FILE_SUMMARY: str
-
-class Error(Exception): ...
-
-def uid(uid): ...
-
-class TestSummaryEntryType(enum.Enum):
- TEST_NAME_LIST: str
- RECORD: str
- SUMMARY: str
- CONTROLLER_INFO: str
- USER_DATA: str
-
-class TestSummaryWriter:
- def __init__(self, path) -> None: ...
- def __copy__(self): ...
- def __deepcopy__(self, *args): ...
- def dump(self, content, entry_type) -> None: ...
-
-class TestResultEnums:
- RECORD_NAME: str
- RECORD_CLASS: str
- RECORD_BEGIN_TIME: str
- RECORD_END_TIME: str
- RECORD_RESULT: str
- RECORD_UID: str
- RECORD_EXTRAS: str
- RECORD_EXTRA_ERRORS: str
- RECORD_DETAILS: str
- RECORD_TERMINATION_SIGNAL_TYPE: str
- RECORD_STACKTRACE: str
- RECORD_SIGNATURE: str
- RECORD_RETRY_PARENT: str
- RECORD_POSITION: str
- TEST_RESULT_PASS: str
- TEST_RESULT_FAIL: str
- TEST_RESULT_SKIP: str
- TEST_RESULT_ERROR: str
-
-class ControllerInfoRecord:
- KEY_TEST_CLASS: Incomplete
- KEY_CONTROLLER_NAME: str
- KEY_CONTROLLER_INFO: str
- KEY_TIMESTAMP: str
- test_class: Incomplete
- controller_name: Incomplete
- controller_info: Incomplete
- timestamp: Incomplete
- def __init__(self, test_class, controller_name, info) -> None: ...
- def to_dict(self): ...
-
-class ExceptionRecord:
- exception: Incomplete
- type: Incomplete
- stacktrace: Incomplete
- extras: Incomplete
- position: Incomplete
- is_test_signal: Incomplete
- def __init__(self, e, position: Incomplete | None = ...) -> None: ...
- def to_dict(self): ...
- def __deepcopy__(self, memo): ...
-
-class TestResultRecord:
- test_name: Incomplete
- test_class: Incomplete
- begin_time: Incomplete
- end_time: Incomplete
- uid: Incomplete
- signature: Incomplete
- retry_parent: Incomplete
- termination_signal: Incomplete
- extra_errors: Incomplete
- result: Incomplete
- def __init__(self, t_name, t_class: Incomplete | None = ...) -> None: ...
- @property
- def details(self): ...
- @property
- def termination_signal_type(self): ...
- @property
- def stacktrace(self): ...
- @property
- def extras(self): ...
- def test_begin(self) -> None: ...
- def update_record(self) -> None: ...
- def test_pass(self, e: Incomplete | None = ...) -> None: ...
- def test_fail(self, e: Incomplete | None = ...) -> None: ...
- def test_skip(self, e: Incomplete | None = ...) -> None: ...
- def test_error(self, e: Incomplete | None = ...) -> None: ...
- def add_error(self, position, e) -> None: ...
- def to_dict(self): ...
-
-class TestResult:
- requested: Incomplete
- failed: Incomplete
- executed: Incomplete
- passed: Incomplete
- skipped: Incomplete
- error: Incomplete
- controller_info: Incomplete
- def __init__(self) -> None: ...
- def __add__(self, r): ...
- def add_record(self, record) -> None: ...
- def add_controller_info_record(self, controller_info_record) -> None: ...
- def add_class_error(self, test_record) -> None: ...
- def is_test_executed(self, test_name): ...
- @property
- def is_all_pass(self): ...
- def requested_test_names_dict(self): ...
- def summary_str(self): ...
- def summary_dict(self): ...
diff --git a/stubs/mobly/runtime_test_info.pyi b/stubs/mobly/runtime_test_info.pyi
deleted file mode 100644
index 9dd1733..0000000
--- a/stubs/mobly/runtime_test_info.pyi
+++ /dev/null
@@ -1,12 +0,0 @@
-from mobly import utils as utils
-
-class RuntimeTestInfo:
- def __init__(self, test_name, log_path, record) -> None: ...
- @property
- def name(self): ...
- @property
- def signature(self): ...
- @property
- def record(self): ...
- @property
- def output_path(self): ...
diff --git a/stubs/mobly/signals.pyi b/stubs/mobly/signals.pyi
deleted file mode 100644
index f4fbe53..0000000
--- a/stubs/mobly/signals.pyi
+++ /dev/null
@@ -1,17 +0,0 @@
-from _typeshed import Incomplete
-
-class TestSignalError(Exception): ...
-
-class TestSignal(Exception):
- details: Incomplete
- extras: Incomplete
- def __init__(self, details, extras: Incomplete | None = ...) -> None: ...
-
-class TestError(TestSignal): ...
-class TestFailure(TestSignal): ...
-class TestPass(TestSignal): ...
-class TestSkip(TestSignal): ...
-class TestAbortSignal(TestSignal): ...
-class TestAbortClass(TestAbortSignal): ...
-class TestAbortAll(TestAbortSignal): ...
-class ControllerError(Exception): ...
diff --git a/stubs/mobly/snippet/__init__.pyi b/stubs/mobly/snippet/__init__.pyi
deleted file mode 100644
index e69de29..0000000
--- a/stubs/mobly/snippet/__init__.pyi
+++ /dev/null
diff --git a/stubs/mobly/snippet/callback_event.pyi b/stubs/mobly/snippet/callback_event.pyi
deleted file mode 100644
index f3dfbb9..0000000
--- a/stubs/mobly/snippet/callback_event.pyi
+++ /dev/null
@@ -1,10 +0,0 @@
-from _typeshed import Incomplete
-
-def from_dict(event_dict): ...
-
-class CallbackEvent:
- callback_id: Incomplete
- name: Incomplete
- creation_time: Incomplete
- data: Incomplete
- def __init__(self, callback_id, name, creation_time, data) -> None: ...
diff --git a/stubs/mobly/snippet/callback_handler_base.pyi b/stubs/mobly/snippet/callback_handler_base.pyi
deleted file mode 100644
index a95e016..0000000
--- a/stubs/mobly/snippet/callback_handler_base.pyi
+++ /dev/null
@@ -1,33 +0,0 @@
-import abc
-
-from _typeshed import Incomplete
-from mobly.snippet import callback_event as callback_event
-from mobly.snippet import errors as errors
-
-class CallbackHandlerBase(abc.ABC, metaclass=abc.ABCMeta):
- ret_value: Incomplete
- def __init__(
- self,
- callback_id,
- event_client,
- ret_value,
- method_name,
- device,
- rpc_max_timeout_sec,
- default_timeout_sec: int = ...,
- ) -> None: ...
- @property
- def rpc_max_timeout_sec(self): ...
- @property
- def default_timeout_sec(self): ...
- @property
- def callback_id(self): ...
- @abc.abstractmethod
- def callEventWaitAndGetRpc(self, callback_id, event_name, timeout_sec): ...
- @abc.abstractmethod
- def callEventGetAllRpc(self, callback_id, event_name): ...
- def waitAndGet(self, event_name, timeout: Incomplete | None = ...): ...
- def waitForEvent(
- self, event_name, predicate, timeout: Incomplete | None = ...
- ): ...
- def getAll(self, event_name): ...
diff --git a/stubs/mobly/snippet/client_base.pyi b/stubs/mobly/snippet/client_base.pyi
deleted file mode 100644
index 92d4b9c..0000000
--- a/stubs/mobly/snippet/client_base.pyi
+++ /dev/null
@@ -1,34 +0,0 @@
-import abc
-
-from _typeshed import Incomplete
-from mobly.snippet import errors as errors
-
-RPC_RESPONSE_REQUIRED_FIELDS: Incomplete
-
-class ClientBase(abc.ABC, metaclass=abc.ABCMeta):
- package: Incomplete
- log: Incomplete
- verbose_logging: bool
- def __init__(self, package, device) -> None: ...
- def __del__(self) -> None: ...
- def initialize(self) -> None: ...
- @abc.abstractmethod
- def before_starting_server(self): ...
- @abc.abstractmethod
- def start_server(self): ...
- @abc.abstractmethod
- def make_connection(self): ...
- def __getattr__(self, name): ...
- def set_snippet_client_verbose_logging(self, verbose) -> None: ...
- @abc.abstractmethod
- def restore_server_connection(self, port: Incomplete | None = ...): ...
- @abc.abstractmethod
- def check_server_proc_running(self): ...
- @abc.abstractmethod
- def send_rpc_request(self, request): ...
- @abc.abstractmethod
- def handle_callback(self, callback_id, ret_value, rpc_func_name): ...
- @abc.abstractmethod
- def stop(self): ...
- @abc.abstractmethod
- def close_connection(self): ...
diff --git a/stubs/mobly/snippet/errors.pyi b/stubs/mobly/snippet/errors.pyi
deleted file mode 100644
index 2c2ac2a..0000000
--- a/stubs/mobly/snippet/errors.pyi
+++ /dev/null
@@ -1,18 +0,0 @@
-from mobly.controllers.android_device_lib import errors as errors
-
-class Error(errors.DeviceError): ...
-class ServerRestoreConnectionError(Error): ...
-class ServerStartError(Error): ...
-class ServerStartProtocolError(ServerStartError): ...
-class ServerStartPreCheckError(Error): ...
-class ApiError(Error): ...
-
-class ProtocolError(Error):
- NO_RESPONSE_FROM_HANDSHAKE: str
- NO_RESPONSE_FROM_SERVER: str
- MISMATCHED_API_ID: str
- RESPONSE_MISSING_FIELD: str
-
-class ServerDiedError(Error): ...
-class CallbackHandlerBaseError(errors.DeviceError): ...
-class CallbackHandlerTimeoutError(Error): ...
diff --git a/stubs/mobly/suite_runner.pyi b/stubs/mobly/suite_runner.pyi
deleted file mode 100644
index 415ed32..0000000
--- a/stubs/mobly/suite_runner.pyi
+++ /dev/null
@@ -1,12 +0,0 @@
-from _typeshed import Incomplete
-from mobly import base_suite as base_suite
-from mobly import base_test as base_test
-from mobly import config_parser as config_parser
-from mobly import signals as signals
-from mobly import test_runner as test_runner
-
-class Error(Exception): ...
-
-def run_suite_class(argv: Incomplete | None = ...) -> None: ...
-def run_suite(test_classes, argv: Incomplete | None = ...) -> None: ...
-def compute_selected_tests(test_classes, selected_tests): ...
diff --git a/stubs/mobly/test_runner.pyi b/stubs/mobly/test_runner.pyi
deleted file mode 100644
index f2aee14..0000000
--- a/stubs/mobly/test_runner.pyi
+++ /dev/null
@@ -1,53 +0,0 @@
-from collections.abc import Generator
-
-from _typeshed import Incomplete
-from mobly import base_test as base_test
-from mobly import config_parser as config_parser
-from mobly import logger as logger
-from mobly import records as records
-from mobly import signals as signals
-from mobly import utils as utils
-
-class Error(Exception): ...
-
-def main(argv: Incomplete | None = ...) -> None: ...
-def parse_mobly_cli_args(argv): ...
-
-class TestRunner:
- class _TestRunInfo:
- config: Incomplete
- test_class: Incomplete
- test_class_name_suffix: Incomplete
- tests: Incomplete
- def __init__(
- self,
- config,
- test_class,
- tests: Incomplete | None = ...,
- test_class_name_suffix: Incomplete | None = ...,
- ) -> None: ...
-
- class _TestRunMetaData:
- root_output_path: Incomplete
- def __init__(self, log_dir, testbed_name) -> None: ...
- def generate_test_run_log_path(self): ...
- def set_start_point(self) -> None: ...
- def set_end_point(self) -> None: ...
- @property
- def run_id(self): ...
- @property
- def time_elapsed_sec(self): ...
-
- results: Incomplete
- def __init__(self, log_dir, testbed_name) -> None: ...
- def mobly_logger(
- self, alias: str = ..., console_level=...
- ) -> Generator[Incomplete, None, None]: ...
- def add_test_class(
- self,
- config,
- test_class,
- tests: Incomplete | None = ...,
- name_suffix: Incomplete | None = ...,
- ) -> None: ...
- def run(self) -> None: ...
diff --git a/stubs/mobly/utils.pyi b/stubs/mobly/utils.pyi
deleted file mode 100644
index 05e8699..0000000
--- a/stubs/mobly/utils.pyi
+++ /dev/null
@@ -1,47 +0,0 @@
-from _typeshed import Incomplete
-
-MAX_FILENAME_LEN: int
-MAX_PORT_ALLOCATION_RETRY: int
-ascii_letters_and_digits: Incomplete
-valid_filename_chars: Incomplete
-GMT_to_olson: Incomplete
-
-class Error(Exception): ...
-
-def abs_path(path): ...
-def create_dir(path) -> None: ...
-def create_alias(target_path, alias_path) -> None: ...
-def get_current_epoch_time(): ...
-def get_current_human_time(): ...
-def epoch_to_human_time(epoch_time): ...
-def get_timezone_olson_id(): ...
-def find_files(paths, file_predicate): ...
-def load_file_to_base64_str(f_path): ...
-def find_field(item_list, cond, comparator, target_field): ...
-def rand_ascii_str(length): ...
-def concurrent_exec(
- func, param_list, max_workers: int = ..., raise_on_exception: bool = ...
-): ...
-def run_command(
- cmd,
- stdout=...,
- stderr=...,
- shell=...,
- timeout=...,
- cwd=...,
- env=...,
- universal_newlines: bool = ...,
-) -> tuple[int, bytes, bytes] | tuple[int, str, str]: ...
-def start_standing_subprocess(
- cmd, shell: bool = ..., env: Incomplete | None = ...
-): ...
-def stop_standing_subprocess(proc) -> None: ...
-def wait_for_standing_subprocess(
- proc, timeout: Incomplete | None = ...
-) -> None: ...
-def get_available_host_port(): ...
-def grep(regex, output): ...
-def cli_cmd_to_string(args): ...
-def get_settable_properties(cls): ...
-def find_subclasses_in_module(base_classes, module): ...
-def find_subclass_in_module(base_class, module): ...
diff --git a/tests/BUILD.gn b/tests/BUILD.gn
deleted file mode 100644
index e17a202..0000000
--- a/tests/BUILD.gn
+++ /dev/null
@@ -1,29 +0,0 @@
-# Copyright 2023 The Fuchsia Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-assert(is_host, "antlion tests only supported for host testing")
-
-group("e2e_tests") {
- testonly = true
- public_deps = [
- "dhcp:e2e_tests",
- "examples:e2e_tests",
- "wlan:e2e_tests",
- "wlan_policy:e2e_tests",
- ]
-}
-
-group("e2e_tests_quick") {
- testonly = true
- public_deps = [
- "examples:e2e_tests_quick",
- "wlan:e2e_tests_quick",
- "wlan_policy:e2e_tests_quick",
- ]
-}
-
-group("e2e_tests_manual") {
- testonly = true
- public_deps = [ "wlan:e2e_tests_manual" ]
-}
diff --git a/tests/dhcp/BUILD.gn b/tests/dhcp/BUILD.gn
deleted file mode 100644
index adf78e0..0000000
--- a/tests/dhcp/BUILD.gn
+++ /dev/null
@@ -1,38 +0,0 @@
-# Copyright 2023 The Fuchsia Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import("//third_party/antlion/antlion_host_test.gni")
-import("//third_party/antlion/environments.gni")
-
-assert(is_host, "antlion tests only supported for host testing")
-
-antlion_host_test("dhcpv4_duplicate_address_test") {
- main_source = "Dhcpv4DuplicateAddressTest.py"
- environments = display_ap_envs
-}
-
-antlion_host_test("dhcpv4_interop_basic_test") {
- main_source = "Dhcpv4InteropBasicTest.py"
- environments = display_ap_envs
-}
-
-antlion_host_test("dhcpv4_interop_combinatorial_options_test") {
- main_source = "Dhcpv4InteropCombinatorialOptionsTest.py"
- environments = display_ap_envs
-}
-
-antlion_host_test("dhcpv4_interop_fixture_test") {
- main_source = "Dhcpv4InteropFixtureTest.py"
- environments = display_ap_envs
-}
-
-group("e2e_tests") {
- testonly = true
- public_deps = [
- ":dhcpv4_duplicate_address_test",
- ":dhcpv4_interop_basic_test",
- ":dhcpv4_interop_combinatorial_options_test",
- ":dhcpv4_interop_fixture_test",
- ]
-}
diff --git a/tests/dhcp/Dhcpv4DuplicateAddressTest.py b/tests/dhcp/Dhcpv4DuplicateAddressTest.py
deleted file mode 100644
index 0889603..0000000
--- a/tests/dhcp/Dhcpv4DuplicateAddressTest.py
+++ /dev/null
@@ -1,134 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import ipaddress
-import re
-from ipaddress import IPv4Address
-
-from mobly import asserts, signals, test_runner
-
-from antlion.controllers.ap_lib import dhcp_config
-from antlion.controllers.utils_lib.commands import ip
-from antlion.test_utils.dhcp import base_test
-
-
-class Dhcpv4DuplicateAddressTest(base_test.Dhcpv4InteropFixture):
- def setup_test(self) -> None:
- super().setup_test()
- self.extra_addresses: list[IPv4Address] = []
- self.ap_params = self.setup_ap()
- self.ap_ip_cmd = ip.LinuxIpCommand(self.access_point.ssh)
-
- def teardown_test(self) -> None:
- super().teardown_test()
- for ip in self.extra_addresses:
- self.ap_ip_cmd.remove_ipv4_address(self.ap_params.id, ip)
-
- def test_duplicate_address_assignment(self) -> None:
- """It's possible for a DHCP server to assign an address that already exists on the network.
- DHCP clients are expected to perform a "gratuitous ARP" of the to-be-assigned address, and
- refuse to assign that address. Clients should also recover by asking for a different
- address.
- """
- # Modify subnet to hold fewer addresses.
- # A '/29' has 8 addresses (6 usable excluding router / broadcast)
- subnet = next(self.ap_params.network.subnets(new_prefix=29))
- subnet_conf = dhcp_config.Subnet(
- subnet=subnet,
- router=self.ap_params.ip,
- # When the DHCP server is considering dynamically allocating an IP address to a client,
- # it first sends an ICMP Echo request (a ping) to the address being assigned. It waits
- # for a second, and if no ICMP Echo response has been heard, it assigns the address.
- # If a response is heard, the lease is abandoned, and the server does not respond to
- # the client.
- # The ping-check configuration parameter can be used to control checking - if its value
- # is false, no ping check is done.
- additional_parameters={"ping-check": "false"},
- )
- dhcp_conf = dhcp_config.DhcpConfig(subnets=[subnet_conf])
- self.access_point.start_dhcp(dhcp_conf=dhcp_conf)
-
- # Add each of the usable IPs as an alias for the router's interface, such that the router
- # will respond to any pings on it.
- for ip in subnet.hosts():
- self.ap_ip_cmd.add_ipv4_address(
- self.ap_params.id,
- ipaddress.IPv4Interface(f"{ip}/{ip.max_prefixlen}"),
- )
- # Ensure we remove the address in self.teardown_test() even if the test fails
- self.extra_addresses.append(ip)
-
- self.connect(ap_params=self.ap_params)
- with asserts.assert_raises(ConnectionError):
- self.get_device_ipv4_addr()
-
- # Per spec, the flow should be:
- # Discover -> Offer -> Request -> Ack -> client optionally performs DAD
- dhcp_logs = self.access_point.get_dhcp_logs()
- if dhcp_logs is None:
- raise signals.TestError(
- "DHCP logs not found; was the DHCP server started?"
- )
-
- for expected_message in [
- r"DHCPDISCOVER from \S+",
- r"DHCPOFFER on [0-9.]+ to \S+",
- r"DHCPREQUEST for [0-9.]+",
- r"DHCPACK on [0-9.]+",
- r"DHCPDECLINE of [0-9.]+ from \S+ via .*: abandoned",
- r"Abandoning IP address [0-9.]+: declined",
- ]:
- asserts.assert_true(
- re.search(expected_message, dhcp_logs),
- f"Did not find expected message ({expected_message}) in dhcp logs: {dhcp_logs}"
- + "\n",
- )
-
- # Remove each of the IP aliases.
- # Note: this also removes the router's address (e.g. 192.168.1.1), so pinging the
- # router after this will not work.
- while self.extra_addresses:
- self.ap_ip_cmd.remove_ipv4_address(
- self.ap_params.id, self.extra_addresses.pop()
- )
-
- # Now, we should get an address successfully
- ip = self.get_device_ipv4_addr()
- dhcp_logs = self.access_point.get_dhcp_logs()
- if dhcp_logs is None:
- raise signals.TestError(
- "DHCP logs not found; was the DHCP server started?"
- )
-
- expected_string = f"DHCPREQUEST for {ip}"
- asserts.assert_true(
- dhcp_logs.count(expected_string) >= 1,
- f'Incorrect count of DHCP Requests ("{expected_string}") in logs: '
- + dhcp_logs
- + "\n",
- )
-
- expected_string = f"DHCPACK on {ip}"
- asserts.assert_true(
- dhcp_logs.count(expected_string) >= 1,
- f'Incorrect count of DHCP Acks ("{expected_string}") in logs: '
- + dhcp_logs
- + "\n",
- )
-
-
-if __name__ == "__main__":
- test_runner.main()
diff --git a/tests/dhcp/Dhcpv4InteropBasicTest.py b/tests/dhcp/Dhcpv4InteropBasicTest.py
deleted file mode 100644
index 079d105..0000000
--- a/tests/dhcp/Dhcpv4InteropBasicTest.py
+++ /dev/null
@@ -1,106 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import re
-import time
-
-from mobly import asserts, signals, test_runner
-
-from antlion.controllers.ap_lib import dhcp_config
-from antlion.test_utils.dhcp import base_test
-
-
-class Dhcpv4InteropBasicTest(base_test.Dhcpv4InteropFixture):
- """DhcpV4 tests which validate basic DHCP client/server interactions."""
-
- def test_basic_dhcp_assignment(self) -> None:
- self.run_test_case_expect_dhcp_success(
- dhcp_options={},
- dhcp_parameters={},
- )
-
- def test_pool_allows_unknown_clients(self) -> None:
- self.run_test_case_expect_dhcp_success(
- dhcp_options={},
- dhcp_parameters={"allow": "unknown-clients"},
- )
-
- def test_pool_disallows_unknown_clients(self) -> None:
- ap_params = self.setup_ap()
- subnet_conf = dhcp_config.Subnet(
- subnet=ap_params.network,
- router=ap_params.ip,
- additional_parameters={"deny": "unknown-clients"},
- )
- dhcp_conf = dhcp_config.DhcpConfig(subnets=[subnet_conf])
- self.access_point.start_dhcp(dhcp_conf=dhcp_conf)
-
- self.connect(ap_params=ap_params)
- with asserts.assert_raises(ConnectionError):
- self.get_device_ipv4_addr()
-
- dhcp_logs = self.access_point.get_dhcp_logs()
- if dhcp_logs is None:
- raise signals.TestError(
- "DHCP logs not found; was the DHCP server started?"
- )
-
- asserts.assert_true(
- re.search(r"DHCPDISCOVER from .*no free leases", dhcp_logs),
- "Did not find expected message in dhcp logs: " + dhcp_logs + "\n",
- )
-
- def test_lease_renewal(self) -> None:
- """Validates that a client renews their DHCP lease."""
- LEASE_TIME = 30
- ap_params = self.setup_ap()
- subnet_conf = dhcp_config.Subnet(
- subnet=ap_params.network, router=ap_params.ip
- )
- dhcp_conf = dhcp_config.DhcpConfig(
- subnets=[subnet_conf],
- default_lease_time=LEASE_TIME,
- max_lease_time=LEASE_TIME,
- )
- self.access_point.start_dhcp(dhcp_conf=dhcp_conf)
- self.connect(ap_params=ap_params)
- ip = self.get_device_ipv4_addr()
-
- SLEEP_TIME = LEASE_TIME + 3
- self.log.info(f"Sleeping {SLEEP_TIME}s to await DHCP renewal")
- time.sleep(SLEEP_TIME)
-
- dhcp_logs = self.access_point.get_dhcp_logs()
- if dhcp_logs is None:
- raise signals.TestError(
- "DHCP logs not found; was the DHCP server started?"
- )
-
- # Fuchsia renews at LEASE_TIME / 2, so there should be at least 2 DHCPREQUESTs in logs.
- # The log lines look like:
- # INFO dhcpd[17385]: DHCPREQUEST for 192.168.9.2 from 01:23:45:67:89:ab via wlan1
- # INFO dhcpd[17385]: DHCPACK on 192.168.9.2 to 01:23:45:67:89:ab via wlan1
- expected_string = f"DHCPREQUEST for {ip}"
- asserts.assert_true(
- dhcp_logs.count(expected_string) >= 2,
- f'Not enough DHCP renewals ("{expected_string}") in logs: '
- + dhcp_logs
- + "\n",
- )
-
-
-if __name__ == "__main__":
- test_runner.main()
diff --git a/tests/dhcp/Dhcpv4InteropCombinatorialOptionsTest.py b/tests/dhcp/Dhcpv4InteropCombinatorialOptionsTest.py
deleted file mode 100644
index 9b87a89..0000000
--- a/tests/dhcp/Dhcpv4InteropCombinatorialOptionsTest.py
+++ /dev/null
@@ -1,137 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import random
-from dataclasses import dataclass
-
-from mobly import asserts, test_runner
-
-from antlion.test_utils.dhcp import base_test
-
-OPT_NUM_DOMAIN_SEARCH = 119
-OPT_NUM_DOMAIN_NAME = 15
-
-
-@dataclass
-class Test:
- name: str
- dhcp_options: dict[str, int | str]
- dhcp_parameters: dict[str, str]
-
-
-class Dhcpv4InteropCombinatorialOptionsTest(base_test.Dhcpv4InteropFixture):
- """DhcpV4 tests which validate combinations of DHCP options."""
-
- def pre_run(self) -> None:
- def test_logic(t: Test) -> None:
- self.run_test_case_expect_dhcp_success(
- t.dhcp_parameters, t.dhcp_options
- )
-
- def name_func(t: Test) -> str:
- return f"test_{t.name}"
-
- self.generate_tests(
- test_logic=test_logic,
- name_func=name_func,
- arg_sets=[
- (t,)
- for t in [
- Test(
- name="domain_name_valid",
- dhcp_options={
- "domain-name": '"example.test"',
- "dhcp-parameter-request-list": OPT_NUM_DOMAIN_NAME,
- },
- dhcp_parameters={},
- ),
- Test(
- name="domain_name_invalid",
- dhcp_options={
- "domain-name": '"example.invalid"',
- "dhcp-parameter-request-list": OPT_NUM_DOMAIN_NAME,
- },
- dhcp_parameters={},
- ),
- Test(
- name="domain_search_valid",
- dhcp_options={
- "domain-name": '"example.test"',
- "dhcp-parameter-request-list": OPT_NUM_DOMAIN_SEARCH,
- },
- dhcp_parameters={},
- ),
- Test(
- name="domain_search_invalid",
- dhcp_options={
- "domain-name": '"example.invalid"',
- "dhcp-parameter-request-list": OPT_NUM_DOMAIN_SEARCH,
- },
- dhcp_parameters={},
- ),
- Test(
- name="max_sized_message",
- dhcp_options=self._generate_max_sized_message_dhcp_options(),
- dhcp_parameters={},
- ),
- ]
- ],
- )
-
- def _generate_max_sized_message_dhcp_options(self) -> dict[str, int | str]:
- """Generates the DHCP options for max sized message test.
-
- The RFC limits DHCP payloads to 576 bytes unless the client signals it
- can handle larger payloads, which it does by sending DHCP option 57,
- "Maximum DHCP Message Size". Despite being able to accept larger
- payloads, clients typically don't advertise this. The test verifies that
- the client accepts a large message split across multiple ethernet
- frames. The test is created by sending many bytes of options through the
- domain-name-servers option, which is of unbounded length (though is
- compressed per RFC1035 section 4.1.4).
-
- Returns:
- A dict of DHCP options.
- """
- typical_ethernet_mtu = 1500
-
- long_dns_setting = ", ".join(
- f'"ns{num}.example"'
- for num in random.sample(range(100_000, 1_000_000), 250)
- )
- # RFC1035 compression means any shared suffix ('.example' in this case)
- # will be deduplicated. Calculate approximate length by removing that
- # suffix.
- long_dns_setting_len = len(
- long_dns_setting.replace(", ", "")
- .replace('"', "")
- .replace(".example", "")
- .encode("utf-8")
- )
- asserts.assert_true(
- long_dns_setting_len > typical_ethernet_mtu,
- "Expected to generate message greater than ethernet mtu",
- )
-
- return {
- "dhcp-max-message-size": long_dns_setting_len * 2,
- "domain-search": long_dns_setting,
- "dhcp-parameter-request-list": OPT_NUM_DOMAIN_SEARCH,
- }
-
-
-if __name__ == "__main__":
- test_runner.main()
diff --git a/tests/dhcp/Dhcpv4InteropFixtureTest.py b/tests/dhcp/Dhcpv4InteropFixtureTest.py
deleted file mode 100644
index 7303052..0000000
--- a/tests/dhcp/Dhcpv4InteropFixtureTest.py
+++ /dev/null
@@ -1,63 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from mobly import asserts, test_runner
-
-from antlion.controllers.ap_lib import dhcp_config
-from antlion.test_utils.dhcp import base_test
-
-
-class Dhcpv4InteropFixtureTest(base_test.Dhcpv4InteropFixture):
- """Tests which validate the behavior of the Dhcpv4InteropFixture.
-
- In theory, these are more similar to unit tests than ACTS tests, but
- since they interact with hardware (specifically, the AP), we have to
- write and run them like the rest of the ACTS tests."""
-
- def test_invalid_options_not_accepted(self) -> None:
- """Ensures the DHCP server doesn't accept invalid options"""
- ap_params = self.setup_ap()
- subnet_conf = dhcp_config.Subnet(
- subnet=ap_params.network,
- router=ap_params.ip,
- additional_options={"foo": "bar"},
- )
- dhcp_conf = dhcp_config.DhcpConfig(subnets=[subnet_conf])
- with asserts.assert_raises_regex(Exception, r"failed to start"):
- self.access_point.start_dhcp(dhcp_conf=dhcp_conf)
-
- def test_invalid_parameters_not_accepted(self) -> None:
- """Ensures the DHCP server doesn't accept invalid parameters"""
- ap_params = self.setup_ap()
- subnet_conf = dhcp_config.Subnet(
- subnet=ap_params.network,
- router=ap_params.ip,
- additional_parameters={"foo": "bar"},
- )
- dhcp_conf = dhcp_config.DhcpConfig(subnets=[subnet_conf])
- with asserts.assert_raises_regex(Exception, r"failed to start"):
- self.access_point.start_dhcp(dhcp_conf=dhcp_conf)
-
- def test_no_dhcp_server_started(self) -> None:
- """Validates that the test fixture does not start a DHCP server."""
- ap_params = self.setup_ap()
- self.connect(ap_params=ap_params)
- with asserts.assert_raises(ConnectionError):
- self.get_device_ipv4_addr()
-
-
-if __name__ == "__main__":
- test_runner.main()
diff --git a/tests/examples/BUILD.gn b/tests/examples/BUILD.gn
deleted file mode 100644
index f42d767..0000000
--- a/tests/examples/BUILD.gn
+++ /dev/null
@@ -1,23 +0,0 @@
-# Copyright 2023 The Fuchsia Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import("//third_party/antlion/antlion_host_test.gni")
-import("//third_party/antlion/environments.gni")
-
-assert(is_host, "antlion tests only supported for host testing")
-
-antlion_host_test("sl4f_sanity_test") {
- main_source = "Sl4fSanityTest.py"
- environments = display_envs + [ qemu_env ]
-}
-
-group("e2e_tests_quick") {
- testonly = true
- public_deps = [ ":sl4f_sanity_test" ]
-}
-
-group("e2e_tests") {
- testonly = true
- public_deps = [ ":sl4f_sanity_test" ]
-}
diff --git a/tests/examples/Sl4fSanityTest.py b/tests/examples/Sl4fSanityTest.py
deleted file mode 100644
index cfd37e3..0000000
--- a/tests/examples/Sl4fSanityTest.py
+++ /dev/null
@@ -1,49 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""
-Test to verify SL4F is running on a Fuchsia device and can communicate with
-antlion successfully.
-"""
-
-import logging
-
-from mobly import asserts, test_runner
-
-from antlion import base_test
-from antlion.controllers import fuchsia_device
-from antlion.controllers.fuchsia_device import FuchsiaDevice
-
-
-class Sl4fSanityTest(base_test.AntlionBaseTest):
- def setup_class(self) -> None:
- self.log = logging.getLogger()
- self.fuchsia_devices: list[FuchsiaDevice] = self.register_controller(
- fuchsia_device
- )
-
- asserts.abort_class_if(
- len(self.fuchsia_devices) == 0,
- "Requires at least one Fuchsia device",
- )
-
- def test_example(self) -> None:
- for fuchsia_device in self.fuchsia_devices:
- res = fuchsia_device.honeydew_fd.netstack.list_interfaces()
- self.log.info(res)
-
-
-if __name__ == "__main__":
- test_runner.main()
diff --git a/tests/wlan/BUILD.gn b/tests/wlan/BUILD.gn
deleted file mode 100644
index a79ca51..0000000
--- a/tests/wlan/BUILD.gn
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2023 The Fuchsia Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-assert(is_host, "antlion tests only supported for host testing")
-
-group("e2e_tests") {
- testonly = true
- public_deps = [
- "compliance:e2e_tests",
- "facade:e2e_tests",
- "functional:e2e_tests",
- "misc:e2e_tests",
- "performance:e2e_tests",
- ]
-}
-
-group("e2e_tests_quick") {
- testonly = true
- public_deps = [
- "compliance:e2e_tests_quick",
- "functional:e2e_tests_quick",
- "performance:e2e_tests_quick",
- ]
-}
-
-group("e2e_tests_manual") {
- testonly = true
- public_deps = [
- "compliance:e2e_tests_manual",
- "functional:e2e_tests_manual",
- "performance:e2e_tests_manual",
- ]
-}
diff --git a/tests/wlan/compliance/BUILD.gn b/tests/wlan/compliance/BUILD.gn
deleted file mode 100644
index da0c7ab..0000000
--- a/tests/wlan/compliance/BUILD.gn
+++ /dev/null
@@ -1,611 +0,0 @@
-# Copyright 2023 The Fuchsia Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import("//third_party/antlion/antlion_host_test.gni")
-import("//third_party/antlion/environments.gni")
-
-assert(is_host, "antlion tests only supported for host testing")
-
-# wlan phy n compliance tests
-antlion_host_test("wlan_phy_compliance_11n_2g_ht20_open_test") {
- main_source = "WlanPhyCompliance11NTest.py"
- environments = display_ap_envs
- test_cases = [ "re:test_11n_2\.4GHz_HT20_open_.*" ]
-}
-
-antlion_host_test("wlan_phy_compliance_11n_2g_ht40lower_open_test") {
- main_source = "WlanPhyCompliance11NTest.py"
- environments = display_ap_envs
- test_cases = [ "re:test_11n_2\.4GHz_HT40Lower_open_.*" ]
-}
-
-antlion_host_test("wlan_phy_compliance_11n_2g_ht40upper_open_test") {
- main_source = "WlanPhyCompliance11NTest.py"
- environments = display_ap_envs
- test_cases = [ "re:test_11n_2\.4GHz_HT40Upper_open_.*" ]
-}
-
-antlion_host_test("wlan_phy_compliance_11n_5g_ht20_open_test") {
- main_source = "WlanPhyCompliance11NTest.py"
- environments = display_ap_envs
- test_cases = [ "re:test_11n_5GHz_HT20_open_.*" ]
-}
-
-antlion_host_test("wlan_phy_compliance_11n_5g_ht40lower_open_test") {
- main_source = "WlanPhyCompliance11NTest.py"
- environments = display_ap_envs
- test_cases = [ "re:test_11n_5GHz_HT40Lower_open_.*" ]
-}
-
-antlion_host_test("wlan_phy_compliance_11n_5g_ht40upper_open_test") {
- main_source = "WlanPhyCompliance11NTest.py"
- environments = display_ap_envs
- test_cases = [ "re:test_11n_5GHz_HT40Upper_open_.*" ]
-}
-
-antlion_host_test("wlan_phy_compliance_11n_2g_ht20_wpa2_test") {
- main_source = "WlanPhyCompliance11NTest.py"
- environments = display_ap_envs
- test_cases = [ "re:test_11n_2\.4GHz_HT20_wpa2_.*" ]
-}
-
-antlion_host_test("wlan_phy_compliance_11n_2g_ht40lower_wpa2_test") {
- main_source = "WlanPhyCompliance11NTest.py"
- environments = display_ap_envs
- test_cases = [ "re:test_11n_2\.4GHz_HT40Lower_wpa2_.*" ]
-}
-
-antlion_host_test("wlan_phy_compliance_11n_2g_ht40upper_wpa2_test") {
- main_source = "WlanPhyCompliance11NTest.py"
- environments = display_ap_envs
- test_cases = [ "re:test_11n_2\.4GHz_HT40Upper_wpa2_.*" ]
-}
-
-antlion_host_test("wlan_phy_compliance_11n_5g_ht20_wpa2_test") {
- main_source = "WlanPhyCompliance11NTest.py"
- environments = display_ap_envs
- test_cases = [ "re:test_11n_5GHz_HT20_wpa2_.*" ]
-}
-
-antlion_host_test("wlan_phy_compliance_11n_5g_ht40lower_wpa2_test") {
- main_source = "WlanPhyCompliance11NTest.py"
- environments = display_ap_envs
- test_cases = [ "re:test_11n_5GHz_HT40Lower_wpa2_.*" ]
-}
-
-antlion_host_test("wlan_phy_compliance_11n_5g_ht40upper_wpa2_test") {
- main_source = "WlanPhyCompliance11NTest.py"
- environments = display_ap_envs
- test_cases = [ "re:test_11n_5GHz_HT40Upper_wpa2_.*" ]
-}
-
-# wlan phy ac compliance tests
-# 20mhz open
-
-antlion_host_test(
- "wlan_phy_compliance_ac_5g_20mhz_open_max_a_mpdu_len_exp0_test") {
- main_source = "WlanPhyCompliance11ACTest.py"
- environments = display_ap_envs
- test_cases = [ "re:test_11ac_20mhz_open_(?:.*MAX-A-MPDU-LEN-EXP0.*)" ]
-}
-
-antlion_host_test(
- "wlan_phy_compliance_ac_5g_20mhz_open_max_a_mpdu_len_exp1_test") {
- main_source = "WlanPhyCompliance11ACTest.py"
- environments = display_ap_envs
- test_cases = [ "re:test_11ac_20mhz_open_(?:.*MAX-A-MPDU-LEN-EXP1.*)" ]
-}
-
-antlion_host_test(
- "wlan_phy_compliance_ac_5g_20mhz_open_max_a_mpdu_len_exp2_test") {
- main_source = "WlanPhyCompliance11ACTest.py"
- environments = display_ap_envs
- test_cases = [ "re:test_11ac_20mhz_open_(?:.*MAX-A-MPDU-LEN-EXP2.*)" ]
-}
-
-antlion_host_test(
- "wlan_phy_compliance_ac_5g_20mhz_open_max_a_mpdu_len_exp3_test") {
- main_source = "WlanPhyCompliance11ACTest.py"
- environments = display_ap_envs
- test_cases = [ "re:test_11ac_20mhz_open_(?:.*MAX-A-MPDU-LEN-EXP3.*)" ]
-}
-
-antlion_host_test(
- "wlan_phy_compliance_ac_5g_20mhz_open_max_a_mpdu_len_exp4_test") {
- main_source = "WlanPhyCompliance11ACTest.py"
- environments = display_ap_envs
- test_cases = [ "re:test_11ac_20mhz_open_(?:.*MAX-A-MPDU-LEN-EXP4.*)" ]
-}
-
-antlion_host_test(
- "wlan_phy_compliance_ac_5g_20mhz_open_max_a_mpdu_len_exp5_test") {
- main_source = "WlanPhyCompliance11ACTest.py"
- environments = display_ap_envs
- test_cases = [ "re:test_11ac_20mhz_open_(?:.*MAX-A-MPDU-LEN-EXP5.*)" ]
-}
-
-antlion_host_test(
- "wlan_phy_compliance_ac_5g_20mhz_open_max_a_mpdu_len_exp6_test") {
- main_source = "WlanPhyCompliance11ACTest.py"
- environments = display_ap_envs
- test_cases = [ "re:test_11ac_20mhz_open_(?:.*MAX-A-MPDU-LEN-EXP6.*)" ]
-}
-
-antlion_host_test(
- "wlan_phy_compliance_ac_5g_20mhz_open_max_a_mpdu_len_exp7_test") {
- main_source = "WlanPhyCompliance11ACTest.py"
- environments = display_ap_envs
- test_cases = [ "re:test_11ac_20mhz_open_(?:.*MAX-A-MPDU-LEN-EXP7.*)" ]
-}
-
-antlion_host_test(
- "wlan_phy_compliance_ac_5g_20mhz_open_max_a_mpdu_len_disabled_test") {
- main_source = "WlanPhyCompliance11ACTest.py"
- environments = display_ap_envs
- test_cases = [ "re:test_11ac_20mhz_open((?!MAX-A-MPDU-LEN-EXP[\d]).)*\$" ]
-}
-
-# 20mhz wpa2
-
-antlion_host_test(
- "wlan_phy_compliance_ac_5g_20mhz_wpa2_max_a_mpdu_len_exp0_test") {
- main_source = "WlanPhyCompliance11ACTest.py"
- environments = display_ap_envs
- test_cases = [ "re:test_11ac_20mhz_wpa2_(?:.*MAX-A-MPDU-LEN-EXP0.*)" ]
-}
-
-antlion_host_test(
- "wlan_phy_compliance_ac_5g_20mhz_wpa2_max_a_mpdu_len_exp1_test") {
- main_source = "WlanPhyCompliance11ACTest.py"
- environments = display_ap_envs
- test_cases = [ "re:test_11ac_20mhz_wpa2_(?:.*MAX-A-MPDU-LEN-EXP1.*)" ]
-}
-
-antlion_host_test(
- "wlan_phy_compliance_ac_5g_20mhz_wpa2_max_a_mpdu_len_exp2_test") {
- main_source = "WlanPhyCompliance11ACTest.py"
- environments = display_ap_envs
- test_cases = [ "re:test_11ac_20mhz_wpa2_(?:.*MAX-A-MPDU-LEN-EXP2.*)" ]
-}
-
-antlion_host_test(
- "wlan_phy_compliance_ac_5g_20mhz_wpa2_max_a_mpdu_len_exp3_test") {
- main_source = "WlanPhyCompliance11ACTest.py"
- environments = display_ap_envs
- test_cases = [ "re:test_11ac_20mhz_wpa2_(?:.*MAX-A-MPDU-LEN-EXP3.*)" ]
-}
-
-antlion_host_test(
- "wlan_phy_compliance_ac_5g_20mhz_wpa2_max_a_mpdu_len_exp4_test") {
- main_source = "WlanPhyCompliance11ACTest.py"
- environments = display_ap_envs
- test_cases = [ "re:test_11ac_20mhz_wpa2_(?:.*MAX-A-MPDU-LEN-EXP4.*)" ]
-}
-
-antlion_host_test(
- "wlan_phy_compliance_ac_5g_20mhz_wpa2_max_a_mpdu_len_exp5_test") {
- main_source = "WlanPhyCompliance11ACTest.py"
- environments = display_ap_envs
- test_cases = [ "re:test_11ac_20mhz_wpa2_(?:.*MAX-A-MPDU-LEN-EXP5.*)" ]
-}
-
-antlion_host_test(
- "wlan_phy_compliance_ac_5g_20mhz_wpa2_max_a_mpdu_len_exp6_test") {
- main_source = "WlanPhyCompliance11ACTest.py"
- environments = display_ap_envs
- test_cases = [ "re:test_11ac_20mhz_wpa2_(?:.*MAX-A-MPDU-LEN-EXP6.*)" ]
-}
-
-antlion_host_test(
- "wlan_phy_compliance_ac_5g_20mhz_wpa2_max_a_mpdu_len_exp7_test") {
- main_source = "WlanPhyCompliance11ACTest.py"
- environments = display_ap_envs
- test_cases = [ "re:test_11ac_20mhz_wpa2_(?:.*MAX-A-MPDU-LEN-EXP7.*)" ]
-}
-
-antlion_host_test(
- "wlan_phy_compliance_ac_5g_20mhz_wpa2_max_a_mpdu_len_disabled_test") {
- main_source = "WlanPhyCompliance11ACTest.py"
- environments = display_ap_envs
- test_cases = [ "re:test_11ac_20mhz_wpa2((?!MAX-A-MPDU-LEN-EXP[\d]).)*\$" ]
-}
-
-# 40mhz open
-
-antlion_host_test(
- "wlan_phy_compliance_ac_5g_40mhz_open_max_a_mpdu_len_exp0_test") {
- main_source = "WlanPhyCompliance11ACTest.py"
- environments = display_ap_envs
- test_cases = [ "re:test_11ac_40mhz_open_(?:.*MAX-A-MPDU-LEN-EXP0.*)" ]
-}
-
-antlion_host_test(
- "wlan_phy_compliance_ac_5g_40mhz_open_max_a_mpdu_len_exp1_test") {
- main_source = "WlanPhyCompliance11ACTest.py"
- environments = display_ap_envs
- test_cases = [ "re:test_11ac_40mhz_open_(?:.*MAX-A-MPDU-LEN-EXP1.*)" ]
-}
-
-antlion_host_test(
- "wlan_phy_compliance_ac_5g_40mhz_open_max_a_mpdu_len_exp2_test") {
- main_source = "WlanPhyCompliance11ACTest.py"
- environments = display_ap_envs
- test_cases = [ "re:test_11ac_40mhz_open_(?:.*MAX-A-MPDU-LEN-EXP2.*)" ]
-}
-
-antlion_host_test(
- "wlan_phy_compliance_ac_5g_40mhz_open_max_a_mpdu_len_exp3_test") {
- main_source = "WlanPhyCompliance11ACTest.py"
- environments = display_ap_envs
- test_cases = [ "re:test_11ac_40mhz_open_(?:.*MAX-A-MPDU-LEN-EXP3.*)" ]
-}
-
-antlion_host_test(
- "wlan_phy_compliance_ac_5g_40mhz_open_max_a_mpdu_len_exp4_test") {
- main_source = "WlanPhyCompliance11ACTest.py"
- environments = display_ap_envs
- test_cases = [ "re:test_11ac_40mhz_open_(?:.*MAX-A-MPDU-LEN-EXP4.*)" ]
-}
-
-antlion_host_test(
- "wlan_phy_compliance_ac_5g_40mhz_open_max_a_mpdu_len_exp5_test") {
- main_source = "WlanPhyCompliance11ACTest.py"
- environments = display_ap_envs
- test_cases = [ "re:test_11ac_40mhz_open_(?:.*MAX-A-MPDU-LEN-EXP5.*)" ]
-}
-
-antlion_host_test(
- "wlan_phy_compliance_ac_5g_40mhz_open_max_a_mpdu_len_exp6_test") {
- main_source = "WlanPhyCompliance11ACTest.py"
- environments = display_ap_envs
- test_cases = [ "re:test_11ac_40mhz_open_(?:.*MAX-A-MPDU-LEN-EXP6.*)" ]
-}
-
-antlion_host_test(
- "wlan_phy_compliance_ac_5g_40mhz_open_max_a_mpdu_len_exp7_test") {
- main_source = "WlanPhyCompliance11ACTest.py"
- environments = display_ap_envs
- test_cases = [ "re:test_11ac_40mhz_open_(?:.*MAX-A-MPDU-LEN-EXP7.*)" ]
-}
-
-antlion_host_test(
- "wlan_phy_compliance_ac_5g_40mhz_open_max_a_mpdu_len_disabled_test") {
- main_source = "WlanPhyCompliance11ACTest.py"
- environments = display_ap_envs
- test_cases = [ "re:test_11ac_40mhz_open((?!MAX-A-MPDU-LEN-EXP[\d]).)*\$" ]
-}
-
-# 40mhz wpa2
-
-antlion_host_test(
- "wlan_phy_compliance_ac_5g_40mhz_wpa2_max_a_mpdu_len_exp0_test") {
- main_source = "WlanPhyCompliance11ACTest.py"
- environments = display_ap_envs
- test_cases = [ "re:test_11ac_40mhz_wpa2_(?:.*MAX-A-MPDU-LEN-EXP0.*)" ]
-}
-
-antlion_host_test(
- "wlan_phy_compliance_ac_5g_40mhz_wpa2_max_a_mpdu_len_exp1_test") {
- main_source = "WlanPhyCompliance11ACTest.py"
- environments = display_ap_envs
- test_cases = [ "re:test_11ac_40mhz_wpa2_(?:.*MAX-A-MPDU-LEN-EXP1.*)" ]
-}
-
-antlion_host_test(
- "wlan_phy_compliance_ac_5g_40mhz_wpa2_max_a_mpdu_len_exp2_test") {
- main_source = "WlanPhyCompliance11ACTest.py"
- environments = display_ap_envs
- test_cases = [ "re:test_11ac_40mhz_wpa2_(?:.*MAX-A-MPDU-LEN-EXP2.*)" ]
-}
-
-antlion_host_test(
- "wlan_phy_compliance_ac_5g_40mhz_wpa2_max_a_mpdu_len_exp3_test") {
- main_source = "WlanPhyCompliance11ACTest.py"
- environments = display_ap_envs
- test_cases = [ "re:test_11ac_40mhz_wpa2_(?:.*MAX-A-MPDU-LEN-EXP3.*)" ]
-}
-
-antlion_host_test(
- "wlan_phy_compliance_ac_5g_40mhz_wpa2_max_a_mpdu_len_exp4_test") {
- main_source = "WlanPhyCompliance11ACTest.py"
- environments = display_ap_envs
- test_cases = [ "re:test_11ac_40mhz_wpa2_(?:.*MAX-A-MPDU-LEN-EXP4.*)" ]
-}
-
-antlion_host_test(
- "wlan_phy_compliance_ac_5g_40mhz_wpa2_max_a_mpdu_len_exp5_test") {
- main_source = "WlanPhyCompliance11ACTest.py"
- environments = display_ap_envs
- test_cases = [ "re:test_11ac_40mhz_wpa2_(?:.*MAX-A-MPDU-LEN-EXP5.*)" ]
-}
-
-antlion_host_test(
- "wlan_phy_compliance_ac_5g_40mhz_wpa2_max_a_mpdu_len_exp6_test") {
- main_source = "WlanPhyCompliance11ACTest.py"
- environments = display_ap_envs
- test_cases = [ "re:test_11ac_40mhz_wpa2_(?:.*MAX-A-MPDU-LEN-EXP6.*)" ]
-}
-
-antlion_host_test(
- "wlan_phy_compliance_ac_5g_40mhz_wpa2_max_a_mpdu_len_exp7_test") {
- main_source = "WlanPhyCompliance11ACTest.py"
- environments = display_ap_envs
- test_cases = [ "re:test_11ac_40mhz_wpa2_(?:.*MAX-A-MPDU-LEN-EXP7.*)" ]
-}
-
-antlion_host_test(
- "wlan_phy_compliance_ac_5g_40mhz_wpa2_max_a_mpdu_len_disabled_test") {
- main_source = "WlanPhyCompliance11ACTest.py"
- environments = display_ap_envs
- test_cases = [ "re:test_11ac_40mhz_wpa2((?!MAX-A-MPDU-LEN-EXP[\d]).)*\$" ]
-}
-
-# 80mhz open
-
-antlion_host_test(
- "wlan_phy_compliance_ac_5g_80mhz_open_max_a_mpdu_len_exp0_test") {
- main_source = "WlanPhyCompliance11ACTest.py"
- environments = display_ap_envs
- test_cases = [ "re:test_11ac_80mhz_open_(?:.*MAX-A-MPDU-LEN-EXP0.*)" ]
-}
-
-antlion_host_test(
- "wlan_phy_compliance_ac_5g_80mhz_open_max_a_mpdu_len_exp1_test") {
- main_source = "WlanPhyCompliance11ACTest.py"
- environments = display_ap_envs
- test_cases = [ "re:test_11ac_80mhz_open_(?:.*MAX-A-MPDU-LEN-EXP1.*)" ]
-}
-
-antlion_host_test(
- "wlan_phy_compliance_ac_5g_80mhz_open_max_a_mpdu_len_exp2_test") {
- main_source = "WlanPhyCompliance11ACTest.py"
- environments = display_ap_envs
- test_cases = [ "re:test_11ac_80mhz_open_(?:.*MAX-A-MPDU-LEN-EXP2.*)" ]
-}
-
-antlion_host_test(
- "wlan_phy_compliance_ac_5g_80mhz_open_max_a_mpdu_len_exp3_test") {
- main_source = "WlanPhyCompliance11ACTest.py"
- environments = display_ap_envs
- test_cases = [ "re:test_11ac_80mhz_open_(?:.*MAX-A-MPDU-LEN-EXP3.*)" ]
-}
-
-antlion_host_test(
- "wlan_phy_compliance_ac_5g_80mhz_open_max_a_mpdu_len_exp4_test") {
- main_source = "WlanPhyCompliance11ACTest.py"
- environments = display_ap_envs
- test_cases = [ "re:test_11ac_80mhz_open_(?:.*MAX-A-MPDU-LEN-EXP4.*)" ]
-}
-
-antlion_host_test(
- "wlan_phy_compliance_ac_5g_80mhz_open_max_a_mpdu_len_exp5_test") {
- main_source = "WlanPhyCompliance11ACTest.py"
- environments = display_ap_envs
- test_cases = [ "re:test_11ac_80mhz_open_(?:.*MAX-A-MPDU-LEN-EXP5.*)" ]
-}
-
-antlion_host_test(
- "wlan_phy_compliance_ac_5g_80mhz_open_max_a_mpdu_len_exp6_test") {
- main_source = "WlanPhyCompliance11ACTest.py"
- environments = display_ap_envs
- test_cases = [ "re:test_11ac_80mhz_open_(?:.*MAX-A-MPDU-LEN-EXP6.*)" ]
-}
-
-antlion_host_test(
- "wlan_phy_compliance_ac_5g_80mhz_open_max_a_mpdu_len_exp7_test") {
- main_source = "WlanPhyCompliance11ACTest.py"
- environments = display_ap_envs
- test_cases = [ "re:test_11ac_80mhz_open_(?:.*MAX-A-MPDU-LEN-EXP7.*)" ]
-}
-
-antlion_host_test(
- "wlan_phy_compliance_ac_5g_80mhz_open_max_a_mpdu_len_disabled_test") {
- main_source = "WlanPhyCompliance11ACTest.py"
- environments = display_ap_envs
- test_cases = [ "re:test_11ac_80mhz_open((?!MAX-A-MPDU-LEN-EXP[\d]).)*\$" ]
-}
-
-# 80mhz wpa2
-
-antlion_host_test(
- "wlan_phy_compliance_ac_5g_80mhz_wpa2_max_a_mpdu_len_exp0_test") {
- main_source = "WlanPhyCompliance11ACTest.py"
- environments = display_ap_envs
- test_cases = [ "re:test_11ac_80mhz_wpa2_(?:.*MAX-A-MPDU-LEN-EXP0.*)" ]
-}
-
-antlion_host_test(
- "wlan_phy_compliance_ac_5g_80mhz_wpa2_max_a_mpdu_len_exp1_test") {
- main_source = "WlanPhyCompliance11ACTest.py"
- environments = display_ap_envs
- test_cases = [ "re:test_11ac_80mhz_wpa2_(?:.*MAX-A-MPDU-LEN-EXP1.*)" ]
-}
-
-antlion_host_test(
- "wlan_phy_compliance_ac_5g_80mhz_wpa2_max_a_mpdu_len_exp2_test") {
- main_source = "WlanPhyCompliance11ACTest.py"
- environments = display_ap_envs
- test_cases = [ "re:test_11ac_80mhz_wpa2_(?:.*MAX-A-MPDU-LEN-EXP2.*)" ]
-}
-
-antlion_host_test(
- "wlan_phy_compliance_ac_5g_80mhz_wpa2_max_a_mpdu_len_exp3_test") {
- main_source = "WlanPhyCompliance11ACTest.py"
- environments = display_ap_envs
- test_cases = [ "re:test_11ac_80mhz_wpa2_(?:.*MAX-A-MPDU-LEN-EXP3.*)" ]
-}
-
-antlion_host_test(
- "wlan_phy_compliance_ac_5g_80mhz_wpa2_max_a_mpdu_len_exp4_test") {
- main_source = "WlanPhyCompliance11ACTest.py"
- environments = display_ap_envs
- test_cases = [ "re:test_11ac_80mhz_wpa2_(?:.*MAX-A-MPDU-LEN-EXP4.*)" ]
-}
-
-antlion_host_test(
- "wlan_phy_compliance_ac_5g_80mhz_wpa2_max_a_mpdu_len_exp5_test") {
- main_source = "WlanPhyCompliance11ACTest.py"
- environments = display_ap_envs
- test_cases = [ "re:test_11ac_80mhz_wpa2_(?:.*MAX-A-MPDU-LEN-EXP5.*)" ]
-}
-
-antlion_host_test(
- "wlan_phy_compliance_ac_5g_80mhz_wpa2_max_a_mpdu_len_exp6_test") {
- main_source = "WlanPhyCompliance11ACTest.py"
- environments = display_ap_envs
- test_cases = [ "re:test_11ac_80mhz_wpa2_(?:.*MAX-A-MPDU-LEN-EXP6.*)" ]
-}
-
-antlion_host_test(
- "wlan_phy_compliance_ac_5g_80mhz_wpa2_max_a_mpdu_len_exp7_test") {
- main_source = "WlanPhyCompliance11ACTest.py"
- environments = display_ap_envs
- test_cases = [ "re:test_11ac_80mhz_wpa2_(?:.*MAX-A-MPDU-LEN-EXP7.*)" ]
-}
-
-antlion_host_test(
- "wlan_phy_compliance_ac_5g_80mhz_wpa2_max_a_mpdu_len_disabled_test") {
- main_source = "WlanPhyCompliance11ACTest.py"
- environments = display_ap_envs
- test_cases = [ "re:test_11ac_80mhz_wpa2((?!MAX-A-MPDU-LEN-EXP[\d]).)*\$" ]
-}
-
-antlion_host_test("wlan_security_compliance_11a_test") {
- main_source = "WlanSecurityComplianceABGTest.py"
- environments = display_ap_envs
- test_cases = [ "re:test.*11a.*" ]
- timeout_secs = 1800
-}
-
-antlion_host_test("wlan_security_compliance_11bg_test") {
- main_source = "WlanSecurityComplianceABGTest.py"
- environments = display_ap_envs
- test_cases = [ "re:test.*11bg.*" ]
- timeout_secs = 1800
-}
-
-antlion_host_test("wlan_phy_compliance_abg_test") {
- main_source = "WlanPhyComplianceABGTest.py"
- environments = display_ap_envs
- timeout_secs = 1200
-}
-
-antlion_host_test("regulatory_compliance_test") {
- main_source = "RegulatoryComplianceTest.py"
- environments = display_ap_envs
-}
-
-antlion_host_test("vape_interop_test") {
- main_source = "VapeInteropTest.py"
- environments = display_ap_envs
- timeout_secs = 900
-}
-
-antlion_host_test("wlan_security_compliance_abg_test_quick") {
- main_source = "WlanSecurityComplianceABGTest.py"
- environments = display_ap_envs
- test_cases = [
- "test_associate_11bg_sec_open_wep_26_hex_ptk_none",
- "test_associate_11bg_sec_wpa_psk_ptk_tkip",
- "test_associate_11bg_sec_wpa_psk_ptk_ccmp",
- "test_associate_11bg_sec_wpa2_psk_ptk_tkip",
- "test_associate_11bg_sec_wpa2_psk_ptk_ccmp",
- "test_associate_11bg_pmf_sec_wpa2_psk_ptk_ccmp",
- "test_associate_11bg_sec_wpa_wpa2_psk_ptk_tkip",
- "test_associate_11bg_sec_wpa_wpa2_psk_ptk_ccmp",
- "test_associate_11bg_sec_wpa3_sae_ptk_ccmp",
- "test_associate_11bg_sec_wpa2_wpa3_psk_sae_ptk_ccmp",
- ]
-}
-
-group("e2e_tests") {
- testonly = true
- public_deps = [
- ":vape_interop_test",
- ":wlan_phy_compliance_11n_2g_ht20_open_test",
- ":wlan_phy_compliance_11n_2g_ht20_wpa2_test",
- ":wlan_phy_compliance_11n_2g_ht40lower_open_test",
- ":wlan_phy_compliance_11n_2g_ht40lower_wpa2_test",
- ":wlan_phy_compliance_11n_2g_ht40upper_open_test",
- ":wlan_phy_compliance_11n_2g_ht40upper_wpa2_test",
- ":wlan_phy_compliance_11n_5g_ht20_open_test",
- ":wlan_phy_compliance_11n_5g_ht20_wpa2_test",
- ":wlan_phy_compliance_11n_5g_ht40lower_open_test",
- ":wlan_phy_compliance_11n_5g_ht40lower_wpa2_test",
- ":wlan_phy_compliance_11n_5g_ht40upper_open_test",
- ":wlan_phy_compliance_11n_5g_ht40upper_wpa2_test",
- ":wlan_phy_compliance_abg_test",
- ":wlan_phy_compliance_ac_5g_20mhz_open_max_a_mpdu_len_disabled_test",
- ":wlan_phy_compliance_ac_5g_20mhz_open_max_a_mpdu_len_exp0_test",
- ":wlan_phy_compliance_ac_5g_20mhz_open_max_a_mpdu_len_exp1_test",
- ":wlan_phy_compliance_ac_5g_20mhz_open_max_a_mpdu_len_exp2_test",
- ":wlan_phy_compliance_ac_5g_20mhz_open_max_a_mpdu_len_exp3_test",
- ":wlan_phy_compliance_ac_5g_20mhz_open_max_a_mpdu_len_exp4_test",
- ":wlan_phy_compliance_ac_5g_20mhz_open_max_a_mpdu_len_exp5_test",
- ":wlan_phy_compliance_ac_5g_20mhz_open_max_a_mpdu_len_exp6_test",
- ":wlan_phy_compliance_ac_5g_20mhz_open_max_a_mpdu_len_exp7_test",
- ":wlan_phy_compliance_ac_5g_20mhz_wpa2_max_a_mpdu_len_disabled_test",
- ":wlan_phy_compliance_ac_5g_20mhz_wpa2_max_a_mpdu_len_exp0_test",
- ":wlan_phy_compliance_ac_5g_20mhz_wpa2_max_a_mpdu_len_exp1_test",
- ":wlan_phy_compliance_ac_5g_20mhz_wpa2_max_a_mpdu_len_exp2_test",
- ":wlan_phy_compliance_ac_5g_20mhz_wpa2_max_a_mpdu_len_exp3_test",
- ":wlan_phy_compliance_ac_5g_20mhz_wpa2_max_a_mpdu_len_exp4_test",
- ":wlan_phy_compliance_ac_5g_20mhz_wpa2_max_a_mpdu_len_exp5_test",
- ":wlan_phy_compliance_ac_5g_20mhz_wpa2_max_a_mpdu_len_exp6_test",
- ":wlan_phy_compliance_ac_5g_20mhz_wpa2_max_a_mpdu_len_exp7_test",
- ":wlan_phy_compliance_ac_5g_40mhz_open_max_a_mpdu_len_disabled_test",
- ":wlan_phy_compliance_ac_5g_40mhz_open_max_a_mpdu_len_exp0_test",
- ":wlan_phy_compliance_ac_5g_40mhz_open_max_a_mpdu_len_exp1_test",
- ":wlan_phy_compliance_ac_5g_40mhz_open_max_a_mpdu_len_exp2_test",
- ":wlan_phy_compliance_ac_5g_40mhz_open_max_a_mpdu_len_exp3_test",
- ":wlan_phy_compliance_ac_5g_40mhz_open_max_a_mpdu_len_exp4_test",
- ":wlan_phy_compliance_ac_5g_40mhz_open_max_a_mpdu_len_exp5_test",
- ":wlan_phy_compliance_ac_5g_40mhz_open_max_a_mpdu_len_exp6_test",
- ":wlan_phy_compliance_ac_5g_40mhz_open_max_a_mpdu_len_exp7_test",
- ":wlan_phy_compliance_ac_5g_40mhz_wpa2_max_a_mpdu_len_disabled_test",
- ":wlan_phy_compliance_ac_5g_40mhz_wpa2_max_a_mpdu_len_exp0_test",
- ":wlan_phy_compliance_ac_5g_40mhz_wpa2_max_a_mpdu_len_exp1_test",
- ":wlan_phy_compliance_ac_5g_40mhz_wpa2_max_a_mpdu_len_exp2_test",
- ":wlan_phy_compliance_ac_5g_40mhz_wpa2_max_a_mpdu_len_exp3_test",
- ":wlan_phy_compliance_ac_5g_40mhz_wpa2_max_a_mpdu_len_exp4_test",
- ":wlan_phy_compliance_ac_5g_40mhz_wpa2_max_a_mpdu_len_exp5_test",
- ":wlan_phy_compliance_ac_5g_40mhz_wpa2_max_a_mpdu_len_exp6_test",
- ":wlan_phy_compliance_ac_5g_40mhz_wpa2_max_a_mpdu_len_exp7_test",
- ":wlan_phy_compliance_ac_5g_80mhz_open_max_a_mpdu_len_disabled_test",
- ":wlan_phy_compliance_ac_5g_80mhz_open_max_a_mpdu_len_exp0_test",
- ":wlan_phy_compliance_ac_5g_80mhz_open_max_a_mpdu_len_exp1_test",
- ":wlan_phy_compliance_ac_5g_80mhz_open_max_a_mpdu_len_exp2_test",
- ":wlan_phy_compliance_ac_5g_80mhz_open_max_a_mpdu_len_exp3_test",
- ":wlan_phy_compliance_ac_5g_80mhz_open_max_a_mpdu_len_exp4_test",
- ":wlan_phy_compliance_ac_5g_80mhz_open_max_a_mpdu_len_exp5_test",
- ":wlan_phy_compliance_ac_5g_80mhz_open_max_a_mpdu_len_exp6_test",
- ":wlan_phy_compliance_ac_5g_80mhz_open_max_a_mpdu_len_exp7_test",
- ":wlan_phy_compliance_ac_5g_80mhz_wpa2_max_a_mpdu_len_disabled_test",
- ":wlan_phy_compliance_ac_5g_80mhz_wpa2_max_a_mpdu_len_exp0_test",
- ":wlan_phy_compliance_ac_5g_80mhz_wpa2_max_a_mpdu_len_exp1_test",
- ":wlan_phy_compliance_ac_5g_80mhz_wpa2_max_a_mpdu_len_exp2_test",
- ":wlan_phy_compliance_ac_5g_80mhz_wpa2_max_a_mpdu_len_exp3_test",
- ":wlan_phy_compliance_ac_5g_80mhz_wpa2_max_a_mpdu_len_exp4_test",
- ":wlan_phy_compliance_ac_5g_80mhz_wpa2_max_a_mpdu_len_exp5_test",
- ":wlan_phy_compliance_ac_5g_80mhz_wpa2_max_a_mpdu_len_exp6_test",
- ":wlan_phy_compliance_ac_5g_80mhz_wpa2_max_a_mpdu_len_exp7_test",
- ":wlan_security_compliance_11a_test",
- ":wlan_security_compliance_11bg_test",
- ]
-}
-
-group("e2e_tests_quick") {
- testonly = true
- public_deps = [ ":wlan_security_compliance_abg_test_quick" ]
-}
-
-group("e2e_tests_manual") {
- testonly = true
- public_deps = [
- # Running RegulatoryComplianceTest is usually only necessary when verifying
- # new WLAN firmware patches. Take it out of automation; it takes too long
- # otherwise.
- ":regulatory_compliance_test",
- ]
-}
diff --git a/tests/wlan/compliance/RegulatoryComplianceTest.py b/tests/wlan/compliance/RegulatoryComplianceTest.py
deleted file mode 100644
index 7dc52c9..0000000
--- a/tests/wlan/compliance/RegulatoryComplianceTest.py
+++ /dev/null
@@ -1,212 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import logging
-from typing import NamedTuple
-
-from honeydew.affordances.connectivity.wlan.utils.types import CountryCode
-from mobly import asserts, test_runner
-from mobly.config_parser import TestRunConfig
-
-from antlion import utils
-from antlion.controllers.access_point import setup_ap
-from antlion.controllers.ap_lib import hostapd_constants
-from antlion.controllers.ap_lib.hostapd_security import SecurityMode
-from antlion.controllers.ap_lib.regulatory_channels import (
- COUNTRY_CHANNELS,
- TEST_CHANNELS,
-)
-from antlion.controllers.fuchsia_device import FuchsiaDevice
-from antlion.test_utils.abstract_devices.wlan_device import AssociationMode
-from antlion.test_utils.wifi import base_test
-
-N_CAPABILITIES_DEFAULT = [
- hostapd_constants.N_CAPABILITY_LDPC,
- hostapd_constants.N_CAPABILITY_SGI20,
- hostapd_constants.N_CAPABILITY_SGI40,
- hostapd_constants.N_CAPABILITY_TX_STBC,
- hostapd_constants.N_CAPABILITY_RX_STBC1,
-]
-
-MAX_2_4_CHANNEL = 14
-
-
-class RegulatoryTest(NamedTuple):
- country_code: str
- channel: int
- channel_bandwidth: int
- expect_association: bool
-
-
-class RegulatoryComplianceTest(base_test.WifiBaseTest):
- """Tests regulatory compliance.
-
- Testbed Requirement:
- * 1 x Fuchsia device (dut)
- * 1 x access point
- """
-
- def __init__(self, configs: TestRunConfig) -> None:
- super().__init__(configs)
- self.log = logging.getLogger()
- self.fuchsia_device, self.dut = self.get_dut_type(
- FuchsiaDevice, AssociationMode.POLICY
- )
-
- self.access_point = self.access_points[0]
- self.access_point.stop_all_aps()
-
- self.regulatory_results = [
- "====CountryCode,Channel,Frequency,ChannelBandwith,Connected/Not-Connected===="
- ]
-
- def pre_run(self) -> None:
- tests: list[RegulatoryTest] = []
- for country in COUNTRY_CHANNELS.values():
- for channel, bandwidths in TEST_CHANNELS.items():
- for bandwidth in bandwidths:
- tests.append(
- RegulatoryTest(
- country_code=country.country_code,
- channel=channel,
- channel_bandwidth=bandwidth,
- expect_association=(
- channel in country.allowed_channels
- and bandwidth
- in country.allowed_channels[channel]
- ),
- )
- )
-
- def generate_test_name(
- country_code: str,
- channel: int,
- channel_bandwidth: int,
- _expect_association: bool,
- ) -> str:
- return (
- f"test_{country_code}_channel_{channel}_{channel_bandwidth}mhz"
- )
-
- self.generate_tests(
- self.verify_channel_compliance, generate_test_name, tests
- )
-
- def teardown_class(self) -> None:
- super().teardown_class()
-
- regulatory_save_path = f"{self.log_path}/regulatory_results.txt"
- with open(regulatory_save_path, "w", encoding="utf-8") as file:
- file.write("\n".join(self.regulatory_results))
-
- def setup_test(self) -> None:
- super().setup_test()
- self.access_point.stop_all_aps()
- for ad in self.android_devices:
- ad.droid.wakeLockAcquireBright()
- ad.droid.wakeUpNow()
- self.dut.wifi_toggle_state(True)
- self.dut.disconnect()
-
- def teardown_test(self) -> None:
- for ad in self.android_devices:
- ad.droid.wakeLockRelease()
- ad.droid.goToSleepNow()
- self.dut.turn_location_off_and_scan_toggle_off()
- self.dut.disconnect()
- self.download_logs()
- self.access_point.stop_all_aps()
- super().teardown_test()
-
- def setup_ap(
- self,
- channel: int,
- channel_bandwidth: int,
- ) -> str:
- """Start network on AP with basic configuration.
-
- Args:
- channel: channel to use for network
- channel_bandwidth: channel bandwidth in mhz to use for network,
-
- Returns:
- SSID of the newly created and running network
-
- Raises:
- ConnectionError if network is not started successfully.
- """
- ssid = utils.rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_2G)
- try:
- setup_ap(
- access_point=self.access_point,
- profile_name="whirlwind",
- channel=channel,
- force_wmm=True,
- ssid=ssid,
- vht_bandwidth=channel_bandwidth,
- setup_bridge=True,
- )
- self.log.info(
- f"Network (ssid: {ssid}) up on channel {channel} "
- f"w/ channel bandwidth {channel_bandwidth} MHz"
- )
- return ssid
- except Exception as err:
- raise ConnectionError(
- f"Failed to setup ap on channel: {channel}, "
- f"channel bandwidth: {channel_bandwidth} MHz. "
- ) from err
-
- def verify_channel_compliance(
- self,
- country_code: str,
- channel: int,
- channel_bandwidth: int,
- expect_association: bool,
- ) -> None:
- """Verify device complies with provided regulatory requirements for a
- specific channel and channel bandwidth. Run with generated test cases
- in the verify_regulatory_compliance parent test.
- """
- self.fuchsia_device.wlan_controller.set_country_code(
- CountryCode(country_code)
- )
-
- ssid = self.setup_ap(channel, channel_bandwidth)
-
- self.log.info(
- f'Attempting to associate to network "{ssid}" on channel '
- f"{channel} @ {channel_bandwidth}mhz"
- )
-
- associated = self.dut.associate(ssid, SecurityMode.OPEN)
-
- channel_ghz = "2.4" if channel < 36 else "5"
- association_code = "c" if associated else "nc"
- regulatory_result = f"REGTRACKER: {country_code},{channel},{channel_ghz},{channel_bandwidth},{association_code}"
- self.regulatory_results.append(regulatory_result)
- self.log.info(regulatory_result)
-
- asserts.assert_true(
- associated == expect_association,
- f"Expected device to{'' if expect_association else ' NOT'} "
- f"associate using country code {country_code} for channel "
- f"{channel} with channel bandwidth {channel_bandwidth} MHz.",
- )
-
-
-if __name__ == "__main__":
- test_runner.main()
diff --git a/tests/wlan/compliance/VapeInteropTest.py b/tests/wlan/compliance/VapeInteropTest.py
deleted file mode 100644
index 2d63070..0000000
--- a/tests/wlan/compliance/VapeInteropTest.py
+++ /dev/null
@@ -1,981 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from mobly import asserts, signals, test_runner
-from mobly.records import TestResultRecord
-
-from antlion import utils
-from antlion.controllers.access_point import setup_ap
-from antlion.controllers.ap_lib import hostapd_constants
-from antlion.controllers.ap_lib.hostapd_security import Security, SecurityMode
-from antlion.test_utils.abstract_devices.wlan_device import AssociationMode
-from antlion.test_utils.wifi import base_test
-
-
-class VapeInteropTest(base_test.WifiBaseTest):
- """Tests interoperability with mock third party AP profiles.
-
- Test Bed Requirement:
- * One Android or Fuchsia Device
- * One Whirlwind Access Point
- """
-
- def setup_class(self) -> None:
- super().setup_class()
-
- self.dut = self.get_dut(AssociationMode.POLICY)
-
- if len(self.access_points) == 0:
- raise signals.TestAbortClass("Requires at least one access point")
- self.access_point = self.access_points[0]
-
- # Same for both 2g and 5g
- self.ssid = utils.rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_2G)
- self.password = utils.rand_ascii_str(
- hostapd_constants.AP_PASSPHRASE_LENGTH_2G
- )
- self.security_profile_wpa2 = Security(
- security_mode=SecurityMode.WPA2,
- password=self.password,
- wpa2_cipher=hostapd_constants.WPA2_DEFAULT_CIPER,
- )
-
- self.access_point.stop_all_aps()
-
- def setup_test(self) -> None:
- if hasattr(self, "android_devices"):
- for ad in self.android_devices:
- ad.droid.wakeLockAcquireBright()
- ad.droid.wakeUpNow()
- self.dut.wifi_toggle_state(True)
-
- def teardown_test(self) -> None:
- if hasattr(self, "android_devices"):
- for ad in self.android_devices:
- ad.droid.wakeLockRelease()
- ad.droid.goToSleepNow()
- self.dut.turn_location_off_and_scan_toggle_off()
- self.dut.disconnect()
- self.dut.reset_wifi()
- self.download_logs()
- self.access_point.stop_all_aps()
-
- def on_fail(self, record: TestResultRecord) -> None:
- super().on_fail(record)
- self.access_point.stop_all_aps()
-
- def test_associate_actiontec_pk5000_24ghz_open(self) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name="actiontec_pk5000",
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.ssid,
- )
- asserts.assert_true(
- self.dut.associate(self.ssid, SecurityMode.OPEN),
- "Failed to connect.",
- )
-
- def test_associate_actiontec_pk5000_24ghz_wpa2(self) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name="actiontec_pk5000",
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.ssid,
- security=self.security_profile_wpa2,
- )
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- SecurityMode.WPA2,
- target_pwd=self.password,
- ),
- "Failed to connect.",
- )
-
- def test_associate_actiontec_mi424wr_24ghz_open(self) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name="actiontec_mi424wr",
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.ssid,
- )
- asserts.assert_true(
- self.dut.associate(self.ssid, SecurityMode.OPEN),
- "Failed to connect.",
- )
-
- def test_associate_actiontec_mi424wr_24ghz_wpa2(self) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name="actiontec_mi424wr",
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.ssid,
- security=self.security_profile_wpa2,
- )
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- SecurityMode.WPA2,
- target_pwd=self.password,
- ),
- "Failed to connect.",
- )
-
- def test_associate_asus_rtac66u_24ghz_open(self) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name="asus_rtac66u",
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.ssid,
- )
- asserts.assert_true(
- self.dut.associate(self.ssid, SecurityMode.OPEN),
- "Failed to connect.",
- )
-
- def test_associate_asus_rtac66u_24ghz_wpa2(self) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name="asus_rtac66u",
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.ssid,
- security=self.security_profile_wpa2,
- )
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- SecurityMode.WPA2,
- target_pwd=self.password,
- ),
- "Failed to connect.",
- )
-
- def test_associate_asus_rtac66u_5ghz_open(self) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name="asus_rtac66u",
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- ssid=self.ssid,
- )
- asserts.assert_true(
- self.dut.associate(self.ssid, SecurityMode.OPEN),
- "Failed to connect.",
- )
-
- def test_associate_asus_rtac66u_5ghz_wpa2(self) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name="asus_rtac66u",
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- ssid=self.ssid,
- security=self.security_profile_wpa2,
- )
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- SecurityMode.WPA2,
- target_pwd=self.password,
- ),
- "Failed to connect.",
- )
-
- def test_associate_asus_rtac86u_24ghz_open(self) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name="asus_rtac86u",
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.ssid,
- )
- asserts.assert_true(
- self.dut.associate(self.ssid, SecurityMode.OPEN),
- "Failed to connect.",
- )
-
- def test_associate_asus_rtac86u_24ghz_wpa2(self) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name="asus_rtac86u",
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.ssid,
- security=self.security_profile_wpa2,
- )
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- SecurityMode.WPA2,
- target_pwd=self.password,
- ),
- "Failed to connect.",
- )
-
- def test_associate_asus_rtac86u_5ghz_open(self) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name="asus_rtac86u",
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- ssid=self.ssid,
- )
- asserts.assert_true(
- self.dut.associate(self.ssid, SecurityMode.OPEN),
- "Failed to connect.",
- )
-
- def test_associate_asus_rtac86u_5ghz_wpa2(self) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name="asus_rtac86u",
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- ssid=self.ssid,
- security=self.security_profile_wpa2,
- )
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- SecurityMode.WPA2,
- target_pwd=self.password,
- ),
- "Failed to connect.",
- )
-
- def test_associate_asus_rtac5300_24ghz_open(self) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name="asus_rtac5300",
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.ssid,
- )
- asserts.assert_true(
- self.dut.associate(self.ssid, SecurityMode.OPEN),
- "Failed to connect.",
- )
-
- def test_associate_asus_rtac5300_24ghz_wpa2(self) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name="asus_rtac5300",
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.ssid,
- security=self.security_profile_wpa2,
- )
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- SecurityMode.WPA2,
- target_pwd=self.password,
- ),
- "Failed to connect.",
- )
-
- def test_associate_asus_rtac5300_5ghz_open(self) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name="asus_rtac5300",
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- ssid=self.ssid,
- )
- asserts.assert_true(
- self.dut.associate(self.ssid, SecurityMode.OPEN),
- "Failed to connect.",
- )
-
- def test_associate_asus_rtac5300_5ghz_wpa2(self) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name="asus_rtac5300",
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- ssid=self.ssid,
- security=self.security_profile_wpa2,
- )
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- SecurityMode.WPA2,
- target_pwd=self.password,
- ),
- "Failed to connect.",
- )
-
- def test_associate_asus_rtn56u_24ghz_open(self) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name="asus_rtn56u",
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.ssid,
- )
- asserts.assert_true(
- self.dut.associate(self.ssid, SecurityMode.OPEN),
- "Failed to connect.",
- )
-
- def test_associate_asus_rtn56u_24ghz_wpa2(self) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name="asus_rtn56u",
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.ssid,
- security=self.security_profile_wpa2,
- )
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- SecurityMode.WPA2,
- target_pwd=self.password,
- ),
- "Failed to connect.",
- )
-
- def test_associate_asus_rtn56u_5ghz_open(self) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name="asus_rtn56u",
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- ssid=self.ssid,
- )
- asserts.assert_true(
- self.dut.associate(self.ssid, SecurityMode.OPEN),
- "Failed to connect.",
- )
-
- def test_associate_asus_rtn56u_5ghz_wpa2(self) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name="asus_rtn56u",
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- ssid=self.ssid,
- security=self.security_profile_wpa2,
- )
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- SecurityMode.WPA2,
- target_pwd=self.password,
- ),
- "Failed to connect.",
- )
-
- def test_associate_asus_rtn66u_24ghz_open(self) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name="asus_rtn66u",
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.ssid,
- )
- asserts.assert_true(
- self.dut.associate(self.ssid, SecurityMode.OPEN),
- "Failed to connect.",
- )
-
- def test_associate_asus_rtn66u_24ghz_wpa2(self) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name="asus_rtn66u",
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.ssid,
- security=self.security_profile_wpa2,
- )
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- SecurityMode.WPA2,
- target_pwd=self.password,
- ),
- "Failed to connect.",
- )
-
- def test_associate_asus_rtn66u_5ghz_open(self) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name="asus_rtn66u",
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- ssid=self.ssid,
- )
- asserts.assert_true(
- self.dut.associate(self.ssid, SecurityMode.OPEN),
- "Failed to connect.",
- )
-
- def test_associate_asus_rtn66u_5ghz_wpa2(self) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name="asus_rtn66u",
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- ssid=self.ssid,
- security=self.security_profile_wpa2,
- )
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- SecurityMode.WPA2,
- target_pwd=self.password,
- ),
- "Failed to connect.",
- )
-
- def test_associate_belkin_f9k1001v5_24ghz_open(self) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name="belkin_f9k1001v5",
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.ssid,
- )
- asserts.assert_true(
- self.dut.associate(self.ssid, SecurityMode.OPEN),
- "Failed to connect.",
- )
-
- def test_associate_belkin_f9k1001v5_24ghz_wpa2(self) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name="belkin_f9k1001v5",
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.ssid,
- security=self.security_profile_wpa2,
- )
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- SecurityMode.WPA2,
- target_pwd=self.password,
- ),
- "Failed to connect.",
- )
-
- def test_associate_linksys_ea4500_24ghz_open(self) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name="linksys_ea4500",
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.ssid,
- )
- asserts.assert_true(
- self.dut.associate(self.ssid, SecurityMode.OPEN),
- "Failed to connect.",
- )
-
- def test_associate_linksys_ea4500_24ghz_wpa2(self) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name="linksys_ea4500",
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.ssid,
- security=self.security_profile_wpa2,
- )
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- SecurityMode.WPA2,
- target_pwd=self.password,
- ),
- "Failed to connect.",
- )
-
- def test_associate_linksys_ea4500_5ghz_open(self) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name="linksys_ea4500",
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- ssid=self.ssid,
- )
- asserts.assert_true(
- self.dut.associate(self.ssid, SecurityMode.OPEN),
- "Failed to connect.",
- )
-
- def test_associate_linksys_ea4500_5ghz_wpa2(self) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name="linksys_ea4500",
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- ssid=self.ssid,
- security=self.security_profile_wpa2,
- )
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- SecurityMode.WPA2,
- target_pwd=self.password,
- ),
- "Failed to connect.",
- )
-
- def test_associate_linksys_ea9500_24ghz_open(self) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name="linksys_ea9500",
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.ssid,
- )
- asserts.assert_true(
- self.dut.associate(self.ssid, SecurityMode.OPEN),
- "Failed to connect.",
- )
-
- def test_associate_linksys_ea9500_24ghz_wpa2(self) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name="linksys_ea9500",
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.ssid,
- security=self.security_profile_wpa2,
- )
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- SecurityMode.WPA2,
- target_pwd=self.password,
- ),
- "Failed to connect.",
- )
-
- def test_associate_linksys_ea9500_5ghz_open(self) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name="linksys_ea9500",
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- ssid=self.ssid,
- )
- asserts.assert_true(
- self.dut.associate(self.ssid, SecurityMode.OPEN),
- "Failed to connect.",
- )
-
- def test_associate_linksys_ea9500_5ghz_wpa2(self) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name="linksys_ea9500",
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- ssid=self.ssid,
- security=self.security_profile_wpa2,
- )
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- SecurityMode.WPA2,
- target_pwd=self.password,
- ),
- "Failed to connect.",
- )
-
- def test_associate_linksys_wrt1900acv2_24ghz_open(self) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name="linksys_wrt1900acv2",
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.ssid,
- )
- asserts.assert_true(
- self.dut.associate(self.ssid, SecurityMode.OPEN),
- "Failed to connect.",
- )
-
- def test_associate_linksys_wrt1900acv2_24ghz_wpa2(self) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name="linksys_wrt1900acv2",
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.ssid,
- security=self.security_profile_wpa2,
- )
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- SecurityMode.WPA2,
- target_pwd=self.password,
- ),
- "Failed to connect.",
- )
-
- def test_associate_linksys_wrt1900acv2_5ghz_open(self) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name="linksys_wrt1900acv2",
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- ssid=self.ssid,
- )
- asserts.assert_true(
- self.dut.associate(self.ssid, SecurityMode.OPEN),
- "Failed to connect.",
- )
-
- def test_associate_linksys_wrt1900acv2_5ghz_wpa2(self) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name="linksys_wrt1900acv2",
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- ssid=self.ssid,
- security=self.security_profile_wpa2,
- )
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- SecurityMode.WPA2,
- target_pwd=self.password,
- ),
- "Failed to connect.",
- )
-
- def test_associate_netgear_r7000_24ghz_open(self) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name="netgear_r7000",
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.ssid,
- )
- asserts.assert_true(
- self.dut.associate(self.ssid, SecurityMode.OPEN),
- "Failed to connect.",
- )
-
- def test_associate_netgear_r7000_24ghz_wpa2(self) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name="netgear_r7000",
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.ssid,
- security=self.security_profile_wpa2,
- )
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- SecurityMode.WPA2,
- target_pwd=self.password,
- ),
- "Failed to connect.",
- )
-
- def test_associate_netgear_r7000_5ghz_open(self) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name="netgear_r7000",
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- ssid=self.ssid,
- )
- asserts.assert_true(
- self.dut.associate(self.ssid, SecurityMode.OPEN),
- "Failed to connect.",
- )
-
- def test_associate_netgear_r7000_5ghz_wpa2(self) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name="netgear_r7000",
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- ssid=self.ssid,
- security=self.security_profile_wpa2,
- )
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- SecurityMode.WPA2,
- target_pwd=self.password,
- ),
- "Failed to connect.",
- )
-
- def test_associate_netgear_wndr3400_24ghz_open(self) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name="netgear_wndr3400",
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.ssid,
- )
- asserts.assert_true(
- self.dut.associate(self.ssid, SecurityMode.OPEN),
- "Failed to connect.",
- )
-
- def test_associate_netgear_wndr3400_24ghz_wpa2(self) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name="netgear_wndr3400",
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.ssid,
- security=self.security_profile_wpa2,
- )
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- SecurityMode.WPA2,
- target_pwd=self.password,
- ),
- "Failed to connect.",
- )
-
- def test_associate_netgear_wndr3400_5ghz_open(self) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name="netgear_wndr3400",
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- ssid=self.ssid,
- )
- asserts.assert_true(
- self.dut.associate(self.ssid, SecurityMode.OPEN),
- "Failed to connect.",
- )
-
- def test_associate_netgear_wndr3400_5ghz_wpa2(self) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name="netgear_wndr3400",
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- ssid=self.ssid,
- security=self.security_profile_wpa2,
- )
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- SecurityMode.WPA2,
- target_pwd=self.password,
- ),
- "Failed to connect.",
- )
-
- def test_associate_securifi_almond_24ghz_open(self) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name="securifi_almond",
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.ssid,
- )
- asserts.assert_true(
- self.dut.associate(self.ssid, SecurityMode.OPEN),
- "Failed to connect.",
- )
-
- def test_associate_securifi_almond_24ghz_wpa2(self) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name="securifi_almond",
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.ssid,
- security=self.security_profile_wpa2,
- )
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- SecurityMode.WPA2,
- target_pwd=self.password,
- ),
- "Failed to connect.",
- )
-
- def test_associate_tplink_archerc5_24ghz_open(self) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name="tplink_archerc5",
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.ssid,
- )
- asserts.assert_true(
- self.dut.associate(self.ssid, SecurityMode.OPEN),
- "Failed to connect.",
- )
-
- def test_associate_tplink_archerc5_24ghz_wpa2(self) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name="tplink_archerc5",
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.ssid,
- security=self.security_profile_wpa2,
- )
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- SecurityMode.WPA2,
- target_pwd=self.password,
- ),
- "Failed to connect.",
- )
-
- def test_associate_tplink_archerc5_5ghz_open(self) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name="tplink_archerc5",
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- ssid=self.ssid,
- )
- asserts.assert_true(
- self.dut.associate(self.ssid, SecurityMode.OPEN),
- "Failed to connect.",
- )
-
- def test_associate_tplink_archerc5_5ghz_wpa2(self) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name="tplink_archerc5",
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- ssid=self.ssid,
- security=self.security_profile_wpa2,
- )
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- SecurityMode.WPA2,
- target_pwd=self.password,
- ),
- "Failed to connect.",
- )
-
- def test_associate_tplink_archerc7_24ghz_open(self) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name="tplink_archerc7",
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.ssid,
- )
- asserts.assert_true(
- self.dut.associate(self.ssid, SecurityMode.OPEN),
- "Failed to connect.",
- )
-
- def test_associate_tplink_archerc7_24ghz_wpa2(self) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name="tplink_archerc7",
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.ssid,
- security=self.security_profile_wpa2,
- )
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- SecurityMode.WPA2,
- target_pwd=self.password,
- ),
- "Failed to connect.",
- )
-
- def test_associate_tplink_archerc7_5ghz_open(self) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name="tplink_archerc7",
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- ssid=self.ssid,
- )
- asserts.assert_true(
- self.dut.associate(self.ssid, SecurityMode.OPEN),
- "Failed to connect.",
- )
-
- def test_associate_tplink_archerc7_5ghz_wpa2(self) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name="tplink_archerc7",
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- ssid=self.ssid,
- security=self.security_profile_wpa2,
- )
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- SecurityMode.WPA2,
- target_pwd=self.password,
- ),
- "Failed to connect.",
- )
-
- def test_associate_tplink_c1200_24ghz_open(self) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name="tplink_c1200",
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.ssid,
- )
- asserts.assert_true(
- self.dut.associate(self.ssid, SecurityMode.OPEN),
- "Failed to connect.",
- )
-
- def test_associate_tplink_c1200_24ghz_wpa2(self) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name="tplink_c1200",
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.ssid,
- security=self.security_profile_wpa2,
- )
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- SecurityMode.WPA2,
- target_pwd=self.password,
- ),
- "Failed to connect.",
- )
-
- def test_associate_tplink_c1200_5ghz_open(self) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name="tplink_c1200",
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- ssid=self.ssid,
- )
- asserts.assert_true(
- self.dut.associate(self.ssid, SecurityMode.OPEN),
- "Failed to connect.",
- )
-
- def test_associate_tplink_c1200_5ghz_wpa2(self) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name="tplink_c1200",
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- ssid=self.ssid,
- security=self.security_profile_wpa2,
- )
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- SecurityMode.WPA2,
- target_pwd=self.password,
- ),
- "Failed to connect.",
- )
-
- def test_associate_tplink_tlwr940n_24ghz_open(self) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name="tplink_tlwr940n",
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.ssid,
- )
- asserts.assert_true(
- self.dut.associate(self.ssid, SecurityMode.OPEN),
- "Failed to connect.",
- )
-
- def test_associate_tplink_tlwr940n_24ghz_wpa2(self) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name="tplink_tlwr940n",
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.ssid,
- security=self.security_profile_wpa2,
- )
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- SecurityMode.WPA2,
- target_pwd=self.password,
- ),
- "Failed to connect.",
- )
-
-
-if __name__ == "__main__":
- test_runner.main()
diff --git a/tests/wlan/compliance/WlanPhyCompliance11ACTest.py b/tests/wlan/compliance/WlanPhyCompliance11ACTest.py
deleted file mode 100644
index e38b2ff..0000000
--- a/tests/wlan/compliance/WlanPhyCompliance11ACTest.py
+++ /dev/null
@@ -1,310 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import itertools
-from dataclasses import dataclass
-from pathlib import Path
-from typing import Any
-
-from mobly import asserts, signals, test_runner
-from mobly.records import TestResultRecord
-
-from antlion import utils
-from antlion.controllers.access_point import setup_ap
-from antlion.controllers.ap_lib import hostapd_constants
-from antlion.controllers.ap_lib.hostapd_security import Security, SecurityMode
-from antlion.controllers.ap_lib.hostapd_utils import generate_random_password
-from antlion.test_utils.abstract_devices.wlan_device import AssociationMode
-from antlion.test_utils.wifi import base_test
-
-# AC Capabilities
-"""
-Capabilities Not Supported on Whirlwind:
- - Supported Channel Width ([VHT160], [VHT160-80PLUS80]): 160mhz and 80+80
- unsupported
- - SU Beamformer [SU-BEAMFORMER]
- - SU Beamformee [SU-BEAMFORMEE]
- - MU Beamformer [MU-BEAMFORMER]
- - MU Beamformee [MU-BEAMFORMEE]
- - BF Antenna ([BF-ANTENNA-2], [BF-ANTENNA-3], [BF-ANTENNA-4])
- - Rx STBC 2, 3, & 4 ([RX-STBC-12],[RX-STBC-123],[RX-STBC-124])
- - VHT Link Adaptation ([VHT-LINK-ADAPT2],[VHT-LINK-ADAPT3])
- - VHT TXOP Power Save [VHT-TXOP-PS]
- - HTC-VHT [HTC-VHT]
-"""
-VHT_MAX_MPDU_LEN = [
- hostapd_constants.AC_CAPABILITY_MAX_MPDU_7991,
- hostapd_constants.AC_CAPABILITY_MAX_MPDU_11454,
- "",
-]
-RXLDPC = [hostapd_constants.AC_CAPABILITY_RXLDPC, ""]
-SHORT_GI_80 = [hostapd_constants.AC_CAPABILITY_SHORT_GI_80, ""]
-TX_STBC = [hostapd_constants.AC_CAPABILITY_TX_STBC_2BY1, ""]
-RX_STBC = [hostapd_constants.AC_CAPABILITY_RX_STBC_1, ""]
-MAX_A_MPDU = [
- hostapd_constants.AC_CAPABILITY_MAX_A_MPDU_LEN_EXP0,
- hostapd_constants.AC_CAPABILITY_MAX_A_MPDU_LEN_EXP1,
- hostapd_constants.AC_CAPABILITY_MAX_A_MPDU_LEN_EXP2,
- hostapd_constants.AC_CAPABILITY_MAX_A_MPDU_LEN_EXP3,
- hostapd_constants.AC_CAPABILITY_MAX_A_MPDU_LEN_EXP4,
- hostapd_constants.AC_CAPABILITY_MAX_A_MPDU_LEN_EXP5,
- hostapd_constants.AC_CAPABILITY_MAX_A_MPDU_LEN_EXP6,
- hostapd_constants.AC_CAPABILITY_MAX_A_MPDU_LEN_EXP7,
- "",
-]
-RX_ANTENNA = [hostapd_constants.AC_CAPABILITY_RX_ANTENNA_PATTERN, ""]
-TX_ANTENNA = [hostapd_constants.AC_CAPABILITY_TX_ANTENNA_PATTERN, ""]
-
-# Default 11N Capabilities
-N_CAPABS_40MHZ = [
- hostapd_constants.N_CAPABILITY_LDPC,
- hostapd_constants.N_CAPABILITY_SGI20,
- hostapd_constants.N_CAPABILITY_RX_STBC1,
- hostapd_constants.N_CAPABILITY_SGI20,
- hostapd_constants.N_CAPABILITY_SGI40,
- hostapd_constants.N_CAPABILITY_MAX_AMSDU_7935,
- hostapd_constants.N_CAPABILITY_HT40_PLUS,
-]
-
-N_CAPABS_20MHZ = [
- hostapd_constants.N_CAPABILITY_LDPC,
- hostapd_constants.N_CAPABILITY_SGI20,
- hostapd_constants.N_CAPABILITY_RX_STBC1,
- hostapd_constants.N_CAPABILITY_SGI20,
- hostapd_constants.N_CAPABILITY_MAX_AMSDU_7935,
- hostapd_constants.N_CAPABILITY_HT20,
-]
-
-SECURITY_MODES: list[SecurityMode] = [SecurityMode.OPEN, SecurityMode.WPA2]
-
-
-@dataclass
-class TestParams:
- security_mode: SecurityMode
- vht_bandwidth_mhz: int
- # TODO(http://b/290396383): Type AP capabilities as enums
- n_capabilities: list[Any]
- ac_capabilities: list[Any]
-
-
-# 6912 test cases
-class WlanPhyCompliance11ACTest(base_test.WifiBaseTest):
- """Tests for validating 11ac PHYS.
-
- Test Bed Requirement:
- * One Android device or Fuchsia device
- * One Access Point
- """
-
- def pre_run(self) -> None:
- test_args: list[tuple[TestParams]] = (
- self._generate_20mhz_test_args()
- + self._generate_40mhz_test_args()
- + self._generate_80mhz_test_args()
- )
-
- def generate_test_name(test: TestParams) -> str:
- ret = []
- for cap in hostapd_constants.AC_CAPABILITIES_MAPPING.keys():
- if cap in test.ac_capabilities:
- ret.append(
- hostapd_constants.AC_CAPABILITIES_MAPPING[cap]
- .replace("[", "_")
- .replace("]", "")
- )
- return f"test_11ac_{test.vht_bandwidth_mhz}mhz_{test.security_mode}{''.join(ret)}"
-
- self.generate_tests(
- test_logic=self.setup_and_connect,
- name_func=generate_test_name,
- arg_sets=test_args,
- )
-
- def setup_class(self) -> None:
- super().setup_class()
-
- if len(self.access_points) == 0:
- raise signals.TestAbortClass(
- "At least one access point is required"
- )
- self.access_point = self.access_points[0]
-
- self.dut = self.get_dut(AssociationMode.POLICY)
- self.access_point.stop_all_aps()
-
- def setup_test(self) -> None:
- for ad in self.android_devices:
- ad.droid.wakeLockAcquireBright()
- ad.droid.wakeUpNow()
- self.dut.wifi_toggle_state(True)
-
- def teardown_test(self) -> None:
- for ad in self.android_devices:
- ad.droid.wakeLockRelease()
- ad.droid.goToSleepNow()
- self.dut.turn_location_off_and_scan_toggle_off()
- self.dut.disconnect()
- self.dut.reset_wifi()
- self.download_logs()
- self.access_point.stop_all_aps()
-
- def on_fail(self, record: TestResultRecord) -> None:
- super().on_fail(record)
- self.access_point.stop_all_aps()
-
- def setup_and_connect(self, settings: TestParams) -> None:
- """Setup the AP and then attempt to associate a DUT.
-
- Args:
- settings: Test parameters
- """
- ssid = utils.rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_2G)
- security: Security | None = None
- password: str | None = None
-
- match settings.security_mode:
- case SecurityMode.OPEN:
- pass
- case SecurityMode.WPA2:
- password = generate_random_password(
- security_mode=SecurityMode.WPA2
- )
- security = Security(
- security_mode=SecurityMode.WPA2,
- password=password,
- wpa_cipher=hostapd_constants.WPA2_DEFAULT_CIPER,
- wpa2_cipher=hostapd_constants.WPA2_DEFAULT_CIPER,
- )
- case _:
- raise signals.TestError(
- f"unsupported security_mode {settings.security_mode}"
- )
-
- setup_ap(
- access_point=self.access_point,
- profile_name="whirlwind",
- mode=hostapd_constants.Mode.MODE_11AC_MIXED,
- channel=36,
- n_capabilities=settings.n_capabilities,
- ac_capabilities=settings.ac_capabilities,
- force_wmm=True,
- ssid=ssid,
- security=security,
- vht_bandwidth=settings.vht_bandwidth_mhz,
- )
-
- with self.access_point.tcpdump.start(
- self.access_point.wlan_5g, Path(self.log_path)
- ):
- asserts.assert_true(
- self.dut.associate(
- ssid,
- target_pwd=password,
- target_security=settings.security_mode,
- ),
- "Failed to associate.",
- )
-
- # 1728 tests
- def _generate_20mhz_test_args(self) -> list[tuple[TestParams]]:
- test_args: list[tuple[TestParams]] = []
-
- # 864 test cases for open security
- # 864 test cases for wpa2 security
- for combination in itertools.product(
- SECURITY_MODES,
- VHT_MAX_MPDU_LEN,
- RXLDPC,
- RX_STBC,
- TX_STBC,
- MAX_A_MPDU,
- RX_ANTENNA,
- TX_ANTENNA,
- ):
- test_args.append(
- (
- TestParams(
- security_mode=combination[0],
- vht_bandwidth_mhz=20,
- n_capabilities=N_CAPABS_20MHZ,
- ac_capabilities=list(combination[1:]),
- ),
- )
- )
-
- return test_args
-
- # 1728 tests
- def _generate_40mhz_test_args(self) -> list[tuple[TestParams]]:
- test_args: list[tuple[TestParams]] = []
-
- # 864 test cases for open security
- # 864 test cases for wpa2 security
- for combination in itertools.product(
- SECURITY_MODES,
- VHT_MAX_MPDU_LEN,
- RXLDPC,
- RX_STBC,
- TX_STBC,
- MAX_A_MPDU,
- RX_ANTENNA,
- TX_ANTENNA,
- ):
- test_args.append(
- (
- TestParams(
- security_mode=combination[0],
- vht_bandwidth_mhz=40,
- n_capabilities=N_CAPABS_40MHZ,
- ac_capabilities=list(combination[1:]),
- ),
- )
- )
-
- return test_args
-
- # 3456 tests
- def _generate_80mhz_test_args(self) -> list[tuple[TestParams]]:
- test_args: list[tuple[TestParams]] = []
-
- # 1728 test cases for open security
- # 1728 test cases for wpa2 security
- for combination in itertools.product(
- SECURITY_MODES,
- VHT_MAX_MPDU_LEN,
- RXLDPC,
- SHORT_GI_80,
- RX_STBC,
- TX_STBC,
- MAX_A_MPDU,
- RX_ANTENNA,
- TX_ANTENNA,
- ):
- test_args.append(
- (
- TestParams(
- security_mode=combination[0],
- vht_bandwidth_mhz=80,
- n_capabilities=N_CAPABS_40MHZ,
- ac_capabilities=list(combination[1:]),
- ),
- )
- )
- return test_args
-
-
-if __name__ == "__main__":
- test_runner.main()
diff --git a/tests/wlan/compliance/WlanPhyCompliance11NTest.py b/tests/wlan/compliance/WlanPhyCompliance11NTest.py
deleted file mode 100644
index 2f63f1c..0000000
--- a/tests/wlan/compliance/WlanPhyCompliance11NTest.py
+++ /dev/null
@@ -1,529 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import itertools
-import logging
-from dataclasses import dataclass
-from typing import Any
-
-from mobly import asserts, signals, test_runner
-from mobly.config_parser import TestRunConfig
-from mobly.records import TestResultRecord
-
-from antlion import utils
-from antlion.controllers.access_point import setup_ap
-from antlion.controllers.ap_lib import hostapd_config, hostapd_constants
-from antlion.controllers.ap_lib.hostapd_security import Security, SecurityMode
-from antlion.controllers.ap_lib.hostapd_utils import generate_random_password
-from antlion.test_utils.abstract_devices.wlan_device import AssociationMode
-from antlion.test_utils.wifi import base_test
-
-FREQUENCY_24: str = "2.4GHz"
-FREQUENCY_5: str = "5GHz"
-CHANNEL_BANDWIDTH_20: str = "HT20"
-CHANNEL_BANDWIDTH_40_LOWER: str = "HT40-"
-CHANNEL_BANDWIDTH_40_UPPER: str = "HT40+"
-SECURITY_OPEN = "open"
-SECURITY_WPA2 = "wpa2"
-N_MODE = [
- hostapd_constants.Mode.MODE_11N_PURE,
- hostapd_constants.Mode.MODE_11N_MIXED,
-]
-LDPC = [hostapd_constants.N_CAPABILITY_LDPC, ""]
-TX_STBC = [hostapd_constants.N_CAPABILITY_TX_STBC, ""]
-RX_STBC = [hostapd_constants.N_CAPABILITY_RX_STBC1, ""]
-SGI_20 = [hostapd_constants.N_CAPABILITY_SGI20, ""]
-SGI_40 = [hostapd_constants.N_CAPABILITY_SGI40, ""]
-DSSS_CCK = [hostapd_constants.N_CAPABILITY_DSSS_CCK_40, ""]
-INTOLERANT_40 = [hostapd_constants.N_CAPABILITY_40_INTOLERANT, ""]
-MAX_AMPDU_7935 = [hostapd_constants.N_CAPABILITY_MAX_AMSDU_7935, ""]
-SMPS = [hostapd_constants.N_CAPABILITY_SMPS_STATIC, ""]
-
-
-@dataclass
-class TestParams:
- frequency: str
- chbw: str
- n_mode: str
- security: SecurityMode
- # TODO(http://b/290396383): Type AP capabilities as enums
- n_capabilities: list[Any]
-
-
-class WlanPhyCompliance11NTest(base_test.WifiBaseTest):
- """Tests for validating 11n PHYS.
-
- Test Bed Requirement:
- * One Android device or Fuchsia device
- * One Access Point
- """
-
- def __init__(self, config: TestRunConfig) -> None:
- super().__init__(config)
-
- def pre_run(self) -> None:
- test_args: list[tuple[TestParams]] = (
- self._generate_24_HT20_test_args()
- + self._generate_24_HT40_lower_test_args()
- + self._generate_24_HT40_upper_test_args()
- + self._generate_5_HT20_test_args()
- + self._generate_5_HT40_lower_test_args()
- + self._generate_5_HT40_upper_test_args()
- + self._generate_24_HT20_wpa2_test_args()
- + self._generate_24_HT40_lower_wpa2_test_args()
- + self._generate_24_HT40_upper_wpa2_test_args()
- + self._generate_5_HT20_wpa2_test_args()
- + self._generate_5_HT40_lower_wpa2_test_args()
- + self._generate_5_HT40_upper_wpa2_test_args()
- )
-
- def generate_test_name(test: TestParams) -> str:
- ret = []
- for cap in hostapd_constants.N_CAPABILITIES_MAPPING.keys():
- if cap in test.n_capabilities:
- ret.append(
- hostapd_constants.N_CAPABILITIES_MAPPING[cap]
- .replace("[", "_")
- .replace("]", "")
- )
- # '+' is used by Mobile Harness as special character, don't use it in test names
- if test.chbw == "HT40-":
- chbw = "HT40Lower"
- elif test.chbw == "HT40+":
- chbw = "HT40Upper"
- else:
- chbw = test.chbw
- return f"test_11n_{test.frequency}_{chbw}_{test.security}_{test.n_mode}{''.join(ret)}"
-
- self.generate_tests(
- test_logic=self.setup_and_connect,
- name_func=generate_test_name,
- arg_sets=test_args,
- )
-
- def setup_class(self) -> None:
- super().setup_class()
-
- if len(self.access_points) < 1:
- logging.error("At least one access point is required for this test")
- raise signals.TestAbortClass(
- "At least one access point is required"
- )
-
- self.dut = self.get_dut(AssociationMode.POLICY)
-
- if len(self.access_points) == 0:
- raise signals.TestAbortClass("Requires at least one access point")
- self.access_point = self.access_points[0]
- self.access_point.stop_all_aps()
-
- def setup_test(self) -> None:
- if hasattr(self, "android_devices"):
- for ad in self.android_devices:
- ad.droid.wakeLockAcquireBright()
- ad.droid.wakeUpNow()
- self.dut.wifi_toggle_state(True)
-
- def teardown_test(self) -> None:
- if hasattr(self, "android_devices"):
- for ad in self.android_devices:
- ad.droid.wakeLockRelease()
- ad.droid.goToSleepNow()
- self.dut.turn_location_off_and_scan_toggle_off()
- self.dut.disconnect()
- self.dut.reset_wifi()
- self.download_logs()
- self.access_point.stop_all_aps()
-
- def on_fail(self, record: TestResultRecord) -> None:
- super().on_fail(record)
- self.access_point.stop_all_aps()
-
- def setup_and_connect(self, test: TestParams) -> None:
- """Start hostapd and associate the DUT.
-
- Args:
- ap_settings: A dictionary of hostapd constant n_capabilities.
- """
- ssid = utils.rand_ascii_str(20)
- security_profile = Security()
- password: str | None = None
- n_capabilities = []
- for n_capability in test.n_capabilities:
- if n_capability in hostapd_constants.N_CAPABILITIES_MAPPING.keys():
- n_capabilities.append(n_capability)
-
- if test.chbw == "HT20" or test.chbw == "HT40+":
- if test.frequency == "2.4GHz":
- channel = 1
- elif test.frequency == "5GHz":
- channel = 36
- else:
- raise ValueError(f"Invalid frequence: {test.frequency}")
-
- elif test.chbw == "HT40-":
- if test.frequency == "2.4GHz":
- channel = 11
- elif test.frequency == "5GHz":
- channel = 60
- else:
- raise ValueError(f"Invalid frequency: {test.frequency}")
-
- else:
- raise ValueError(f"Invalid channel bandwidth: {test.chbw}")
-
- if test.chbw == "HT40-" or test.chbw == "HT40+":
- if hostapd_config.ht40_plus_allowed(channel):
- extended_channel = hostapd_constants.N_CAPABILITY_HT40_PLUS
- elif hostapd_config.ht40_minus_allowed(channel):
- extended_channel = hostapd_constants.N_CAPABILITY_HT40_MINUS
- else:
- raise ValueError(f"Invalid channel: {channel}")
- n_capabilities.append(extended_channel)
-
- if test.security is SecurityMode.WPA2:
- security_profile = Security(
- security_mode=SecurityMode.WPA2,
- password=generate_random_password(length=20),
- wpa_cipher="CCMP",
- wpa2_cipher="CCMP",
- )
- password = security_profile.password
-
- if test.n_mode not in N_MODE:
- raise ValueError(f"Invalid n-mode: {test.n_mode}")
-
- setup_ap(
- access_point=self.access_point,
- profile_name="whirlwind",
- mode=test.n_mode,
- channel=channel,
- n_capabilities=n_capabilities,
- ac_capabilities=[],
- force_wmm=True,
- ssid=ssid,
- security=security_profile,
- )
- asserts.assert_true(
- self.dut.associate(
- ssid,
- target_pwd=password,
- target_security=test.security,
- ),
- "Failed to connect.",
- )
-
- def _generate_24_HT20_test_args(self) -> list[tuple[TestParams]]:
- test_args: list[tuple[TestParams]] = []
- for combination in itertools.product(
- N_MODE,
- LDPC,
- TX_STBC,
- RX_STBC,
- SGI_20,
- INTOLERANT_40,
- MAX_AMPDU_7935,
- SMPS,
- ):
- test_args.append(
- (
- TestParams(
- frequency=FREQUENCY_24,
- chbw=CHANNEL_BANDWIDTH_20,
- n_mode=combination[0],
- security=SecurityMode.OPEN,
- n_capabilities=list(combination[1:]),
- ),
- )
- )
- return test_args
-
- def _generate_24_HT40_lower_test_args(self) -> list[tuple[TestParams]]:
- test_args: list[tuple[TestParams]] = []
- for combination in itertools.product(
- LDPC,
- TX_STBC,
- RX_STBC,
- SGI_20,
- SGI_40,
- MAX_AMPDU_7935,
- SMPS,
- DSSS_CCK,
- ):
- test_args.append(
- (
- TestParams(
- frequency=FREQUENCY_24,
- chbw=CHANNEL_BANDWIDTH_40_LOWER,
- n_mode=hostapd_constants.Mode.MODE_11N_MIXED,
- security=SecurityMode.OPEN,
- n_capabilities=list(combination),
- ),
- )
- )
- return test_args
-
- def _generate_24_HT40_upper_test_args(self) -> list[tuple[TestParams]]:
- test_args: list[tuple[TestParams]] = []
- for combination in itertools.product(
- LDPC,
- TX_STBC,
- RX_STBC,
- SGI_20,
- SGI_40,
- MAX_AMPDU_7935,
- SMPS,
- DSSS_CCK,
- ):
- test_args.append(
- (
- TestParams(
- frequency=FREQUENCY_24,
- chbw=CHANNEL_BANDWIDTH_40_UPPER,
- n_mode=hostapd_constants.Mode.MODE_11N_MIXED,
- security=SecurityMode.OPEN,
- n_capabilities=list(combination),
- ),
- )
- )
- return test_args
-
- def _generate_5_HT20_test_args(self) -> list[tuple[TestParams]]:
- test_args: list[tuple[TestParams]] = []
- for combination in itertools.product(
- LDPC,
- TX_STBC,
- RX_STBC,
- SGI_20,
- INTOLERANT_40,
- MAX_AMPDU_7935,
- SMPS,
- ):
- test_args.append(
- (
- TestParams(
- frequency=FREQUENCY_5,
- chbw=CHANNEL_BANDWIDTH_20,
- n_mode=hostapd_constants.Mode.MODE_11N_MIXED,
- security=SecurityMode.OPEN,
- n_capabilities=list(combination),
- ),
- )
- )
- return test_args
-
- def _generate_5_HT40_lower_test_args(self) -> list[tuple[TestParams]]:
- test_args: list[tuple[TestParams]] = []
- for combination in itertools.product(
- LDPC,
- TX_STBC,
- RX_STBC,
- SGI_20,
- SGI_40,
- MAX_AMPDU_7935,
- SMPS,
- DSSS_CCK,
- ):
- test_args.append(
- (
- TestParams(
- frequency=FREQUENCY_5,
- chbw=CHANNEL_BANDWIDTH_40_LOWER,
- n_mode=hostapd_constants.Mode.MODE_11N_MIXED,
- security=SecurityMode.OPEN,
- n_capabilities=list(combination),
- ),
- )
- )
- return test_args
-
- def _generate_5_HT40_upper_test_args(self) -> list[tuple[TestParams]]:
- test_args: list[tuple[TestParams]] = []
- for combination in itertools.product(
- N_MODE,
- LDPC,
- TX_STBC,
- RX_STBC,
- SGI_20,
- SGI_40,
- MAX_AMPDU_7935,
- SMPS,
- DSSS_CCK,
- ):
- test_args.append(
- (
- TestParams(
- frequency=FREQUENCY_5,
- chbw=CHANNEL_BANDWIDTH_40_UPPER,
- n_mode=combination[0],
- security=SecurityMode.OPEN,
- n_capabilities=list(combination[1:]),
- ),
- )
- )
- return test_args
-
- def _generate_24_HT20_wpa2_test_args(self) -> list[tuple[TestParams]]:
- test_args: list[tuple[TestParams]] = []
- for combination in itertools.product(
- LDPC,
- TX_STBC,
- RX_STBC,
- SGI_20,
- INTOLERANT_40,
- MAX_AMPDU_7935,
- SMPS,
- ):
- test_args.append(
- (
- TestParams(
- frequency=FREQUENCY_24,
- chbw=CHANNEL_BANDWIDTH_20,
- n_mode=hostapd_constants.Mode.MODE_11N_MIXED,
- security=SecurityMode.WPA2,
- n_capabilities=list(combination),
- ),
- )
- )
- return test_args
-
- def _generate_24_HT40_lower_wpa2_test_args(self) -> list[tuple[TestParams]]:
- test_args: list[tuple[TestParams]] = []
- for combination in itertools.product(
- LDPC,
- TX_STBC,
- RX_STBC,
- SGI_20,
- SGI_40,
- MAX_AMPDU_7935,
- SMPS,
- DSSS_CCK,
- ):
- test_args.append(
- (
- TestParams(
- frequency=FREQUENCY_24,
- chbw=CHANNEL_BANDWIDTH_40_LOWER,
- n_mode=hostapd_constants.Mode.MODE_11N_MIXED,
- security=SecurityMode.WPA2,
- n_capabilities=list(combination),
- ),
- )
- )
- return test_args
-
- def _generate_24_HT40_upper_wpa2_test_args(self) -> list[tuple[TestParams]]:
- test_args: list[tuple[TestParams]] = []
- for combination in itertools.product(
- LDPC,
- TX_STBC,
- RX_STBC,
- SGI_20,
- SGI_40,
- MAX_AMPDU_7935,
- SMPS,
- DSSS_CCK,
- ):
- test_args.append(
- (
- TestParams(
- frequency=FREQUENCY_24,
- chbw=CHANNEL_BANDWIDTH_40_UPPER,
- n_mode=hostapd_constants.Mode.MODE_11N_MIXED,
- security=SecurityMode.WPA2,
- n_capabilities=list(combination),
- ),
- )
- )
- return test_args
-
- def _generate_5_HT20_wpa2_test_args(self) -> list[tuple[TestParams]]:
- test_args: list[tuple[TestParams]] = []
- for combination in itertools.product(
- LDPC,
- TX_STBC,
- RX_STBC,
- SGI_20,
- INTOLERANT_40,
- MAX_AMPDU_7935,
- SMPS,
- ):
- test_args.append(
- (
- TestParams(
- frequency=FREQUENCY_5,
- chbw=CHANNEL_BANDWIDTH_20,
- n_mode=hostapd_constants.Mode.MODE_11N_MIXED,
- security=SecurityMode.WPA2,
- n_capabilities=list(combination),
- ),
- )
- )
- return test_args
-
- def _generate_5_HT40_lower_wpa2_test_args(self) -> list[tuple[TestParams]]:
- test_args: list[tuple[TestParams]] = []
- for combination in itertools.product(
- LDPC,
- TX_STBC,
- RX_STBC,
- SGI_20,
- SGI_40,
- MAX_AMPDU_7935,
- SMPS,
- DSSS_CCK,
- ):
- test_args.append(
- (
- TestParams(
- frequency=FREQUENCY_5,
- chbw=CHANNEL_BANDWIDTH_40_LOWER,
- n_mode=hostapd_constants.Mode.MODE_11N_MIXED,
- security=SecurityMode.WPA2,
- n_capabilities=list(combination),
- ),
- )
- )
- return test_args
-
- def _generate_5_HT40_upper_wpa2_test_args(self) -> list[tuple[TestParams]]:
- test_args: list[tuple[TestParams]] = []
- for combination in itertools.product(
- LDPC,
- TX_STBC,
- RX_STBC,
- SGI_20,
- SGI_40,
- MAX_AMPDU_7935,
- SMPS,
- DSSS_CCK,
- ):
- test_args.append(
- (
- TestParams(
- frequency=FREQUENCY_5,
- chbw=CHANNEL_BANDWIDTH_40_UPPER,
- n_mode=hostapd_constants.Mode.MODE_11N_MIXED,
- security=SecurityMode.WPA2,
- n_capabilities=list(combination),
- ),
- )
- )
- return test_args
-
-
-if __name__ == "__main__":
- test_runner.main()
diff --git a/tests/wlan/compliance/WlanPhyComplianceABGTest.py b/tests/wlan/compliance/WlanPhyComplianceABGTest.py
deleted file mode 100644
index 5d7465d..0000000
--- a/tests/wlan/compliance/WlanPhyComplianceABGTest.py
+++ /dev/null
@@ -1,2093 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from mobly import asserts, signals, test_runner
-from mobly.records import TestResultRecord
-
-from antlion.controllers.access_point import setup_ap
-from antlion.controllers.ap_lib import hostapd_constants
-from antlion.controllers.ap_lib.hostapd_security import SecurityMode
-from antlion.test_utils.abstract_devices.wlan_device import AssociationMode
-from antlion.test_utils.wifi import base_test
-
-
-class WlanPhyComplianceABGTest(base_test.WifiBaseTest):
- """Tests for validating 11a, 11b, and 11g PHYS.
-
- Test Bed Requirement:
- * One Android device or Fuchsia device
- * One Access Point
- """
-
- def setup_class(self) -> None:
- super().setup_class()
-
- self.dut = self.get_dut(AssociationMode.POLICY)
-
- if len(self.access_points) == 0:
- raise signals.TestAbortClass("Requires at least one access point")
- self.access_point = self.access_points[0]
- open_network = self.get_open_network(False, [])
- open_network_min_len = self.get_open_network(
- False,
- [],
- ssid_length_2g=hostapd_constants.AP_SSID_MIN_LENGTH_2G,
- ssid_length_5g=hostapd_constants.AP_SSID_MIN_LENGTH_5G,
- )
- open_network_max_len = self.get_open_network(
- False,
- [],
- ssid_length_2g=hostapd_constants.AP_SSID_MAX_LENGTH_2G,
- ssid_length_5g=hostapd_constants.AP_SSID_MAX_LENGTH_5G,
- )
- self.open_network_2g = open_network["2g"]
- self.open_network_5g = open_network["5g"]
- self.open_network_max_len_2g = open_network_max_len["2g"]
- self.open_network_max_len_2g["SSID"] = self.open_network_max_len_2g[
- "SSID"
- ][3:]
- self.open_network_max_len_5g = open_network_max_len["5g"]
- self.open_network_max_len_5g["SSID"] = self.open_network_max_len_5g[
- "SSID"
- ][3:]
- self.open_network_min_len_2g = open_network_min_len["2g"]
- self.open_network_min_len_2g["SSID"] = self.open_network_min_len_2g[
- "SSID"
- ][3:]
- self.open_network_min_len_5g = open_network_min_len["5g"]
- self.open_network_min_len_5g["SSID"] = self.open_network_min_len_5g[
- "SSID"
- ][3:]
-
- self.utf8_ssid_2g = "2𝔤_𝔊𝔬𝔬𝔤𝔩𝔢"
- self.utf8_ssid_5g = "5𝔤_𝔊𝔬𝔬𝔤𝔩𝔢"
-
- self.utf8_ssid_2g_french = "Château du Feÿ"
- self.utf8_password_2g_french = "du Feÿ Château"
-
- self.utf8_ssid_2g_german = "Rat für Straßenatlas"
- self.utf8_password_2g_german = "für Straßenatlas Rat"
-
- self.utf8_ssid_2g_dutch = "Die niet óúd, is níéuw!"
- self.utf8_password_2g_dutch = "niet óúd, is níéuw! Die"
-
- self.utf8_ssid_2g_swedish = "Det är femtioåtta"
- self.utf8_password_2g_swedish = "femtioåtta Det är"
-
- self.utf8_ssid_2g_norwegian = "Curaçao ØÆ æ å å å"
- self.utf8_password_2g_norwegian = "ØÆ Curaçao æ å å å"
-
- # Danish and Norwegian has the same alphabet
- self.utf8_ssid_2g_danish = self.utf8_ssid_2g_norwegian
- self.utf8_password_2g_danish = self.utf8_password_2g_norwegian
-
- self.utf8_ssid_2g_japanese = "あなた はお母さん"
- self.utf8_password_2g_japanese = "そっくりね。あな"
-
- self.utf8_ssid_2g_spanish = "¡No á,é,í,ó,ú,ü,ñ,¿,¡"
- self.utf8_password_2g_spanish = "á,é,í,ó,ú,ü,ñ,¿,¡ ¡No"
-
- self.utf8_ssid_2g_italian = "caffè Pinocchio è italiano?"
- self.utf8_password_2g_italian = "Pinocchio è italiano? caffè"
-
- self.utf8_ssid_2g_korean = "ㅘㅙㅚㅛㅜㅝㅞㅟㅠ"
- self.utf8_password_2g_korean = "ㅜㅝㅞㅟㅠㅘㅙㅚㅛ"
-
- self.access_point.stop_all_aps()
-
- def setup_test(self) -> None:
- if hasattr(self, "android_devices"):
- for ad in self.android_devices:
- ad.droid.wakeLockAcquireBright()
- ad.droid.wakeUpNow()
- self.dut.wifi_toggle_state(True)
-
- def teardown_test(self) -> None:
- if hasattr(self, "android_devices"):
- for ad in self.android_devices:
- ad.droid.wakeLockRelease()
- ad.droid.goToSleepNow()
- self.dut.turn_location_off_and_scan_toggle_off()
- self.dut.disconnect()
- self.dut.reset_wifi()
- self.download_logs()
- self.access_point.stop_all_aps()
-
- def on_fail(self, record: TestResultRecord) -> None:
- super().on_fail(record)
- self.access_point.stop_all_aps()
-
- def test_associate_11b_only_long_preamble(self) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name="whirlwind_11ab_legacy",
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.open_network_2g["SSID"],
- preamble=False,
- )
- asserts.assert_true(
- self.dut.associate(self.open_network_2g["SSID"], SecurityMode.OPEN),
- "Failed to associate.",
- )
-
- def test_associate_11b_only_short_preamble(self) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name="whirlwind_11ab_legacy",
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.open_network_2g["SSID"],
- preamble=True,
- )
- asserts.assert_true(
- self.dut.associate(self.open_network_2g["SSID"], SecurityMode.OPEN),
- "Failed to associate.",
- )
-
- def test_associate_11b_only_minimal_beacon_interval(self) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name="whirlwind_11ab_legacy",
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.open_network_2g["SSID"],
- beacon_interval=15,
- )
- asserts.assert_true(
- self.dut.associate(self.open_network_2g["SSID"], SecurityMode.OPEN),
- "Failed to associate.",
- )
-
- def test_associate_11b_only_maximum_beacon_interval(self) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name="whirlwind_11ab_legacy",
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.open_network_2g["SSID"],
- beacon_interval=1024,
- )
- asserts.assert_true(
- self.dut.associate(self.open_network_2g["SSID"], SecurityMode.OPEN),
- "Failed to associate.",
- )
-
- def test_associate_11b_only_frag_threshold_430(self) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name="whirlwind_11ab_legacy",
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.open_network_2g["SSID"],
- frag_threshold=430,
- )
- asserts.assert_true(
- self.dut.associate(self.open_network_2g["SSID"], SecurityMode.OPEN),
- "Failed to associate.",
- )
-
- def test_associate_11b_only_rts_threshold_256(self) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name="whirlwind_11ab_legacy",
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.open_network_2g["SSID"],
- rts_threshold=256,
- )
- asserts.assert_true(
- self.dut.associate(self.open_network_2g["SSID"], SecurityMode.OPEN),
- "Failed to associate.",
- )
-
- def test_associate_11b_only_rts_256_frag_430(self) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name="whirlwind_11ab_legacy",
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.open_network_2g["SSID"],
- rts_threshold=256,
- frag_threshold=430,
- )
- asserts.assert_true(
- self.dut.associate(self.open_network_2g["SSID"], SecurityMode.OPEN),
- "Failed to associate.",
- )
-
- def test_associate_11b_only_high_dtim_low_beacon_interval(self) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name="whirlwind_11ab_legacy",
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.open_network_2g["SSID"],
- dtim_period=3,
- beacon_interval=100,
- )
- asserts.assert_true(
- self.dut.associate(self.open_network_2g["SSID"], SecurityMode.OPEN),
- "Failed to associate.",
- )
-
- def test_associate_11b_only_low_dtim_high_beacon_interval(self) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name="whirlwind_11ab_legacy",
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.open_network_2g["SSID"],
- dtim_period=1,
- beacon_interval=300,
- )
- asserts.assert_true(
- self.dut.associate(self.open_network_2g["SSID"], SecurityMode.OPEN),
- "Failed to associate.",
- )
-
- def test_associate_11b_only_with_WMM_with_default_values(self) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name="whirlwind_11ab_legacy",
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.open_network_2g["SSID"],
- force_wmm=True,
- additional_ap_parameters=hostapd_constants.WMM_11B_DEFAULT_PARAMS,
- )
- asserts.assert_true(
- self.dut.associate(self.open_network_2g["SSID"], SecurityMode.OPEN),
- "Failed to associate.",
- )
-
- def test_associate_11b_only_with_WMM_with_non_default_values(self) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name="whirlwind_11ab_legacy",
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.open_network_2g["SSID"],
- force_wmm=True,
- additional_ap_parameters=hostapd_constants.WMM_NON_DEFAULT_PARAMS,
- )
- asserts.assert_true(
- self.dut.associate(self.open_network_2g["SSID"], SecurityMode.OPEN),
- "Failed to associate.",
- )
-
- def test_associate_11b_only_with_WMM_ACM_on_BK(self) -> None:
- wmm_acm_bits_enabled = (
- hostapd_constants.WMM_11B_DEFAULT_PARAMS
- | hostapd_constants.WMM_ACM_BK
- )
- setup_ap(
- access_point=self.access_point,
- profile_name="whirlwind_11ab_legacy",
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.open_network_2g["SSID"],
- force_wmm=True,
- additional_ap_parameters=wmm_acm_bits_enabled,
- )
- asserts.assert_true(
- self.dut.associate(self.open_network_2g["SSID"], SecurityMode.OPEN),
- "Failed to associate.",
- )
-
- def test_associate_11b_only_with_WMM_ACM_on_BE(self) -> None:
- wmm_acm_bits_enabled = (
- hostapd_constants.WMM_11B_DEFAULT_PARAMS
- | hostapd_constants.WMM_ACM_BE
- )
- setup_ap(
- access_point=self.access_point,
- profile_name="whirlwind_11ab_legacy",
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.open_network_2g["SSID"],
- force_wmm=True,
- additional_ap_parameters=wmm_acm_bits_enabled,
- )
- asserts.assert_true(
- self.dut.associate(self.open_network_2g["SSID"], SecurityMode.OPEN),
- "Failed to associate.",
- )
-
- def test_associate_11b_only_with_WMM_ACM_on_VI(self) -> None:
- wmm_acm_bits_enabled = (
- hostapd_constants.WMM_11B_DEFAULT_PARAMS
- | hostapd_constants.WMM_ACM_VI
- )
- setup_ap(
- access_point=self.access_point,
- profile_name="whirlwind_11ab_legacy",
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.open_network_2g["SSID"],
- force_wmm=True,
- additional_ap_parameters=wmm_acm_bits_enabled,
- )
- asserts.assert_true(
- self.dut.associate(self.open_network_2g["SSID"], SecurityMode.OPEN),
- "Failed to associate.",
- )
-
- def test_associate_11b_only_with_WMM_ACM_on_VO(self) -> None:
- wmm_acm_bits_enabled = (
- hostapd_constants.WMM_11B_DEFAULT_PARAMS
- | hostapd_constants.WMM_ACM_VO
- )
- setup_ap(
- access_point=self.access_point,
- profile_name="whirlwind_11ab_legacy",
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.open_network_2g["SSID"],
- force_wmm=True,
- additional_ap_parameters=wmm_acm_bits_enabled,
- )
- asserts.assert_true(
- self.dut.associate(self.open_network_2g["SSID"], SecurityMode.OPEN),
- "Failed to associate.",
- )
-
- def test_associate_11b_only_with_WMM_ACM_on_BK_BE_VI(self) -> None:
- wmm_acm_bits_enabled = (
- hostapd_constants.WMM_11B_DEFAULT_PARAMS
- | hostapd_constants.WMM_ACM_BK
- | hostapd_constants.WMM_ACM_BE
- | hostapd_constants.WMM_ACM_VI
- )
- setup_ap(
- access_point=self.access_point,
- profile_name="whirlwind_11ab_legacy",
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.open_network_2g["SSID"],
- force_wmm=True,
- additional_ap_parameters=wmm_acm_bits_enabled,
- )
- asserts.assert_true(
- self.dut.associate(self.open_network_2g["SSID"], SecurityMode.OPEN),
- "Failed to associate.",
- )
-
- def test_associate_11b_only_with_WMM_ACM_on_BK_BE_VO(self) -> None:
- wmm_acm_bits_enabled = (
- hostapd_constants.WMM_11B_DEFAULT_PARAMS
- | hostapd_constants.WMM_ACM_BK
- | hostapd_constants.WMM_ACM_BE
- | hostapd_constants.WMM_ACM_VO
- )
- setup_ap(
- access_point=self.access_point,
- profile_name="whirlwind_11ab_legacy",
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.open_network_2g["SSID"],
- force_wmm=True,
- additional_ap_parameters=wmm_acm_bits_enabled,
- )
- asserts.assert_true(
- self.dut.associate(self.open_network_2g["SSID"], SecurityMode.OPEN),
- "Failed to associate.",
- )
-
- def test_associate_11b_only_with_WMM_ACM_on_BK_VI_VO(self) -> None:
- wmm_acm_bits_enabled = (
- hostapd_constants.WMM_11B_DEFAULT_PARAMS
- | hostapd_constants.WMM_ACM_BK
- | hostapd_constants.WMM_ACM_VI
- | hostapd_constants.WMM_ACM_VO
- )
- setup_ap(
- access_point=self.access_point,
- profile_name="whirlwind_11ab_legacy",
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.open_network_2g["SSID"],
- force_wmm=True,
- additional_ap_parameters=wmm_acm_bits_enabled,
- )
- asserts.assert_true(
- self.dut.associate(self.open_network_2g["SSID"], SecurityMode.OPEN),
- "Failed to associate.",
- )
-
- def test_associate_11b_only_with_WMM_ACM_on_BE_VI_VO(self) -> None:
- wmm_acm_bits_enabled = (
- hostapd_constants.WMM_11B_DEFAULT_PARAMS
- | hostapd_constants.WMM_ACM_BE
- | hostapd_constants.WMM_ACM_VI
- | hostapd_constants.WMM_ACM_VO
- )
- setup_ap(
- access_point=self.access_point,
- profile_name="whirlwind_11ab_legacy",
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.open_network_2g["SSID"],
- force_wmm=True,
- additional_ap_parameters=wmm_acm_bits_enabled,
- )
- asserts.assert_true(
- self.dut.associate(self.open_network_2g["SSID"], SecurityMode.OPEN),
- "Failed to associate.",
- )
-
- def test_associate_11b_only_with_country_code(self) -> None:
- country_info = (
- hostapd_constants.ENABLE_IEEE80211D
- | hostapd_constants.COUNTRY_STRING["ALL"]
- | hostapd_constants.COUNTRY_CODE["UNITED_STATES"]
- )
- setup_ap(
- access_point=self.access_point,
- profile_name="whirlwind_11ab_legacy",
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.open_network_2g["SSID"],
- additional_ap_parameters=country_info,
- )
- asserts.assert_true(
- self.dut.associate(self.open_network_2g["SSID"], SecurityMode.OPEN),
- "Failed to associate.",
- )
-
- def test_associate_11b_only_with_non_country_code(self) -> None:
- country_info = (
- hostapd_constants.ENABLE_IEEE80211D
- | hostapd_constants.COUNTRY_STRING["ALL"]
- | hostapd_constants.COUNTRY_CODE["NON_COUNTRY"]
- )
- setup_ap(
- access_point=self.access_point,
- profile_name="whirlwind_11ab_legacy",
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.open_network_2g["SSID"],
- additional_ap_parameters=country_info,
- )
- asserts.assert_true(
- self.dut.associate(self.open_network_2g["SSID"], SecurityMode.OPEN),
- "Failed to associate.",
- )
-
- def test_associate_11b_only_with_hidden_ssid(self) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name="whirlwind_11ab_legacy",
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.open_network_2g["SSID"],
- hidden=True,
- )
- asserts.assert_true(
- self.dut.associate(self.open_network_2g["SSID"], SecurityMode.OPEN),
- "Failed to associate.",
- )
-
- def test_associate_11b_only_with_vendor_ie_in_beacon_correct_length(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name="whirlwind_11ab_legacy",
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.open_network_2g["SSID"],
- additional_ap_parameters=hostapd_constants.VENDOR_IE[
- "correct_length_beacon"
- ],
- )
- asserts.assert_true(
- self.dut.associate(self.open_network_2g["SSID"], SecurityMode.OPEN),
- "Failed to associate.",
- )
-
- def test_associate_11b_only_with_vendor_ie_in_beacon_zero_length(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name="whirlwind_11ab_legacy",
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.open_network_2g["SSID"],
- additional_ap_parameters=hostapd_constants.VENDOR_IE[
- "zero_length_beacon_without_data"
- ],
- )
- asserts.assert_true(
- self.dut.associate(self.open_network_2g["SSID"], SecurityMode.OPEN),
- "Failed to associate.",
- )
-
- def test_associate_11b_only_with_vendor_ie_in_assoc_correct_length(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name="whirlwind_11ab_legacy",
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.open_network_2g["SSID"],
- additional_ap_parameters=hostapd_constants.VENDOR_IE[
- "correct_length_association_response"
- ],
- )
- asserts.assert_true(
- self.dut.associate(self.open_network_2g["SSID"], SecurityMode.OPEN),
- "Failed to associate.",
- )
-
- def test_associate_11b_only_with_vendor_ie_in_assoc_zero_length(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name="whirlwind_11ab_legacy",
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.open_network_2g["SSID"],
- additional_ap_parameters=hostapd_constants.VENDOR_IE[
- "zero_length_association_" "response_without_data"
- ],
- )
- asserts.assert_true(
- self.dut.associate(self.open_network_2g["SSID"], SecurityMode.OPEN),
- "Failed to associate.",
- )
-
- def test_associate_11a_only_long_preamble(self) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name="whirlwind_11ab_legacy",
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- ssid=self.open_network_5g["SSID"],
- preamble=False,
- )
- asserts.assert_true(
- self.dut.associate(self.open_network_5g["SSID"], SecurityMode.OPEN),
- "Failed to associate.",
- )
-
- def test_associate_11a_only_short_preamble(self) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name="whirlwind_11ab_legacy",
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- ssid=self.open_network_5g["SSID"],
- preamble=True,
- )
- asserts.assert_true(
- self.dut.associate(self.open_network_5g["SSID"], SecurityMode.OPEN),
- "Failed to associate.",
- )
-
- def test_associate_11a_only_minimal_beacon_interval(self) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name="whirlwind_11ab_legacy",
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- ssid=self.open_network_5g["SSID"],
- beacon_interval=15,
- )
- asserts.assert_true(
- self.dut.associate(self.open_network_5g["SSID"], SecurityMode.OPEN),
- "Failed to associate.",
- )
-
- def test_associate_11a_only_maximum_beacon_interval(self) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name="whirlwind_11ab_legacy",
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- ssid=self.open_network_5g["SSID"],
- beacon_interval=1024,
- )
- asserts.assert_true(
- self.dut.associate(self.open_network_5g["SSID"], SecurityMode.OPEN),
- "Failed to associate.",
- )
-
- def test_associate_11a_only_frag_threshold_430(self) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name="whirlwind_11ab_legacy",
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- ssid=self.open_network_5g["SSID"],
- frag_threshold=430,
- )
- asserts.assert_true(
- self.dut.associate(self.open_network_5g["SSID"], SecurityMode.OPEN),
- "Failed to associate.",
- )
-
- def test_associate_11a_only_rts_threshold_256(self) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name="whirlwind_11ab_legacy",
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- ssid=self.open_network_5g["SSID"],
- rts_threshold=256,
- )
- asserts.assert_true(
- self.dut.associate(self.open_network_5g["SSID"], SecurityMode.OPEN),
- "Failed to associate.",
- )
-
- def test_associate_11a_only_rts_256_frag_430(self) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name="whirlwind_11ab_legacy",
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- ssid=self.open_network_5g["SSID"],
- rts_threshold=256,
- frag_threshold=430,
- )
- asserts.assert_true(
- self.dut.associate(self.open_network_5g["SSID"], SecurityMode.OPEN),
- "Failed to associate.",
- )
-
- def test_associate_11a_only_high_dtim_low_beacon_interval(self) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name="whirlwind_11ab_legacy",
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- ssid=self.open_network_5g["SSID"],
- dtim_period=3,
- beacon_interval=100,
- )
- asserts.assert_true(
- self.dut.associate(self.open_network_5g["SSID"], SecurityMode.OPEN),
- "Failed to associate.",
- )
-
- def test_associate_11a_only_low_dtim_high_beacon_interval(self) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name="whirlwind_11ab_legacy",
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- ssid=self.open_network_5g["SSID"],
- dtim_period=1,
- beacon_interval=300,
- )
- asserts.assert_true(
- self.dut.associate(self.open_network_5g["SSID"], SecurityMode.OPEN),
- "Failed to associate.",
- )
-
- def test_associate_11a_only_with_WMM_with_default_values(self) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name="whirlwind_11ab_legacy",
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- ssid=self.open_network_5g["SSID"],
- force_wmm=True,
- additional_ap_parameters=hostapd_constants.WMM_PHYS_11A_11G_11N_11AC_DEFAULT_PARAMS,
- )
- asserts.assert_true(
- self.dut.associate(self.open_network_5g["SSID"], SecurityMode.OPEN),
- "Failed to associate.",
- )
-
- def test_associate_11a_only_with_WMM_with_non_default_values(self) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name="whirlwind_11ab_legacy",
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- ssid=self.open_network_5g["SSID"],
- force_wmm=True,
- additional_ap_parameters=hostapd_constants.WMM_NON_DEFAULT_PARAMS,
- )
- asserts.assert_true(
- self.dut.associate(self.open_network_5g["SSID"], SecurityMode.OPEN),
- "Failed to associate.",
- )
-
- def test_associate_11a_only_with_WMM_ACM_on_BK(self) -> None:
- wmm_acm_bits_enabled = (
- hostapd_constants.WMM_PHYS_11A_11G_11N_11AC_DEFAULT_PARAMS
- | hostapd_constants.WMM_ACM_BK
- )
- setup_ap(
- access_point=self.access_point,
- profile_name="whirlwind_11ab_legacy",
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- ssid=self.open_network_5g["SSID"],
- force_wmm=True,
- additional_ap_parameters=wmm_acm_bits_enabled,
- )
- asserts.assert_true(
- self.dut.associate(self.open_network_5g["SSID"], SecurityMode.OPEN),
- "Failed to associate.",
- )
-
- def test_associate_11a_only_with_WMM_ACM_on_BE(self) -> None:
- wmm_acm_bits_enabled = (
- hostapd_constants.WMM_PHYS_11A_11G_11N_11AC_DEFAULT_PARAMS
- | hostapd_constants.WMM_ACM_BE
- )
- setup_ap(
- access_point=self.access_point,
- profile_name="whirlwind_11ab_legacy",
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- ssid=self.open_network_5g["SSID"],
- force_wmm=True,
- additional_ap_parameters=wmm_acm_bits_enabled,
- )
- asserts.assert_true(
- self.dut.associate(self.open_network_5g["SSID"], SecurityMode.OPEN),
- "Failed to associate.",
- )
-
- def test_associate_11a_only_with_WMM_ACM_on_VI(self) -> None:
- wmm_acm_bits_enabled = (
- hostapd_constants.WMM_PHYS_11A_11G_11N_11AC_DEFAULT_PARAMS
- | hostapd_constants.WMM_ACM_VI
- )
- setup_ap(
- access_point=self.access_point,
- profile_name="whirlwind_11ab_legacy",
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- ssid=self.open_network_5g["SSID"],
- force_wmm=True,
- additional_ap_parameters=wmm_acm_bits_enabled,
- )
- asserts.assert_true(
- self.dut.associate(self.open_network_5g["SSID"], SecurityMode.OPEN),
- "Failed to associate.",
- )
-
- def test_associate_11a_only_with_WMM_ACM_on_VO(self) -> None:
- wmm_acm_bits_enabled = (
- hostapd_constants.WMM_PHYS_11A_11G_11N_11AC_DEFAULT_PARAMS
- | hostapd_constants.WMM_ACM_VO
- )
- setup_ap(
- access_point=self.access_point,
- profile_name="whirlwind_11ab_legacy",
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- ssid=self.open_network_5g["SSID"],
- force_wmm=True,
- additional_ap_parameters=wmm_acm_bits_enabled,
- )
- asserts.assert_true(
- self.dut.associate(self.open_network_5g["SSID"], SecurityMode.OPEN),
- "Failed to associate.",
- )
-
- def test_associate_11a_only_with_WMM_ACM_on_BK_BE_VI(self) -> None:
- wmm_acm_bits_enabled = (
- hostapd_constants.WMM_PHYS_11A_11G_11N_11AC_DEFAULT_PARAMS
- | hostapd_constants.WMM_ACM_BK
- | hostapd_constants.WMM_ACM_BE
- | hostapd_constants.WMM_ACM_VI
- )
- setup_ap(
- access_point=self.access_point,
- profile_name="whirlwind_11ab_legacy",
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- ssid=self.open_network_5g["SSID"],
- force_wmm=True,
- additional_ap_parameters=wmm_acm_bits_enabled,
- )
- asserts.assert_true(
- self.dut.associate(self.open_network_5g["SSID"], SecurityMode.OPEN),
- "Failed to associate.",
- )
-
- def test_associate_11a_only_with_WMM_ACM_on_BK_BE_VO(self) -> None:
- wmm_acm_bits_enabled = (
- hostapd_constants.WMM_PHYS_11A_11G_11N_11AC_DEFAULT_PARAMS
- | hostapd_constants.WMM_ACM_BK
- | hostapd_constants.WMM_ACM_BE
- | hostapd_constants.WMM_ACM_VO
- )
- setup_ap(
- access_point=self.access_point,
- profile_name="whirlwind_11ab_legacy",
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- ssid=self.open_network_5g["SSID"],
- force_wmm=True,
- additional_ap_parameters=wmm_acm_bits_enabled,
- )
- asserts.assert_true(
- self.dut.associate(self.open_network_5g["SSID"], SecurityMode.OPEN),
- "Failed to associate.",
- )
-
- def test_associate_11a_only_with_WMM_ACM_on_BK_VI_VO(self) -> None:
- wmm_acm_bits_enabled = (
- hostapd_constants.WMM_PHYS_11A_11G_11N_11AC_DEFAULT_PARAMS
- | hostapd_constants.WMM_ACM_BK
- | hostapd_constants.WMM_ACM_VI
- | hostapd_constants.WMM_ACM_VO
- )
- setup_ap(
- access_point=self.access_point,
- profile_name="whirlwind_11ab_legacy",
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- ssid=self.open_network_5g["SSID"],
- force_wmm=True,
- additional_ap_parameters=wmm_acm_bits_enabled,
- )
- asserts.assert_true(
- self.dut.associate(self.open_network_5g["SSID"], SecurityMode.OPEN),
- "Failed to associate.",
- )
-
- def test_associate_11a_only_with_WMM_ACM_on_BE_VI_VO(self) -> None:
- wmm_acm_bits_enabled = (
- hostapd_constants.WMM_PHYS_11A_11G_11N_11AC_DEFAULT_PARAMS
- | hostapd_constants.WMM_ACM_BE
- | hostapd_constants.WMM_ACM_VI
- | hostapd_constants.WMM_ACM_VO
- )
- setup_ap(
- access_point=self.access_point,
- profile_name="whirlwind_11ab_legacy",
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- ssid=self.open_network_5g["SSID"],
- force_wmm=True,
- additional_ap_parameters=wmm_acm_bits_enabled,
- )
- asserts.assert_true(
- self.dut.associate(self.open_network_5g["SSID"], SecurityMode.OPEN),
- "Failed to associate.",
- )
-
- def test_associate_11a_only_with_country_code(self) -> None:
- country_info = (
- hostapd_constants.ENABLE_IEEE80211D
- | hostapd_constants.COUNTRY_STRING["ALL"]
- | hostapd_constants.COUNTRY_CODE["UNITED_STATES"]
- )
- setup_ap(
- access_point=self.access_point,
- profile_name="whirlwind_11ab_legacy",
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- ssid=self.open_network_5g["SSID"],
- additional_ap_parameters=country_info,
- )
- asserts.assert_true(
- self.dut.associate(self.open_network_5g["SSID"], SecurityMode.OPEN),
- "Failed to associate.",
- )
-
- def test_associate_11a_only_with_non_country_code(self) -> None:
- country_info = (
- hostapd_constants.ENABLE_IEEE80211D
- | hostapd_constants.COUNTRY_STRING["ALL"]
- | hostapd_constants.COUNTRY_CODE["NON_COUNTRY"]
- )
- setup_ap(
- access_point=self.access_point,
- profile_name="whirlwind_11ab_legacy",
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- ssid=self.open_network_5g["SSID"],
- additional_ap_parameters=country_info,
- )
- asserts.assert_true(
- self.dut.associate(self.open_network_5g["SSID"], SecurityMode.OPEN),
- "Failed to associate.",
- )
-
- def test_associate_11a_only_with_hidden_ssid(self) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name="whirlwind_11ab_legacy",
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- ssid=self.open_network_5g["SSID"],
- hidden=True,
- )
- asserts.assert_true(
- self.dut.associate(self.open_network_5g["SSID"], SecurityMode.OPEN),
- "Failed to associate.",
- )
-
- def test_associate_11a_only_with_vendor_ie_in_beacon_correct_length(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name="whirlwind_11ab_legacy",
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- ssid=self.open_network_5g["SSID"],
- additional_ap_parameters=hostapd_constants.VENDOR_IE[
- "correct_length_beacon"
- ],
- )
- asserts.assert_true(
- self.dut.associate(self.open_network_5g["SSID"], SecurityMode.OPEN),
- "Failed to associate.",
- )
-
- def test_associate_11a_only_with_vendor_ie_in_beacon_zero_length(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name="whirlwind_11ab_legacy",
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- ssid=self.open_network_5g["SSID"],
- additional_ap_parameters=hostapd_constants.VENDOR_IE[
- "zero_length_beacon_without_data"
- ],
- )
- asserts.assert_true(
- self.dut.associate(self.open_network_5g["SSID"], SecurityMode.OPEN),
- "Failed to associate.",
- )
-
- def test_associate_11a_only_with_vendor_ie_in_assoc_correct_length(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name="whirlwind_11ab_legacy",
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- ssid=self.open_network_5g["SSID"],
- additional_ap_parameters=hostapd_constants.VENDOR_IE[
- "correct_length_association_response"
- ],
- )
- asserts.assert_true(
- self.dut.associate(self.open_network_5g["SSID"], SecurityMode.OPEN),
- "Failed to associate.",
- )
-
- def test_associate_11a_only_with_vendor_ie_in_assoc_zero_length(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name="whirlwind_11ab_legacy",
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- ssid=self.open_network_5g["SSID"],
- additional_ap_parameters=hostapd_constants.VENDOR_IE[
- "zero_length_association_" "response_without_data"
- ],
- )
- asserts.assert_true(
- self.dut.associate(self.open_network_5g["SSID"], SecurityMode.OPEN),
- "Failed to associate.",
- )
-
- def test_associate_11g_only_long_preamble(self) -> None:
- data_rates = (
- hostapd_constants.OFDM_DATA_RATES
- | hostapd_constants.OFDM_ONLY_BASIC_RATES
- )
- setup_ap(
- access_point=self.access_point,
- profile_name="whirlwind_11ag_legacy",
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.open_network_2g["SSID"],
- preamble=False,
- additional_ap_parameters=data_rates,
- )
- asserts.assert_true(
- self.dut.associate(self.open_network_2g["SSID"], SecurityMode.OPEN),
- "Failed to associate.",
- )
-
- def test_associate_11g_only_short_preamble(self) -> None:
- data_rates = (
- hostapd_constants.OFDM_DATA_RATES
- | hostapd_constants.OFDM_ONLY_BASIC_RATES
- )
- setup_ap(
- access_point=self.access_point,
- profile_name="whirlwind_11ag_legacy",
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.open_network_2g["SSID"],
- preamble=True,
- additional_ap_parameters=data_rates,
- )
- asserts.assert_true(
- self.dut.associate(self.open_network_2g["SSID"], SecurityMode.OPEN),
- "Failed to associate.",
- )
-
- def test_associate_11g_only_minimal_beacon_interval(self) -> None:
- data_rates = (
- hostapd_constants.OFDM_DATA_RATES
- | hostapd_constants.OFDM_ONLY_BASIC_RATES
- )
- setup_ap(
- access_point=self.access_point,
- profile_name="whirlwind_11ag_legacy",
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.open_network_2g["SSID"],
- beacon_interval=15,
- additional_ap_parameters=data_rates,
- )
- asserts.assert_true(
- self.dut.associate(self.open_network_2g["SSID"], SecurityMode.OPEN),
- "Failed to associate.",
- )
-
- def test_associate_11g_only_maximum_beacon_interval(self) -> None:
- data_rates = (
- hostapd_constants.OFDM_DATA_RATES
- | hostapd_constants.OFDM_ONLY_BASIC_RATES
- )
- setup_ap(
- access_point=self.access_point,
- profile_name="whirlwind_11ag_legacy",
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.open_network_2g["SSID"],
- beacon_interval=1024,
- additional_ap_parameters=data_rates,
- )
- asserts.assert_true(
- self.dut.associate(self.open_network_2g["SSID"], SecurityMode.OPEN),
- "Failed to associate.",
- )
-
- def test_associate_11g_only_frag_threshold_430(self) -> None:
- data_rates = (
- hostapd_constants.OFDM_DATA_RATES
- | hostapd_constants.OFDM_ONLY_BASIC_RATES
- )
- setup_ap(
- access_point=self.access_point,
- profile_name="whirlwind_11ag_legacy",
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.open_network_2g["SSID"],
- frag_threshold=430,
- additional_ap_parameters=data_rates,
- )
- asserts.assert_true(
- self.dut.associate(self.open_network_2g["SSID"], SecurityMode.OPEN),
- "Failed to associate.",
- )
-
- def test_associate_11g_only_rts_threshold_256(self) -> None:
- data_rates = (
- hostapd_constants.OFDM_DATA_RATES
- | hostapd_constants.OFDM_ONLY_BASIC_RATES
- )
- setup_ap(
- access_point=self.access_point,
- profile_name="whirlwind_11ag_legacy",
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.open_network_2g["SSID"],
- rts_threshold=256,
- additional_ap_parameters=data_rates,
- )
- asserts.assert_true(
- self.dut.associate(self.open_network_2g["SSID"], SecurityMode.OPEN),
- "Failed to associate.",
- )
-
- def test_associate_11g_only_rts_256_frag_430(self) -> None:
- data_rates = (
- hostapd_constants.OFDM_DATA_RATES
- | hostapd_constants.OFDM_ONLY_BASIC_RATES
- )
- setup_ap(
- access_point=self.access_point,
- profile_name="whirlwind_11ag_legacy",
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.open_network_2g["SSID"],
- rts_threshold=256,
- frag_threshold=430,
- additional_ap_parameters=data_rates,
- )
- asserts.assert_true(
- self.dut.associate(self.open_network_2g["SSID"], SecurityMode.OPEN),
- "Failed to associate.",
- )
-
- def test_associate_11g_only_high_dtim_low_beacon_interval(self) -> None:
- data_rates = (
- hostapd_constants.OFDM_DATA_RATES
- | hostapd_constants.OFDM_ONLY_BASIC_RATES
- )
- setup_ap(
- access_point=self.access_point,
- profile_name="whirlwind_11ag_legacy",
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.open_network_2g["SSID"],
- dtim_period=3,
- beacon_interval=100,
- additional_ap_parameters=data_rates,
- )
- asserts.assert_true(
- self.dut.associate(self.open_network_2g["SSID"], SecurityMode.OPEN),
- "Failed to associate.",
- )
-
- def test_associate_11g_only_low_dtim_high_beacon_interval(self) -> None:
- data_rates = (
- hostapd_constants.OFDM_DATA_RATES
- | hostapd_constants.OFDM_ONLY_BASIC_RATES
- )
- setup_ap(
- access_point=self.access_point,
- profile_name="whirlwind_11ag_legacy",
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.open_network_2g["SSID"],
- dtim_period=1,
- beacon_interval=300,
- additional_ap_parameters=data_rates,
- )
- asserts.assert_true(
- self.dut.associate(self.open_network_2g["SSID"], SecurityMode.OPEN),
- "Failed to associate.",
- )
-
- def test_associate_11g_only_with_WMM_with_default_values(self) -> None:
- data_rates = (
- hostapd_constants.OFDM_DATA_RATES
- | hostapd_constants.OFDM_ONLY_BASIC_RATES
- | hostapd_constants.WMM_PHYS_11A_11G_11N_11AC_DEFAULT_PARAMS
- )
- setup_ap(
- access_point=self.access_point,
- profile_name="whirlwind_11ag_legacy",
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.open_network_2g["SSID"],
- force_wmm=True,
- additional_ap_parameters=data_rates,
- )
- asserts.assert_true(
- self.dut.associate(self.open_network_2g["SSID"], SecurityMode.OPEN),
- "Failed to associate.",
- )
-
- def test_associate_11g_only_with_WMM_with_non_default_values(self) -> None:
- data_rates = (
- hostapd_constants.OFDM_DATA_RATES
- | hostapd_constants.OFDM_ONLY_BASIC_RATES
- | hostapd_constants.WMM_NON_DEFAULT_PARAMS
- )
- setup_ap(
- access_point=self.access_point,
- profile_name="whirlwind_11ag_legacy",
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.open_network_2g["SSID"],
- force_wmm=True,
- additional_ap_parameters=data_rates,
- )
- asserts.assert_true(
- self.dut.associate(self.open_network_2g["SSID"], SecurityMode.OPEN),
- "Failed to associate.",
- )
-
- def test_associate_11g_only_with_WMM_ACM_on_BK(self) -> None:
- data_rates = (
- hostapd_constants.OFDM_DATA_RATES
- | hostapd_constants.OFDM_ONLY_BASIC_RATES
- )
- wmm_acm_bits_enabled = (
- hostapd_constants.WMM_PHYS_11A_11G_11N_11AC_DEFAULT_PARAMS
- | hostapd_constants.WMM_ACM_BK
- | data_rates
- )
- setup_ap(
- access_point=self.access_point,
- profile_name="whirlwind_11ag_legacy",
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.open_network_2g["SSID"],
- force_wmm=True,
- additional_ap_parameters=wmm_acm_bits_enabled,
- )
- asserts.assert_true(
- self.dut.associate(self.open_network_2g["SSID"], SecurityMode.OPEN),
- "Failed to associate.",
- )
-
- def test_associate_11g_only_with_WMM_ACM_on_BE(self) -> None:
- data_rates = (
- hostapd_constants.OFDM_DATA_RATES
- | hostapd_constants.OFDM_ONLY_BASIC_RATES
- )
- wmm_acm_bits_enabled = (
- hostapd_constants.WMM_PHYS_11A_11G_11N_11AC_DEFAULT_PARAMS
- | hostapd_constants.WMM_ACM_BE
- | data_rates
- )
- setup_ap(
- access_point=self.access_point,
- profile_name="whirlwind_11ag_legacy",
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.open_network_2g["SSID"],
- force_wmm=True,
- additional_ap_parameters=wmm_acm_bits_enabled,
- )
- asserts.assert_true(
- self.dut.associate(self.open_network_2g["SSID"], SecurityMode.OPEN),
- "Failed to associate.",
- )
-
- def test_associate_11g_only_with_WMM_ACM_on_VI(self) -> None:
- data_rates = (
- hostapd_constants.OFDM_DATA_RATES
- | hostapd_constants.OFDM_ONLY_BASIC_RATES
- )
- wmm_acm_bits_enabled = (
- hostapd_constants.WMM_PHYS_11A_11G_11N_11AC_DEFAULT_PARAMS
- | hostapd_constants.WMM_ACM_VI
- | data_rates
- )
- setup_ap(
- access_point=self.access_point,
- profile_name="whirlwind_11ag_legacy",
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.open_network_2g["SSID"],
- force_wmm=True,
- additional_ap_parameters=wmm_acm_bits_enabled,
- )
- asserts.assert_true(
- self.dut.associate(self.open_network_2g["SSID"], SecurityMode.OPEN),
- "Failed to associate.",
- )
-
- def test_associate_11g_only_with_WMM_ACM_on_VO(self) -> None:
- data_rates = (
- hostapd_constants.OFDM_DATA_RATES
- | hostapd_constants.OFDM_ONLY_BASIC_RATES
- )
- wmm_acm_bits_enabled = (
- hostapd_constants.WMM_PHYS_11A_11G_11N_11AC_DEFAULT_PARAMS
- | hostapd_constants.WMM_ACM_VO
- | data_rates
- )
- setup_ap(
- access_point=self.access_point,
- profile_name="whirlwind_11ag_legacy",
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.open_network_2g["SSID"],
- force_wmm=True,
- additional_ap_parameters=wmm_acm_bits_enabled,
- )
- asserts.assert_true(
- self.dut.associate(self.open_network_2g["SSID"], SecurityMode.OPEN),
- "Failed to associate.",
- )
-
- def test_associate_11g_only_with_WMM_ACM_on_BK_BE_VI(self) -> None:
- data_rates = (
- hostapd_constants.OFDM_DATA_RATES
- | hostapd_constants.OFDM_ONLY_BASIC_RATES
- )
- wmm_acm_bits_enabled = (
- hostapd_constants.WMM_PHYS_11A_11G_11N_11AC_DEFAULT_PARAMS
- | hostapd_constants.WMM_ACM_BK
- | hostapd_constants.WMM_ACM_BE
- | hostapd_constants.WMM_ACM_VI
- | data_rates
- )
- setup_ap(
- access_point=self.access_point,
- profile_name="whirlwind_11ag_legacy",
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.open_network_2g["SSID"],
- force_wmm=True,
- additional_ap_parameters=wmm_acm_bits_enabled,
- )
- asserts.assert_true(
- self.dut.associate(self.open_network_2g["SSID"], SecurityMode.OPEN),
- "Failed to associate.",
- )
-
- def test_associate_11g_only_with_WMM_ACM_on_BK_BE_VO(self) -> None:
- data_rates = (
- hostapd_constants.OFDM_DATA_RATES
- | hostapd_constants.OFDM_ONLY_BASIC_RATES
- )
- wmm_acm_bits_enabled = (
- hostapd_constants.WMM_PHYS_11A_11G_11N_11AC_DEFAULT_PARAMS
- | hostapd_constants.WMM_ACM_BK
- | hostapd_constants.WMM_ACM_BE
- | hostapd_constants.WMM_ACM_VO
- | data_rates
- )
- setup_ap(
- access_point=self.access_point,
- profile_name="whirlwind_11ag_legacy",
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.open_network_2g["SSID"],
- force_wmm=True,
- additional_ap_parameters=wmm_acm_bits_enabled,
- )
- asserts.assert_true(
- self.dut.associate(self.open_network_2g["SSID"], SecurityMode.OPEN),
- "Failed to associate.",
- )
-
- def test_associate_11g_only_with_WMM_ACM_on_BK_VI_VO(self) -> None:
- data_rates = (
- hostapd_constants.OFDM_DATA_RATES
- | hostapd_constants.OFDM_ONLY_BASIC_RATES
- )
- wmm_acm_bits_enabled = (
- hostapd_constants.WMM_PHYS_11A_11G_11N_11AC_DEFAULT_PARAMS
- | hostapd_constants.WMM_ACM_BK
- | hostapd_constants.WMM_ACM_VI
- | hostapd_constants.WMM_ACM_VO
- | data_rates
- )
- setup_ap(
- access_point=self.access_point,
- profile_name="whirlwind_11ag_legacy",
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.open_network_2g["SSID"],
- force_wmm=True,
- additional_ap_parameters=wmm_acm_bits_enabled,
- )
- asserts.assert_true(
- self.dut.associate(self.open_network_2g["SSID"], SecurityMode.OPEN),
- "Failed to associate.",
- )
-
- def test_associate_11g_only_with_WMM_ACM_on_BE_VI_VO(self) -> None:
- data_rates = (
- hostapd_constants.OFDM_DATA_RATES
- | hostapd_constants.OFDM_ONLY_BASIC_RATES
- )
- wmm_acm_bits_enabled = (
- hostapd_constants.WMM_PHYS_11A_11G_11N_11AC_DEFAULT_PARAMS
- | hostapd_constants.WMM_ACM_BE
- | hostapd_constants.WMM_ACM_VI
- | hostapd_constants.WMM_ACM_VO
- | data_rates
- )
- setup_ap(
- access_point=self.access_point,
- profile_name="whirlwind_11ag_legacy",
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.open_network_2g["SSID"],
- force_wmm=True,
- additional_ap_parameters=wmm_acm_bits_enabled,
- )
- asserts.assert_true(
- self.dut.associate(self.open_network_2g["SSID"], SecurityMode.OPEN),
- "Failed to associate.",
- )
-
- def test_associate_11g_only_with_country_code(self) -> None:
- data_rates = (
- hostapd_constants.OFDM_DATA_RATES
- | hostapd_constants.OFDM_ONLY_BASIC_RATES
- )
- country_info = (
- hostapd_constants.ENABLE_IEEE80211D
- | hostapd_constants.COUNTRY_STRING["ALL"]
- | hostapd_constants.COUNTRY_CODE["UNITED_STATES"]
- | data_rates
- )
- setup_ap(
- access_point=self.access_point,
- profile_name="whirlwind_11ag_legacy",
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.open_network_2g["SSID"],
- additional_ap_parameters=country_info,
- )
- asserts.assert_true(
- self.dut.associate(self.open_network_2g["SSID"], SecurityMode.OPEN),
- "Failed to associate.",
- )
-
- def test_associate_11g_only_with_non_country_code(self) -> None:
- data_rates = (
- hostapd_constants.OFDM_DATA_RATES
- | hostapd_constants.OFDM_ONLY_BASIC_RATES
- )
- country_info = (
- hostapd_constants.ENABLE_IEEE80211D
- | hostapd_constants.COUNTRY_STRING["ALL"]
- | hostapd_constants.COUNTRY_CODE["NON_COUNTRY"]
- | data_rates
- )
- setup_ap(
- access_point=self.access_point,
- profile_name="whirlwind_11ag_legacy",
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.open_network_2g["SSID"],
- additional_ap_parameters=country_info,
- )
- asserts.assert_true(
- self.dut.associate(self.open_network_2g["SSID"], SecurityMode.OPEN),
- "Failed to associate.",
- )
-
- def test_associate_11g_only_with_hidden_ssid(self) -> None:
- data_rates = (
- hostapd_constants.OFDM_DATA_RATES
- | hostapd_constants.OFDM_ONLY_BASIC_RATES
- )
- setup_ap(
- access_point=self.access_point,
- profile_name="whirlwind_11ag_legacy",
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.open_network_2g["SSID"],
- hidden=True,
- additional_ap_parameters=data_rates,
- )
- asserts.assert_true(
- self.dut.associate(self.open_network_2g["SSID"], SecurityMode.OPEN),
- "Failed to associate.",
- )
-
- def test_associate_11g_only_with_vendor_ie_in_beacon_correct_length(
- self,
- ) -> None:
- data_rates = (
- hostapd_constants.OFDM_DATA_RATES
- | hostapd_constants.OFDM_ONLY_BASIC_RATES
- | hostapd_constants.VENDOR_IE["correct_length_beacon"]
- )
- setup_ap(
- access_point=self.access_point,
- profile_name="whirlwind_11ag_legacy",
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.open_network_2g["SSID"],
- additional_ap_parameters=data_rates,
- )
- asserts.assert_true(
- self.dut.associate(self.open_network_2g["SSID"], SecurityMode.OPEN),
- "Failed to associate.",
- )
-
- def test_associate_11g_only_with_vendor_ie_in_beacon_zero_length(
- self,
- ) -> None:
- data_rates = (
- hostapd_constants.OFDM_DATA_RATES
- | hostapd_constants.OFDM_ONLY_BASIC_RATES
- | hostapd_constants.VENDOR_IE["zero_length_beacon_without_data"]
- )
- setup_ap(
- access_point=self.access_point,
- profile_name="whirlwind_11ag_legacy",
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.open_network_2g["SSID"],
- additional_ap_parameters=data_rates,
- )
- asserts.assert_true(
- self.dut.associate(self.open_network_2g["SSID"], SecurityMode.OPEN),
- "Failed to associate.",
- )
-
- def test_associate_11g_only_with_vendor_ie_in_assoc_correct_length(
- self,
- ) -> None:
- data_rates = (
- hostapd_constants.OFDM_DATA_RATES
- | hostapd_constants.OFDM_ONLY_BASIC_RATES
- | hostapd_constants.VENDOR_IE["correct_length_association_response"]
- )
- setup_ap(
- access_point=self.access_point,
- profile_name="whirlwind_11ag_legacy",
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.open_network_2g["SSID"],
- additional_ap_parameters=data_rates,
- )
- asserts.assert_true(
- self.dut.associate(self.open_network_2g["SSID"], SecurityMode.OPEN),
- "Failed to associate.",
- )
-
- def test_associate_11g_only_with_vendor_ie_in_assoc_zero_length(
- self,
- ) -> None:
- data_rates = (
- hostapd_constants.OFDM_DATA_RATES
- | hostapd_constants.OFDM_ONLY_BASIC_RATES
- | hostapd_constants.VENDOR_IE["correct_length_association_response"]
- | hostapd_constants.VENDOR_IE[
- "zero_length_association_" "response_without_data"
- ]
- )
- setup_ap(
- access_point=self.access_point,
- profile_name="whirlwind_11ag_legacy",
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.open_network_2g["SSID"],
- additional_ap_parameters=data_rates,
- )
- asserts.assert_true(
- self.dut.associate(self.open_network_2g["SSID"], SecurityMode.OPEN),
- "Failed to associate.",
- )
-
- def test_associate_11bg_only_long_preamble(self) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name="whirlwind_11ag_legacy",
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.open_network_2g["SSID"],
- preamble=False,
- )
- asserts.assert_true(
- self.dut.associate(self.open_network_2g["SSID"], SecurityMode.OPEN),
- "Failed to associate.",
- )
-
- def test_associate_11bg_short_preamble(self) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name="whirlwind_11ag_legacy",
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.open_network_2g["SSID"],
- preamble=True,
- )
- asserts.assert_true(
- self.dut.associate(self.open_network_2g["SSID"], SecurityMode.OPEN),
- "Failed to associate.",
- )
-
- def test_associate_11bg_minimal_beacon_interval(self) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name="whirlwind_11ag_legacy",
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.open_network_2g["SSID"],
- beacon_interval=15,
- )
- asserts.assert_true(
- self.dut.associate(self.open_network_2g["SSID"], SecurityMode.OPEN),
- "Failed to associate.",
- )
-
- def test_associate_11bg_maximum_beacon_interval(self) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name="whirlwind_11ag_legacy",
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.open_network_2g["SSID"],
- beacon_interval=1024,
- )
- asserts.assert_true(
- self.dut.associate(self.open_network_2g["SSID"], SecurityMode.OPEN),
- "Failed to associate.",
- )
-
- def test_associate_11bg_frag_threshold_430(self) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name="whirlwind_11ag_legacy",
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.open_network_2g["SSID"],
- frag_threshold=430,
- )
- asserts.assert_true(
- self.dut.associate(self.open_network_2g["SSID"], SecurityMode.OPEN),
- "Failed to associate.",
- )
-
- def test_associate_11bg_rts_threshold_256(self) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name="whirlwind_11ag_legacy",
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.open_network_2g["SSID"],
- rts_threshold=256,
- )
- asserts.assert_true(
- self.dut.associate(self.open_network_2g["SSID"], SecurityMode.OPEN),
- "Failed to associate.",
- )
-
- def test_associate_11bg_rts_256_frag_430(self) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name="whirlwind_11ag_legacy",
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.open_network_2g["SSID"],
- rts_threshold=256,
- frag_threshold=430,
- )
- asserts.assert_true(
- self.dut.associate(self.open_network_2g["SSID"], SecurityMode.OPEN),
- "Failed to associate.",
- )
-
- def test_associate_11bg_high_dtim_low_beacon_interval(self) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name="whirlwind_11ag_legacy",
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.open_network_2g["SSID"],
- dtim_period=3,
- beacon_interval=100,
- )
- asserts.assert_true(
- self.dut.associate(self.open_network_2g["SSID"], SecurityMode.OPEN),
- "Failed to associate.",
- )
-
- def test_associate_11bg_low_dtim_high_beacon_interval(self) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name="whirlwind_11ag_legacy",
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.open_network_2g["SSID"],
- dtim_period=1,
- beacon_interval=300,
- )
- asserts.assert_true(
- self.dut.associate(self.open_network_2g["SSID"], SecurityMode.OPEN),
- "Failed to associate.",
- )
-
- def test_associate_11bg_with_WMM_with_default_values(self) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name="whirlwind_11ag_legacy",
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.open_network_2g["SSID"],
- force_wmm=True,
- additional_ap_parameters=hostapd_constants.WMM_PHYS_11A_11G_11N_11AC_DEFAULT_PARAMS,
- )
- asserts.assert_true(
- self.dut.associate(self.open_network_2g["SSID"], SecurityMode.OPEN),
- "Failed to associate.",
- )
-
- def test_associate_11bg_with_WMM_with_non_default_values(self) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name="whirlwind_11ag_legacy",
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.open_network_2g["SSID"],
- force_wmm=True,
- additional_ap_parameters=hostapd_constants.WMM_NON_DEFAULT_PARAMS,
- )
- asserts.assert_true(
- self.dut.associate(self.open_network_2g["SSID"], SecurityMode.OPEN),
- "Failed to associate.",
- )
-
- def test_associate_11bg_with_WMM_ACM_on_BK(self) -> None:
- wmm_acm_bits_enabled = (
- hostapd_constants.WMM_PHYS_11A_11G_11N_11AC_DEFAULT_PARAMS
- | hostapd_constants.WMM_ACM_BK
- )
- setup_ap(
- access_point=self.access_point,
- profile_name="whirlwind_11ag_legacy",
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.open_network_2g["SSID"],
- force_wmm=True,
- additional_ap_parameters=wmm_acm_bits_enabled,
- )
- asserts.assert_true(
- self.dut.associate(self.open_network_2g["SSID"], SecurityMode.OPEN),
- "Failed to associate.",
- )
-
- def test_associate_11bg_with_WMM_ACM_on_BE(self) -> None:
- wmm_acm_bits_enabled = (
- hostapd_constants.WMM_PHYS_11A_11G_11N_11AC_DEFAULT_PARAMS
- | hostapd_constants.WMM_ACM_BE
- )
- setup_ap(
- access_point=self.access_point,
- profile_name="whirlwind_11ag_legacy",
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.open_network_2g["SSID"],
- force_wmm=True,
- additional_ap_parameters=wmm_acm_bits_enabled,
- )
- asserts.assert_true(
- self.dut.associate(self.open_network_2g["SSID"], SecurityMode.OPEN),
- "Failed to associate.",
- )
-
- def test_associate_11bg_with_WMM_ACM_on_VI(self) -> None:
- wmm_acm_bits_enabled = (
- hostapd_constants.WMM_PHYS_11A_11G_11N_11AC_DEFAULT_PARAMS
- | hostapd_constants.WMM_ACM_VI
- )
- setup_ap(
- access_point=self.access_point,
- profile_name="whirlwind_11ag_legacy",
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.open_network_2g["SSID"],
- force_wmm=True,
- additional_ap_parameters=wmm_acm_bits_enabled,
- )
- asserts.assert_true(
- self.dut.associate(self.open_network_2g["SSID"], SecurityMode.OPEN),
- "Failed to associate.",
- )
-
- def test_associate_11bg_with_WMM_ACM_on_VO(self) -> None:
- wmm_acm_bits_enabled = (
- hostapd_constants.WMM_PHYS_11A_11G_11N_11AC_DEFAULT_PARAMS
- | hostapd_constants.WMM_ACM_VO
- )
- setup_ap(
- access_point=self.access_point,
- profile_name="whirlwind_11ag_legacy",
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.open_network_2g["SSID"],
- force_wmm=True,
- additional_ap_parameters=wmm_acm_bits_enabled,
- )
- asserts.assert_true(
- self.dut.associate(self.open_network_2g["SSID"], SecurityMode.OPEN),
- "Failed to associate.",
- )
-
- def test_associate_11bg_with_WMM_ACM_on_BK_BE_VI(self) -> None:
- wmm_acm_bits_enabled = (
- hostapd_constants.WMM_PHYS_11A_11G_11N_11AC_DEFAULT_PARAMS
- | hostapd_constants.WMM_ACM_BK
- | hostapd_constants.WMM_ACM_BE
- | hostapd_constants.WMM_ACM_VI
- )
- setup_ap(
- access_point=self.access_point,
- profile_name="whirlwind_11ag_legacy",
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.open_network_2g["SSID"],
- force_wmm=True,
- additional_ap_parameters=wmm_acm_bits_enabled,
- )
- asserts.assert_true(
- self.dut.associate(self.open_network_2g["SSID"], SecurityMode.OPEN),
- "Failed to associate.",
- )
-
- def test_associate_11bg_with_WMM_ACM_on_BK_BE_VO(self) -> None:
- wmm_acm_bits_enabled = (
- hostapd_constants.WMM_PHYS_11A_11G_11N_11AC_DEFAULT_PARAMS
- | hostapd_constants.WMM_ACM_BK
- | hostapd_constants.WMM_ACM_BE
- | hostapd_constants.WMM_ACM_VO
- )
- setup_ap(
- access_point=self.access_point,
- profile_name="whirlwind_11ag_legacy",
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.open_network_2g["SSID"],
- force_wmm=True,
- additional_ap_parameters=wmm_acm_bits_enabled,
- )
- asserts.assert_true(
- self.dut.associate(self.open_network_2g["SSID"], SecurityMode.OPEN),
- "Failed to associate.",
- )
-
- def test_associate_11bg_with_WMM_ACM_on_BK_VI_VO(self) -> None:
- wmm_acm_bits_enabled = (
- hostapd_constants.WMM_PHYS_11A_11G_11N_11AC_DEFAULT_PARAMS
- | hostapd_constants.WMM_ACM_BK
- | hostapd_constants.WMM_ACM_VI
- | hostapd_constants.WMM_ACM_VO
- )
- setup_ap(
- access_point=self.access_point,
- profile_name="whirlwind_11ag_legacy",
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.open_network_2g["SSID"],
- force_wmm=True,
- additional_ap_parameters=wmm_acm_bits_enabled,
- )
- asserts.assert_true(
- self.dut.associate(self.open_network_2g["SSID"], SecurityMode.OPEN),
- "Failed to associate.",
- )
-
- def test_associate_11bg_with_WMM_ACM_on_BE_VI_VO(self) -> None:
- wmm_acm_bits_enabled = (
- hostapd_constants.WMM_PHYS_11A_11G_11N_11AC_DEFAULT_PARAMS
- | hostapd_constants.WMM_ACM_BE
- | hostapd_constants.WMM_ACM_VI
- | hostapd_constants.WMM_ACM_VO
- )
- setup_ap(
- access_point=self.access_point,
- profile_name="whirlwind_11ag_legacy",
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.open_network_2g["SSID"],
- force_wmm=True,
- additional_ap_parameters=wmm_acm_bits_enabled,
- )
- asserts.assert_true(
- self.dut.associate(self.open_network_2g["SSID"], SecurityMode.OPEN),
- "Failed to associate.",
- )
-
- def test_associate_11bg_with_country_code(self) -> None:
- country_info = (
- hostapd_constants.ENABLE_IEEE80211D
- | hostapd_constants.COUNTRY_STRING["ALL"]
- | hostapd_constants.COUNTRY_CODE["UNITED_STATES"]
- )
- setup_ap(
- access_point=self.access_point,
- profile_name="whirlwind_11ag_legacy",
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.open_network_2g["SSID"],
- additional_ap_parameters=country_info,
- )
- asserts.assert_true(
- self.dut.associate(self.open_network_2g["SSID"], SecurityMode.OPEN),
- "Failed to associate.",
- )
-
- def test_associate_11bg_with_non_country_code(self) -> None:
- country_info = (
- hostapd_constants.ENABLE_IEEE80211D
- | hostapd_constants.COUNTRY_STRING["ALL"]
- | hostapd_constants.COUNTRY_CODE["NON_COUNTRY"]
- )
- setup_ap(
- access_point=self.access_point,
- profile_name="whirlwind_11ag_legacy",
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.open_network_2g["SSID"],
- additional_ap_parameters=country_info,
- )
- asserts.assert_true(
- self.dut.associate(self.open_network_2g["SSID"], SecurityMode.OPEN),
- "Failed to associate.",
- )
-
- def test_associate_11bg_only_with_hidden_ssid(self) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name="whirlwind_11ag_legacy",
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.open_network_2g["SSID"],
- hidden=True,
- )
- asserts.assert_true(
- self.dut.associate(self.open_network_2g["SSID"], SecurityMode.OPEN),
- "Failed to associate.",
- )
-
- def test_associate_11bg_with_vendor_ie_in_beacon_correct_length(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name="whirlwind_11ag_legacy",
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.open_network_2g["SSID"],
- additional_ap_parameters=hostapd_constants.VENDOR_IE[
- "correct_length_beacon"
- ],
- )
- asserts.assert_true(
- self.dut.associate(self.open_network_2g["SSID"], SecurityMode.OPEN),
- "Failed to associate.",
- )
-
- def test_associate_11bg_with_vendor_ie_in_beacon_zero_length(self) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name="whirlwind_11ag_legacy",
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.open_network_2g["SSID"],
- additional_ap_parameters=hostapd_constants.VENDOR_IE[
- "zero_length_beacon_without_data"
- ],
- )
- asserts.assert_true(
- self.dut.associate(self.open_network_2g["SSID"], SecurityMode.OPEN),
- "Failed to associate.",
- )
-
- def test_minimum_ssid_length_2g_11n_20mhz(self) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name="whirlwind_11ab_legacy",
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.open_network_min_len_2g["SSID"],
- )
- asserts.assert_true(
- self.dut.associate(
- self.open_network_min_len_2g["SSID"], SecurityMode.OPEN
- ),
- "Failed to associate.",
- )
-
- def test_minimum_ssid_length_5g_11ac_80mhz(self) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name="whirlwind_11ab_legacy",
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- ssid=self.open_network_min_len_5g["SSID"],
- )
- asserts.assert_true(
- self.dut.associate(
- self.open_network_min_len_5g["SSID"], SecurityMode.OPEN
- ),
- "Failed to associate.",
- )
-
- def test_maximum_ssid_length_2g_11n_20mhz(self) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name="whirlwind_11ab_legacy",
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.open_network_max_len_2g["SSID"],
- )
- asserts.assert_true(
- self.dut.associate(
- self.open_network_max_len_2g["SSID"], SecurityMode.OPEN
- ),
- "Failed to associate.",
- )
-
- def test_maximum_ssid_length_5g_11ac_80mhz(self) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name="whirlwind_11ab_legacy",
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- ssid=self.open_network_max_len_5g["SSID"],
- )
- asserts.assert_true(
- self.dut.associate(
- self.open_network_max_len_5g["SSID"], SecurityMode.OPEN
- ),
- "Failed to associate.",
- )
-
- def test_ssid_with_UTF8_characters_2g_11n_20mhz(self) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name="whirlwind_11ab_legacy",
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.utf8_ssid_2g,
- )
- asserts.assert_true(
- self.dut.associate(self.utf8_ssid_2g, SecurityMode.OPEN),
- "Failed to associate.",
- )
-
- def test_ssid_with_UTF8_characters_5g_11ac_80mhz(self) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name="whirlwind_11ab_legacy",
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- ssid=self.utf8_ssid_5g,
- )
- asserts.assert_true(
- self.dut.associate(self.utf8_ssid_5g, SecurityMode.OPEN),
- "Failed to associate.",
- )
-
- def test_ssid_with_UTF8_characters_french_2g_11n_20mhz(self) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name="whirlwind_11ab_legacy",
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.utf8_ssid_2g_french,
- )
- asserts.assert_true(
- self.dut.associate(self.utf8_ssid_2g_french, SecurityMode.OPEN),
- "Failed to associate.",
- )
-
- def test_ssid_with_UTF8_characters_german_2g_11n_20mhz(self) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name="whirlwind_11ab_legacy",
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.utf8_ssid_2g_german,
- )
- asserts.assert_true(
- self.dut.associate(self.utf8_ssid_2g_german, SecurityMode.OPEN),
- "Failed to associate.",
- )
-
- def test_ssid_with_UTF8_characters_dutch_2g_11n_20mhz(self) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name="whirlwind_11ab_legacy",
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.utf8_ssid_2g_dutch,
- )
- asserts.assert_true(
- self.dut.associate(self.utf8_ssid_2g_dutch, SecurityMode.OPEN),
- "Failed to associate.",
- )
-
- def test_ssid_with_UTF8_characters_swedish_2g_11n_20mhz(self) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name="whirlwind_11ab_legacy",
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.utf8_ssid_2g_swedish,
- )
- asserts.assert_true(
- self.dut.associate(self.utf8_ssid_2g_swedish, SecurityMode.OPEN),
- "Failed to associate.",
- )
-
- def test_ssid_with_UTF8_characters_norwegian_2g_11n_20mhz(self) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name="whirlwind_11ab_legacy",
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.utf8_ssid_2g_norwegian,
- )
- asserts.assert_true(
- self.dut.associate(self.utf8_ssid_2g_norwegian, SecurityMode.OPEN),
- "Failed to associate.",
- )
-
- def test_ssid_with_UTF8_characters_danish_2g_11n_20mhz(self) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name="whirlwind_11ab_legacy",
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.utf8_ssid_2g_danish,
- )
- asserts.assert_true(
- self.dut.associate(self.utf8_ssid_2g_danish, SecurityMode.OPEN),
- "Failed to associate.",
- )
-
- def test_ssid_with_UTF8_characters_japanese_2g_11n_20mhz(self) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name="whirlwind_11ab_legacy",
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.utf8_ssid_2g_japanese,
- )
- asserts.assert_true(
- self.dut.associate(self.utf8_ssid_2g_japanese, SecurityMode.OPEN),
- "Failed to associate.",
- )
-
- def test_ssid_with_UTF8_characters_spanish_2g_11n_20mhz(self) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name="whirlwind_11ab_legacy",
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.utf8_ssid_2g_spanish,
- )
- asserts.assert_true(
- self.dut.associate(self.utf8_ssid_2g_spanish, SecurityMode.OPEN),
- "Failed to associate.",
- )
-
- def test_ssid_with_UTF8_characters_italian_2g_11n_20mhz(self) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name="whirlwind_11ab_legacy",
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.utf8_ssid_2g_italian,
- )
- asserts.assert_true(
- self.dut.associate(self.utf8_ssid_2g_italian, SecurityMode.OPEN),
- "Failed to associate.",
- )
-
- def test_ssid_with_UTF8_characters_korean_2g_11n_20mhz(self) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name="whirlwind_11ab_legacy",
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.utf8_ssid_2g_korean,
- )
-
- asserts.assert_true(
- self.dut.associate(self.utf8_ssid_2g_korean, SecurityMode.OPEN),
- "Failed to associate.",
- )
-
-
-if __name__ == "__main__":
- test_runner.main()
diff --git a/tests/wlan/compliance/WlanSecurityComplianceABGTest.py b/tests/wlan/compliance/WlanSecurityComplianceABGTest.py
deleted file mode 100644
index 672a942..0000000
--- a/tests/wlan/compliance/WlanSecurityComplianceABGTest.py
+++ /dev/null
@@ -1,8258 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from __future__ import annotations
-
-import re
-from functools import wraps
-from typing import Callable
-
-from mobly import asserts, signals, test_runner
-from mobly.records import TestResultRecord
-
-from antlion import utils
-from antlion.controllers.access_point import setup_ap
-from antlion.controllers.ap_lib import hostapd_constants
-from antlion.controllers.ap_lib.hostapd_security import Security, SecurityMode
-from antlion.controllers.ap_lib.hostapd_utils import generate_random_password
-from antlion.test_utils.abstract_devices.wlan_device import AssociationMode
-from antlion.test_utils.wifi import base_test
-
-AP_11ABG_PROFILE_NAME = "whirlwind_11ag_legacy"
-SSID_LENGTH_DEFAULT = 15
-
-
-def create_security_profile(
- test_func: Callable[[WlanSecurityComplianceABGTest], None]
-) -> Callable[[WlanSecurityComplianceABGTest], None]:
- """Decorator for generating hostapd security profile object based on the
- test name.
- Args:
- test_func: The test function
- Returns:
- security_profile_generator: The function that generates the security
- profile object
- """
-
- @wraps(test_func)
- def security_profile_generator(self: WlanSecurityComplianceABGTest) -> None:
- """Function that looks at the name of the function and determines what
- the security profile should be based on what items are in the name
-
- Example: A function with the name sec_wpa_wpa2_ptk_ccmp_tkip would
- return a security profile that has wpa and wpa2 configure with a
- ptk cipher of ccmp or tkip. Removing one of those options would
- drop it from the config.
-
- Args:
- *args: args that were sent to the original test function
- **kwargs: kwargs that were sent to the original test function
- Returns:
- The original function that was called
- """
- utf8_password_2g = "2𝔤_𝔊𝔬𝔬𝔤𝔩𝔢"
- utf8_password_2g_french = "du Feÿ Château"
- utf8_password_2g_german = "für Straßenatlas Rat"
- utf8_password_2g_dutch = "niet óúd, is níéuw! Die"
- utf8_password_2g_swedish = "femtioåtta Det är"
- utf8_password_2g_norwegian = "ØÆ Curaçao æ å å å"
- # Danish and Norwegian has the same alphabet
- utf8_password_2g_danish = utf8_password_2g_norwegian
- utf8_password_2g_japanese = "そっくりね。あな"
- utf8_password_2g_spanish = "á,é,í,ó,ú,ü,ñ,¿,¡ ¡No"
- utf8_password_2g_italian = "Pinocchio è italiano? caffè"
- utf8_password_2g_korean = "ㅜㅝㅞㅟㅠㅘㅙㅚㅛ"
-
- security = re.search(r"sec(.*?)ptk_(.*)", test_func.__name__)
- if security is None:
- raise TypeError(
- f'Test name does not match expected pattern: "{test_func.__name__}"'
- )
-
- security_mode_raw = security.group(1)
- ptk_type = security.group(2)
- wpa_cipher: str | None = None
- wpa2_cipher: str | None = None
-
- if "_wpa_wpa2_wpa3_" in security_mode_raw:
- security_mode = SecurityMode.WPA_WPA2_WPA3
- elif "_wpa_wpa2_" in security_mode_raw:
- security_mode = SecurityMode.WPA_WPA2
- elif "_wpa2_wpa3_" in security_mode_raw:
- security_mode = SecurityMode.WPA2_WPA3
- elif "_wep_" in security_mode_raw:
- if self.dut.has_wep_support:
- security_mode = SecurityMode.WEP
- else:
- raise signals.TestSkip("DUT does not support WEP security")
- elif "_wpa_" in security_mode_raw:
- if self.dut.has_wpa_support:
- security_mode = SecurityMode.WPA
- else:
- raise signals.TestSkip("DUT does not support WPA security")
- elif "_wpa2_" in security_mode_raw:
- security_mode = SecurityMode.WPA2
- elif "_wpa3_" in security_mode_raw:
- security_mode = SecurityMode.WPA3
- else:
- raise TypeError(
- f'Security mode "{security_mode_raw}" not supported'
- )
-
- if "tkip" in ptk_type and "ccmp" in ptk_type:
- wpa_cipher = "TKIP CCMP"
- wpa2_cipher = "TKIP CCMP"
- elif "tkip" in ptk_type:
- wpa_cipher = "TKIP"
- wpa2_cipher = "TKIP"
- elif "ccmp" in ptk_type:
- wpa_cipher = "CCMP"
- wpa2_cipher = "CCMP"
- if "max_length_password" in test_func.__name__:
- password = generate_random_password(
- length=hostapd_constants.MAX_WPA_PASSWORD_LENGTH
- )
- elif "max_length_psk" in test_func.__name__:
- password = str(
- generate_random_password(
- length=hostapd_constants.MAX_WPA_PSK_LENGTH, hex=True
- )
- ).lower()
- elif "wep_5_chars" in test_func.__name__:
- password = generate_random_password(length=5)
- elif "wep_13_chars" in test_func.__name__:
- password = generate_random_password(length=13)
- elif "wep_10_hex" in test_func.__name__:
- password = str(
- generate_random_password(length=10, hex=True)
- ).lower()
- elif "wep_26_hex" in test_func.__name__:
- password = str(
- generate_random_password(length=26, hex=True)
- ).lower()
- elif "utf8" in test_func.__name__:
- if "french" in test_func.__name__:
- password = utf8_password_2g_french
- elif "german" in test_func.__name__:
- password = utf8_password_2g_german
- elif "dutch" in test_func.__name__:
- password = utf8_password_2g_dutch
- elif "swedish" in test_func.__name__:
- password = utf8_password_2g_swedish
- elif "norwegian" in test_func.__name__:
- password = utf8_password_2g_norwegian
- elif "danish" in test_func.__name__:
- password = utf8_password_2g_danish
- elif "japanese" in test_func.__name__:
- password = utf8_password_2g_japanese
- elif "spanish" in test_func.__name__:
- password = utf8_password_2g_spanish
- elif "italian" in test_func.__name__:
- password = utf8_password_2g_italian
- elif "korean" in test_func.__name__:
- password = utf8_password_2g_korean
- else:
- password = utf8_password_2g
- else:
- password = generate_random_password()
-
- self.security_profile = Security(
- security_mode=security_mode,
- password=password,
- wpa_cipher=wpa_cipher,
- wpa2_cipher=wpa2_cipher,
- )
- self.client_password = password
- self.target_security = security_mode
- self.ssid = utils.rand_ascii_str(SSID_LENGTH_DEFAULT)
-
- test_func(self)
-
- return security_profile_generator
-
-
-class WlanSecurityComplianceABGTest(base_test.WifiBaseTest):
- """Tests for validating 11a, 11b, and 11g PHYS.
-
- Test Bed Requirement:
- * One Android device or Fuchsia device
- * One Access Point
- """
-
- def setup_class(self) -> None:
- super().setup_class()
- self.dut = self.get_dut(AssociationMode.POLICY)
-
- if len(self.access_points) == 0:
- raise signals.TestAbortClass("Requires at least one access point")
- self.access_point = self.access_points[0]
-
- self.ssid: str
- self.target_security: SecurityMode
- self.security_profile: Security
- self.client_password: str
-
- self.access_point.stop_all_aps()
-
- def setup_test(self) -> None:
- super().setup_test()
- if hasattr(self, "android_devices"):
- for ad in self.android_devices:
- ad.droid.wakeLockAcquireBright()
- ad.droid.wakeUpNow()
- self.dut.wifi_toggle_state(True)
-
- def teardown_test(self) -> None:
- if hasattr(self, "android_devices"):
- for ad in self.android_devices:
- ad.droid.wakeLockRelease()
- ad.droid.goToSleepNow()
- self.dut.turn_location_off_and_scan_toggle_off()
- self.dut.disconnect()
- self.dut.reset_wifi()
- self.download_logs()
- self.access_point.stop_all_aps()
- super().teardown_test()
-
- def on_fail(self, record: TestResultRecord) -> None:
- super().on_fail(record)
- self.access_point.stop_all_aps()
-
- @create_security_profile
- def test_associate_11a_sec_open_wep_5_chars_ptk_none(self) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- ssid=self.ssid,
- security=self.security_profile,
- force_wmm=False,
- additional_ap_parameters=hostapd_constants.WEP_AUTH["open"],
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11a_sec_open_wep_13_chars_ptk_none(self) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- ssid=self.ssid,
- security=self.security_profile,
- force_wmm=False,
- additional_ap_parameters=hostapd_constants.WEP_AUTH["open"],
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11a_sec_open_wep_10_hex_ptk_none(self) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- ssid=self.ssid,
- security=self.security_profile,
- force_wmm=False,
- additional_ap_parameters=hostapd_constants.WEP_AUTH["open"],
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11a_sec_open_wep_26_hex_ptk_none(self) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- ssid=self.ssid,
- security=self.security_profile,
- force_wmm=False,
- additional_ap_parameters=hostapd_constants.WEP_AUTH["open"],
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11a_sec_shared_wep_5_chars_ptk_none(self) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- ssid=self.ssid,
- security=self.security_profile,
- force_wmm=False,
- additional_ap_parameters=hostapd_constants.WEP_AUTH["shared"],
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11a_sec_shared_wep_13_chars_ptk_none(self) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- ssid=self.ssid,
- security=self.security_profile,
- force_wmm=False,
- additional_ap_parameters=hostapd_constants.WEP_AUTH["shared"],
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11a_sec_shared_wep_10_hex_ptk_none(self) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- ssid=self.ssid,
- security=self.security_profile,
- force_wmm=False,
- additional_ap_parameters=hostapd_constants.WEP_AUTH["shared"],
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11a_sec_shared_wep_26_hex_ptk_none(self) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- ssid=self.ssid,
- security=self.security_profile,
- force_wmm=False,
- additional_ap_parameters=hostapd_constants.WEP_AUTH["shared"],
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11a_sec_wpa_psk_ptk_tkip(self) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- ssid=self.ssid,
- security=self.security_profile,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11a_sec_wpa_psk_ptk_ccmp(self) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- ssid=self.ssid,
- security=self.security_profile,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11a_sec_wpa_psk_ptk_tkip_or_ccmp(self) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- ssid=self.ssid,
- security=self.security_profile,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11a_max_length_password_sec_wpa_psk_ptk_tkip(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- ssid=self.ssid,
- security=self.security_profile,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11a_max_length_password_sec_wpa_psk_ptk_ccmp(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- ssid=self.ssid,
- security=self.security_profile,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11a_max_length_password_sec_wpa_psk_ptk_tkip_or_ccmp(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- ssid=self.ssid,
- security=self.security_profile,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11a_max_length_psk_sec_wpa_psk_ptk_tkip(self) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- ssid=self.ssid,
- security=self.security_profile,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11a_max_length_psk_sec_wpa_psk_ptk_ccmp(self) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- ssid=self.ssid,
- security=self.security_profile,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11a_max_length_psk_sec_wpa_psk_ptk_tkip_or_ccmp(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- ssid=self.ssid,
- security=self.security_profile,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11a_frag_430_sec_wpa_psk_ptk_tkip(self) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- ssid=self.ssid,
- security=self.security_profile,
- frag_threshold=430,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11a_frag_430_sec_wpa_psk_ptk_ccmp(self) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- ssid=self.ssid,
- security=self.security_profile,
- frag_threshold=430,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11a_frag_430_sec_wpa_psk_ptk_tkip_or_ccmp(self) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- ssid=self.ssid,
- security=self.security_profile,
- frag_threshold=430,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11a_rts_256_sec_wpa_psk_ptk_tkip(self) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- ssid=self.ssid,
- security=self.security_profile,
- rts_threshold=256,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11a_rts_256_sec_wpa_psk_ptk_ccmp(self) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- ssid=self.ssid,
- security=self.security_profile,
- rts_threshold=256,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11a_rts_256_sec_wpa_psk_ptk_tkip_or_ccmp(self) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- ssid=self.ssid,
- security=self.security_profile,
- rts_threshold=256,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11a_rts_256_frag_430_sec_wpa_psk_ptk_tkip_or_ccmp(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- ssid=self.ssid,
- security=self.security_profile,
- rts_threshold=256,
- frag_threshold=430,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11a_high_dtim_low_beacon_int_sec_wpa_psk_ptk_tkip_or_ccmp(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- ssid=self.ssid,
- dtim_period=hostapd_constants.HIGH_DTIM,
- beacon_interval=hostapd_constants.LOW_BEACON_INTERVAL,
- security=self.security_profile,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11a_low_dtim_high_beacon_int_sec_wpa_psk_ptk_tkip_or_ccmp(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- ssid=self.ssid,
- dtim_period=hostapd_constants.LOW_DTIM,
- beacon_interval=hostapd_constants.HIGH_BEACON_INTERVAL,
- security=self.security_profile,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11a_with_WMM_with_default_values_sec_wpa_psk_ptk_tkip_or_ccmp(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- ssid=self.ssid,
- force_wmm=True,
- additional_ap_parameters=hostapd_constants.WMM_PHYS_11A_11G_11N_11AC_DEFAULT_PARAMS,
- security=self.security_profile,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11a_with_vendor_ie_in_beacon_correct_length_sec_wpa_psk_ptk_tkip_or_ccmp(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- ssid=self.ssid,
- additional_ap_parameters=hostapd_constants.VENDOR_IE[
- "correct_length_beacon"
- ],
- security=self.security_profile,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11a_with_vendor_ie_in_beacon_zero_length_sec_wpa_psk_ptk_tkip_or_ccmp(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- ssid=self.ssid,
- additional_ap_parameters=hostapd_constants.VENDOR_IE[
- "zero_length_beacon_without_data"
- ],
- security=self.security_profile,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11a_with_vendor_ie_in_beacon_similar_to_wpa_ie_sec_wpa_psk_ptk_tkip_or_ccmp(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- ssid=self.ssid,
- additional_ap_parameters=hostapd_constants.VENDOR_IE[
- "simliar_to_wpa"
- ],
- security=self.security_profile,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11a_sec_wpa2_psk_ptk_tkip(self) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- ssid=self.ssid,
- security=self.security_profile,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11a_sec_wpa2_psk_ptk_ccmp(self) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- ssid=self.ssid,
- security=self.security_profile,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11a_sec_wpa2_psk_ptk_tkip_or_ccmp(self) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- ssid=self.ssid,
- security=self.security_profile,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11a_max_length_password_sec_wpa2_psk_ptk_tkip(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- ssid=self.ssid,
- security=self.security_profile,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11a_max_length_password_sec_wpa2_psk_ptk_ccmp(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- ssid=self.ssid,
- security=self.security_profile,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11a_max_length_password_sec_wpa2_psk_ptk_tkip_or_ccmp(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- ssid=self.ssid,
- security=self.security_profile,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11a_max_length_psk_sec_wpa2_psk_ptk_tkip(self) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- ssid=self.ssid,
- security=self.security_profile,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11a_max_length_psk_sec_wpa2_psk_ptk_ccmp(self) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- ssid=self.ssid,
- security=self.security_profile,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11a_max_length_psk_sec_wpa2_psk_ptk_tkip_or_ccmp(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- ssid=self.ssid,
- security=self.security_profile,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11a_frag_430_sec_wpa2_psk_ptk_tkip(self) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- ssid=self.ssid,
- security=self.security_profile,
- frag_threshold=430,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11a_frag_430_sec_wpa2_psk_ptk_ccmp(self) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- ssid=self.ssid,
- security=self.security_profile,
- frag_threshold=430,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11a_frag_430_sec_wpa2_psk_ptk_tkip_or_ccmp(self) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- ssid=self.ssid,
- security=self.security_profile,
- frag_threshold=430,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11a_rts_256_sec_wpa2_psk_ptk_tkip(self) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- ssid=self.ssid,
- security=self.security_profile,
- rts_threshold=256,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11a_rts_256_sec_wpa2_psk_ptk_ccmp(self) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- ssid=self.ssid,
- security=self.security_profile,
- rts_threshold=256,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11a_rts_256_sec_wpa2_psk_ptk_tkip_or_ccmp(self) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- ssid=self.ssid,
- security=self.security_profile,
- rts_threshold=256,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11a_rts_256_frag_430_sec_wpa2_psk_ptk_tkip_or_ccmp(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- ssid=self.ssid,
- security=self.security_profile,
- rts_threshold=256,
- frag_threshold=430,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11a_high_dtim_low_beacon_int_sec_wpa2_psk_ptk_tkip_or_ccmp(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- ssid=self.ssid,
- dtim_period=hostapd_constants.HIGH_DTIM,
- beacon_interval=hostapd_constants.LOW_BEACON_INTERVAL,
- security=self.security_profile,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11a_low_dtim_high_beacon_int_sec_wpa2_psk_ptk_tkip_or_ccmp(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- ssid=self.ssid,
- dtim_period=hostapd_constants.LOW_DTIM,
- beacon_interval=hostapd_constants.HIGH_BEACON_INTERVAL,
- security=self.security_profile,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11a_with_WMM_with_default_values_sec_wpa2_psk_ptk_tkip_or_ccmp(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- ssid=self.ssid,
- force_wmm=True,
- additional_ap_parameters=hostapd_constants.WMM_11B_DEFAULT_PARAMS,
- security=self.security_profile,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11a_with_vendor_ie_in_beacon_correct_length_sec_wpa2_psk_ptk_tkip_or_ccmp(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- ssid=self.ssid,
- additional_ap_parameters=hostapd_constants.VENDOR_IE[
- "correct_length_beacon"
- ],
- security=self.security_profile,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11a_with_vendor_ie_in_beacon_zero_length_sec_wpa2_psk_ptk_tkip_or_ccmp(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- ssid=self.ssid,
- additional_ap_parameters=hostapd_constants.VENDOR_IE[
- "zero_length_beacon_without_data"
- ],
- security=self.security_profile,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11a_with_vendor_ie_in_beacon_similar_to_wpa_ie_sec_wpa2_psk_ptk_tkip_or_ccmp(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- ssid=self.ssid,
- additional_ap_parameters=hostapd_constants.VENDOR_IE[
- "simliar_to_wpa"
- ],
- security=self.security_profile,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11a_pmf_sec_wpa2_psk_ptk_tkip(self) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- ssid=self.ssid,
- security=self.security_profile,
- pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
- force_wmm=False,
- )
-
- asserts.assert_false(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Expected failure to associate. This device must support TKIP and "
- "PMF, which is not supported on Fuchsia. If this device is a "
- "mainstream device, we need to reconsider adding support for TKIP "
- "and PMF on Fuchsia.",
- )
-
- @create_security_profile
- def test_associate_11a_pmf_sec_wpa2_psk_ptk_ccmp(self) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- ssid=self.ssid,
- pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
- security=self.security_profile,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11a_pmf_sec_wpa2_psk_ptk_tkip_or_ccmp(self) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- ssid=self.ssid,
- security=self.security_profile,
- pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11a_pmf_max_length_password_sec_wpa2_psk_ptk_tkip(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- ssid=self.ssid,
- security=self.security_profile,
- pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
- force_wmm=False,
- )
-
- asserts.assert_false(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Expected failure to associate. This device must support TKIP and "
- "PMF, which is not supported on Fuchsia. If this device is a "
- "mainstream device, we need to reconsider adding support for TKIP "
- "and PMF on Fuchsia.",
- )
-
- @create_security_profile
- def test_associate_11a_pmf_max_length_password_sec_wpa2_psk_ptk_ccmp(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- ssid=self.ssid,
- security=self.security_profile,
- pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11a_pmf_max_length_password_sec_wpa2_psk_ptk_tkip_or_ccmp(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- ssid=self.ssid,
- security=self.security_profile,
- pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11a_pmf_max_length_psk_sec_wpa2_psk_ptk_tkip(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- ssid=self.ssid,
- security=self.security_profile,
- pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
- force_wmm=False,
- )
-
- asserts.assert_false(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Expected failure to associate. This device must support TKIP and "
- "PMF, which is not supported on Fuchsia. If this device is a "
- "mainstream device, we need to reconsider adding support for TKIP "
- "and PMF on Fuchsia.",
- )
-
- @create_security_profile
- def test_associate_11a_pmf_max_length_psk_sec_wpa2_psk_ptk_ccmp(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- ssid=self.ssid,
- security=self.security_profile,
- pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11a_pmf_max_length_psk_sec_wpa2_psk_ptk_tkip_or_ccmp(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- ssid=self.ssid,
- security=self.security_profile,
- pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11a_pmf_frag_430_sec_wpa2_psk_ptk_tkip(self) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- ssid=self.ssid,
- security=self.security_profile,
- pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
- frag_threshold=430,
- force_wmm=False,
- )
-
- asserts.assert_false(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Expected failure to associate. This device must support TKIP and "
- "PMF, which is not supported on Fuchsia. If this device is a "
- "mainstream device, we need to reconsider adding support for TKIP "
- "and PMF on Fuchsia.",
- )
-
- @create_security_profile
- def test_associate_11a_pmf_frag_430_sec_wpa2_psk_ptk_ccmp(self) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- ssid=self.ssid,
- security=self.security_profile,
- pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
- frag_threshold=430,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11a_pmf_frag_430_sec_wpa2_psk_ptk_tkip_or_ccmp(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- ssid=self.ssid,
- security=self.security_profile,
- pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
- frag_threshold=430,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11a_pmf_rts_256_sec_wpa2_psk_ptk_tkip(self) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- ssid=self.ssid,
- security=self.security_profile,
- pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
- rts_threshold=256,
- force_wmm=False,
- )
-
- asserts.assert_false(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Expected failure to associate. This device must support TKIP and "
- "PMF, which is not supported on Fuchsia. If this device is a "
- "mainstream device, we need to reconsider adding support for TKIP "
- "and PMF on Fuchsia.",
- )
-
- @create_security_profile
- def test_associate_11a_pmf_rts_256_sec_wpa2_psk_ptk_ccmp(self) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- ssid=self.ssid,
- security=self.security_profile,
- pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
- rts_threshold=256,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11a_pmf_rts_256_sec_wpa2_psk_ptk_tkip_or_ccmp(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- ssid=self.ssid,
- security=self.security_profile,
- pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
- rts_threshold=256,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11a_pmf_rts_256_frag_430_sec_wpa2_psk_ptk_tkip_or_ccmp(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- ssid=self.ssid,
- security=self.security_profile,
- pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
- rts_threshold=256,
- frag_threshold=430,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11a_pmf_high_dtim_low_beacon_int_sec_wpa2_psk_ptk_tkip_or_ccmp(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- ssid=self.ssid,
- dtim_period=hostapd_constants.HIGH_DTIM,
- beacon_interval=hostapd_constants.LOW_BEACON_INTERVAL,
- security=self.security_profile,
- pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11a_pmf_low_dtim_high_beacon_int_sec_wpa2_psk_ptk_tkip_or_ccmp(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- ssid=self.ssid,
- dtim_period=hostapd_constants.LOW_DTIM,
- beacon_interval=hostapd_constants.HIGH_BEACON_INTERVAL,
- security=self.security_profile,
- pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11a_pmf_with_WMM_with_default_values_sec_wpa2_psk_ptk_tkip_or_ccmp(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- ssid=self.ssid,
- force_wmm=True,
- additional_ap_parameters=hostapd_constants.WMM_11B_DEFAULT_PARAMS,
- security=self.security_profile,
- pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11a_pmf_with_vendor_ie_in_beacon_correct_length_sec_wpa2_psk_ptk_tkip_or_ccmp(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- ssid=self.ssid,
- additional_ap_parameters=hostapd_constants.VENDOR_IE[
- "correct_length_beacon"
- ],
- security=self.security_profile,
- pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11a_pmf_with_vendor_ie_in_beacon_zero_length_sec_wpa2_psk_ptk_tkip_or_ccmp(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- ssid=self.ssid,
- additional_ap_parameters=hostapd_constants.VENDOR_IE[
- "zero_length_beacon_without_data"
- ],
- security=self.security_profile,
- pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11a_pmf_with_vendor_ie_in_beacon_similar_to_wpa_ie_sec_wpa2_psk_ptk_tkip_or_ccmp(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- ssid=self.ssid,
- additional_ap_parameters=hostapd_constants.VENDOR_IE[
- "simliar_to_wpa"
- ],
- security=self.security_profile,
- pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11a_sec_wpa_wpa2_psk_ptk_tkip(self) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- ssid=self.ssid,
- security=self.security_profile,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11a_sec_wpa_wpa2_psk_ptk_ccmp(self) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- ssid=self.ssid,
- security=self.security_profile,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11a_sec_wpa_wpa2_psk_ptk_tkip_or_ccmp(self) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- ssid=self.ssid,
- security=self.security_profile,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11a_max_length_password_sec_wpa_wpa2_psk_ptk_tkip(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- ssid=self.ssid,
- security=self.security_profile,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11a_max_length_password_sec_wpa_wpa2_psk_ptk_ccmp(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- ssid=self.ssid,
- security=self.security_profile,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11a_max_length_password_sec_wpa_wpa2_psk_ptk_tkip_or_ccmp(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- ssid=self.ssid,
- security=self.security_profile,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11a_max_length_psk_sec_wpa_wpa2_psk_ptk_tkip(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- ssid=self.ssid,
- security=self.security_profile,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11a_max_length_psk_sec_wpa_wpa2_psk_ptk_ccmp(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- ssid=self.ssid,
- security=self.security_profile,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11a_max_length_psk_sec_wpa_wpa2_psk_ptk_tkip_or_ccmp(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- ssid=self.ssid,
- security=self.security_profile,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11a_frag_430_sec_wpa_wpa2_psk_ptk_tkip(self) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- ssid=self.ssid,
- security=self.security_profile,
- frag_threshold=430,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11a_frag_430_sec_wpa_wpa2_psk_ptk_ccmp(self) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- ssid=self.ssid,
- security=self.security_profile,
- frag_threshold=430,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11a_frag_430_sec_wpa_wpa2_psk_ptk_tkip_or_ccmp(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- ssid=self.ssid,
- security=self.security_profile,
- frag_threshold=430,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11a_rts_256_sec_wpa_wpa2_psk_ptk_tkip(self) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- ssid=self.ssid,
- security=self.security_profile,
- rts_threshold=256,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11a_rts_256_sec_wpa_wpa2_psk_ptk_ccmp(self) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- ssid=self.ssid,
- security=self.security_profile,
- rts_threshold=256,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11a_rts_256_sec_wpa_wpa2_psk_ptk_tkip_or_ccmp(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- ssid=self.ssid,
- security=self.security_profile,
- rts_threshold=256,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11a_rts_256_frag_430_sec_wpa_wpa2_psk_ptk_tkip_or_ccmp(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- ssid=self.ssid,
- security=self.security_profile,
- rts_threshold=256,
- frag_threshold=430,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11a_high_dtim_low_beacon_int_sec_wpa_wpa2_psk_ptk_tkip_or_ccmp(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- ssid=self.ssid,
- dtim_period=hostapd_constants.HIGH_DTIM,
- beacon_interval=hostapd_constants.LOW_BEACON_INTERVAL,
- security=self.security_profile,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11a_low_dtim_high_beacon_int_sec_wpa_wpa2_psk_ptk_tkip_or_ccmp(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- ssid=self.ssid,
- dtim_period=hostapd_constants.LOW_DTIM,
- beacon_interval=hostapd_constants.HIGH_BEACON_INTERVAL,
- security=self.security_profile,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11a_with_WMM_with_default_values_sec_wpa_wpa2_psk_ptk_tkip_or_ccmp(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- ssid=self.ssid,
- force_wmm=True,
- additional_ap_parameters=hostapd_constants.WMM_11B_DEFAULT_PARAMS,
- security=self.security_profile,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11a_with_vendor_ie_in_beacon_correct_length_sec_wpa_wpa2_psk_ptk_tkip_or_ccmp(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- ssid=self.ssid,
- additional_ap_parameters=hostapd_constants.VENDOR_IE[
- "correct_length_beacon"
- ],
- security=self.security_profile,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11a_with_vendor_ie_in_beacon_zero_length_sec_wpa_wpa2_psk_ptk_tkip_or_ccmp(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- ssid=self.ssid,
- additional_ap_parameters=hostapd_constants.VENDOR_IE[
- "zero_length_beacon_without_data"
- ],
- security=self.security_profile,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11a_with_vendor_ie_in_beacon_similar_to_wpa_ie_sec_wpa_wpa2_psk_ptk_tkip_or_ccmp(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- ssid=self.ssid,
- additional_ap_parameters=hostapd_constants.VENDOR_IE[
- "simliar_to_wpa"
- ],
- security=self.security_profile,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11a_sec_wpa3_sae_ptk_ccmp(self) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- ssid=self.ssid,
- security=self.security_profile,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11a_sec_wpa3_sae_ptk_tkip_or_ccmp(self) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- ssid=self.ssid,
- security=self.security_profile,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11a_max_length_password_sec_wpa3_sae_ptk_ccmp(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- ssid=self.ssid,
- security=self.security_profile,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11a_max_length_password_sec_wpa3_sae_ptk_tkip_or_ccmp(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- ssid=self.ssid,
- security=self.security_profile,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11a_frag_430_sec_wpa3_sae_ptk_ccmp(self) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- ssid=self.ssid,
- security=self.security_profile,
- frag_threshold=430,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11a_frag_430_sec_wpa3_sae_ptk_tkip_or_ccmp(self) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- ssid=self.ssid,
- security=self.security_profile,
- frag_threshold=430,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11a_rts_256_sec_wpa3_sae_ptk_ccmp(self) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- ssid=self.ssid,
- security=self.security_profile,
- rts_threshold=256,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11a_rts_256_sec_wpa3_sae_ptk_tkip_or_ccmp(self) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- ssid=self.ssid,
- security=self.security_profile,
- rts_threshold=256,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11a_rts_256_frag_430_sec_wpa3_sae_ptk_tkip_or_ccmp(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- ssid=self.ssid,
- security=self.security_profile,
- rts_threshold=256,
- frag_threshold=430,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11a_high_dtim_low_beacon_int_sec_wpa3_sae_ptk_tkip_or_ccmp(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- ssid=self.ssid,
- dtim_period=hostapd_constants.HIGH_DTIM,
- beacon_interval=hostapd_constants.LOW_BEACON_INTERVAL,
- security=self.security_profile,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11a_low_dtim_high_beacon_int_sec_wpa3_sae_ptk_tkip_or_ccmp(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- ssid=self.ssid,
- dtim_period=hostapd_constants.LOW_DTIM,
- beacon_interval=hostapd_constants.HIGH_BEACON_INTERVAL,
- security=self.security_profile,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11a_with_WMM_with_default_values_sec_wpa3_sae_ptk_tkip_or_ccmp(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- ssid=self.ssid,
- force_wmm=True,
- additional_ap_parameters=hostapd_constants.WMM_PHYS_11A_11G_11N_11AC_DEFAULT_PARAMS,
- security=self.security_profile,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11a_with_vendor_ie_in_beacon_correct_length_sec_wpa3_sae_ptk_tkip_or_ccmp(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- ssid=self.ssid,
- additional_ap_parameters=hostapd_constants.VENDOR_IE[
- "correct_length_beacon"
- ],
- security=self.security_profile,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11a_with_vendor_ie_in_beacon_zero_length_sec_wpa3_sae_ptk_tkip_or_ccmp(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- ssid=self.ssid,
- additional_ap_parameters=hostapd_constants.VENDOR_IE[
- "zero_length_beacon_without_data"
- ],
- security=self.security_profile,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11a_with_vendor_ie_in_beacon_similar_to_wpa_ie_sec_wpa3_sae_ptk_tkip_or_ccmp(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- ssid=self.ssid,
- additional_ap_parameters=hostapd_constants.VENDOR_IE[
- "simliar_to_wpa"
- ],
- security=self.security_profile,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11a_sec_wpa2_wpa3_psk_sae_ptk_ccmp(self) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- ssid=self.ssid,
- security=self.security_profile,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11a_sec_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(self) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- ssid=self.ssid,
- security=self.security_profile,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11a_max_length_password_sec_wpa2_wpa3_psk_sae_ptk_ccmp(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- ssid=self.ssid,
- security=self.security_profile,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11a_max_length_password_sec_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- ssid=self.ssid,
- security=self.security_profile,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11a_frag_430_sec_wpa2_wpa3_psk_sae_ptk_ccmp(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- ssid=self.ssid,
- security=self.security_profile,
- frag_threshold=430,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11a_frag_430_sec_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- ssid=self.ssid,
- security=self.security_profile,
- frag_threshold=430,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11a_rts_256_sec_wpa2_wpa3_psk_sae_ptk_ccmp(self) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- ssid=self.ssid,
- security=self.security_profile,
- rts_threshold=256,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11a_rts_256_sec_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- ssid=self.ssid,
- security=self.security_profile,
- rts_threshold=256,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11a_rts_256_frag_430_sec_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- ssid=self.ssid,
- security=self.security_profile,
- rts_threshold=256,
- frag_threshold=430,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11a_high_dtim_low_beacon_int_sec_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- ssid=self.ssid,
- dtim_period=hostapd_constants.HIGH_DTIM,
- beacon_interval=hostapd_constants.LOW_BEACON_INTERVAL,
- security=self.security_profile,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11a_low_dtim_high_beacon_int_sec_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- ssid=self.ssid,
- dtim_period=hostapd_constants.LOW_DTIM,
- beacon_interval=hostapd_constants.HIGH_BEACON_INTERVAL,
- security=self.security_profile,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11a_with_WMM_with_default_values_sec_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- ssid=self.ssid,
- force_wmm=True,
- additional_ap_parameters=hostapd_constants.WMM_PHYS_11A_11G_11N_11AC_DEFAULT_PARAMS,
- security=self.security_profile,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11a_with_vendor_ie_in_beacon_correct_length_sec_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- ssid=self.ssid,
- additional_ap_parameters=hostapd_constants.VENDOR_IE[
- "correct_length_beacon"
- ],
- security=self.security_profile,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11a_with_vendor_ie_in_beacon_zero_length_sec_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- ssid=self.ssid,
- additional_ap_parameters=hostapd_constants.VENDOR_IE[
- "zero_length_beacon_without_data"
- ],
- security=self.security_profile,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11a_with_vendor_ie_in_beacon_similar_to_wpa_ie_sec_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- ssid=self.ssid,
- additional_ap_parameters=hostapd_constants.VENDOR_IE[
- "simliar_to_wpa"
- ],
- security=self.security_profile,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11a_pmf_sec_wpa2_wpa3_psk_sae_ptk_ccmp(self) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- ssid=self.ssid,
- security=self.security_profile,
- pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11a_pmf_sec_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- ssid=self.ssid,
- security=self.security_profile,
- pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11a_pmf_max_length_password_sec_wpa2_wpa3_psk_sae_ptk_ccmp(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- ssid=self.ssid,
- security=self.security_profile,
- pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11a_pmf_max_length_password_sec_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- ssid=self.ssid,
- security=self.security_profile,
- pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11a_pmf_frag_430_sec_wpa2_wpa3_psk_sae_ptk_ccmp(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- ssid=self.ssid,
- security=self.security_profile,
- pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
- frag_threshold=430,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11a_pmf_frag_430_sec_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- ssid=self.ssid,
- security=self.security_profile,
- pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
- frag_threshold=430,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11a_pmf_rts_256_sec_wpa2_wpa3_psk_sae_ptk_ccmp(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- ssid=self.ssid,
- security=self.security_profile,
- pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
- rts_threshold=256,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11a_pmf_rts_256_sec_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- ssid=self.ssid,
- security=self.security_profile,
- pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
- rts_threshold=256,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11a_pmf_rts_256_frag_430_sec_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- ssid=self.ssid,
- security=self.security_profile,
- pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
- rts_threshold=256,
- frag_threshold=430,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11a_pmf_high_dtim_low_beacon_int_sec_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- ssid=self.ssid,
- dtim_period=hostapd_constants.HIGH_DTIM,
- beacon_interval=hostapd_constants.LOW_BEACON_INTERVAL,
- security=self.security_profile,
- pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11a_pmf_low_dtim_high_beacon_int_sec_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- ssid=self.ssid,
- dtim_period=hostapd_constants.LOW_DTIM,
- beacon_interval=hostapd_constants.HIGH_BEACON_INTERVAL,
- security=self.security_profile,
- pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11a_pmf_with_WMM_with_default_values_sec_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- ssid=self.ssid,
- force_wmm=True,
- additional_ap_parameters=hostapd_constants.WMM_PHYS_11A_11G_11N_11AC_DEFAULT_PARAMS,
- security=self.security_profile,
- pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11a_pmf_with_vendor_ie_in_beacon_correct_length_sec_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- ssid=self.ssid,
- additional_ap_parameters=hostapd_constants.VENDOR_IE[
- "correct_length_beacon"
- ],
- security=self.security_profile,
- pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11a_pmf_with_vendor_ie_in_beacon_zero_length_sec_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- ssid=self.ssid,
- additional_ap_parameters=hostapd_constants.VENDOR_IE[
- "zero_length_beacon_without_data"
- ],
- security=self.security_profile,
- pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11a_pmf_with_vendor_ie_in_beacon_similar_to_wpa_ie_sec_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- ssid=self.ssid,
- additional_ap_parameters=hostapd_constants.VENDOR_IE[
- "simliar_to_wpa"
- ],
- security=self.security_profile,
- pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11a_sec_wpa_wpa2_wpa3_psk_sae_ptk_ccmp(self) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- ssid=self.ssid,
- security=self.security_profile,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11a_sec_wpa_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- ssid=self.ssid,
- security=self.security_profile,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11a_max_length_password_sec_wpa_wpa2_wpa3_psk_sae_ptk_ccmp(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- ssid=self.ssid,
- security=self.security_profile,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11a_max_length_password_sec_wpa_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- ssid=self.ssid,
- security=self.security_profile,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11a_frag_430_sec_wpa_wpa2_wpa3_psk_sae_ptk_ccmp(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- ssid=self.ssid,
- security=self.security_profile,
- frag_threshold=430,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11a_frag_430_sec_wpa_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- ssid=self.ssid,
- security=self.security_profile,
- frag_threshold=430,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11a_rts_256_sec_wpa_wpa2_wpa3_psk_sae_ptk_ccmp(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- ssid=self.ssid,
- security=self.security_profile,
- rts_threshold=256,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11a_rts_256_sec_wpa_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- ssid=self.ssid,
- security=self.security_profile,
- rts_threshold=256,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11a_rts_256_frag_430_sec_wpa_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- ssid=self.ssid,
- security=self.security_profile,
- rts_threshold=256,
- frag_threshold=430,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11a_high_dtim_low_beacon_int_sec_wpa_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- ssid=self.ssid,
- dtim_period=hostapd_constants.HIGH_DTIM,
- beacon_interval=hostapd_constants.LOW_BEACON_INTERVAL,
- security=self.security_profile,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11a_low_dtim_high_beacon_int_sec_wpa_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- ssid=self.ssid,
- dtim_period=hostapd_constants.LOW_DTIM,
- beacon_interval=hostapd_constants.HIGH_BEACON_INTERVAL,
- security=self.security_profile,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11a_with_WMM_with_default_values_sec_wpa_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- ssid=self.ssid,
- force_wmm=True,
- additional_ap_parameters=hostapd_constants.WMM_PHYS_11A_11G_11N_11AC_DEFAULT_PARAMS,
- security=self.security_profile,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11a_with_vendor_ie_in_beacon_correct_length_sec_wpa_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- ssid=self.ssid,
- additional_ap_parameters=hostapd_constants.VENDOR_IE[
- "correct_length_beacon"
- ],
- security=self.security_profile,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11a_with_vendor_ie_in_beacon_zero_length_sec_wpa_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- ssid=self.ssid,
- additional_ap_parameters=hostapd_constants.VENDOR_IE[
- "zero_length_beacon_without_data"
- ],
- security=self.security_profile,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11a_with_vendor_ie_in_beacon_similar_to_wpa_ie_sec_wpa_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- ssid=self.ssid,
- additional_ap_parameters=hostapd_constants.VENDOR_IE[
- "simliar_to_wpa"
- ],
- security=self.security_profile,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11a_pmf_sec_wpa_wpa2_wpa3_psk_sae_ptk_ccmp(self) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- ssid=self.ssid,
- security=self.security_profile,
- pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11a_pmf_sec_wpa_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- ssid=self.ssid,
- security=self.security_profile,
- pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11a_pmf_max_length_password_sec_wpa_wpa2_wpa3_psk_sae_ptk_ccmp(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- ssid=self.ssid,
- security=self.security_profile,
- pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11a_pmf_max_length_password_sec_wpa_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- ssid=self.ssid,
- security=self.security_profile,
- pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11a_pmf_frag_430_sec_wpa_wpa2_wpa3_psk_sae_ptk_ccmp(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- ssid=self.ssid,
- security=self.security_profile,
- pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
- frag_threshold=430,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11a_pmf_frag_430_sec_wpa_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- ssid=self.ssid,
- security=self.security_profile,
- pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
- frag_threshold=430,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11a_pmf_rts_256_sec_wpa_wpa2_wpa3_psk_sae_ptk_ccmp(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- ssid=self.ssid,
- security=self.security_profile,
- pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
- rts_threshold=256,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11a_pmf_rts_256_sec_wpa_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- ssid=self.ssid,
- security=self.security_profile,
- pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
- rts_threshold=256,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11a_pmf_rts_256_frag_430_sec_wpa_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- ssid=self.ssid,
- security=self.security_profile,
- pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
- rts_threshold=256,
- frag_threshold=430,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11a_pmf_high_dtim_low_beacon_int_sec_wpa_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- ssid=self.ssid,
- dtim_period=hostapd_constants.HIGH_DTIM,
- beacon_interval=hostapd_constants.LOW_BEACON_INTERVAL,
- security=self.security_profile,
- pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11a_pmf_low_dtim_high_beacon_int_sec_wpa_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- ssid=self.ssid,
- dtim_period=hostapd_constants.LOW_DTIM,
- beacon_interval=hostapd_constants.HIGH_BEACON_INTERVAL,
- security=self.security_profile,
- pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11a_pmf_with_WMM_with_default_values_sec_wpa_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- ssid=self.ssid,
- force_wmm=True,
- additional_ap_parameters=hostapd_constants.WMM_PHYS_11A_11G_11N_11AC_DEFAULT_PARAMS,
- security=self.security_profile,
- pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11a_pmf_with_vendor_ie_in_beacon_correct_length_sec_wpa_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- ssid=self.ssid,
- additional_ap_parameters=hostapd_constants.VENDOR_IE[
- "correct_length_beacon"
- ],
- security=self.security_profile,
- pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11a_pmf_with_vendor_ie_in_beacon_zero_length_sec_wpa_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- ssid=self.ssid,
- additional_ap_parameters=hostapd_constants.VENDOR_IE[
- "zero_length_beacon_without_data"
- ],
- security=self.security_profile,
- pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11a_pmf_with_vendor_ie_in_beacon_similar_to_wpa_ie_sec_wpa_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- ssid=self.ssid,
- additional_ap_parameters=hostapd_constants.VENDOR_IE[
- "simliar_to_wpa"
- ],
- security=self.security_profile,
- pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11bg_sec_open_wep_5_chars_ptk_none(self) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.ssid,
- security=self.security_profile,
- force_wmm=False,
- additional_ap_parameters=hostapd_constants.WEP_AUTH["open"],
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11bg_sec_open_wep_13_chars_ptk_none(self) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.ssid,
- security=self.security_profile,
- force_wmm=False,
- additional_ap_parameters=hostapd_constants.WEP_AUTH["open"],
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11bg_sec_open_wep_10_hex_ptk_none(self) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.ssid,
- security=self.security_profile,
- force_wmm=False,
- additional_ap_parameters=hostapd_constants.WEP_AUTH["open"],
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11bg_sec_open_wep_26_hex_ptk_none(self) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.ssid,
- security=self.security_profile,
- force_wmm=False,
- additional_ap_parameters=hostapd_constants.WEP_AUTH["open"],
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11bg_sec_shared_wep_5_chars_ptk_none(self) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.ssid,
- security=self.security_profile,
- force_wmm=False,
- additional_ap_parameters=hostapd_constants.WEP_AUTH["shared"],
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11bg_sec_shared_wep_13_chars_ptk_none(self) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.ssid,
- security=self.security_profile,
- force_wmm=False,
- additional_ap_parameters=hostapd_constants.WEP_AUTH["shared"],
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11bg_sec_shared_wep_10_hex_ptk_none(self) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.ssid,
- security=self.security_profile,
- force_wmm=False,
- additional_ap_parameters=hostapd_constants.WEP_AUTH["shared"],
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11bg_sec_shared_wep_26_hex_ptk_none(self) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.ssid,
- security=self.security_profile,
- force_wmm=False,
- additional_ap_parameters=hostapd_constants.WEP_AUTH["shared"],
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11bg_sec_wpa_psk_ptk_tkip(self) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.ssid,
- security=self.security_profile,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11bg_sec_wpa_psk_ptk_ccmp(self) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.ssid,
- security=self.security_profile,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11bg_sec_wpa_psk_ptk_tkip_or_ccmp(self) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.ssid,
- security=self.security_profile,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11bg_max_length_password_sec_wpa_psk_ptk_tkip(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.ssid,
- security=self.security_profile,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11bg_max_length_password_sec_wpa_psk_ptk_ccmp(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.ssid,
- security=self.security_profile,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11bg_max_length_password_sec_wpa_psk_ptk_tkip_or_ccmp(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.ssid,
- security=self.security_profile,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11bg_max_length_psk_sec_wpa_psk_ptk_tkip(self) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.ssid,
- security=self.security_profile,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11bg_max_length_psk_sec_wpa_psk_ptk_ccmp(self) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.ssid,
- security=self.security_profile,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11bg_max_length_psk_sec_wpa_psk_ptk_tkip_or_ccmp(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.ssid,
- security=self.security_profile,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11bg_frag_430_sec_wpa_psk_ptk_tkip(self) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.ssid,
- security=self.security_profile,
- frag_threshold=430,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11bg_frag_430_sec_wpa_psk_ptk_ccmp(self) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.ssid,
- security=self.security_profile,
- frag_threshold=430,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11bg_frag_430_sec_wpa_psk_ptk_tkip_or_ccmp(self) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.ssid,
- security=self.security_profile,
- frag_threshold=430,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11bg_rts_256_sec_wpa_psk_ptk_tkip(self) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.ssid,
- security=self.security_profile,
- rts_threshold=256,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11bg_rts_256_sec_wpa_psk_ptk_ccmp(self) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.ssid,
- security=self.security_profile,
- rts_threshold=256,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11bg_rts_256_sec_wpa_psk_ptk_tkip_or_ccmp(self) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.ssid,
- security=self.security_profile,
- rts_threshold=256,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11bg_rts_256_frag_430_sec_wpa_psk_ptk_tkip_or_ccmp(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.ssid,
- security=self.security_profile,
- rts_threshold=256,
- frag_threshold=430,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11bg_high_dtim_low_beacon_int_sec_wpa_psk_ptk_tkip_or_ccmp(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.ssid,
- dtim_period=hostapd_constants.HIGH_DTIM,
- beacon_interval=hostapd_constants.LOW_BEACON_INTERVAL,
- security=self.security_profile,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11bg_low_dtim_high_beacon_int_sec_wpa_psk_ptk_tkip_or_ccmp(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.ssid,
- dtim_period=hostapd_constants.LOW_DTIM,
- beacon_interval=hostapd_constants.HIGH_BEACON_INTERVAL,
- security=self.security_profile,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11bg_with_WMM_with_default_values_sec_wpa_psk_ptk_tkip_or_ccmp(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.ssid,
- force_wmm=True,
- additional_ap_parameters=hostapd_constants.WMM_11B_DEFAULT_PARAMS,
- security=self.security_profile,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11bg_with_vendor_ie_in_beacon_correct_length_sec_wpa_psk_ptk_tkip_or_ccmp(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.ssid,
- additional_ap_parameters=hostapd_constants.VENDOR_IE[
- "correct_length_beacon"
- ],
- security=self.security_profile,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11bg_with_vendor_ie_in_beacon_zero_length_sec_wpa_psk_ptk_tkip_or_ccmp(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.ssid,
- additional_ap_parameters=hostapd_constants.VENDOR_IE[
- "zero_length_beacon_without_data"
- ],
- security=self.security_profile,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11bg_with_vendor_ie_in_beacon_similar_to_wpa_ie_sec_wpa_psk_ptk_tkip_or_ccmp(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.ssid,
- additional_ap_parameters=hostapd_constants.VENDOR_IE[
- "simliar_to_wpa"
- ],
- security=self.security_profile,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11bg_sec_wpa2_psk_ptk_tkip(self) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.ssid,
- security=self.security_profile,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11bg_sec_wpa2_psk_ptk_ccmp(self) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.ssid,
- security=self.security_profile,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11bg_sec_wpa2_psk_ptk_tkip_or_ccmp(self) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.ssid,
- security=self.security_profile,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11bg_max_length_password_sec_wpa2_psk_ptk_tkip(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.ssid,
- security=self.security_profile,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11bg_max_length_password_sec_wpa2_psk_ptk_ccmp(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.ssid,
- security=self.security_profile,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11bg_max_length_password_sec_wpa2_psk_ptk_tkip_or_ccmp(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.ssid,
- security=self.security_profile,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11bg_max_length_psk_sec_wpa2_psk_ptk_tkip(self) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.ssid,
- security=self.security_profile,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11bg_max_length_psk_sec_wpa2_psk_ptk_ccmp(self) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.ssid,
- security=self.security_profile,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11bg_max_length_psk_sec_wpa2_psk_ptk_tkip_or_ccmp(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.ssid,
- security=self.security_profile,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11bg_frag_430_sec_wpa2_psk_ptk_tkip(self) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.ssid,
- security=self.security_profile,
- frag_threshold=430,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11bg_frag_430_sec_wpa2_psk_ptk_ccmp(self) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.ssid,
- security=self.security_profile,
- frag_threshold=430,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11bg_frag_430_sec_wpa2_psk_ptk_tkip_or_ccmp(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.ssid,
- security=self.security_profile,
- frag_threshold=430,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11bg_rts_256_sec_wpa2_psk_ptk_tkip(self) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.ssid,
- security=self.security_profile,
- rts_threshold=256,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11bg_rts_256_sec_wpa2_psk_ptk_ccmp(self) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.ssid,
- security=self.security_profile,
- rts_threshold=256,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11bg_rts_256_sec_wpa2_psk_ptk_tkip_or_ccmp(self) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.ssid,
- security=self.security_profile,
- rts_threshold=256,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11bg_rts_256_frag_430_sec_wpa2_psk_ptk_tkip_or_ccmp(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.ssid,
- security=self.security_profile,
- rts_threshold=256,
- frag_threshold=430,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11bg_high_dtim_low_beacon_int_sec_wpa2_psk_ptk_tkip_or_ccmp(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.ssid,
- dtim_period=hostapd_constants.HIGH_DTIM,
- beacon_interval=hostapd_constants.LOW_BEACON_INTERVAL,
- security=self.security_profile,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11bg_low_dtim_high_beacon_int_sec_wpa2_psk_ptk_tkip_or_ccmp(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.ssid,
- dtim_period=hostapd_constants.HIGH_DTIM,
- beacon_interval=hostapd_constants.HIGH_BEACON_INTERVAL,
- security=self.security_profile,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11bg_with_WMM_with_default_values_sec_wpa2_psk_ptk_tkip_or_ccmp(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.ssid,
- force_wmm=True,
- additional_ap_parameters=hostapd_constants.WMM_11B_DEFAULT_PARAMS,
- security=self.security_profile,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11bg_with_vendor_ie_in_beacon_correct_length_sec_wpa2_psk_ptk_tkip_or_ccmp(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.ssid,
- additional_ap_parameters=hostapd_constants.VENDOR_IE[
- "correct_length_beacon"
- ],
- security=self.security_profile,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11bg_with_vendor_ie_in_beacon_zero_length_sec_wpa2_psk_ptk_tkip_or_ccmp(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.ssid,
- additional_ap_parameters=hostapd_constants.VENDOR_IE[
- "zero_length_beacon_without_data"
- ],
- security=self.security_profile,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11bg_with_vendor_ie_in_beacon_similar_to_wpa_ie_sec_wpa2_psk_ptk_tkip_or_ccmp(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.ssid,
- additional_ap_parameters=hostapd_constants.VENDOR_IE[
- "simliar_to_wpa"
- ],
- security=self.security_profile,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11bg_pmf_sec_wpa2_psk_ptk_tkip(self) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.ssid,
- security=self.security_profile,
- pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
- force_wmm=False,
- )
-
- asserts.assert_false(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Expected failure to associate. This device must support TKIP and "
- "PMF, which is not supported on Fuchsia. If this device is a "
- "mainstream device, we need to reconsider adding support for TKIP "
- "and PMF on Fuchsia.",
- )
-
- @create_security_profile
- def test_associate_11bg_pmf_sec_wpa2_psk_ptk_ccmp(self) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.ssid,
- security=self.security_profile,
- pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11bg_pmf_sec_wpa2_psk_ptk_tkip_or_ccmp(self) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.ssid,
- security=self.security_profile,
- pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11bg_pmf_max_length_password_sec_wpa2_psk_ptk_tkip(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.ssid,
- security=self.security_profile,
- pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
- force_wmm=False,
- )
-
- asserts.assert_false(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Expected failure to associate. This device must support TKIP and "
- "PMF, which is not supported on Fuchsia. If this device is a "
- "mainstream device, we need to reconsider adding support for TKIP "
- "and PMF on Fuchsia.",
- )
-
- @create_security_profile
- def test_associate_11bg_pmf_max_length_password_sec_wpa2_psk_ptk_ccmp(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.ssid,
- security=self.security_profile,
- pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11bg_pmf_max_length_password_sec_wpa2_psk_ptk_tkip_or_ccmp(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.ssid,
- security=self.security_profile,
- pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11bg_pmf_max_length_psk_sec_wpa2_psk_ptk_tkip(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.ssid,
- security=self.security_profile,
- pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
- force_wmm=False,
- )
-
- asserts.assert_false(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Expected failure to associate. This device must support TKIP and "
- "PMF, which is not supported on Fuchsia. If this device is a "
- "mainstream device, we need to reconsider adding support for TKIP "
- "and PMF on Fuchsia.",
- )
-
- @create_security_profile
- def test_associate_11bg_pmf_max_length_psk_sec_wpa2_psk_ptk_ccmp(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.ssid,
- security=self.security_profile,
- pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11bg_pmf_max_length_psk_sec_wpa2_psk_ptk_tkip_or_ccmp(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.ssid,
- security=self.security_profile,
- pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11bg_pmf_frag_430_sec_wpa2_psk_ptk_tkip(self) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.ssid,
- security=self.security_profile,
- pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
- frag_threshold=430,
- force_wmm=False,
- )
-
- asserts.assert_false(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Expected failure to associate. This device must support TKIP and "
- "PMF, which is not supported on Fuchsia. If this device is a "
- "mainstream device, we need to reconsider adding support for TKIP "
- "and PMF on Fuchsia.",
- )
-
- @create_security_profile
- def test_associate_11bg_pmf_frag_430_sec_wpa2_psk_ptk_ccmp(self) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.ssid,
- security=self.security_profile,
- pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
- frag_threshold=430,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11bg_pmf_frag_430_sec_wpa2_psk_ptk_tkip_or_ccmp(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.ssid,
- security=self.security_profile,
- pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
- frag_threshold=430,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11bg_pmf_rts_256_sec_wpa2_psk_ptk_tkip(self) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.ssid,
- security=self.security_profile,
- pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
- rts_threshold=256,
- force_wmm=False,
- )
-
- asserts.assert_false(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Expected failure to associate. This device must support TKIP and "
- "PMF, which is not supported on Fuchsia. If this device is a "
- "mainstream device, we need to reconsider adding support for TKIP "
- "and PMF on Fuchsia.",
- )
-
- @create_security_profile
- def test_associate_11bg_pmf_rts_256_sec_wpa2_psk_ptk_ccmp(self) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.ssid,
- security=self.security_profile,
- pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
- rts_threshold=256,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11bg_pmf_rts_256_sec_wpa2_psk_ptk_tkip_or_ccmp(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.ssid,
- security=self.security_profile,
- pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
- rts_threshold=256,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11bg_pmf_rts_256_frag_430_sec_wpa2_psk_ptk_tkip_or_ccmp(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.ssid,
- security=self.security_profile,
- pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
- rts_threshold=256,
- frag_threshold=430,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11bg_pmf_high_dtim_low_beacon_int_sec_wpa2_psk_ptk_tkip_or_ccmp(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.ssid,
- dtim_period=hostapd_constants.HIGH_DTIM,
- beacon_interval=hostapd_constants.LOW_BEACON_INTERVAL,
- security=self.security_profile,
- pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11bg_pmf_low_dtim_high_beacon_int_sec_wpa2_psk_ptk_tkip_or_ccmp(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.ssid,
- dtim_period=hostapd_constants.HIGH_DTIM,
- beacon_interval=hostapd_constants.HIGH_BEACON_INTERVAL,
- security=self.security_profile,
- pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11bg_pmf_with_WMM_with_default_values_sec_wpa2_psk_ptk_tkip_or_ccmp(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.ssid,
- force_wmm=True,
- additional_ap_parameters=hostapd_constants.WMM_11B_DEFAULT_PARAMS,
- security=self.security_profile,
- pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11bg_pmf_with_vendor_ie_in_beacon_correct_length_sec_wpa2_psk_ptk_tkip_or_ccmp(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.ssid,
- additional_ap_parameters=hostapd_constants.VENDOR_IE[
- "correct_length_beacon"
- ],
- security=self.security_profile,
- pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11bg_pmf_with_vendor_ie_in_beacon_zero_length_sec_wpa2_psk_ptk_tkip_or_ccmp(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.ssid,
- additional_ap_parameters=hostapd_constants.VENDOR_IE[
- "zero_length_beacon_without_data"
- ],
- security=self.security_profile,
- pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11bg_pmf_with_vendor_ie_in_beacon_similar_to_wpa_ie_sec_wpa2_psk_ptk_tkip_or_ccmp(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.ssid,
- additional_ap_parameters=hostapd_constants.VENDOR_IE[
- "simliar_to_wpa"
- ],
- security=self.security_profile,
- pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11bg_sec_wpa_wpa2_psk_ptk_tkip(self) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.ssid,
- security=self.security_profile,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11bg_sec_wpa_wpa2_psk_ptk_ccmp(self) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.ssid,
- security=self.security_profile,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11bg_sec_wpa_wpa2_psk_ptk_tkip_or_ccmp(self) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.ssid,
- security=self.security_profile,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11bg_max_length_password_sec_wpa_wpa2_psk_ptk_tkip(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.ssid,
- security=self.security_profile,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11bg_max_length_password_sec_wpa_wpa2_psk_ptk_ccmp(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.ssid,
- security=self.security_profile,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11bg_max_length_password_sec_wpa_wpa2_psk_ptk_tkip_or_ccmp(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.ssid,
- security=self.security_profile,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11bg_max_length_psk_sec_wpa_wpa2_psk_ptk_tkip(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.ssid,
- security=self.security_profile,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11bg_max_length_psk_sec_wpa_wpa2_psk_ptk_ccmp(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.ssid,
- security=self.security_profile,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11bg_max_length_psk_sec_wpa_wpa2_psk_ptk_tkip_or_ccmp(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.ssid,
- security=self.security_profile,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11bg_frag_430_sec_wpa_wpa2_psk_ptk_tkip(self) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.ssid,
- security=self.security_profile,
- frag_threshold=430,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11bg_frag_430_sec_wpa_wpa2_psk_ptk_ccmp(self) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.ssid,
- security=self.security_profile,
- frag_threshold=430,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11bg_frag_430_sec_wpa_wpa2_psk_ptk_tkip_or_ccmp(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.ssid,
- security=self.security_profile,
- frag_threshold=430,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11bg_rts_256_sec_wpa_wpa2_psk_ptk_tkip(self) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.ssid,
- security=self.security_profile,
- rts_threshold=256,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11bg_rts_256_sec_wpa_wpa2_psk_ptk_ccmp(self) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.ssid,
- security=self.security_profile,
- rts_threshold=256,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11bg_rts_256_sec_wpa_wpa2_psk_ptk_tkip_or_ccmp(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.ssid,
- security=self.security_profile,
- rts_threshold=256,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11bg_rts_256_frag_430_sec_wpa_wpa2_psk_ptk_tkip_or_ccmp(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.ssid,
- security=self.security_profile,
- rts_threshold=256,
- frag_threshold=430,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11bg_high_dtim_low_beacon_int_sec_wpa_wpa2_psk_ptk_tkip_or_ccmp(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.ssid,
- dtim_period=hostapd_constants.HIGH_DTIM,
- beacon_interval=hostapd_constants.LOW_BEACON_INTERVAL,
- security=self.security_profile,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11bg_low_dtim_high_beacon_int_sec_wpa_wpa2_psk_ptk_tkip_or_ccmp(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.ssid,
- dtim_period=hostapd_constants.LOW_DTIM,
- beacon_interval=hostapd_constants.HIGH_BEACON_INTERVAL,
- security=self.security_profile,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11bg_with_WMM_with_default_values_sec_wpa_wpa2_psk_ptk_tkip_or_ccmp(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.ssid,
- force_wmm=True,
- additional_ap_parameters=hostapd_constants.WMM_PHYS_11A_11G_11N_11AC_DEFAULT_PARAMS,
- security=self.security_profile,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11bg_with_vendor_ie_in_beacon_correct_length_sec_wpa_wpa2_psk_ptk_tkip_or_ccmp(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.ssid,
- additional_ap_parameters=hostapd_constants.VENDOR_IE[
- "correct_length_beacon"
- ],
- security=self.security_profile,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11bg_with_vendor_ie_in_beacon_zero_length_sec_wpa_wpa2_psk_ptk_tkip_or_ccmp(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.ssid,
- additional_ap_parameters=hostapd_constants.VENDOR_IE[
- "zero_length_beacon_without_data"
- ],
- security=self.security_profile,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11bg_with_vendor_ie_in_beacon_similar_to_wpa_ie_sec_wpa_wpa2_psk_ptk_tkip_or_ccmp(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.ssid,
- additional_ap_parameters=hostapd_constants.VENDOR_IE[
- "simliar_to_wpa"
- ],
- security=self.security_profile,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11bg_sec_wpa3_sae_ptk_ccmp(self) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.ssid,
- security=self.security_profile,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11bg_sec_wpa3_sae_ptk_tkip_or_ccmp(self) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.ssid,
- security=self.security_profile,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11bg_max_length_password_sec_wpa3_sae_ptk_ccmp(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.ssid,
- security=self.security_profile,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11bg_max_length_password_sec_wpa3_sae_ptk_tkip_or_ccmp(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.ssid,
- security=self.security_profile,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11bg_frag_430_sec_wpa3_sae_ptk_ccmp(self) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.ssid,
- security=self.security_profile,
- frag_threshold=430,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11bg_frag_430_sec_wpa3_sae_ptk_tkip_or_ccmp(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.ssid,
- security=self.security_profile,
- frag_threshold=430,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11bg_rts_256_sec_wpa3_sae_ptk_ccmp(self) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.ssid,
- security=self.security_profile,
- rts_threshold=256,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11bg_rts_256_sec_wpa3_sae_ptk_tkip_or_ccmp(self) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.ssid,
- security=self.security_profile,
- rts_threshold=256,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11bg_rts_256_frag_430_sec_wpa3_sae_ptk_tkip_or_ccmp(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.ssid,
- security=self.security_profile,
- rts_threshold=256,
- frag_threshold=430,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11bg_high_dtim_low_beacon_int_sec_wpa3_sae_ptk_tkip_or_ccmp(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.ssid,
- dtim_period=hostapd_constants.HIGH_DTIM,
- beacon_interval=hostapd_constants.LOW_BEACON_INTERVAL,
- security=self.security_profile,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11bg_low_dtim_high_beacon_int_sec_wpa3_sae_ptk_tkip_or_ccmp(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.ssid,
- dtim_period=hostapd_constants.LOW_DTIM,
- beacon_interval=hostapd_constants.HIGH_BEACON_INTERVAL,
- security=self.security_profile,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11bg_with_WMM_with_default_values_sec_wpa3_sae_ptk_tkip_or_ccmp(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.ssid,
- force_wmm=True,
- additional_ap_parameters=hostapd_constants.WMM_11B_DEFAULT_PARAMS,
- security=self.security_profile,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11bg_with_vendor_ie_in_beacon_correct_length_sec_wpa3_sae_ptk_tkip_or_ccmp(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.ssid,
- additional_ap_parameters=hostapd_constants.VENDOR_IE[
- "correct_length_beacon"
- ],
- security=self.security_profile,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11bg_with_vendor_ie_in_beacon_zero_length_sec_wpa3_sae_ptk_tkip_or_ccmp(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.ssid,
- additional_ap_parameters=hostapd_constants.VENDOR_IE[
- "zero_length_beacon_without_data"
- ],
- security=self.security_profile,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11bg_with_vendor_ie_in_beacon_similar_to_wpa_ie_sec_wpa3_sae_ptk_tkip_or_ccmp(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.ssid,
- additional_ap_parameters=hostapd_constants.VENDOR_IE[
- "simliar_to_wpa"
- ],
- security=self.security_profile,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11bg_sec_wpa2_wpa3_psk_sae_ptk_ccmp(self) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.ssid,
- security=self.security_profile,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11bg_sec_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.ssid,
- security=self.security_profile,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11bg_max_length_password_sec_wpa2_wpa3_psk_sae_ptk_ccmp(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.ssid,
- security=self.security_profile,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11bg_max_length_password_sec_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.ssid,
- security=self.security_profile,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11bg_frag_430_sec_wpa2_wpa3_psk_sae_ptk_ccmp(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.ssid,
- security=self.security_profile,
- frag_threshold=430,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11bg_frag_430_sec_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.ssid,
- security=self.security_profile,
- frag_threshold=430,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11bg_rts_256_sec_wpa2_wpa3_psk_sae_ptk_ccmp(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.ssid,
- security=self.security_profile,
- rts_threshold=256,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11bg_rts_256_sec_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.ssid,
- security=self.security_profile,
- rts_threshold=256,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11bg_rts_256_frag_430_sec_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.ssid,
- security=self.security_profile,
- rts_threshold=256,
- frag_threshold=430,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11bg_high_dtim_low_beacon_int_sec_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.ssid,
- dtim_period=hostapd_constants.HIGH_DTIM,
- beacon_interval=hostapd_constants.LOW_BEACON_INTERVAL,
- security=self.security_profile,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11bg_low_dtim_high_beacon_int_sec_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.ssid,
- dtim_period=hostapd_constants.LOW_DTIM,
- beacon_interval=hostapd_constants.HIGH_BEACON_INTERVAL,
- security=self.security_profile,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11bg_with_WMM_with_default_values_sec_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.ssid,
- force_wmm=True,
- additional_ap_parameters=hostapd_constants.WMM_11B_DEFAULT_PARAMS,
- security=self.security_profile,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11bg_with_vendor_ie_in_beacon_correct_length_sec_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.ssid,
- additional_ap_parameters=hostapd_constants.VENDOR_IE[
- "correct_length_beacon"
- ],
- security=self.security_profile,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11bg_with_vendor_ie_in_beacon_zero_length_sec_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.ssid,
- additional_ap_parameters=hostapd_constants.VENDOR_IE[
- "zero_length_beacon_without_data"
- ],
- security=self.security_profile,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11bg_with_vendor_ie_in_beacon_similar_to_wpa_ie_sec_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.ssid,
- additional_ap_parameters=hostapd_constants.VENDOR_IE[
- "simliar_to_wpa"
- ],
- security=self.security_profile,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11bg_pmf_sec_wpa2_wpa3_psk_sae_ptk_ccmp(self) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.ssid,
- security=self.security_profile,
- pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11bg_pmf_sec_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.ssid,
- security=self.security_profile,
- pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11bg_pmf_max_length_password_sec_wpa2_wpa3_psk_sae_ptk_ccmp(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.ssid,
- security=self.security_profile,
- pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11bg_pmf_max_length_password_sec_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.ssid,
- security=self.security_profile,
- pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11bg_pmf_frag_430_sec_wpa2_wpa3_psk_sae_ptk_ccmp(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.ssid,
- security=self.security_profile,
- pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
- frag_threshold=430,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11bg_pmf_frag_430_sec_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.ssid,
- security=self.security_profile,
- pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
- frag_threshold=430,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11bg_pmf_rts_256_sec_wpa2_wpa3_psk_sae_ptk_ccmp(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.ssid,
- security=self.security_profile,
- pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
- rts_threshold=256,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11bg_pmf_rts_256_sec_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.ssid,
- security=self.security_profile,
- pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
- rts_threshold=256,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11bg_pmf_rts_256_frag_430_sec_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.ssid,
- security=self.security_profile,
- pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
- rts_threshold=256,
- frag_threshold=430,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11bg_pmf_high_dtim_low_beacon_int_sec_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.ssid,
- dtim_period=hostapd_constants.HIGH_DTIM,
- beacon_interval=hostapd_constants.LOW_BEACON_INTERVAL,
- security=self.security_profile,
- pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11bg_pmf_low_dtim_high_beacon_int_sec_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.ssid,
- dtim_period=hostapd_constants.LOW_DTIM,
- beacon_interval=hostapd_constants.HIGH_BEACON_INTERVAL,
- security=self.security_profile,
- pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11bg_pmf_with_WMM_with_default_values_sec_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.ssid,
- force_wmm=True,
- additional_ap_parameters=hostapd_constants.WMM_11B_DEFAULT_PARAMS,
- security=self.security_profile,
- pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11bg_pmf_with_vendor_ie_in_beacon_correct_length_sec_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.ssid,
- additional_ap_parameters=hostapd_constants.VENDOR_IE[
- "correct_length_beacon"
- ],
- security=self.security_profile,
- pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11bg_pmf_with_vendor_ie_in_beacon_zero_length_sec_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.ssid,
- additional_ap_parameters=hostapd_constants.VENDOR_IE[
- "zero_length_beacon_without_data"
- ],
- security=self.security_profile,
- pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11bg_pmf_with_vendor_ie_in_beacon_similar_to_wpa_ie_sec_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.ssid,
- additional_ap_parameters=hostapd_constants.VENDOR_IE[
- "simliar_to_wpa"
- ],
- security=self.security_profile,
- pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11bg_sec_wpa_wpa2_wpa3_psk_sae_ptk_ccmp(self) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.ssid,
- security=self.security_profile,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11bg_sec_wpa_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.ssid,
- security=self.security_profile,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11bg_max_length_password_sec_wpa_wpa2_wpa3_psk_sae_ptk_ccmp(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.ssid,
- security=self.security_profile,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11bg_max_length_password_sec_wpa_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.ssid,
- security=self.security_profile,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11bg_frag_430_sec_wpa_wpa2_wpa3_psk_sae_ptk_ccmp(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.ssid,
- security=self.security_profile,
- frag_threshold=430,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11bg_frag_430_sec_wpa_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.ssid,
- security=self.security_profile,
- frag_threshold=430,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11bg_rts_256_sec_wpa_wpa2_wpa3_psk_sae_ptk_ccmp(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.ssid,
- security=self.security_profile,
- rts_threshold=256,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11bg_rts_256_sec_wpa_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.ssid,
- security=self.security_profile,
- rts_threshold=256,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11bg_rts_256_frag_430_sec_wpa_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.ssid,
- security=self.security_profile,
- rts_threshold=256,
- frag_threshold=430,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11bg_high_dtim_low_beacon_int_sec_wpa_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.ssid,
- dtim_period=hostapd_constants.HIGH_DTIM,
- beacon_interval=hostapd_constants.LOW_BEACON_INTERVAL,
- security=self.security_profile,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11bg_low_dtim_high_beacon_int_sec_wpa_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.ssid,
- dtim_period=hostapd_constants.LOW_DTIM,
- beacon_interval=hostapd_constants.HIGH_BEACON_INTERVAL,
- security=self.security_profile,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11bg_with_WMM_with_default_values_sec_wpa_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.ssid,
- force_wmm=True,
- additional_ap_parameters=hostapd_constants.WMM_11B_DEFAULT_PARAMS,
- security=self.security_profile,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11bg_with_vendor_ie_in_beacon_correct_length_sec_wpa_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.ssid,
- additional_ap_parameters=hostapd_constants.VENDOR_IE[
- "correct_length_beacon"
- ],
- security=self.security_profile,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11bg_with_vendor_ie_in_beacon_zero_length_sec_wpa_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.ssid,
- additional_ap_parameters=hostapd_constants.VENDOR_IE[
- "zero_length_beacon_without_data"
- ],
- security=self.security_profile,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11bg_with_vendor_ie_in_beacon_similar_to_wpa_ie_sec_wpa_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.ssid,
- additional_ap_parameters=hostapd_constants.VENDOR_IE[
- "simliar_to_wpa"
- ],
- security=self.security_profile,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11bg_pmf_sec_wpa_wpa2_wpa3_psk_sae_ptk_ccmp(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.ssid,
- security=self.security_profile,
- pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11bg_pmf_sec_wpa_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.ssid,
- security=self.security_profile,
- pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11bg_pmf_max_length_password_sec_wpa_wpa2_wpa3_psk_sae_ptk_ccmp(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.ssid,
- security=self.security_profile,
- pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11bg_pmf_max_length_password_sec_wpa_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.ssid,
- security=self.security_profile,
- pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11bg_pmf_frag_430_sec_wpa_wpa2_wpa3_psk_sae_ptk_ccmp(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.ssid,
- security=self.security_profile,
- pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
- frag_threshold=430,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11bg_pmf_frag_430_sec_wpa_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.ssid,
- security=self.security_profile,
- pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
- frag_threshold=430,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11bg_pmf_rts_256_sec_wpa_wpa2_wpa3_psk_sae_ptk_ccmp(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.ssid,
- security=self.security_profile,
- pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
- rts_threshold=256,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11bg_pmf_rts_256_sec_wpa_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.ssid,
- security=self.security_profile,
- pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
- rts_threshold=256,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11bg_pmf_rts_256_frag_430_sec_wpa_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.ssid,
- security=self.security_profile,
- pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
- rts_threshold=256,
- frag_threshold=430,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11bg_pmf_high_dtim_low_beacon_int_sec_wpa_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.ssid,
- dtim_period=hostapd_constants.HIGH_DTIM,
- beacon_interval=hostapd_constants.LOW_BEACON_INTERVAL,
- security=self.security_profile,
- pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11bg_pmf_low_dtim_high_beacon_int_sec_wpa_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.ssid,
- dtim_period=hostapd_constants.LOW_DTIM,
- beacon_interval=hostapd_constants.HIGH_BEACON_INTERVAL,
- security=self.security_profile,
- pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11bg_pmf_with_WMM_with_default_values_sec_wpa_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.ssid,
- force_wmm=True,
- additional_ap_parameters=hostapd_constants.WMM_11B_DEFAULT_PARAMS,
- security=self.security_profile,
- pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11bg_pmf_with_vendor_ie_in_beacon_correct_length_sec_wpa_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.ssid,
- additional_ap_parameters=hostapd_constants.VENDOR_IE[
- "correct_length_beacon"
- ],
- security=self.security_profile,
- pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11bg_pmf_with_vendor_ie_in_beacon_zero_length_sec_wpa_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.ssid,
- additional_ap_parameters=hostapd_constants.VENDOR_IE[
- "zero_length_beacon_without_data"
- ],
- security=self.security_profile,
- pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_11bg_pmf_with_vendor_ie_in_beacon_similar_to_wpa_ie_sec_wpa_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.ssid,
- additional_ap_parameters=hostapd_constants.VENDOR_IE[
- "simliar_to_wpa"
- ],
- security=self.security_profile,
- pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_utf8_password_11bg_sec_wpa2_psk_ptk_ccmp(self) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.ssid,
- security=self.security_profile,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_utf8_french_password_11bg_sec_wpa2_psk_ptk_ccmp(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.ssid,
- security=self.security_profile,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_utf8_german_password_11bg_sec_wpa2_psk_ptk_ccmp(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.ssid,
- security=self.security_profile,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_utf8_dutch_password_11bg_sec_wpa2_psk_ptk_ccmp(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.ssid,
- security=self.security_profile,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_utf8_swedish_password_11bg_sec_wpa2_psk_ptk_ccmp(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.ssid,
- security=self.security_profile,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_utf8_norwegian_password_11bg_sec_wpa2_psk_ptk_ccmp(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.ssid,
- security=self.security_profile,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_utf8_danish_password_11bg_sec_wpa2_psk_ptk_ccmp(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.ssid,
- security=self.security_profile,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_utf8_japanese_password_11bg_sec_wpa2_psk_ptk_ccmp(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.ssid,
- security=self.security_profile,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_utf8_spanish_password_11bg_sec_wpa2_psk_ptk_ccmp(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.ssid,
- security=self.security_profile,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_utf8_italian_password_11bg_sec_wpa2_psk_ptk_ccmp(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.ssid,
- security=self.security_profile,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
- @create_security_profile
- def test_associate_utf8_korean_password_11bg_sec_wpa2_psk_ptk_ccmp(
- self,
- ) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name=AP_11ABG_PROFILE_NAME,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.ssid,
- security=self.security_profile,
- force_wmm=False,
- )
-
- asserts.assert_true(
- self.dut.associate(
- self.ssid,
- target_security=self.target_security,
- target_pwd=self.client_password,
- ),
- "Failed to associate.",
- )
-
-
-if __name__ == "__main__":
- test_runner.main()
diff --git a/tests/wlan/facade/BUILD.gn b/tests/wlan/facade/BUILD.gn
deleted file mode 100644
index 7bf2919..0000000
--- a/tests/wlan/facade/BUILD.gn
+++ /dev/null
@@ -1,20 +0,0 @@
-# Copyright 2023 The Fuchsia Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import("//third_party/antlion/antlion_host_test.gni")
-import("//third_party/antlion/environments.gni")
-
-assert(is_host, "antlion tests only supported for host testing")
-
-antlion_host_test("wlan_deprecated_configuration_test") {
- main_source = "WlanDeprecatedConfigurationTest.py"
- environments = display_envs
-}
-
-group("e2e_tests") {
- testonly = true
- public_deps = [
- ":wlan_deprecated_configuration_test",
- ]
-}
diff --git a/tests/wlan/facade/WlanDeprecatedConfigurationTest.py b/tests/wlan/facade/WlanDeprecatedConfigurationTest.py
deleted file mode 100644
index a0d258c..0000000
--- a/tests/wlan/facade/WlanDeprecatedConfigurationTest.py
+++ /dev/null
@@ -1,188 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import logging
-
-import fidl_fuchsia_wlan_common as f_wlan_common
-from honeydew.affordances.connectivity.wlan.utils.types import (
- ConnectivityMode,
- OperatingBand,
- SecurityType,
-)
-from mobly import asserts, test_runner
-from mobly.config_parser import TestRunConfig
-
-from antlion import utils
-from antlion.controllers.fuchsia_device import FuchsiaDevice
-from antlion.test_utils.abstract_devices.wlan_device import AssociationMode
-from antlion.test_utils.wifi import base_test
-
-AP_ROLE = "Ap"
-DEFAULT_SSID = "testssid"
-TEST_MAC_ADDR = "12:34:56:78:9a:bc"
-TEST_MAC_ADDR_SECONDARY = "bc:9a:78:56:34:12"
-
-
-class WlanDeprecatedConfigurationTest(base_test.WifiBaseTest):
- """Tests for WlanDeprecatedConfigurationFacade"""
-
- def __init__(self, configs: TestRunConfig) -> None:
- super().__init__(configs)
- self.log = logging.getLogger()
- self.fuchsia_device, self.dut = self.get_dut_type(
- FuchsiaDevice, AssociationMode.POLICY
- )
-
- def setup_test(self) -> None:
- super().setup_test()
- self._stop_soft_aps()
-
- def teardown_test(self) -> None:
- self._stop_soft_aps()
- super().teardown_test()
-
- def _get_ap_interface_mac_address(self) -> str:
- """Retrieves mac address from wlan interface with role ap
-
- Returns:
- string, the mac address of the AP interface
-
- Raises:
- ConnectionError, if SL4F calls fail
- AttributeError, if no interface has role 'Ap'
- """
- for wlan_iface in self.dut.get_wlan_interface_id_list():
- result = self.fuchsia_device.honeydew_fd.wlan_core.query_iface(
- wlan_iface
- )
- if result.role is f_wlan_common.WlanMacRole.AP:
- return utils.mac_address_list_to_str(bytes(result.sta_addr))
- raise AttributeError(
- "Failed to get ap interface mac address. No AP interface found."
- )
-
- def _start_soft_ap(self) -> None:
- """Starts SoftAP on DUT.
-
- Raises:
- ConnectionError, if SL4F call fails.
- """
- self.log.info("Starting SoftAP on device %s", self.dut.identifier)
- self.fuchsia_device.honeydew_fd.wlan_policy_ap.start(
- DEFAULT_SSID,
- SecurityType.NONE,
- None,
- ConnectivityMode.LOCAL_ONLY,
- OperatingBand.ANY,
- )
-
- def _stop_soft_aps(self) -> None:
- """Stops SoftAP on DUT.
-
- Raises:
- ConnectionError, if SL4F call fails.
- """
- self.log.info("Stopping SoftAP.")
- self.fuchsia_device.honeydew_fd.wlan_policy_ap.stop_all()
-
- def _suggest_ap_mac_addr(self, mac_addr: str) -> None:
- """Suggests mac address for AP interface.
- Args:
- mac_addr: string, mac address to suggest.
-
- Raises:
- TestFailure, if SL4F call fails.
- """
- self.log.info(
- "Suggesting AP mac addr (%s) via wlan_deprecated_configuration_lib.",
- mac_addr,
- )
- response = self.fuchsia_device.sl4f.wlan_deprecated_configuration_lib.wlanSuggestAccessPointMacAddress(
- mac_addr
- )
- if response.get("error"):
- asserts.fail(
- f"Failed to suggest AP mac address ({mac_addr}): {response['error']}"
- )
-
- def _verify_mac_addr(self, expected_addr: str) -> None:
- """Verifies mac address of ap interface is set to expected mac address.
-
- Args:
- Args:
- expected_addr: string, expected mac address
-
- Raises:
- TestFailure, if actual mac address is not expected mac address.
- """
- set_mac_addr = self._get_ap_interface_mac_address()
- if set_mac_addr != expected_addr:
- asserts.fail(
- f"Failed to set AP mac address via wlan_deprecated_configuration_lib. "
- f"Expected mac addr: {expected_addr}, Actual mac addr: {set_mac_addr}"
- )
- else:
- self.log.info(f"AP mac address successfully set to {expected_addr}")
-
- def test_suggest_ap_mac_address(self) -> None:
- """Tests suggest ap mac address SL4F call
-
- 1. Get initial mac address
- 2. Suggest new mac address
- 3. Verify new mac address is set successfully
- 4. Reset to initial mac address
- 5. Verify initial mac address is reset successfully
-
-
- Raises:
- TestFailure, if wlanSuggestAccessPointMacAddress call fails or
- of mac address is not the suggest value
- ConnectionError, if other SL4F calls fail
- """
- # Retrieve initial ap mac address
- self._start_soft_ap()
-
- self.log.info("Getting initial mac address.")
- initial_mac_addr = self._get_ap_interface_mac_address()
- self.log.info(f"Initial mac address: {initial_mac_addr}")
-
- if initial_mac_addr != TEST_MAC_ADDR:
- suggested_mac_addr = TEST_MAC_ADDR
- else:
- suggested_mac_addr = TEST_MAC_ADDR_SECONDARY
-
- self._stop_soft_aps()
-
- # Suggest and verify new mac address
- self._suggest_ap_mac_addr(suggested_mac_addr)
-
- self._start_soft_ap()
-
- self._verify_mac_addr(suggested_mac_addr)
-
- self._stop_soft_aps()
-
- # Reset to initial mac address and verify
- self.log.info(f"Resetting to initial mac address ({initial_mac_addr}).")
- self._suggest_ap_mac_addr(initial_mac_addr)
-
- self._start_soft_ap()
-
- self._verify_mac_addr(initial_mac_addr)
-
-
-if __name__ == "__main__":
- test_runner.main()
diff --git a/tests/wlan/functional/BUILD.gn b/tests/wlan/functional/BUILD.gn
deleted file mode 100644
index 2922af9..0000000
--- a/tests/wlan/functional/BUILD.gn
+++ /dev/null
@@ -1,180 +0,0 @@
-# Copyright 2023 The Fuchsia Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import("//third_party/antlion/antlion_host_test.gni")
-import("//third_party/antlion/environments.gni")
-
-assert(is_host, "antlion tests only supported for host testing")
-
-antlion_host_test("beacon_loss_test") {
- main_source = "BeaconLossTest.py"
- environments = display_ap_envs
- timeout_secs = 900
-}
-
-antlion_host_test("channel_switch_test") {
- main_source = "ChannelSwitchTest.py"
- environments = display_ap_envs
-}
-
-antlion_host_test("connection_stress_test") {
- main_source = "ConnectionStressTest.py"
- environments = display_ap_envs
-}
-
-antlion_host_test("ping_stress_test") {
- main_source = "PingStressTest.py"
- environments = display_ap_envs
-}
-
-antlion_host_test("soft_ap_test") {
- main_source = "SoftApTest.py"
-
- # Requires one Fuchsia device and one Anddroid device. There are no
- # infra-hosted environments to run this test on. Will likely remain an at-desk
- # test for as long as it requires an Android device.
- environments = []
-}
-
-antlion_host_test("wlan_driver_restart_test") {
- main_source = "WlanDriverRestartTest.py"
- environments = [
- nuc7_env,
- nuc11_env,
- ]
- test_data_deps =
- [ "//src/developer/ffx/tools/driver:ffx_driver_tool_test_data" ]
-}
-
-antlion_host_test("wlan_policy_initiated_roam_test") {
- main_source = "WlanPolicyInitiatedRoamTest.py"
- environments = display_ap_envs
-}
-
-# wlan reboot tests
-# ap tests
-antlion_host_test("wlan_reboot_ap_open_test") {
- main_source = "WlanRebootTest.py"
- environments = display_ap_envs
- test_cases = [ "re:test.+ap.+open.+" ]
- deps = [ "//third_party/iperf" ]
-}
-
-antlion_host_test("wlan_reboot_ap_wpa2_test") {
- main_source = "WlanRebootTest.py"
- environments = display_ap_envs
- test_cases = [ "re:test.+ap.+wpa2.+" ]
- deps = [ "//third_party/iperf" ]
-}
-
-antlion_host_test("wlan_reboot_ap_wpa3_test") {
- main_source = "WlanRebootTest.py"
- environments = display_ap_envs
- test_cases = [ "re:test.+ap.+wpa3.+" ]
- deps = [ "//third_party/iperf" ]
-}
-
-# dut tests
-antlion_host_test("wlan_reboot_dut_open_test") {
- main_source = "WlanRebootTest.py"
- environments = display_ap_envs
- test_cases = [ "re:test.+dut.+open.+" ]
- deps = [ "//third_party/iperf" ]
-}
-
-antlion_host_test("wlan_reboot_dut_wpa2_test") {
- main_source = "WlanRebootTest.py"
- environments = display_ap_envs
- test_cases = [ "re:test.+dut.+wpa2.+" ]
- deps = [ "//third_party/iperf" ]
-}
-
-antlion_host_test("wlan_reboot_dut_wpa3_test") {
- main_source = "WlanRebootTest.py"
- environments = display_ap_envs
- test_cases = [ "re:test.+dut.+wpa3.+" ]
- deps = [ "//third_party/iperf" ]
-}
-
-# quick tests
-antlion_host_test("wlan_reboot_ap_test_quick") {
- main_source = "WlanRebootTest.py"
- environments = display_ap_envs
- test_cases = [ "test_soft_reboot_ap_5g_open_ipv4" ]
- deps = [ "//third_party/iperf" ]
-}
-
-antlion_host_test("wlan_reboot_dut_test_quick") {
- main_source = "WlanRebootTest.py"
- environments = display_ap_envs
- test_cases = [ "test_soft_reboot_dut_5g_open_ipv4" ]
- deps = [ "//third_party/iperf" ]
-}
-
-antlion_host_test("wlan_scan_test") {
- main_source = "WlanScanTest.py"
- environments = display_ap_envs
-}
-
-# iwlwifi AX201 does not support WPA2 yet.
-# TODO(b/328494216): Remove then add nuc11_env to wlan_scan_test.
-antlion_host_test("wlan_scan_test_without_wpa2") {
- main_source = "WlanScanTest.py"
- environments = [ nuc11_ap_env ]
- test_cases = [
- "test_basic_scan_request",
- "test_scan_while_connected_open_network_2g",
- "test_scan_while_connected_open_network_5g",
- ]
-}
-
-antlion_host_test("wlan_target_security_test") {
- main_source = "WlanTargetSecurityTest.py"
- environments = display_ap_envs
-}
-
-antlion_host_test("wlan_wireless_network_management_test") {
- main_source = "WlanWirelessNetworkManagementTest.py"
- environments = display_ap_envs
-}
-
-group("e2e_tests") {
- testonly = true
- public_deps = [
- ":beacon_loss_test",
- ":channel_switch_test",
- ":ping_stress_test",
- ":wlan_reboot_ap_open_test",
- ":wlan_reboot_ap_test_quick",
- ":wlan_reboot_ap_wpa2_test",
- ":wlan_reboot_ap_wpa3_test",
- ":wlan_reboot_dut_open_test",
- ":wlan_reboot_dut_wpa2_test",
- ":wlan_reboot_dut_wpa3_test",
- ":wlan_scan_test",
- ":wlan_scan_test_without_wpa2",
- ":wlan_target_security_test",
- ":wlan_wireless_network_management_test",
- ]
-}
-
-group("e2e_tests_quick") {
- testonly = true
- public_deps = [
- ":ping_stress_test",
- ":wlan_driver_restart_test",
- ":wlan_reboot_ap_test_quick",
- ":wlan_reboot_dut_test_quick",
- ":wlan_scan_test_without_wpa2",
- ]
-}
-
-# Tests that are disabled in automation
-group("e2e_tests_manual") {
- testonly = true
- public_deps = [
- ":soft_ap_test",
- ":wlan_policy_initiated_roam_test",
- ]
-}
diff --git a/tests/wlan/functional/BeaconLossTest.py b/tests/wlan/functional/BeaconLossTest.py
deleted file mode 100644
index d1176c3..0000000
--- a/tests/wlan/functional/BeaconLossTest.py
+++ /dev/null
@@ -1,151 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""
-Script for testing WiFi recovery after rebooting the AP.
-
-Override default number of iterations using the following
-parameter in the test config file.
-
-"beacon_loss_test_iterations": "5"
-"""
-
-import logging
-import time
-
-from mobly import asserts, signals, test_runner
-from mobly.records import TestResultRecord
-
-from antlion.controllers.access_point import setup_ap
-from antlion.controllers.ap_lib import hostapd_constants
-from antlion.controllers.ap_lib.hostapd_security import SecurityMode
-from antlion.test_utils.abstract_devices.wlan_device import AssociationMode
-from antlion.test_utils.wifi import base_test
-from antlion.utils import rand_ascii_str
-
-
-class BeaconLossTest(base_test.WifiBaseTest):
- # Default number of test iterations here.
- # Override using parameter in config file.
- # Eg: "beacon_loss_test_iterations": "10"
- num_of_iterations = 5
-
- # Time to wait for AP to startup
- wait_ap_startup_s = 15
-
- # Default wait time in seconds for the AP radio to turn back on
- wait_to_connect_after_ap_txon_s = 5
-
- # Time to wait for device to disconnect after AP radio of
- wait_after_ap_txoff_s = 15
-
- # Time to wait for device to complete connection setup after
- # given an associate command
- wait_client_connection_setup_s = 15
-
- def setup_class(self) -> None:
- super().setup_class()
- self.log = logging.getLogger()
- self.ssid = rand_ascii_str(10)
-
- self.dut = self.get_dut(AssociationMode.POLICY)
-
- if len(self.access_points) == 0:
- raise signals.TestAbortClass("Requires at least one access point")
- self.access_point = self.access_points[0]
-
- self.num_of_iterations = int(
- self.user_params.get(
- "beacon_loss_test_iterations", self.num_of_iterations
- )
- )
- self.in_use_interface: str | None = None
-
- def teardown_test(self) -> None:
- self.dut.disconnect()
- self.dut.reset_wifi()
- # ensure radio is on, in case the test failed while the radio was off
- if self.in_use_interface:
- self.access_point.iwconfig.ap_iwconfig(
- self.in_use_interface, "txpower on"
- )
- self.download_logs()
- self.access_point.stop_all_aps()
-
- def on_fail(self, record: TestResultRecord) -> None:
- super().on_fail(record)
- self.access_point.stop_all_aps()
-
- def beacon_loss(self, channel: int) -> None:
- setup_ap(
- access_point=self.access_point,
- profile_name="whirlwind",
- channel=channel,
- ssid=self.ssid,
- )
- time.sleep(self.wait_ap_startup_s)
- if channel > 14:
- self.in_use_interface = self.access_point.wlan_5g
- else:
- self.in_use_interface = self.access_point.wlan_2g
-
- # TODO(b/144505723): [ACTS] update BeaconLossTest.py to handle client
- # roaming, saved networks, etc.
- self.log.info("sending associate command for ssid %s", self.ssid)
- self.dut.associate(self.ssid, SecurityMode.OPEN)
-
- asserts.assert_true(self.dut.is_connected(), "Failed to connect.")
-
- time.sleep(self.wait_client_connection_setup_s)
-
- for _ in range(0, self.num_of_iterations):
- # Turn off AP radio
- self.log.info("turning off radio")
- self.access_point.iwconfig.ap_iwconfig(
- self.in_use_interface, "txpower off"
- )
- time.sleep(self.wait_after_ap_txoff_s)
-
- # Did we disconnect from AP?
- asserts.assert_false(
- self.dut.is_connected(), "Failed to disconnect."
- )
-
- # Turn on AP radio
- self.log.info("turning on radio")
- self.access_point.iwconfig.ap_iwconfig(
- self.in_use_interface, "txpower on"
- )
- time.sleep(self.wait_to_connect_after_ap_txon_s)
-
- # Tell the client to connect
- self.log.info(f"sending associate command for ssid {self.ssid}")
- self.dut.associate(self.ssid, SecurityMode.OPEN)
- time.sleep(self.wait_client_connection_setup_s)
-
- # Did we connect back to WiFi?
- asserts.assert_true(
- self.dut.is_connected(), "Failed to connect back."
- )
-
- def test_beacon_loss_2g(self) -> None:
- self.beacon_loss(channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G)
-
- def test_beacon_loss_5g(self) -> None:
- self.beacon_loss(channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G)
-
-
-if __name__ == "__main__":
- test_runner.main()
diff --git a/tests/wlan/functional/ChannelSwitchTest.py b/tests/wlan/functional/ChannelSwitchTest.py
deleted file mode 100644
index cc1e8c3..0000000
--- a/tests/wlan/functional/ChannelSwitchTest.py
+++ /dev/null
@@ -1,416 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""
-Tests STA handling of channel switch announcements.
-"""
-
-import logging
-import random
-import time
-from typing import Sequence
-
-import fidl_fuchsia_wlan_common as f_wlan_common
-from honeydew.affordances.connectivity.wlan.utils.errors import (
- HoneydewWlanError,
-)
-from honeydew.affordances.connectivity.wlan.utils.types import (
- ClientStatusConnected,
- ConnectivityMode,
- OperatingBand,
- SecurityType,
-)
-from mobly import asserts, signals, test_runner
-from mobly.config_parser import TestRunConfig
-
-from antlion.controllers.access_point import setup_ap
-from antlion.controllers.ap_lib import hostapd_constants
-from antlion.controllers.ap_lib.hostapd_security import SecurityMode
-from antlion.controllers.fuchsia_device import FuchsiaDevice
-from antlion.test_utils.abstract_devices.wlan_device import AssociationMode
-from antlion.test_utils.wifi import base_test
-from antlion.utils import rand_ascii_str
-
-# Number of channel switch announcement beacons to send.
-CSA_BEACON_COUNT = 10
-
-# Beacon interval in unit of kus.
-BEACON_INTERVAL_KUS = 100
-
-# 1 kus = 1.024ms.
-SEC_PER_KUS = 0.001024
-
-
-class ChannelSwitchTest(base_test.WifiBaseTest):
- # Time to wait between issuing channel switches
- WAIT_BETWEEN_CHANNEL_SWITCHES_S = 15
-
- # For operating class 115 tests.
- GLOBAL_OPERATING_CLASS_115_CHANNELS = [36, 40, 44, 48]
- # A channel outside the operating class.
- NON_GLOBAL_OPERATING_CLASS_115_CHANNEL = 52
-
- # For operating class 124 tests.
- GLOBAL_OPERATING_CLASS_124_CHANNELS = [149, 153, 157, 161]
- # A channel outside the operating class.
- NON_GLOBAL_OPERATING_CLASS_124_CHANNEL = 52
-
- def __init__(self, configs: TestRunConfig) -> None:
- super().__init__(configs)
- self.log = logging.getLogger()
- self.ssid = rand_ascii_str(10)
-
- if len(self.access_points) == 0:
- raise signals.TestAbortClass("Requires at least one access point")
-
- self.access_point = self.access_points[0]
-
- self.fuchsia_device, self.dut = self.get_dut_type(
- FuchsiaDevice, AssociationMode.POLICY
- )
- self.fuchsia_device.honeydew_fd.wlan_policy_ap.stop_all()
-
- def setup_class(self) -> None:
- super().setup_class()
-
- def teardown_test(self) -> None:
- self.dut.disconnect()
- self.dut.reset_wifi()
- self.download_logs()
- self.access_point.stop_all_aps()
- super().teardown_test()
-
- def channel_switch(
- self,
- band: hostapd_constants.BandType,
- starting_channel: int,
- channel_switches: Sequence[int],
- test_with_soft_ap: bool = False,
- ) -> None:
- """Setup and run a channel switch test with the given parameters.
-
- Creates an AP, associates to it, and then issues channel switches
- through the provided channels. After each channel switch, the test
- checks that the DUT is connected for a period of time before considering
- the channel switch successful. If directed to start a SoftAP, the test
- will also check that the SoftAP is on the expected channel after each
- channel switch.
-
- Args:
- band: band that AP will use
- starting_channel: channel number that AP will use at startup
- channel_switches: ordered list of channels that the test will
- attempt to switch to
- test_with_soft_ap: whether to start a SoftAP before beginning the
- channel switches (default is False); note that if a SoftAP is
- started, the test will also check that the SoftAP handles
- channel switches correctly
- """
- current_channel = starting_channel
-
- match band:
- case hostapd_constants.BandType.BAND_2G:
- ap_iface = self.access_point.wlan_2g
- case hostapd_constants.BandType.BAND_5G:
- ap_iface = self.access_point.wlan_5g
-
- asserts.assert_true(
- self._channels_valid_for_band([current_channel], band),
- (
- f"starting channel {current_channel} not a valid channel for band {band}"
- ),
- )
-
- setup_ap(
- access_point=self.access_point,
- profile_name="whirlwind",
- channel=current_channel,
- ssid=self.ssid,
- beacon_interval=BEACON_INTERVAL_KUS,
- # Antlion channel_switch currently only supports 20 MHz.
- vht_bandwidth=20,
- )
- if test_with_soft_ap:
- self._start_soft_ap()
- self.log.info("sending associate command for ssid %s", self.ssid)
- self.dut.associate(self.ssid, SecurityMode.OPEN)
- asserts.assert_true(self.dut.is_connected(), "Failed to connect.")
-
- asserts.assert_true(
- channel_switches, "Cannot run test, no channels to switch to"
- )
- asserts.assert_true(
- self._channels_valid_for_band(channel_switches, band),
- (
- f"channel_switches {channel_switches} includes invalid channels "
- f"for band {band}"
- ),
- )
-
- for channel_num in channel_switches:
- if channel_num == current_channel:
- continue
- self.log.info(f"channel switch: {current_channel} -> {channel_num}")
- self.access_point.channel_switch(
- ap_iface, channel_num, CSA_BEACON_COUNT
- )
- channel_num_after_switch = self.access_point.get_current_channel(
- ap_iface
- )
- asserts.assert_equal(
- channel_num_after_switch,
- channel_num,
- "AP failed to channel switch",
- )
- previous_channel = current_channel
- current_channel = channel_num
-
- # Check periodically to see if DUT stays connected. Sometimes
- # CSA-induced disconnects occur seconds after last channel switch.
-
- change_channel_after = (
- time.time() + self.WAIT_BETWEEN_CHANNEL_SWITCHES_S
- )
- must_change_channel_within = (
- BEACON_INTERVAL_KUS * SEC_PER_KUS * CSA_BEACON_COUNT
- )
- must_change_channel_by = time.time() + must_change_channel_within
-
- while time.time() < change_channel_after:
- status = self.fuchsia_device.honeydew_fd.wlan_core.status()
- if not isinstance(status, ClientStatusConnected):
- raise signals.TestFailure(
- f"want ClientStatusConnected, got {type(status)} after "
- f"switching from channel {previous_channel} to "
- f"channel {current_channel}"
- )
-
- got_channel = status.channel.primary
-
- if got_channel == previous_channel:
- asserts.assert_less(
- time.time(),
- must_change_channel_by,
- "expected channel to switch from channel "
- f"{previous_channel} to {current_channel} "
- f"within {must_change_channel_within:.2}s",
- )
- time.sleep(0.1)
- continue
-
- asserts.assert_equal(
- got_channel,
- current_channel,
- f"want channel={current_channel}, got {got_channel}",
- )
- if test_with_soft_ap:
- soft_ap_channel = self._soft_ap_channel()
- asserts.assert_equal(
- soft_ap_channel,
- channel_num,
- f"SoftAP interface on wrong channel ({soft_ap_channel})",
- )
- time.sleep(1)
-
- def test_channel_switch_2g(self) -> None:
- """Channel switch through all (US only) channels in the 2 GHz band."""
- self.channel_switch(
- band=hostapd_constants.BandType.BAND_2G,
- starting_channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- channel_switches=hostapd_constants.US_CHANNELS_2G,
- )
-
- def test_channel_switch_2g_with_soft_ap(self) -> None:
- """Channel switch through (US only) 2 Ghz channels with SoftAP up."""
- self.channel_switch(
- band=hostapd_constants.BandType.BAND_2G,
- starting_channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- channel_switches=hostapd_constants.US_CHANNELS_2G,
- test_with_soft_ap=True,
- )
-
- def test_channel_switch_2g_shuffled_with_soft_ap(self) -> None:
- """Switch through shuffled (US only) 2 Ghz channels with SoftAP up."""
- channels = hostapd_constants.US_CHANNELS_2G
- random.shuffle(channels)
- self.log.info(f"Shuffled channel switch sequence: {channels}")
- self.channel_switch(
- band=hostapd_constants.BandType.BAND_2G,
- starting_channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- channel_switches=channels,
- test_with_soft_ap=True,
- )
-
- def test_channel_switch_5g(self) -> None:
- """Channel switch through all (US only) channels in the 5 GHz band."""
- self.channel_switch(
- band=hostapd_constants.BandType.BAND_5G,
- starting_channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- channel_switches=hostapd_constants.US_CHANNELS_5G,
- )
-
- def test_channel_switch_5g_with_soft_ap(self) -> None:
- """Channel switch through (US only) 5 GHz channels with SoftAP up."""
- self.channel_switch(
- band=hostapd_constants.BandType.BAND_5G,
- starting_channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- channel_switches=hostapd_constants.US_CHANNELS_5G,
- test_with_soft_ap=True,
- )
-
- def test_channel_switch_5g_shuffled_with_soft_ap(self) -> None:
- """Switch through shuffled (US only) 5 Ghz channels with SoftAP up."""
- channels = hostapd_constants.US_CHANNELS_5G
- random.shuffle(channels)
- self.log.info(f"Shuffled channel switch sequence: {channels}")
- self.channel_switch(
- band=hostapd_constants.BandType.BAND_5G,
- starting_channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- channel_switches=channels,
- test_with_soft_ap=True,
- )
-
- def test_channel_switch_regression_global_operating_class_115(self) -> None:
- """Channel switch into, through, and out of global op. class 115 channels.
-
- Global operating class 115 is described in IEEE 802.11-2016 Table E-4.
- Regression test for fxbug.dev/42165602.
- """
- channels = self.GLOBAL_OPERATING_CLASS_115_CHANNELS + [
- self.NON_GLOBAL_OPERATING_CLASS_115_CHANNEL
- ]
- self.channel_switch(
- band=hostapd_constants.BandType.BAND_5G,
- starting_channel=self.NON_GLOBAL_OPERATING_CLASS_115_CHANNEL,
- channel_switches=channels,
- )
-
- def test_channel_switch_regression_global_operating_class_115_with_soft_ap(
- self,
- ) -> None:
- """Test global operating class 124 channel switches, with SoftAP.
-
- Regression test for fxbug.dev/42165602.
- """
- channels = self.GLOBAL_OPERATING_CLASS_115_CHANNELS + [
- self.NON_GLOBAL_OPERATING_CLASS_115_CHANNEL
- ]
- self.channel_switch(
- band=hostapd_constants.BandType.BAND_5G,
- starting_channel=self.NON_GLOBAL_OPERATING_CLASS_115_CHANNEL,
- channel_switches=channels,
- test_with_soft_ap=True,
- )
-
- def test_channel_switch_regression_global_operating_class_124(self) -> None:
- """Switch into, through, and out of global op. class 124 channels.
-
- Global operating class 124 is described in IEEE 802.11-2016 Table E-4.
- Regression test for fxbug.dev/42142868.
- """
- channels = self.GLOBAL_OPERATING_CLASS_124_CHANNELS + [
- self.NON_GLOBAL_OPERATING_CLASS_124_CHANNEL
- ]
- self.channel_switch(
- band=hostapd_constants.BandType.BAND_5G,
- starting_channel=self.NON_GLOBAL_OPERATING_CLASS_124_CHANNEL,
- channel_switches=channels,
- )
-
- def test_channel_switch_regression_global_operating_class_124_with_soft_ap(
- self,
- ) -> None:
- """Test global operating class 124 channel switches, with SoftAP.
-
- Regression test for fxbug.dev/42142868.
- """
- channels = self.GLOBAL_OPERATING_CLASS_124_CHANNELS + [
- self.NON_GLOBAL_OPERATING_CLASS_124_CHANNEL
- ]
- self.channel_switch(
- band=hostapd_constants.BandType.BAND_5G,
- starting_channel=self.NON_GLOBAL_OPERATING_CLASS_124_CHANNEL,
- channel_switches=channels,
- test_with_soft_ap=True,
- )
-
- def _channels_valid_for_band(
- self, channels: Sequence[int], band: hostapd_constants.BandType
- ) -> bool:
- """Determine if the channels are valid for the band (US only).
-
- Args:
- channels: channel numbers
- band: a valid band
- """
- channels_set = frozenset(channels)
- match band:
- case hostapd_constants.BandType.BAND_2G:
- band_channels = frozenset(hostapd_constants.US_CHANNELS_2G)
- case hostapd_constants.BandType.BAND_5G:
- band_channels = frozenset(hostapd_constants.US_CHANNELS_5G)
- return channels_set <= band_channels
-
- def _start_soft_ap(self) -> None:
- """Start a SoftAP on the DUT.
-
- Raises:
- EnvironmentError: if the SoftAP does not start
- """
- ssid = rand_ascii_str(10)
- self.log.info(f'Starting SoftAP on DUT with ssid "{ssid}"')
-
- self.fuchsia_device.honeydew_fd.wlan_policy_ap.start(
- ssid,
- SecurityType.NONE,
- None,
- ConnectivityMode.LOCAL_ONLY,
- OperatingBand.ANY,
- )
- self.log.info(f"SoftAp network ({ssid}) is up.")
-
- def _soft_ap_channel(self) -> int:
- """Determine the channel of the DUT SoftAP interface.
-
- If the interface is not connected, the method will assert a test
- failure.
-
- Returns: channel number
-
- Raises:
- EnvironmentError: if SoftAP interface channel cannot be determined.
- signals.TestFailure: when the SoftAP interface is not connected.
- """
- iface_ids = self.dut.get_wlan_interface_id_list()
- for iface_id in iface_ids:
- try:
- result = self.fuchsia_device.honeydew_fd.wlan_core.query_iface(
- iface_id
- )
- except HoneydewWlanError as e:
- self.log.warning(f"Query iface {iface_id} failed: {e}")
- continue
- if result.role is f_wlan_common.WlanMacRole.AP:
- status = self.fuchsia_device.honeydew_fd.wlan_core.status()
- if not isinstance(status, ClientStatusConnected):
- raise signals.TestFailure(
- f"want ClientStatusConnected, got {type(status)}"
- )
- return status.channel.primary
- raise EnvironmentError("Could not determine SoftAP channel")
-
-
-if __name__ == "__main__":
- test_runner.main()
diff --git a/tests/wlan/functional/ConnectionStressTest.py b/tests/wlan/functional/ConnectionStressTest.py
deleted file mode 100644
index 484793e..0000000
--- a/tests/wlan/functional/ConnectionStressTest.py
+++ /dev/null
@@ -1,195 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""
-Script for testing WiFi connection and disconnection in a loop
-
-"""
-
-import logging
-import time
-from dataclasses import dataclass
-
-from mobly import asserts, signals, test_runner
-from mobly.records import TestResultRecord
-
-from antlion.controllers.access_point import setup_ap
-from antlion.controllers.ap_lib.hostapd_constants import (
- AP_DEFAULT_CHANNEL_2G,
- AP_DEFAULT_CHANNEL_5G,
-)
-from antlion.controllers.ap_lib.hostapd_security import Security, SecurityMode
-from antlion.test_utils.abstract_devices.wlan_device import AssociationMode
-from antlion.test_utils.wifi import base_test
-from antlion.utils import rand_ascii_str
-
-
-@dataclass
-class TestParams:
- profile: str
- channel: int
- security_mode: SecurityMode
- ap_ssid: str
- ap_password: str | None
- dut_ssid: str
- dut_password: str | None
- expect_associated: bool
-
-
-class ConnectionStressTest(base_test.WifiBaseTest):
- # Default number of test iterations here.
- # Override using parameter in config file.
- # Eg: "connection_stress_test_iterations": "50"
- num_of_iterations = 10
-
- def pre_run(self) -> None:
- tests: list[TestParams] = []
-
- # Successful associate
- for profile in [
- "whirlwind",
- "whirlwind_11ab_legacy",
- "whirlwind_11ag_legacy",
- ]:
- for channel in [AP_DEFAULT_CHANNEL_2G, AP_DEFAULT_CHANNEL_5G]:
- ssid = rand_ascii_str(10)
- tests.append(
- TestParams(
- profile=profile,
- channel=channel,
- security_mode=SecurityMode.OPEN,
- ap_ssid=ssid,
- ap_password=None,
- dut_ssid=ssid,
- dut_password=None,
- expect_associated=True,
- )
- )
-
- # Wrong SSID
- for channel in [AP_DEFAULT_CHANNEL_2G, AP_DEFAULT_CHANNEL_5G]:
- ssid = rand_ascii_str(10)
- tests.append(
- TestParams(
- profile="whirlwind",
- channel=channel,
- security_mode=SecurityMode.OPEN,
- ap_ssid=ssid,
- ap_password=None,
- dut_ssid=f"wrong_{ssid}",
- dut_password=None,
- expect_associated=False,
- )
- )
-
- # Wrong password
- for channel in [AP_DEFAULT_CHANNEL_2G, AP_DEFAULT_CHANNEL_5G]:
- ssid = rand_ascii_str(10)
- password = rand_ascii_str(20)
- tests.append(
- TestParams(
- profile="whirlwind",
- channel=channel,
- security_mode=SecurityMode.WPA2,
- ap_ssid=ssid,
- ap_password=password,
- dut_ssid=ssid,
- dut_password=f"wrong_{password}",
- expect_associated=False,
- )
- )
-
- def test_name(test: TestParams) -> str:
- channel = "2g" if test.channel == AP_DEFAULT_CHANNEL_2G else "5g"
- if test.expect_associated:
- return f"test_{test.profile}_{channel}"
- if test.ap_ssid != test.dut_ssid:
- return f"test_{test.profile}_{channel}_wrong_ssid"
- if test.ap_password != test.dut_password:
- return f"test_{test.profile}_{channel}_wrong_password"
- raise TypeError(f"Unknown name for {test}")
-
- self.generate_tests(
- self.connect_disconnect, test_name, [(t,) for t in tests]
- )
-
- def setup_class(self) -> None:
- super().setup_class()
- self.log = logging.getLogger()
- self.ssid = rand_ascii_str(10)
-
- self.dut = self.get_dut(AssociationMode.POLICY)
-
- if len(self.access_points) == 0:
- raise signals.TestAbortClass("Requires at least one access point")
- self.access_point = self.access_points[0]
-
- self.num_of_iterations = int(
- self.user_params.get(
- "connection_stress_test_iterations", self.num_of_iterations
- )
- )
- self.log.info(f"iterations: {self.num_of_iterations}")
-
- def teardown_test(self) -> None:
- self.dut.reset_wifi()
- self.download_logs()
- self.access_point.stop_all_aps()
-
- def on_fail(self, record: TestResultRecord) -> None:
- super().on_fail(record)
- self.access_point.stop_all_aps()
-
- def connect_disconnect(self, test: TestParams) -> None:
- """Helper to start an AP, connect DUT to it and disconnect
-
- Args:
- ap_config: Dictionary containing profile name and channel
- ssid: ssid to connect to
- password: password for the ssid to connect to
- """
- setup_ap(
- access_point=self.access_point,
- profile_name=test.profile,
- channel=test.channel,
- ssid=test.ap_ssid,
- security=Security(
- security_mode=test.security_mode, password=test.ap_password
- ),
- )
-
- for iteration in range(0, self.num_of_iterations):
- associated = self.dut.associate(
- test.dut_ssid,
- target_pwd=test.dut_password,
- target_security=test.security_mode,
- )
- asserts.assert_equal(
- associated,
- test.expect_associated,
- (
- f"Attempt {iteration}/{self.num_of_iterations}: "
- f"associated={associated}, want {test.expect_associated}"
- ),
- )
-
- self.dut.disconnect()
-
- # Wait a second before trying again
- time.sleep(1)
-
-
-if __name__ == "__main__":
- test_runner.main()
diff --git a/tests/wlan/functional/PingStressTest.py b/tests/wlan/functional/PingStressTest.py
deleted file mode 100644
index 40c83c6..0000000
--- a/tests/wlan/functional/PingStressTest.py
+++ /dev/null
@@ -1,277 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""
-PingStressTest exercises sending ICMP and ICMPv6 pings to a wireless access
-router and another device behind the AP. Note, this does not reach out to the
-internet. The DUT is only responsible for sending a routable packet; any
-communication past the first-hop is not the responsibility of the DUT.
-"""
-
-import logging
-import multiprocessing
-from typing import Callable, NamedTuple
-
-from mobly import asserts, signals, test_runner
-
-from antlion import utils
-from antlion.controllers.access_point import AccessPoint, setup_ap
-from antlion.controllers.ap_lib import hostapd_constants
-from antlion.controllers.ap_lib.hostapd_security import SecurityMode
-from antlion.controllers.fuchsia_device import FuchsiaDevice
-from antlion.test_utils.abstract_devices.wlan_device import AssociationMode
-from antlion.test_utils.wifi import base_test
-from antlion.utils import PingResult, rand_ascii_str
-
-LOOPBACK_IPV4 = "127.0.0.1"
-LOOPBACK_IPV6 = "::1"
-PING_RESULT_TIMEOUT_SEC = 60 * 5
-
-
-class Addrs(NamedTuple):
- gateway_ipv4: str
- gateway_ipv6: str
-
-
-class Test(NamedTuple):
- name: str
- dest_ip: str | Callable[[Addrs], str]
- packet_count: int = 3
- interval: int = 1000
- timeout: int = 1000
- size: int = 25
-
-
-class PingStressTest(base_test.WifiBaseTest):
- def pre_run(self) -> None:
- self.generate_tests(
- self.send_ping,
- lambda test_name, *_: f"test_{test_name}",
- [
- Test("loopback_ipv4", LOOPBACK_IPV4),
- Test("loopback_ipv6", LOOPBACK_IPV6),
- Test("gateway_ipv4", lambda addrs: addrs.gateway_ipv4),
- Test("gateway_ipv6", lambda addrs: addrs.gateway_ipv6),
- Test(
- "gateway_ipv4_small_packet",
- lambda addrs: addrs.gateway_ipv4,
- ),
- Test(
- "gateway_ipv6_small_packet",
- lambda addrs: addrs.gateway_ipv6,
- ),
- Test(
- "gateway_ipv4_small_packet_long",
- lambda addrs: addrs.gateway_ipv4,
- packet_count=50,
- ),
- Test(
- "gateway_ipv6_small_packet_long",
- lambda addrs: addrs.gateway_ipv6,
- packet_count=50,
- ),
- Test(
- "gateway_ipv4_medium_packet",
- lambda addrs: addrs.gateway_ipv4,
- size=64,
- ),
- Test(
- "gateway_ipv6_medium_packet",
- lambda addrs: addrs.gateway_ipv6,
- size=64,
- ),
- Test(
- "gateway_ipv4_medium_packet_long",
- lambda addrs: addrs.gateway_ipv4,
- packet_count=50,
- timeout=1500,
- size=64,
- ),
- Test(
- "gateway_ipv6_medium_packet_long",
- lambda addrs: addrs.gateway_ipv6,
- packet_count=50,
- timeout=1500,
- size=64,
- ),
- Test(
- "gateway_ipv4_large_packet",
- lambda addrs: addrs.gateway_ipv4,
- size=500,
- ),
- Test(
- "gateway_ipv6_large_packet",
- lambda addrs: addrs.gateway_ipv6,
- size=500,
- ),
- Test(
- "gateway_ipv4_large_packet_long",
- lambda addrs: addrs.gateway_ipv4,
- packet_count=50,
- timeout=5000,
- size=500,
- ),
- Test(
- "gateway_ipv6_large_packet_long",
- lambda addrs: addrs.gateway_ipv6,
- packet_count=50,
- timeout=5000,
- size=500,
- ),
- ],
- )
-
- def setup_class(self) -> None:
- super().setup_class()
- self.log = logging.getLogger()
- self.ssid = rand_ascii_str(10)
-
- self.fuchsia_device, self.dut = self.get_dut_type(
- FuchsiaDevice, AssociationMode.POLICY
- )
-
- if len(self.access_points) < 1:
- raise signals.TestAbortClass(
- "At least one access point is required"
- )
- self.access_point: AccessPoint = self.access_points[0]
-
- setup_ap(
- access_point=self.access_point,
- profile_name="whirlwind",
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.ssid,
- setup_bridge=True,
- is_ipv6_enabled=True,
- is_nat_enabled=False,
- )
-
- ap_bridges = self.access_point.interfaces.get_bridge_interface()
- if ap_bridges and len(ap_bridges) > 0:
- ap_bridge = ap_bridges[0]
- else:
- asserts.abort_class(
- f"Expected one bridge interface on the AP, got {ap_bridges}"
- )
- self.ap_ipv4 = utils.get_addr(self.access_point.ssh, ap_bridge)
- self.ap_ipv6 = utils.get_addr(
- self.access_point.ssh, ap_bridge, addr_type="ipv6_link_local"
- )
- self.log.info(
- f"Gateway finished setup ({self.ap_ipv4} | {self.ap_ipv6})"
- )
-
- self.dut.associate(self.ssid, SecurityMode.OPEN)
-
- # Wait till the DUT has valid IP addresses after connecting.
- self.fuchsia_device.wait_for_ipv4_addr(
- self.dut.get_default_wlan_test_interface()
- )
- self.fuchsia_device.wait_for_ipv6_addr(
- self.dut.get_default_wlan_test_interface()
- )
- self.log.info("DUT has valid IP addresses on test network")
-
- def teardown_class(self) -> None:
- if hasattr(self, "dut"):
- self.dut.disconnect()
- self.dut.reset_wifi()
- self.download_logs()
- self.access_point.stop_all_aps()
- super().teardown_class()
-
- def send_ping(
- self,
- _: str,
- get_addr_fn: str | Callable[[Addrs], str],
- count: int = 3,
- interval: int = 1000,
- timeout: int = 1000,
- size: int = 25,
- ) -> None:
- dest_ip = (
- get_addr_fn(
- Addrs(
- gateway_ipv4=self.ap_ipv4,
- # IPv6 link-local addresses require specification of the
- # outgoing interface as the scope ID when sending packets.
- gateway_ipv6=f"{self.ap_ipv6}%{self.dut.get_default_wlan_test_interface()}",
- )
- )
- if callable(get_addr_fn)
- else get_addr_fn
- )
-
- self.log.info(f"Attempting to ping {dest_ip}...")
- ping_result = self.dut.ping(dest_ip, count, interval, timeout, size)
- if ping_result.success:
- self.log.info("Ping was successful.")
- else:
- raise signals.TestFailure(f"Ping was unsuccessful: {ping_result}")
-
- def test_simultaneous_pings(self) -> None:
- ping_urls = [
- self.ap_ipv4,
- f"{self.ap_ipv6}%{self.dut.get_default_wlan_test_interface()}",
- ]
- ping_processes: list[multiprocessing.Process] = []
- ping_results: list[PingResult] = []
-
- def ping_from_dut(
- self: PingStressTest, dest_ip: str, ping_results: list[PingResult]
- ) -> None:
- self.log.info(f"Attempting to ping {dest_ip}...")
- ping_result = self.dut.ping(dest_ip, count=10, size=50)
- if ping_result.success:
- self.log.info(f"Success pinging: {dest_ip}")
- else:
- self.log.info(f"Failure pinging: {dest_ip}")
- ping_results.append(ping_result)
-
- try:
- # Start multiple ping at the same time
- for index, url in enumerate(ping_urls):
- p = multiprocessing.Process(
- target=ping_from_dut, args=(self, url, ping_results)
- )
- ping_processes.append(p)
- p.start()
-
- # Wait for all processes to complete or timeout
- for p in ping_processes:
- p.join(PING_RESULT_TIMEOUT_SEC)
-
- finally:
- is_alive = False
-
- for index, p in enumerate(ping_processes):
- if p.is_alive():
- p.terminate()
- is_alive = True
-
- if is_alive:
- raise signals.TestFailure(
- f"Timed out while pinging {ping_urls[index]}"
- )
-
- for i, ping_result in enumerate(ping_results):
- if not ping_result.success:
- raise signals.TestFailure(
- f"Failed to ping {ping_urls[i]}: {ping_result}"
- )
-
-
-if __name__ == "__main__":
- test_runner.main()
diff --git a/tests/wlan/functional/SoftApTest.py b/tests/wlan/functional/SoftApTest.py
deleted file mode 100644
index 997d27b..0000000
--- a/tests/wlan/functional/SoftApTest.py
+++ /dev/null
@@ -1,1641 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import logging
-import multiprocessing as mp
-import random
-import time
-from dataclasses import dataclass
-from enum import Enum, StrEnum, auto, unique
-from typing import Any, Mapping, Type, TypeAlias, TypeVar
-
-from honeydew.affordances.connectivity.wlan.utils.types import (
- ConnectivityMode,
- OperatingBand,
- SecurityType,
-)
-from mobly import asserts, signals, test_runner
-from mobly.config_parser import TestRunConfig
-
-from antlion import utils
-from antlion.controllers import iperf_client, iperf_server
-from antlion.controllers.access_point import AccessPoint, setup_ap
-from antlion.controllers.android_device import AndroidDevice
-from antlion.controllers.ap_lib import hostapd_constants
-from antlion.controllers.ap_lib.hostapd_security import Security, SecurityMode
-from antlion.controllers.ap_lib.hostapd_utils import generate_random_password
-from antlion.controllers.fuchsia_device import FuchsiaDevice
-from antlion.controllers.utils_lib.ssh import settings
-from antlion.controllers.utils_lib.ssh.connection import SshConnection
-from antlion.test_utils.abstract_devices.wlan_device import (
- AndroidWlanDevice,
- AssociationMode,
- FuchsiaWlanDevice,
- SupportsWLAN,
- create_wlan_device,
-)
-from antlion.test_utils.wifi import base_test
-
-DEFAULT_AP_PROFILE = "whirlwind"
-DEFAULT_IPERF_PORT = 5201
-DEFAULT_TIMEOUT = 30
-DEFAULT_IPERF_TIMEOUT = 60
-DEFAULT_NO_ADDR_EXPECTED_TIMEOUT = 5
-STATE_UP = True
-STATE_DOWN = False
-
-ConfigValue: TypeAlias = str | int | bool | list["ConfigValue"] | "Config"
-Config: TypeAlias = dict[str, ConfigValue]
-
-T = TypeVar("T")
-
-
-def get_typed(
- map: Mapping[str, Any], key: str, value_type: Type[T], default: T
-) -> T:
- value = map.get(key, default)
- if not isinstance(value, value_type):
- raise TypeError(
- f'"{key}" must be a {value_type.__name__}, got {type(value)}'
- )
- return value
-
-
-@unique
-class DeviceRole(Enum):
- AP = auto()
- CLIENT = auto()
-
-
-@unique
-class TestType(StrEnum):
- ASSOCIATE_ONLY = auto()
- ASSOCIATE_AND_PING = auto()
- ASSOCIATE_AND_PASS_TRAFFIC = auto()
-
-
-@dataclass
-class TestParams:
- test_type: TestType
- security_type: SecurityMode
- connectivity_mode: ConnectivityMode
- operating_band: OperatingBand
- ssid: str
- password: str
- iterations: int
-
-
-@dataclass
-class APParams:
- profile: str
- ssid: str
- channel: int
- security: Security
- password: str
-
- @staticmethod
- def from_dict(d: dict[str, Any]) -> "APParams":
- security_mode_str = get_typed(
- d, "security_mode", str, SecurityMode.OPEN.value
- )
- security_mode = SecurityMode[security_mode_str]
- password = get_typed(
- d,
- "password",
- str,
- generate_random_password(security_mode=security_mode),
- )
-
- return APParams(
- profile=get_typed(d, "profile", str, DEFAULT_AP_PROFILE),
- ssid=get_typed(
- d,
- "ssid",
- str,
- utils.rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_2G),
- ),
- channel=get_typed(
- d, "channel", int, hostapd_constants.AP_DEFAULT_CHANNEL_2G
- ),
- security=Security(security_mode, password),
- password=password,
- )
-
- def setup_ap(
- self, access_point: AccessPoint, timeout_sec: int = DEFAULT_TIMEOUT
- ) -> str:
- """Setup access_point and return the IPv4 address of its test interface."""
- setup_ap(
- access_point=access_point,
- profile_name=self.profile,
- channel=self.channel,
- ssid=self.ssid,
- security=self.security,
- )
-
- interface = (
- access_point.wlan_2g if self.channel < 36 else access_point.wlan_5g
- )
-
- end_time = time.time() + timeout_sec
- while time.time() < end_time:
- ips = utils.get_interface_ip_addresses(access_point.ssh, interface)
- if len(ips["ipv4_private"]) > 0:
- return ips["ipv4_private"][0]
- time.sleep(1)
- raise ConnectionError(
- f"After {timeout_sec}s, device {access_point.identifier} still does not have "
- f"an ipv4 address on interface {interface}."
- )
-
-
-@dataclass
-class SoftAPParams:
- ssid: str
- security_type: SecurityMode
- password: str | None
- connectivity_mode: ConnectivityMode
- operating_band: OperatingBand
-
- def __str__(self) -> str:
- if self.operating_band is OperatingBand.ANY:
- band = "any"
- elif self.operating_band is OperatingBand.ONLY_2_4GHZ:
- band = "2g"
- elif self.operating_band is OperatingBand.ONLY_5GHZ:
- band = "5g"
- else:
- raise TypeError(f'Unknown OperatingBand "{self.operating_band}"')
- return f'{band}_{self.security_type.replace("/", "_")}_{self.connectivity_mode}'
-
- @staticmethod
- def from_dict(d: dict[str, Any]) -> "SoftAPParams":
- security_type = get_typed(
- d, "security_type", str, SecurityMode.OPEN.value
- )
- security_mode = SecurityMode[security_type]
-
- password = d.get("password")
- if password is None and security_mode is not SecurityMode.OPEN:
- password = generate_random_password(security_mode=security_mode)
- if password is not None and not isinstance(password, str):
- raise TypeError(
- f'"password" must be a str or None, got {type(password)}'
- )
- if password is not None and security_mode is SecurityMode.OPEN:
- raise TypeError(
- f'"password" must be None if "security_type" is "{SecurityMode.OPEN}"'
- )
-
- connectivity_mode = get_typed(
- d, "connectivity_mode", str, str(ConnectivityMode.LOCAL_ONLY)
- )
- operating_band = get_typed(
- d, "operating_band", str, str(OperatingBand.ONLY_2_4GHZ)
- )
-
- return SoftAPParams(
- ssid=get_typed(
- d,
- "ssid",
- str,
- utils.rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_2G),
- ),
- security_type=security_mode,
- password=password,
- connectivity_mode=ConnectivityMode[connectivity_mode],
- operating_band=OperatingBand[operating_band],
- )
-
-
-@dataclass
-class AssociationStressTestParams:
- test_type: TestType
- soft_ap_params: SoftAPParams
- iterations: int
-
- def __str__(self) -> str:
- return f"{self.soft_ap_params}_{self.test_type}_{self.iterations}_iterations"
-
- @staticmethod
- def from_dict(d: dict[str, Any]) -> "AssociationStressTestParams":
- test_type = get_typed(
- d, "test_type", str, TestType.ASSOCIATE_AND_PASS_TRAFFIC.value
- )
- return AssociationStressTestParams(
- test_type=TestType[test_type],
- soft_ap_params=SoftAPParams.from_dict(d.get("soft_ap_params", {})),
- iterations=get_typed(d, "iterations", int, 10),
- )
-
-
-@dataclass
-class ClientModeAlternatingTestParams:
- ap_params: APParams
- soft_ap_params: SoftAPParams
- iterations: int
-
- def __str__(self) -> str:
- return (
- f"ap_{self.ap_params.security.security_mode}_"
- f"soft_ap_{self.soft_ap_params.security_type}_"
- f"{self.iterations}_iterations"
- )
-
- @staticmethod
- def from_dict(d: dict[str, Any]) -> "ClientModeAlternatingTestParams":
- return ClientModeAlternatingTestParams(
- ap_params=APParams.from_dict(d.get("ap_params", {})),
- soft_ap_params=SoftAPParams.from_dict(d.get("soft_ap_params", {})),
- iterations=get_typed(d, "iterations", int, 10),
- )
-
-
-@dataclass
-class ToggleTestParams:
- soft_ap_params: SoftAPParams
- iterations: int
-
- def __str__(self) -> str:
- return f"{self.soft_ap_params}_{self.iterations}_iterations"
-
- @staticmethod
- def from_dict(d: dict[str, Any]) -> "ToggleTestParams":
- return ToggleTestParams(
- soft_ap_params=SoftAPParams.from_dict(d.get("soft_ap_params", {})),
- iterations=get_typed(d, "iterations", int, 10),
- )
-
-
-@dataclass
-class ClientModeToggleTestParams:
- ap_params: APParams
- iterations: int
-
- def __str__(self) -> str:
- return f"{self.ap_params}_{self.iterations}_iterations"
-
- @staticmethod
- def from_dict(d: dict[str, Any]) -> "ClientModeToggleTestParams":
- return ClientModeToggleTestParams(
- ap_params=APParams.from_dict(d.get("ap_params", {})),
- iterations=get_typed(d, "iterations", int, 10),
- )
-
-
-class StressTestIterationFailure(Exception):
- """Used to differentiate a subtest failure from an actual exception"""
-
-
-class SoftApTest(base_test.WifiBaseTest):
- """Tests for Fuchsia SoftAP
-
- Testbed requirement:
- * One Fuchsia device
- * At least one client (Android) device
- * For multi-client tests, at least two client (Android) devices are
- required. Test will be skipped if less than two client devices are
- present.
- * For any tests that exercise client-mode (e.g. toggle tests, simultaneous
- tests), a physical AP (whirlwind) is also required. Those tests will be
- skipped if physical AP is not present.
- """
-
- def __init__(self, configs: TestRunConfig) -> None:
- super().__init__(configs)
- self.log = logging.getLogger()
- self.soft_ap_test_params = configs.user_params.get(
- "soft_ap_test_params", {}
- )
-
- def pre_run(self) -> None:
- self.generate_soft_ap_tests()
- self.generate_association_stress_tests()
- self.generate_soft_ap_and_client_mode_alternating_stress_tests()
- self.generate_soft_ap_toggle_stress_tests()
- self.generate_client_mode_toggle_stress_tests()
- self.generate_soft_ap_toggle_stress_with_client_mode_tests()
- self.generate_client_mode_toggle_stress_with_soft_ap_tests()
- self.generate_soft_ap_and_client_mode_random_toggle_stress_tests()
-
- def generate_soft_ap_tests(self) -> None:
- tests: list[SoftAPParams] = []
-
- for operating_band in OperatingBand:
- for security_mode in [
- SecurityMode.OPEN,
- SecurityMode.WEP,
- SecurityMode.WPA,
- SecurityMode.WPA2,
- SecurityMode.WPA3,
- ]:
- for connectivity_mode in ConnectivityMode:
- if security_mode is SecurityMode.OPEN:
- ssid_length = hostapd_constants.AP_SSID_LENGTH_2G
- password = None
- else:
- ssid_length = hostapd_constants.AP_SSID_LENGTH_5G
- password = generate_random_password()
-
- tests.append(
- SoftAPParams(
- ssid=utils.rand_ascii_str(ssid_length),
- security_type=security_mode,
- password=password,
- connectivity_mode=connectivity_mode,
- operating_band=operating_band,
- )
- )
-
- def generate_name(test: SoftAPParams) -> str:
- return f"test_soft_ap_{test}"
-
- self.generate_tests(
- self.associate_with_soft_ap_test,
- generate_name,
- tests,
- )
-
- def associate_with_soft_ap_test(self, soft_ap_params: SoftAPParams) -> None:
- self.start_soft_ap(soft_ap_params)
- self.associate_with_soft_ap(self.primary_client, soft_ap_params)
- self.assert_connected_to_ap(
- self.primary_client, self.dut, check_traffic=True
- )
-
- def setup_class(self) -> None:
- super().setup_class()
- self.fuchsia_device, self.dut = self.get_dut_type(
- FuchsiaDevice, AssociationMode.POLICY
- )
-
- # TODO(fxb/51313): Add in device agnosticity for clients
- # Create a wlan device and iperf client for each Android client
- self.clients: list[SupportsWLAN] = []
- self.iperf_clients_map: dict[Any, Any] = {}
- for device in self.android_devices:
- client_wlan_device = create_wlan_device(
- device, AssociationMode.POLICY
- )
- self.clients.append(client_wlan_device)
- self.iperf_clients_map[
- client_wlan_device
- ] = client_wlan_device.create_iperf_client()
- self.primary_client = self.clients[0]
-
- # Create an iperf server on the DUT, which will be used for any streaming.
- self.iperf_server_settings = settings.from_config(
- {
- "user": self.fuchsia_device.ssh_username,
- "host": self.fuchsia_device.ip,
- "ssh_config": self.fuchsia_device.ssh_config,
- }
- )
- self.iperf_server = iperf_server.IPerfServerOverSsh(
- self.iperf_server_settings,
- DEFAULT_IPERF_PORT,
- test_interface=self.dut.get_default_wlan_test_interface(),
- use_killall=True,
- )
- self.iperf_server.start()
-
- # Attempt to create an ap iperf server. AP is only required for tests
- # that use client mode.
- self.access_point: AccessPoint | None = None
- self.ap_iperf_client: iperf_client.IPerfClientOverSsh | None = None
-
- try:
- self.access_point = self.access_points[0]
- self.ap_iperf_client = iperf_client.IPerfClientOverSsh(
- self.access_point.ssh_provider,
- # Date is already synced by the AccessPoint controller.
- sync_date=False,
- )
- self.iperf_clients_map[self.access_point] = self.ap_iperf_client
- except AttributeError:
- pass
-
- def teardown_class(self) -> None:
- # Because this is using killall, it will stop all iperf processes
- self.iperf_server.stop()
- super().teardown_class()
-
- def setup_test(self) -> None:
- super().setup_test()
- for ad in self.android_devices:
- ad.droid.wakeLockAcquireBright()
- ad.droid.wakeUpNow()
- for client in self.clients:
- client.disconnect()
- client.reset_wifi()
- client.wifi_toggle_state(True)
- self.fuchsia_device.honeydew_fd.wlan_policy_ap.stop_all()
- if self.access_point:
- self.access_point.stop_all_aps()
- self.dut.disconnect()
-
- def teardown_test(self) -> None:
- for client in self.clients:
- client.disconnect()
- for ad in self.android_devices:
- ad.droid.wakeLockRelease()
- ad.droid.goToSleepNow()
- self.fuchsia_device.honeydew_fd.wlan_policy_ap.stop_all()
- self.download_logs()
- if self.access_point:
- self.access_point.stop_all_aps()
- self.dut.disconnect()
- super().teardown_test()
-
- def start_soft_ap(self, params: SoftAPParams) -> None:
- """Starts a softAP on Fuchsia device.
-
- Args:
- settings: a dict containing softAP configuration params
- ssid: string, SSID of softAP network
- security_type: string, security type of softAP network
- - 'none', 'wep', 'wpa', 'wpa2', 'wpa3'
- password: string, password if applicable
- connectivity_mode: string, connecitivity_mode for softAP
- - 'local_only', 'unrestricted'
- operating_band: string, band for softAP network
- - 'any', 'only_5_ghz', 'only_2_4_ghz'
- """
- self.log.info(f"Starting SoftAP on DUT with settings: {params}")
- self.fuchsia_device.honeydew_fd.wlan_policy_ap.start(
- params.ssid,
- SecurityType(params.security_type.fuchsia_security_type()),
- params.password,
- params.connectivity_mode,
- params.operating_band,
- )
- self.log.info(f"SoftAp network ({params.ssid}) is up.")
-
- def associate_with_soft_ap(
- self, device: SupportsWLAN, params: SoftAPParams
- ) -> None:
- """Associates client device with softAP on Fuchsia device.
-
- Args:
- device: wlan_device to associate with the softAP
- params: soft AP configuration
-
- Raises:
- TestFailure if association fails
- """
- self.log.info(
- f'Associating {device.identifier} to SoftAP on {self.dut.identifier} called "{params.ssid}'
- )
-
- associated = device.associate(
- params.ssid,
- target_pwd=params.password,
- target_security=params.security_type,
- check_connectivity=params.connectivity_mode
- is ConnectivityMode.UNRESTRICTED,
- )
-
- asserts.assert_true(
- associated,
- f'Failed to associate "{device.identifier}" to SoftAP "{params.ssid}"',
- )
-
- def disconnect_from_soft_ap(self, device: SupportsWLAN) -> None:
- """Disconnects client device from SoftAP.
-
- Args:
- device: wlan_device to disconnect from SoftAP
- """
- self.log.info(f"Disconnecting device {device.identifier} from SoftAP.")
- device.disconnect()
-
- def get_ap_test_interface(self, ap: AccessPoint, channel: int) -> str:
- if channel < 36:
- return ap.wlan_2g
- else:
- return ap.wlan_5g
-
- def get_device_test_interface(
- self, device: SupportsWLAN | FuchsiaDevice, role: DeviceRole
- ) -> str:
- """Retrieves test interface from a provided device, which can be the
- FuchsiaDevice DUT, the AccessPoint, or an AndroidClient.
-
- Args:
- device: the device do get the test interface from. Either
- FuchsiaDevice (DUT), Android client, or AccessPoint.
- role: str, either "client" or "ap". Required for FuchsiaDevice (DUT)
-
- Returns:
- String, name of test interface on given device.
- """
-
- if isinstance(device, FuchsiaDevice):
- device.update_wlan_interfaces()
- if role is DeviceRole.CLIENT:
- if device.wlan_client_test_interface_name is None:
- raise TypeError(
- "Expected wlan_client_test_interface_name to be str"
- )
- return device.wlan_client_test_interface_name
- if role is DeviceRole.AP:
- if device.wlan_ap_test_interface_name is None:
- raise TypeError(
- "Expected wlan_ap_test_interface_name to be str"
- )
- return device.wlan_ap_test_interface_name
- raise ValueError(f"Unsupported interface role: {role}")
- else:
- return device.get_default_wlan_test_interface()
-
- def wait_for_ipv4_address(
- self,
- device: SupportsWLAN | AccessPoint,
- interface_name: str,
- timeout: int = DEFAULT_TIMEOUT,
- ) -> str:
- """Waits for interface on a wlan_device to get an ipv4 address.
-
- Args:
- device: wlan_device or AccessPoint to check interface
- interface_name: name of the interface to check
- timeout: seconds to wait before raising an error
-
- Returns:
- The IP address of interface_name.
-
- Raises:
- ConnectionError, if interface does not have an ipv4 address after timeout
- """
- comm_channel: SshConnection | FuchsiaDevice | AndroidDevice
- if isinstance(device, AccessPoint):
- comm_channel = device.ssh
- elif isinstance(device, FuchsiaWlanDevice):
- comm_channel = device.device
- elif isinstance(device, AndroidWlanDevice):
- comm_channel = device.device
- else:
- raise TypeError(f"Invalid device type {type(device)}")
-
- end_time = time.time() + timeout
- while time.time() < end_time:
- ips = utils.get_interface_ip_addresses(comm_channel, interface_name)
- if len(ips["ipv4_private"]) > 0:
- self.log.info(
- f"Device {device.identifier} interface {interface_name} has "
- f"ipv4 address {ips['ipv4_private'][0]}"
- )
- return ips["ipv4_private"][0]
- else:
- time.sleep(1)
- raise ConnectionError(
- f"After {timeout} seconds, device {device.identifier} still does not have "
- f"an ipv4 address on interface {interface_name}."
- )
-
- def run_iperf_traffic(
- self,
- ip_client: iperf_client.IPerfClientOverAdb
- | iperf_client.IPerfClientOverSsh,
- server_address: str,
- server_port: int = 5201,
- ) -> None:
- """Runs traffic between client and ap an verifies throughput.
-
- Args:
- ip_client: iperf client to use
- server_address: ipv4 address of the iperf server to use
- server_port: port of the iperf server
-
- Raises:
- ConnectionError if no traffic passes in either direction
- """
- ip_client_identifier = self.get_iperf_client_identifier(ip_client)
-
- self.log.info(
- f"Running traffic from iperf client {ip_client_identifier} to "
- f"iperf server {server_address}."
- )
- client_to_ap_path = ip_client.start(
- server_address,
- f"-i 1 -t 10 -J -p {server_port}",
- "client_to_soft_ap",
- )
-
- client_to_ap_result = iperf_server.IPerfResult(client_to_ap_path)
- if not client_to_ap_result.avg_receive_rate:
- raise ConnectionError(
- f"Failed to pass traffic from iperf client {ip_client_identifier} to "
- f"iperf server {server_address}."
- )
-
- self.log.info(
- f"Passed traffic from iperf client {ip_client_identifier} to "
- f"iperf server {server_address} with avg rate of "
- f"{client_to_ap_result.avg_receive_rate} MB/s."
- )
-
- self.log.info(
- f"Running traffic from iperf server {server_address} to "
- f"iperf client {ip_client_identifier}."
- )
- ap_to_client_path = ip_client.start(
- server_address,
- f"-i 1 -t 10 -R -J -p {server_port}",
- "soft_ap_to_client",
- )
-
- ap_to_client_result = iperf_server.IPerfResult(ap_to_client_path)
- if not ap_to_client_result.avg_receive_rate:
- raise ConnectionError(
- f"Failed to pass traffic from iperf server {server_address} to "
- f"iperf client {ip_client_identifier}."
- )
-
- self.log.info(
- f"Passed traffic from iperf server {server_address} to "
- f"iperf client {ip_client_identifier} with avg rate of "
- f"{ap_to_client_result.avg_receive_rate} MB/s."
- )
-
- def run_iperf_traffic_parallel_process(
- self,
- ip_client: iperf_client.IPerfClientOverAdb
- | iperf_client.IPerfClientOverSsh,
- server_address: str,
- error_queue: "mp.Queue[str]",
- server_port: int = 5201,
- ) -> None:
- """Executes run_iperf_traffic using a queue to capture errors. Used
- when running iperf in a parallel process.
-
- Args:
- ip_client: iperf client to use
- server_address: ipv4 address of the iperf server to use
- error_queue: multiprocessing queue to capture errors
- server_port: port of the iperf server
- """
- try:
- self.run_iperf_traffic(
- ip_client, server_address, server_port=server_port
- )
- except ConnectionError as err:
- error_queue.put(
- f"In iperf process from {self.get_iperf_client_identifier(ip_client)} to {server_address}: {err}"
- )
-
- def get_iperf_client_identifier(
- self,
- ip_client: iperf_client.IPerfClientOverAdb
- | iperf_client.IPerfClientOverSsh,
- ) -> str:
- """Retrieves an identifier string from iperf client, for logging.
-
- Args:
- ip_client: iperf client to grab identifier from
- """
- if type(ip_client) == iperf_client.IPerfClientOverAdb:
- assert hasattr(ip_client._android_device, "serial")
- assert isinstance(ip_client._android_device.serial, str)
- return ip_client._android_device.serial
- if type(ip_client) == iperf_client.IPerfClientOverSsh:
- return ip_client._ssh_provider.config.host_name
- raise TypeError(f'Unknown "ip_client" type {type(ip_client)}')
-
- def assert_connected_to_ap(
- self,
- client: SupportsWLAN,
- ap: SupportsWLAN | AccessPoint,
- channel: int | None = None,
- check_traffic: bool = False,
- timeout_sec: int = DEFAULT_TIMEOUT,
- ) -> None:
- """Assert the client device has L3 connectivity to the AP."""
- device_interface = self.get_device_test_interface(
- client, DeviceRole.CLIENT
- )
-
- if isinstance(ap, AccessPoint):
- if channel is None:
- raise TypeError(
- "channel must not be None when ap is an AccessPoint"
- )
- ap_interface = self.get_ap_test_interface(ap, channel)
- else:
- ap_interface = self.get_device_test_interface(ap, DeviceRole.AP)
-
- client_ipv4 = self.wait_for_ipv4_address(
- client, device_interface, timeout=timeout_sec
- )
- ap_ipv4 = self.wait_for_ipv4_address(
- ap, ap_interface, timeout=timeout_sec
- )
-
- client_ping = client.ping(ap_ipv4, timeout=DEFAULT_TIMEOUT * 1000)
- asserts.assert_true(
- client_ping.success,
- f"Failed to ping from client to ap: {client_ping}",
- )
-
- ap_ping = ap.ping(client_ipv4, timeout=DEFAULT_TIMEOUT * 1000)
- asserts.assert_true(
- ap_ping.success,
- f"Failed to ping from ap to client: {ap_ping}",
- )
-
- if not check_traffic:
- return
-
- if client is self.dut:
- self.run_iperf_traffic(self.iperf_clients_map[ap], client_ipv4)
- else:
- self.run_iperf_traffic(self.iperf_clients_map[client], ap_ipv4)
-
- def assert_disconnected_to_ap(
- self,
- client: SupportsWLAN,
- ap: SupportsWLAN | AccessPoint,
- channel: int | None = None,
- timeout_sec: int = DEFAULT_NO_ADDR_EXPECTED_TIMEOUT,
- ) -> None:
- """Assert the client device does not have ping connectivity to the AP."""
- device_interface = self.get_device_test_interface(
- client, DeviceRole.CLIENT
- )
-
- if isinstance(ap, AccessPoint):
- if channel is None:
- raise TypeError(
- "channel must not be None when ap is an AccessPoint"
- )
- ap_interface = self.get_ap_test_interface(ap, channel)
- else:
- ap_interface = self.get_device_test_interface(ap, DeviceRole.AP)
-
- try:
- client_ipv4 = self.wait_for_ipv4_address(
- client, device_interface, timeout=timeout_sec
- )
- ap_ipv4 = self.wait_for_ipv4_address(
- ap, ap_interface, timeout=timeout_sec
- )
- except ConnectionError:
- # When disconnected, IP addresses aren't always available.
- return
-
- asserts.assert_false(
- client.ping(ap_ipv4, timeout=DEFAULT_TIMEOUT * 1000).success,
- "Unexpectedly succeeded to ping from client to ap",
- )
- asserts.assert_false(
- ap.ping(client_ipv4, timeout=DEFAULT_TIMEOUT * 1000).success,
- "Unexpectedly succeeded to ping from ap to client",
- )
-
- # Runners for Generated Test Cases
-
- def run_soft_ap_association_stress_test(
- self, test: AssociationStressTestParams
- ) -> None:
- """Sets up a SoftAP, and repeatedly associates and disassociates a client."""
- self.log.info(
- f"Running association stress test type {test.test_type} in "
- f"iteration {test.iterations} times"
- )
-
- self.start_soft_ap(test.soft_ap_params)
-
- passed_count = 0
- for run in range(test.iterations):
- try:
- self.log.info(f"Starting SoftAp association run {str(run + 1)}")
-
- if test.test_type == TestType.ASSOCIATE_ONLY:
- self.associate_with_soft_ap(
- self.primary_client, test.soft_ap_params
- )
-
- elif test.test_type == TestType.ASSOCIATE_AND_PING:
- self.associate_with_soft_ap(
- self.primary_client, test.soft_ap_params
- )
- self.assert_connected_to_ap(self.primary_client, self.dut)
-
- elif test.test_type == TestType.ASSOCIATE_AND_PASS_TRAFFIC:
- self.associate_with_soft_ap(
- self.primary_client, test.soft_ap_params
- )
- self.assert_connected_to_ap(
- self.primary_client, self.dut, check_traffic=True
- )
-
- else:
- raise AttributeError(f"Invalid test type: {test.test_type}")
-
- except signals.TestFailure as err:
- self.log.error(
- f"SoftAp association stress run {str(run + 1)} failed. "
- f"Err: {err.details}"
- )
- else:
- self.log.info(
- f"SoftAp association stress run {str(run + 1)} successful."
- )
- passed_count += 1
-
- if passed_count < test.iterations:
- asserts.fail(
- "SoftAp association stress test failed after "
- f"{passed_count}/{test.iterations} runs."
- )
-
- asserts.explicit_pass(
- f"SoftAp association stress test passed after {passed_count}/{test.iterations} "
- "runs."
- )
-
- # Alternate SoftAP and Client mode test
-
- def run_soft_ap_and_client_mode_alternating_test(
- self, test: ClientModeAlternatingTestParams
- ) -> None:
- """Runs a single soft_ap and client alternating stress test.
-
- See test_soft_ap_and_client_mode_alternating_stress for details.
- """
- if self.access_point is None:
- raise signals.TestSkip("No access point provided")
-
- test.ap_params.setup_ap(self.access_point)
-
- for _ in range(test.iterations):
- # Toggle SoftAP on then off.
- self.toggle_soft_ap(test.soft_ap_params, STATE_DOWN)
- self.toggle_soft_ap(test.soft_ap_params, STATE_UP)
-
- # Toggle client mode on then off.
- self.toggle_client_mode(
- self.access_point, test.ap_params, STATE_DOWN
- )
- self.toggle_client_mode(self.access_point, test.ap_params, STATE_UP)
-
- # Toggle Stress Test Helper Functions
-
- # Stress Test Toggle Functions
-
- def start_soft_ap_and_verify_connected(
- self, client: SupportsWLAN, soft_ap_params: SoftAPParams
- ) -> None:
- """Sets up SoftAP, associates a client, then verifies connection.
-
- Args:
- client: SoftApClient, client to use to verify SoftAP
- soft_ap_params: dict, containing parameters to setup softap
-
- Raises:
- StressTestIterationFailure, if toggle occurs, but connection
- is not functioning as expected
- """
- # Change SSID every time, to avoid client connection issues.
- soft_ap_params.ssid = utils.rand_ascii_str(
- hostapd_constants.AP_SSID_LENGTH_2G
- )
- self.start_soft_ap(soft_ap_params)
- self.associate_with_soft_ap(client, soft_ap_params)
- self.assert_connected_to_ap(client, self.dut)
-
- def stop_soft_ap_and_verify_disconnected(
- self, client: SupportsWLAN, soft_ap_params: SoftAPParams
- ) -> None:
- """Tears down SoftAP, and verifies connection is down.
-
- Args:
- client: SoftApClient, client to use to verify SoftAP
- soft_ap_params: dict, containing parameters of SoftAP to teardown
-
- Raise:
- EnvironmentError, if client and AP can still communicate
- """
- self.log.info("Stopping SoftAP on DUT.")
- self.fuchsia_device.honeydew_fd.wlan_policy_ap.stop(
- soft_ap_params.ssid,
- SecurityType(soft_ap_params.security_type.fuchsia_security_type()),
- soft_ap_params.password,
- )
- self.assert_disconnected_to_ap(client, self.dut)
-
- def start_client_mode_and_verify_connected(
- self, access_point: AccessPoint, ap_params: APParams
- ) -> None:
- """Connects DUT to AP in client mode and verifies connection
-
- Args:
- ap_params: dict, containing parameters of the AP network
-
- Raises:
- EnvironmentError, if DUT fails to associate altogether
- StressTestIterationFailure, if DUT associates but connection is not
- functioning as expected.
- """
- self.log.info(f"Associating DUT with AP network: {ap_params.ssid}")
- associated = self.dut.associate(
- ap_params.ssid,
- ap_params.security.security_mode,
- target_pwd=ap_params.password,
- )
- if not associated:
- raise EnvironmentError("Failed to associate DUT in client mode.")
- else:
- self.log.info("Association successful.")
-
- self.assert_connected_to_ap(
- self.dut, access_point, channel=ap_params.channel
- )
-
- def stop_client_mode_and_verify_disconnected(
- self, access_point: AccessPoint, ap_params: APParams
- ) -> None:
- """Disconnects DUT from AP and verifies connection is down.
-
- Args:
- ap_params: containing parameters of the AP network
-
- Raises:
- EnvironmentError, if DUT and AP can still communicate
- """
- self.log.info("Disconnecting DUT from AP.")
- self.dut.disconnect()
- self.assert_disconnected_to_ap(
- self.dut, access_point, channel=ap_params.channel
- )
-
- # Toggle Stress Test Iteration and Pre-Test Functions
-
- # SoftAP Toggle Stress Test Helper Functions
-
- def soft_ap_toggle_test(self, test: ToggleTestParams) -> None:
- current_state = STATE_DOWN
- for i in range(test.iterations):
- self.toggle_soft_ap(test.soft_ap_params, current_state)
- current_state = not current_state
-
- def toggle_soft_ap(
- self, soft_ap_params: SoftAPParams, current_state: bool
- ) -> None:
- """Runs a single iteration of SoftAP toggle stress test
-
- Args:
- settings: dict, containing test settings
- current_state: bool, current state of SoftAP (True if up,
- else False)
-
- Raises:
- StressTestIterationFailure, if toggle occurs but mode isn't
- functioning correctly.
- EnvironmentError, if toggle fails to occur at all
- """
- self.log.info(f"Toggling SoftAP {'down' if current_state else 'up'}.")
- if current_state == STATE_DOWN:
- self.start_soft_ap_and_verify_connected(
- self.primary_client, soft_ap_params
- )
- else:
- self.stop_soft_ap_and_verify_disconnected(
- self.primary_client, soft_ap_params
- )
-
- # Client Mode Toggle Stress Test Helper Functions
-
- def client_mode_toggle_test(self, test: ClientModeToggleTestParams) -> None:
- if self.access_point is None:
- raise signals.TestSkip("No access point provided")
-
- test.ap_params.setup_ap(self.access_point)
-
- current_state = STATE_DOWN
- for i in range(test.iterations):
- self.log.info(
- f"Iteration {i}: toggling client mode {'off' if current_state else 'on'}."
- )
- self.toggle_client_mode(
- self.access_point, test.ap_params, current_state
- )
- current_state = not current_state
-
- def toggle_client_mode(
- self,
- access_point: AccessPoint,
- ap_params: APParams,
- current_state: bool,
- ) -> None:
- if current_state == STATE_DOWN:
- self.start_client_mode_and_verify_connected(access_point, ap_params)
- else:
- self.stop_client_mode_and_verify_disconnected(
- access_point, ap_params
- )
-
- # TODO: Remove
- def client_mode_toggle_test_iteration(
- self,
- test: ClientModeToggleTestParams,
- access_point: AccessPoint,
- current_state: bool,
- ) -> None:
- """Runs a single iteration of client mode toggle stress test
-
- Args:
- settings: dict, containing test settings
- current_state: bool, current state of client mode (True if up,
- else False)
-
- Raises:
- StressTestIterationFailure, if toggle occurs but mode isn't
- functioning correctly.
- EnvironmentError, if toggle fails to occur at all
- """
- self.log.info(
- f"Toggling client mode {'off' if current_state else 'on'}"
- )
- if current_state == STATE_DOWN:
- self.start_client_mode_and_verify_connected(
- access_point, test.ap_params
- )
- else:
- self.stop_client_mode_and_verify_disconnected(
- access_point, test.ap_params
- )
-
- # Toggle SoftAP with Client Mode Up Test Helper Functions
-
- def soft_ap_toggle_with_client_mode_test(
- self, test: ClientModeAlternatingTestParams
- ) -> None:
- if self.access_point is None:
- raise signals.TestSkip("No access point provided")
-
- test.ap_params.setup_ap(self.access_point)
- self.start_client_mode_and_verify_connected(
- self.access_point, test.ap_params
- )
-
- current_state = STATE_DOWN
- for i in range(test.iterations):
- self.toggle_soft_ap(test.soft_ap_params, current_state)
- self.assert_connected_to_ap(
- self.dut, self.access_point, channel=test.ap_params.channel
- )
- current_state = not current_state
-
- # Toggle Client Mode with SoftAP Up Test Helper Functions
-
- def client_mode_toggle_with_soft_ap_test(
- self, test: ClientModeAlternatingTestParams
- ) -> None:
- if self.access_point is None:
- raise signals.TestSkip("No access point provided")
-
- test.ap_params.setup_ap(self.access_point)
- self.start_soft_ap_and_verify_connected(
- self.primary_client, test.soft_ap_params
- )
-
- current_state = STATE_DOWN
- for i in range(test.iterations):
- self.toggle_client_mode(
- self.access_point, test.ap_params, current_state
- )
- self.assert_connected_to_ap(self.primary_client, self.dut)
- current_state = not current_state
-
- # Toggle SoftAP and Client Mode Randomly
-
- def soft_ap_and_client_mode_random_toggle_test(
- self, test: ClientModeAlternatingTestParams
- ) -> None:
- if self.access_point is None:
- raise signals.TestSkip("No access point provided")
-
- test.ap_params.setup_ap(self.access_point)
-
- current_soft_ap_state = STATE_DOWN
- current_client_mode_state = STATE_DOWN
- for i in range(test.iterations):
- # Randomly determine if softap, client mode, or both should
- # be toggled.
- rand_toggle_choice = random.randrange(0, 3)
- if rand_toggle_choice <= 1:
- self.toggle_soft_ap(test.soft_ap_params, current_soft_ap_state)
- current_soft_ap_state = not current_soft_ap_state
- if rand_toggle_choice >= 1:
- self.toggle_client_mode(
- self.access_point, test.ap_params, current_client_mode_state
- )
- current_client_mode_state = not current_client_mode_state
-
- if current_soft_ap_state == STATE_UP:
- self.assert_connected_to_ap(self.primary_client, self.dut)
- else:
- self.assert_disconnected_to_ap(self.primary_client, self.dut)
-
- if current_client_mode_state == STATE_UP:
- self.assert_connected_to_ap(
- self.dut, self.access_point, channel=test.ap_params.channel
- )
- else:
- self.assert_disconnected_to_ap(
- self.dut, self.access_point, channel=test.ap_params.channel
- )
-
- # Test Cases
-
- def test_multi_client(self) -> None:
- """Tests multi-client association with a single soft AP network.
-
- This tests associates a variable length list of clients, verfying it can
- can ping the SoftAP and pass traffic, and then verfies all previously
- associated clients can still ping and pass traffic.
-
- The same occurs in reverse for disassocations.
-
- SoftAP parameters can be changed from default via ACTS config:
- Example Config
- "soft_ap_test_params" : {
- "multi_client_test_params": {
- "ssid": "testssid",
- "security_type": "wpa2",
- "password": "password",
- "connectivity_mode": "local_only",
- "operating_band": "only_2_4_ghz"
- }
- }
- """
- asserts.skip_if(
- len(self.clients) < 2, "Test requires at least 2 SoftAPClients"
- )
-
- test_params = self.soft_ap_test_params.get(
- "multi_client_test_params", {}
- )
- soft_ap_params = SoftAPParams.from_dict(
- test_params.get("soft_ap_params", {})
- )
-
- self.start_soft_ap(soft_ap_params)
-
- associated: list[dict[str, Any]] = []
-
- for client in self.clients:
- # Associate new client
- self.associate_with_soft_ap(client, soft_ap_params)
- self.assert_connected_to_ap(client, self.dut)
-
- # Verify previously associated clients still behave as expected
- for associated_client in associated:
- id = associated_client["device"].identifier
- self.log.info(
- f"Verifying previously associated client {id} still "
- "functions correctly."
- )
- self.assert_connected_to_ap(
- associated_client["device"], self.dut, check_traffic=True
- )
-
- client_interface = self.get_device_test_interface(
- client, DeviceRole.CLIENT
- )
- client_ipv4 = self.wait_for_ipv4_address(client, client_interface)
- associated.append({"device": client, "address": client_ipv4})
-
- self.log.info("All devices successfully associated.")
-
- self.log.info("Verifying all associated clients can ping eachother.")
- for transmitter in associated:
- for receiver in associated:
- if transmitter != receiver:
- if not transmitter["device"].can_ping(receiver["address"]):
- asserts.fail(
- "Could not ping from one associated client "
- f"({transmitter['address']}) to another "
- f"({receiver['address']})."
- )
- else:
- self.log.info(
- "Successfully pinged from associated client "
- f"({transmitter['address']}) to another "
- f"({receiver['address']})"
- )
-
- self.log.info(
- "All associated clients can ping each other. Beginning disassociations."
- )
-
- while len(associated) > 0:
- # Disassociate client
- client = associated.pop()["device"]
- self.disconnect_from_soft_ap(client)
-
- # Verify still connected clients still behave as expected
- for associated_client in associated:
- id = associated_client["device"].identifier
- self.log.info(
- f"Verifying still associated client {id} still functions correctly."
- )
- self.assert_connected_to_ap(
- associated_client["device"], self.dut, check_traffic=True
- )
-
- self.log.info("All disassociations occurred smoothly.")
-
- def test_simultaneous_soft_ap_and_client(self) -> None:
- """Tests FuchsiaDevice DUT can act as a client and a SoftAP
- simultaneously.
-
- Raises:
- ConnectionError: if DUT fails to connect as client
- RuntimeError: if parallel processes fail to join
- TestFailure: if DUT fails to pass traffic as either a client or an
- AP
- """
- if self.access_point is None:
- raise signals.TestSkip("No access point provided")
-
- self.log.info("Setting up AP using hostapd.")
- test_params = self.soft_ap_test_params.get(
- "soft_ap_and_client_test_params", {}
- )
-
- # Configure AP
- ap_params = APParams.from_dict(test_params.get("ap_params", {}))
-
- # Setup AP and associate DUT
- ap_params.setup_ap(self.access_point)
- try:
- self.start_client_mode_and_verify_connected(
- self.access_point, ap_params
- )
- except Exception as err:
- asserts.fail(f"Failed to set up client mode. Err: {err}")
-
- # Setup SoftAP
- soft_ap_params = SoftAPParams.from_dict(
- test_params.get("soft_ap_params", {})
- )
- self.start_soft_ap_and_verify_connected(
- self.primary_client, soft_ap_params
- )
-
- # Get FuchsiaDevice test interfaces
- dut_ap_interface = self.get_device_test_interface(
- self.dut, role=DeviceRole.AP
- )
- dut_client_interface = self.get_device_test_interface(
- self.dut, role=DeviceRole.CLIENT
- )
-
- # Get FuchsiaDevice addresses
- dut_ap_ipv4 = self.wait_for_ipv4_address(self.dut, dut_ap_interface)
- dut_client_ipv4 = self.wait_for_ipv4_address(
- self.dut, dut_client_interface
- )
-
- # Set up secondary iperf server of FuchsiaDevice
- self.log.info("Setting up second iperf server on FuchsiaDevice DUT.")
- secondary_iperf_server = iperf_server.IPerfServerOverSsh(
- self.iperf_server_settings,
- DEFAULT_IPERF_PORT + 1,
- test_interface=self.dut.get_default_wlan_test_interface(),
- use_killall=True,
- )
- secondary_iperf_server.start()
-
- # Set up iperf client on AP
- self.log.info("Setting up iperf client on AP.")
- ap_iperf_client = iperf_client.IPerfClientOverSsh(
- self.access_point.ssh_provider,
- # Date is already synced by the AccessPoint controller.
- sync_date=False,
- )
-
- # Setup iperf processes:
- # Primary client <-> SoftAP interface on FuchsiaDevice
- # AP <-> Client interface on FuchsiaDevice
- process_errors: "mp.Queue[str]" = mp.Queue()
- iperf_soft_ap = mp.Process(
- target=self.run_iperf_traffic_parallel_process,
- args=[
- self.iperf_clients_map[self.primary_client],
- dut_ap_ipv4,
- process_errors,
- ],
- )
-
- iperf_fuchsia_client = mp.Process(
- target=self.run_iperf_traffic_parallel_process,
- args=[ap_iperf_client, dut_client_ipv4, process_errors],
- kwargs={"server_port": 5202},
- )
-
- # Run iperf processes simultaneously
- self.log.info(
- "Running simultaneous iperf traffic: between AP and DUT "
- "client interface, and DUT AP interface and client."
- )
-
- iperf_soft_ap.start()
- iperf_fuchsia_client.start()
-
- # Block until processes can join or timeout
- for proc in [iperf_soft_ap, iperf_fuchsia_client]:
- proc.join(timeout=DEFAULT_IPERF_TIMEOUT)
- if proc.is_alive():
- proc.terminate()
- proc.join()
- raise RuntimeError(f"Failed to join process {proc}")
-
- # Stop iperf server (also stopped in teardown class as failsafe)
- secondary_iperf_server.stop()
-
- # Check errors from parallel processes
- if process_errors.empty():
- asserts.explicit_pass(
- "FuchsiaDevice was successfully able to pass traffic as a "
- "client and an AP simultaneously."
- )
- else:
- while not process_errors.empty():
- self.log.error(
- f"Error in iperf process: {process_errors.get()}"
- )
- asserts.fail(
- "FuchsiaDevice failed to pass traffic as a client and an AP "
- "simultaneously."
- )
-
- def generate_association_stress_tests(self) -> None:
- """Repeatedly associate and disassociate a client.
-
- Creates one SoftAP and uses one client.
-
- Example config:
-
- soft_ap_test_params:
- soft_ap_association_stress_tests:
- - soft_ap_params:
- ssid: "test_network"
- security_type: "wpa2"
- password: "password"
- connectivity_mode: "local_only"
- operating_band: "only_2_4_ghz"
- iterations: 10
- """
- test_specs: list[dict[str, Any]] = self.soft_ap_test_params.get(
- "test_soft_ap_association_stress",
- [],
- )
-
- tests = [
- AssociationStressTestParams.from_dict(spec) for spec in test_specs
- ]
-
- if len(tests) == 0:
- # Add default test
- tests.append(AssociationStressTestParams.from_dict({}))
-
- def generate_name(test: AssociationStressTestParams) -> str:
- return f"test_association_stress_{test}"
-
- self.generate_tests(
- self.run_soft_ap_association_stress_test,
- generate_name,
- tests,
- )
-
- def generate_soft_ap_and_client_mode_alternating_stress_tests(self) -> None:
- """Alternate between SoftAP and Client modes.
-
- Each tests sets up an AP. Then, for each iteration:
- - DUT starts up SoftAP, client associates with SoftAP,
- connection is verified, then disassociates
- - DUT associates to the AP, connection is verified, then
- disassociates
-
- Example Config:
-
- soft_ap_test_params:
- toggle_soft_ap_and_client_tests:
- - ap_params:
- ssid: "test-ap-network"
- security_mode: "wpa2"
- password: "password"
- channel: 6
- soft_ap_params:
- ssid: "test-soft-ap-network"
- security_type: "wpa2"
- password: "other-password"
- connectivity_mode: "local_only"
- operating_band: "only_2_4_ghz"
- iterations: 5
- """
- test_specs: list[dict[str, Any]] = self.soft_ap_test_params.get(
- "toggle_soft_ap_and_client_tests",
- [],
- )
-
- tests = [
- ClientModeAlternatingTestParams.from_dict(spec)
- for spec in test_specs
- ]
-
- if len(tests) == 0:
- # Add default test
- tests.append(ClientModeAlternatingTestParams.from_dict({}))
-
- def generate_name(test: ClientModeAlternatingTestParams) -> str:
- return f"test_soft_ap_and_client_mode_alternating_stress_{test}"
-
- self.generate_tests(
- self.run_soft_ap_and_client_mode_alternating_test,
- generate_name,
- tests,
- )
-
- def generate_soft_ap_toggle_stress_tests(self) -> None:
- """Toggle SoftAP up and down.
-
- If toggled up, a client is associated and connection is verified
- If toggled down, test verifies client is not connected
-
- Will run with default params, but custom tests can be provided in the
- Mobly config.
-
- Example Config
-
- soft_ap_test_params:
- test_soft_ap_toggle_stress:
- soft_ap_params:
- security_type: "wpa2"
- password: "password"
- connectivity_mode: "local_only"
- operating_band: "only_2_4_ghz"
- iterations: 5
- """
- test_specs: list[dict[str, Any]] = self.soft_ap_test_params.get(
- "test_soft_ap_toggle_stress",
- [],
- )
-
- tests = [ToggleTestParams.from_dict(spec) for spec in test_specs]
-
- if len(tests) == 0:
- # Add default test
- tests.append(ToggleTestParams.from_dict({}))
-
- def generate_name(test: ToggleTestParams) -> str:
- return f"test_soft_ap_toggle_stress_{test}"
-
- self.generate_tests(
- self.soft_ap_toggle_test,
- generate_name,
- tests,
- )
-
- def generate_client_mode_toggle_stress_tests(self) -> None:
- """Toggles client mode up and down.
-
- If toggled up, DUT associates to AP, and connection is verified
- If toggled down, test verifies DUT is not connected to AP
-
- Will run with default params, but custom tests can be provided in the
- Mobly config.
-
- Example Config
-
- soft_ap_test_params:
- test_client_mode_toggle_stress:
- soft_ap_params:
- security_type: "wpa2"
- password: "password"
- connectivity_mode: "local_only"
- operating_band: "only_2_4_ghz"
- iterations: 10
- """
- test_specs: list[dict[str, Any]] = self.soft_ap_test_params.get(
- "test_client_mode_toggle_stress",
- [],
- )
-
- tests = [
- ClientModeToggleTestParams.from_dict(spec) for spec in test_specs
- ]
-
- if len(tests) == 0:
- # Add default test
- tests.append(ClientModeToggleTestParams.from_dict({}))
-
- def generate_name(test: ClientModeToggleTestParams) -> str:
- return f"test_client_mode_toggle_stress_{test}"
-
- self.generate_tests(
- self.client_mode_toggle_test,
- generate_name,
- tests,
- )
-
- def generate_soft_ap_toggle_stress_with_client_mode_tests(self) -> None:
- """Same as test_soft_ap_toggle_stress, but client mode is set up
- at test start and verified after every toggle."""
-
- test_specs: list[dict[str, Any]] = self.soft_ap_test_params.get(
- "test_soft_ap_toggle_stress_with_client_mode",
- [],
- )
-
- tests = [
- ClientModeAlternatingTestParams.from_dict(spec)
- for spec in test_specs
- ]
-
- if len(tests) == 0:
- # Add default test
- tests.append(ClientModeAlternatingTestParams.from_dict({}))
-
- def generate_name(test: ClientModeAlternatingTestParams) -> str:
- return f"test_soft_ap_toggle_stress_with_client_mode_{test}"
-
- self.generate_tests(
- self.soft_ap_toggle_with_client_mode_test,
- generate_name,
- tests,
- )
-
- def generate_client_mode_toggle_stress_with_soft_ap_tests(self) -> None:
- """Same as test_client_mode_toggle_stress, but softap is set up at
- test start and verified after every toggle."""
- test_specs: list[dict[str, Any]] = self.soft_ap_test_params.get(
- "test_client_mode_toggle_stress_with_soft_ap",
- [],
- )
-
- tests = [
- ClientModeAlternatingTestParams.from_dict(spec)
- for spec in test_specs
- ]
-
- if len(tests) == 0:
- # Add default test
- tests.append(ClientModeAlternatingTestParams.from_dict({}))
-
- def generate_name(test: ClientModeAlternatingTestParams) -> str:
- return f"test_client_mode_toggle_stress_with_soft_ap_{test}"
-
- self.generate_tests(
- self.soft_ap_toggle_with_client_mode_test,
- generate_name,
- tests,
- )
-
- def generate_soft_ap_and_client_mode_random_toggle_stress_tests(
- self,
- ) -> None:
- """Same as above toggle stres tests, but each iteration, either softap,
- client mode, or both are toggled, then states are verified."""
- test_specs: list[dict[str, Any]] = self.soft_ap_test_params.get(
- "test_soft_ap_and_client_mode_random_toggle_stress",
- [],
- )
-
- tests = [
- ClientModeAlternatingTestParams.from_dict(spec)
- for spec in test_specs
- ]
-
- if len(tests) == 0:
- # Add default test
- tests.append(ClientModeAlternatingTestParams.from_dict({}))
-
- def generate_name(test: ClientModeAlternatingTestParams) -> str:
- return f"test_soft_ap_and_client_mode_random_toggle_stress_{test}"
-
- self.generate_tests(
- self.soft_ap_and_client_mode_random_toggle_test,
- generate_name,
- tests,
- )
-
-
-if __name__ == "__main__":
- test_runner.main()
diff --git a/tests/wlan/functional/WlanDriverRestartTest.py b/tests/wlan/functional/WlanDriverRestartTest.py
deleted file mode 100644
index d921144..0000000
--- a/tests/wlan/functional/WlanDriverRestartTest.py
+++ /dev/null
@@ -1,104 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2023 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import logging
-import time
-
-from mobly import asserts, signals, test_runner
-from mobly.config_parser import TestRunConfig
-
-from antlion import base_test, controllers
-from antlion.controllers.fuchsia_device import FuchsiaDevice
-
-# Time to wait until an interface is recreated after the softmac WLAN driver
-# restarts.
-DELAY_FOR_DRIVER_RESTART_SEC = 2.0
-
-
-class WlanDriverRestartTest(base_test.AntlionBaseTest):
- def __init__(self, configs: TestRunConfig) -> None:
- super().__init__(configs)
- self.log = logging.getLogger()
-
- def setup_class(self) -> None:
- super().setup_class()
-
- fuchsia_devices: list[FuchsiaDevice] = self.register_controller(
- controllers.fuchsia_device
- )
- self.fuchsia_device = fuchsia_devices[0]
-
- # Skip this test suite if the device isn't running a softmac WLAN driver.
- driver_list = self.fuchsia_device.ffx.run(["driver", "list"])
- if "iwlwifi" not in driver_list:
- raise signals.TestSkip(
- "No intel WiFi driver found on this device, skipping test"
- )
-
- def test_driver_restart_recreates_interface(self) -> None:
- """Verify the WLAN interface gets recreated after its driver restarts."""
- # Store existing phy and interface identifiers.
- phys = self.fuchsia_device.honeydew_fd.wlan_core.get_phy_id_list()
- asserts.assert_equal(len(phys), 1, "Expected one phy_id")
- old_interfaces = (
- self.fuchsia_device.honeydew_fd.wlan_core.get_iface_id_list()
- )
- asserts.assert_not_equal(old_interfaces, [], "Iface not found.")
-
- # Restarting should replace the old interface with a new one.
- self.fuchsia_device.ffx.run(
- [
- "driver",
- "restart",
- "fuchsia-pkg://fuchsia.com/iwlwifi#meta/iwlwifi.cm",
- ]
- )
-
- # Check for new phy and interface identifiers.
- timeout = time.time() + DELAY_FOR_DRIVER_RESTART_SEC
- while time.time() < timeout:
- new_interfaces = (
- self.fuchsia_device.honeydew_fd.wlan_core.get_iface_id_list()
- )
-
- if new_interfaces == old_interfaces:
- # Interface has not been deleted yet. Keep waiting.
- time.sleep(0.1)
- continue
- if len(new_interfaces) == 0:
- # Interface has not come back up yet. Keep waiting.
- time.sleep(0.1)
- continue
- if len(new_interfaces) == 1:
- # New interface has been added! All done here
- break
-
- asserts.fail(
- "More interfaces exist than before! \n"
- f"Old: {old_interfaces}\n"
- f"New: {new_interfaces}"
- )
- else:
- asserts.fail(
- f"New interface not created within {DELAY_FOR_DRIVER_RESTART_SEC}s"
- )
-
- phys = self.fuchsia_device.honeydew_fd.wlan_core.get_phy_id_list()
- asserts.assert_equal(len(phys), 1, "Expected one phy_id")
-
-
-if __name__ == "__main__":
- test_runner.main()
diff --git a/tests/wlan/functional/WlanPolicyInitiatedRoamTest.py b/tests/wlan/functional/WlanPolicyInitiatedRoamTest.py
deleted file mode 100644
index 14344e7..0000000
--- a/tests/wlan/functional/WlanPolicyInitiatedRoamTest.py
+++ /dev/null
@@ -1,350 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2024 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import time
-from dataclasses import dataclass
-from datetime import datetime, timedelta
-
-import fidl_fuchsia_wlan_common as f_wlan_common
-from mobly import asserts, signals, test_runner
-from mobly.records import TestResultRecord
-
-from antlion import utils
-from antlion.controllers.access_point import setup_ap
-from antlion.controllers.ap_lib import hostapd_constants
-from antlion.controllers.ap_lib.hostapd_security import Security, SecurityMode
-from antlion.controllers.fuchsia_device import FuchsiaDevice
-from antlion.test_utils.abstract_devices.wlan_device import AssociationMode
-from antlion.test_utils.wifi import base_test
-
-
-@dataclass
-class TestParams:
- dut_security_mode: SecurityMode
- original_security_mode: SecurityMode
- original_band: hostapd_constants.BandType
- target_security_mode: SecurityMode
- target_band: hostapd_constants.BandType
- expect_roam: bool
-
-
-_DUT_SECURITY_MODES: frozenset[SecurityMode] = frozenset(
- [
- SecurityMode.OPEN,
- SecurityMode.WEP,
- SecurityMode.WPA,
- SecurityMode.WPA2,
- SecurityMode.WPA3,
- ]
-)
-
-_AP_SECURITY_MODES: frozenset[SecurityMode] = _DUT_SECURITY_MODES | frozenset(
- [
- SecurityMode.WPA_WPA2,
- SecurityMode.WPA2_WPA3,
- ]
-)
-
-_DUT_SECURITY_MODE_TO_COMPATIBLE_AP_MODES: dict[
- SecurityMode, frozenset[SecurityMode]
-] = {
- SecurityMode.OPEN: frozenset([SecurityMode.OPEN]),
- SecurityMode.WEP: frozenset([SecurityMode.WEP]),
- SecurityMode.WPA: frozenset([SecurityMode.WPA, SecurityMode.WPA_WPA2]),
- SecurityMode.WPA2: frozenset(
- [
- SecurityMode.WPA2,
- SecurityMode.WPA_WPA2,
- SecurityMode.WPA2_WPA3,
- ]
- ),
- SecurityMode.WPA3: frozenset([SecurityMode.WPA3, SecurityMode.WPA2_WPA3]),
-}
-
-
-class WlanPolicyInitiatedRoamTest(base_test.WifiBaseTest):
- """Tests Fuchsia's WLAN Policy-initiated roam support.
-
- Testbed Requirements:
- * One Fuchsia device
- * One Whirlwind access point
- """
-
- def pre_run(self) -> None:
- test_args: list[tuple[TestParams]] = []
-
- for (
- dut_mode,
- compatible_ap_modes,
- ) in _DUT_SECURITY_MODE_TO_COMPATIBLE_AP_MODES.items():
- for ap_mode in compatible_ap_modes:
- # Same compatible security mode on both APs, 2.4 GHz to 5 GHz.
- test_args.append(
- (
- TestParams(
- dut_security_mode=dut_mode,
- original_security_mode=ap_mode,
- original_band=hostapd_constants.BandType.BAND_2G,
- target_security_mode=ap_mode,
- target_band=hostapd_constants.BandType.BAND_5G,
- expect_roam=True,
- ),
- )
- )
-
- # Same compatible security mode on both APs, 5 GHz to 2.4 GHz.
- test_args.append(
- (
- TestParams(
- dut_security_mode=dut_mode,
- original_security_mode=ap_mode,
- original_band=hostapd_constants.BandType.BAND_5G,
- target_security_mode=ap_mode,
- target_band=hostapd_constants.BandType.BAND_2G,
- expect_roam=True,
- ),
- )
- )
-
- # Test incompatible roams, which should all fail.
- incompatible_modes = _AP_SECURITY_MODES - compatible_ap_modes
- for incompatible_mode in incompatible_modes:
- test_args.append(
- (
- TestParams(
- dut_security_mode=dut_mode,
- original_security_mode=ap_mode,
- original_band=hostapd_constants.BandType.BAND_2G,
- target_security_mode=incompatible_mode,
- target_band=hostapd_constants.BandType.BAND_5G,
- expect_roam=False,
- ),
- ),
- )
-
- def generate_roam_test_name(test: TestParams) -> str:
- if test.expect_roam:
- expected = "roams"
- else:
- expected = "does_not_roam"
- return f"test_{test.dut_security_mode}_dut_{expected}_from_{test.original_security_mode}_{test.original_band}_to_{test.target_security_mode}_{test.target_band}"
-
- self.generate_tests(
- test_logic=self.setup_connect_attenuate_roam,
- name_func=generate_roam_test_name,
- arg_sets=test_args,
- )
-
- def setup_class(self) -> None:
- super().setup_class()
-
- self.fuchsia_device, self.dut = self.get_dut_type(
- FuchsiaDevice, AssociationMode.POLICY
- )
-
- if len(self.access_points) == 0:
- raise signals.TestAbortClass("Requires at least one access point")
- self.access_point = self.access_points[0]
-
- def teardown_class(self) -> None:
- self.dut.disconnect()
- self.access_point.stop_all_aps()
- super().teardown_class()
-
- def teardown_test(self) -> None:
- self.dut.disconnect()
- self.download_logs()
- self.access_point.stop_all_aps()
- super().teardown_test()
-
- def on_fail(self, record: TestResultRecord) -> None:
- self.dut.disconnect()
- self.access_point.stop_all_aps()
- super().on_fail(record)
-
- def setup_ap(
- self,
- ssid: str,
- security: Security | None = None,
- additional_ap_parameters: dict[str, int] | None = None,
- channel: int = hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ) -> None:
- """Sets up an AP using the provided parameters.
-
- Args:
- ssid: SSID for the AP.
- security: security config for AP, defaults to None (open network
- with no password).
- additional_ap_parameters: A dictionary of parameters that can be set
- directly in the hostapd config file.
- channel: which channel number to set the AP to (default is
- AP_DEFAULT_CHANNEL_2G).
- """
- setup_ap(
- access_point=self.access_point,
- profile_name="whirlwind",
- channel=channel,
- ssid=ssid,
- security=security,
- additional_ap_parameters=additional_ap_parameters,
- )
-
- def _get_client_mac(self) -> str:
- """Get the MAC address of the DUT client interface.
-
- Returns:
- str, MAC address of the DUT client interface.
- Raises:
- ValueError if there is no DUT client interface.
- WlanError if the DUT interface query fails.
- """
- for wlan_iface in self.dut.get_wlan_interface_id_list():
- result = self.fuchsia_device.honeydew_fd.wlan_core.query_iface2(
- wlan_iface
- )
- if result.role is f_wlan_common.WlanMacRole.CLIENT:
- return utils.mac_address_list_to_str(bytes(result.sta_addr))
- raise ValueError(
- "Failed to get client interface mac address. No client interface found."
- )
-
- # This is called in generate_tests.
- def setup_connect_attenuate_roam(self, test: TestParams) -> None:
- """Setup the APs, associate a DUT, and slowly reduce AP signal strength until roam.
-
- Args:
- test: Test parameters
- """
- ssid = utils.rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_2G)
- original_password = None
- if test.original_security_mode is not SecurityMode.OPEN:
- # Length 13, so it can be used for WEP or WPA
- original_password = utils.rand_ascii_str(13)
-
- # Setup original AP.
- original_security = Security(
- test.original_security_mode, original_password
- )
- self.setup_ap(
- ssid,
- security=original_security,
- channel=test.original_band.default_channel(),
- )
-
- asserts.assert_true(
- self.dut.associate(
- ssid,
- target_pwd=original_password,
- target_security=test.dut_security_mode,
- ),
- "Failed to associate.",
- )
- # Verify that DUT is actually associated (as seen from AP).
- client_mac = self._get_client_mac()
-
- if test.original_band == hostapd_constants.BandType.BAND_2G:
- original_identifier = self.access_point.wlan_2g
- elif test.original_band == hostapd_constants.BandType.BAND_5G:
- original_identifier = self.access_point.wlan_5g
-
- asserts.assert_true(
- self.access_point.sta_associated(original_identifier, client_mac),
- f"DUT is not associated on the {test.original_band} band",
- )
-
- # Setup target AP.
- target_security = Security(test.target_security_mode, original_password)
- self.setup_ap(
- ssid,
- security=target_security,
- channel=test.target_band.default_channel(),
- )
-
- if test.target_band == hostapd_constants.BandType.BAND_2G:
- target_identifier = self.access_point.wlan_2g
- elif test.target_band == hostapd_constants.BandType.BAND_5G:
- target_identifier = self.access_point.wlan_5g
-
- FULL_POWER_DBM = 23
- current_dbm = FULL_POWER_DBM
- NUM_ITERATIONS = 10
- PERIOD_S = 10
-
- for id in (original_identifier, target_identifier):
- # Reset back to full power.
- self.access_point.iwconfig.ap_iwconfig(
- id, f"txpower {FULL_POWER_DBM}"
- )
- self.access_point.iwconfig.ap_iwconfig(id, "txpower auto")
-
- for i in range(NUM_ITERATIONS):
- # Reduce power, but with a floor of 1 dBm.
- current_dbm = max(current_dbm // 2, 1)
- self.access_point.iwconfig.ap_iwconfig(
- original_identifier, f"txpower {current_dbm}"
- )
-
- period_deadline = datetime.now() + timedelta(seconds=PERIOD_S)
- while datetime.now() < period_deadline:
- # Check for STA on destination, and if it has roamed, end the test.
- if test.expect_roam:
- if self.access_point.sta_authorized(
- target_identifier, client_mac
- ):
- break
- # We want to detect if DUT disconnected from the original BSS without roaming to the
- # target BSS. Specifically, we want to avoid a false positive if DUT does a full
- # disconnect from the original BSS followed by a regular connect to the target BSS,
- # rather than roaming between them. This is not a perfect mechanism to detect this
- # case, but it suffices for manually run tests. Automated tests will need a better
- # way to detect this scenario.
- # TODO(https://fxbug.dev/359966771): Surface intermediate states to Antlion.
- if not self.access_point.sta_associated(
- original_identifier, client_mac
- ):
- raise signals.TestFailure(
- "DUT left original BSS without roaming to target BSS"
- )
- time.sleep(0.25)
-
- if test.expect_roam:
- # Verify that DUT roamed (as seen from AP).
- asserts.assert_true(
- self.access_point.sta_authenticated(
- target_identifier, client_mac
- ),
- f"DUT is not authenticated on the {test.target_band} band",
- )
- asserts.assert_true(
- self.access_point.sta_associated(target_identifier, client_mac),
- f"DUT is not associated on the {test.target_band} band",
- )
- asserts.assert_true(
- self.access_point.sta_authorized(target_identifier, client_mac),
- "DUT is not 802.1X authorized on the 5GHz band",
- )
- else:
- # DUT should have stayed on the original BSS.
- asserts.assert_true(
- self.access_point.sta_authenticated(
- original_identifier, client_mac
- ),
- f"DUT is not authenticated on the {test.original_band} band",
- )
-
-
-if __name__ == "__main__":
- test_runner.main()
diff --git a/tests/wlan/functional/WlanRebootTest.py b/tests/wlan/functional/WlanRebootTest.py
deleted file mode 100644
index 0002c79..0000000
--- a/tests/wlan/functional/WlanRebootTest.py
+++ /dev/null
@@ -1,508 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import itertools
-import logging
-import os
-import time
-from dataclasses import dataclass
-from enum import Enum, StrEnum, auto, unique
-
-from mobly import asserts, signals, test_runner
-from mobly.records import TestResultRecord
-
-from antlion import utils
-from antlion.controllers.ap_lib.hostapd_ap_preset import create_ap_preset
-from antlion.controllers.ap_lib.hostapd_constants import (
- AP_SSID_LENGTH_2G,
- BandType,
-)
-from antlion.controllers.ap_lib.hostapd_security import Security, SecurityMode
-from antlion.controllers.ap_lib.hostapd_utils import generate_random_password
-from antlion.controllers.ap_lib.radvd_config import RadvdConfig
-from antlion.controllers.fuchsia_device import FuchsiaDevice
-from antlion.test_utils.abstract_devices.wlan_device import AssociationMode
-from antlion.test_utils.wifi import base_test
-
-DUT_NETWORK_CONNECTION_TIMEOUT = 60
-
-
-@unique
-class DeviceType(StrEnum):
- AP = auto()
- DUT = auto()
-
-
-@unique
-class RebootType(StrEnum):
- SOFT = auto()
- HARD = auto()
-
-
-@unique
-class IpVersionType(Enum):
- IPV4 = auto()
- IPV6 = auto()
- DUAL_IPV4_IPV6 = auto()
-
- def ipv4(self) -> bool:
- match self:
- case IpVersionType.IPV4:
- return True
- case IpVersionType.IPV6:
- return False
- case IpVersionType.DUAL_IPV4_IPV6:
- return True
-
- def ipv6(self) -> bool:
- match self:
- case IpVersionType.IPV4:
- return False
- case IpVersionType.IPV6:
- return True
- case IpVersionType.DUAL_IPV4_IPV6:
- return True
-
- @staticmethod
- def all() -> list["IpVersionType"]:
- return [
- IpVersionType.IPV4,
- IpVersionType.IPV6,
- IpVersionType.DUAL_IPV4_IPV6,
- ]
-
-
-@dataclass
-class TestParams:
- reboot_device: DeviceType
- reboot_type: RebootType
- band: BandType
- security_mode: SecurityMode
- ip_version: IpVersionType
-
-
-class WlanRebootTest(base_test.WifiBaseTest):
- """Tests wlan reconnects in different reboot scenarios.
-
- Testbed Requirement:
- * One ACTS compatible device (dut)
- * One Whirlwind Access Point
- * One PduDevice
- """
-
- def pre_run(self) -> None:
- test_params: list[tuple[TestParams]] = []
- for (
- device_type,
- reboot_type,
- band,
- security_mode,
- ip_version,
- ) in itertools.product(
- # DeviceType,
- # RebootType,
- # BandType,
- # SecurityMode,
- # IpVersionType,
- #
- # TODO(https://github.com/python/mypy/issues/14688): Replace the code below
- # with the commented code above once the bug affecting StrEnum resolves.
- [e for e in DeviceType],
- [e for e in RebootType],
- [e for e in BandType],
- [SecurityMode.OPEN, SecurityMode.WPA2, SecurityMode.WPA3],
- [e for e in IpVersionType],
- ):
- test_params.append(
- (
- TestParams(
- device_type,
- reboot_type,
- band,
- security_mode,
- ip_version,
- ),
- )
- )
-
- def generate_test_name(t: TestParams) -> str:
- test_name = (
- "test"
- f"_{t.reboot_type}_reboot"
- f"_{t.reboot_device}"
- f"_{t.band}"
- f"_{t.security_mode}"
- )
- if t.ip_version.ipv4():
- test_name += "_ipv4"
- if t.ip_version.ipv6():
- test_name += "_ipv6"
- return test_name
-
- self.generate_tests(
- test_logic=self.run_reboot_test,
- name_func=generate_test_name,
- arg_sets=test_params,
- )
-
- def setup_class(self) -> None:
- super().setup_class()
- self.log = logging.getLogger()
-
- if len(self.access_points) == 0:
- raise signals.TestAbortClass("Requires at least one access point")
- self.access_point = self.access_points[0]
-
- self.fuchsia_device, self.dut = self.get_dut_type(
- FuchsiaDevice, AssociationMode.POLICY
- )
-
- def setup_test(self) -> None:
- super().setup_test()
- self.access_point.stop_all_aps()
- self.dut.wifi_toggle_state(True)
- for ad in self.android_devices:
- ad.droid.wakeLockAcquireBright()
- ad.droid.wakeUpNow()
- self.dut.disconnect()
- if self.fuchsia_device:
- self.fuchsia_device.configure_wlan()
-
- def on_fail(self, record: TestResultRecord) -> None:
- super().on_fail(record)
- self.access_point.download_ap_logs(self.current_test_info.output_path)
-
- def teardown_test(self) -> None:
- # TODO(b/273923552): We take a snapshot here and before rebooting the
- # DUT for every test because the persistence component does not make the
- # inspect logs available for 120 seconds. This helps for debugging
- # issues where we need previous state.
- self.dut.take_bug_report(self.current_test_info.record)
- self.download_logs()
- self.access_point.stop_all_aps()
- self.dut.disconnect()
- for ad in self.android_devices:
- ad.droid.wakeLockRelease()
- ad.droid.goToSleepNow()
- self.dut.turn_location_off_and_scan_toggle_off()
- self.dut.reset_wifi()
- if self.fuchsia_device:
- self.fuchsia_device.deconfigure_wlan()
- super().teardown_test()
-
- def setup_ap(
- self,
- ssid: str,
- band: BandType,
- ip_version: IpVersionType,
- security_mode: SecurityMode,
- password: str | None = None,
- ) -> None:
- """Setup ap with basic config.
-
- Args:
- ssid: The ssid to setup on ap
- band: The type of band to set up the ap with ('2g' or '5g').
- ip_version: The type of ip to use (ipv4 or ipv6)
- security_mode: The type of security mode.
- password: The PSK or passphase.
- """
- # TODO(fxb/63719): Add varying AP parameters
- security_profile = Security(
- security_mode=security_mode, password=password
- )
-
- self.access_point.start_ap(
- hostapd_config=create_ap_preset(
- iface_wlan_2g=self.access_point.wlan_2g,
- iface_wlan_5g=self.access_point.wlan_5g,
- profile_name="whirlwind",
- channel=band.default_channel(),
- ssid=ssid,
- security=security_profile,
- # TODO(http://b/271628778): Remove ap_max_inactivity once
- # Fuchsia respects 802.11w (PMF) comeback-time.
- ap_max_inactivity=100 if band is BandType.BAND_5G else None,
- ),
- radvd_config=RadvdConfig() if ip_version.ipv6() else None,
- )
-
- if not ip_version.ipv4():
- self.access_point.stop_dhcp()
-
- self.log.info(f"Network (SSID: {ssid}) is up.")
-
- def ping_dut_to_ap(
- self,
- band: BandType,
- ip_version: IpVersionType,
- ) -> None:
- """Validate the DUT is pingable."""
- if band is BandType.BAND_2G:
- test_interface = self.access_point.wlan_2g
- elif band is BandType.BAND_5G:
- test_interface = self.access_point.wlan_5g
-
- if ip_version == IpVersionType.IPV4:
- ap_address = utils.get_addr(self.access_point.ssh, test_interface)
- elif ip_version == IpVersionType.IPV6:
- ap_address = utils.get_addr(
- self.access_point.ssh,
- test_interface,
- addr_type="ipv6_link_local",
- )
- else:
- raise TypeError(f"Invalid IP type: {ip_version}")
-
- if ap_address:
- if ip_version == IpVersionType.IPV4:
- ping_result = self.dut.ping(ap_address)
- else:
- ap_address = (
- f"{ap_address}%{self.dut.get_default_wlan_test_interface()}"
- )
- ping_result = self.dut.ping(ap_address)
- if ping_result.success:
- self.log.info("Ping was successful.")
- else:
- raise signals.TestFailure(
- f"Ping was unsuccessful: {ping_result}"
- )
- else:
- raise ConnectionError("Failed to retrieve APs ping address.")
-
- def prepare_dut_for_reconnection(self) -> None:
- """Perform any actions to ready DUT for reconnection.
-
- These actions will vary depending on the DUT. eg. android devices may
- need to be woken up, ambient devices should not require any interaction,
- etc.
- """
- self.dut.wifi_toggle_state(True)
- for ad in self.android_devices:
- ad.droid.wakeUpNow()
-
- def wait_for_dut_network_connection(self, ssid: str) -> None:
- """Checks if device is connected to given network. Sleeps 1 second
- between retries.
-
- Args:
- ssid: ssid to check connection to.
- Raises:
- ConnectionError, if DUT is not connected after all timeout.
- """
- self.log.info(
- f"Checking if DUT is connected to {ssid} network. Will retry for "
- f"{DUT_NETWORK_CONNECTION_TIMEOUT} seconds."
- )
- timeout = time.time() + DUT_NETWORK_CONNECTION_TIMEOUT
- while time.time() < timeout:
- try:
- is_connected = self.dut.is_connected(ssid=ssid)
- except Exception as err:
- self.log.debug(
- f"SL4* call failed. Retrying in 1 second. Error: {err}"
- )
- is_connected = False
- finally:
- if is_connected:
- self.log.info("Success: DUT has connected.")
- break
- else:
- self.log.debug(
- f"DUT not connected to network {ssid}...retrying in 1 second."
- )
- time.sleep(1)
- else:
- raise ConnectionError("DUT failed to connect to the network.")
-
- def write_csv_time_to_reconnect(
- self,
- test_name: str,
- reconnect_success: bool,
- time_to_reconnect: float = 0.0,
- ) -> None:
- """Writes the time to reconnect to a csv file.
- Args:
- test_name: the name of the test case
- reconnect_success: whether the test successfully reconnected or not
- time_to_reconnect: the time from when the rebooted device came back
- up to when it reassociated (or 'FAIL'), if it failed to
- reconnect.
- """
- csv_file_name = os.path.join(self.log_path, "time_to_reconnect.csv")
- self.log.info(f"Writing to {csv_file_name}")
- with open(csv_file_name, "a") as csv_file:
- if reconnect_success:
- csv_file.write(f"{test_name},{time_to_reconnect}\n")
- else:
- csv_file.write(f"{test_name},'FAIL'\n")
-
- def log_and_continue(
- self,
- ssid: str,
- time_to_reconnect: float = 0.0,
- error: Exception | None = None,
- ) -> None:
- """Writes the time to reconnect to the csv file before continuing, used
- in stress tests runs.
-
- Args:
- time_to_reconnect: the time from when the rebooted device came back
- ip to when reassociation occurred.
- error: error message to log before continuing with the test
- """
- if error:
- self.log.info(
- f"Device failed to reconnect to network {ssid}. Error: {error}"
- )
- self.write_csv_time_to_reconnect(
- f"{self.current_test_info.name}", False
- )
-
- else:
- self.log.info(
- f"Device successfully reconnected to network {ssid} after "
- f"{time_to_reconnect} seconds."
- )
- self.write_csv_time_to_reconnect(
- f"{self.current_test_info.name}", True, time_to_reconnect
- )
-
- def run_reboot_test(self, settings: TestParams) -> None:
- """Runs a reboot test based on a given config.
- 1. Setups up a network, associates the dut, and saves the network.
- 2. Verifies the dut receives ip address(es).
- 3. Verifies traffic between DUT and AP (ping)
- 4. Reboots (hard or soft) the device (dut or ap).
- - If the ap was rebooted, setup the same network again.
- 5. Wait for reassociation or timeout.
- 6. If reassocation occurs:
- - Verifies the dut receives ip address(es).
- - Verifies traffic between DUT and AP (ping).
- 7. Logs time to reconnect (or failure to reconnect)
-
- Args:
- settings: TestParams dataclass containing the following values:
- reboot_device: the device to reboot either DUT or AP.
- reboot_type: how to reboot the reboot_device either hard or soft.
- band: band to setup either 2g or 5g
- security_mode: security mode to set up either OPEN, WPA2, or WPA3.
- ip_version: the ip version (ipv4 or ipv6)
- """
- # TODO(b/286443517): Properly support WLAN on android devices.
- assert (
- self.fuchsia_device is not None
- ), "Fuchsia device not found, test currently does not support android devices."
-
- ssid = utils.rand_ascii_str(AP_SSID_LENGTH_2G)
- reboot_device: DeviceType = settings.reboot_device
- reboot_type: RebootType = settings.reboot_type
- band: BandType = settings.band
- ip_version: IpVersionType = settings.ip_version
- security_mode: SecurityMode = settings.security_mode
- password: str | None = None
- if security_mode is not SecurityMode.OPEN:
- password = generate_random_password(security_mode=security_mode)
-
- # Skip hard reboots if no PDU present
- asserts.skip_if(
- reboot_type is RebootType.HARD and len(self.pdu_devices) == 0,
- "Hard reboots require a PDU device.",
- )
-
- self.setup_ap(
- ssid,
- band,
- ip_version,
- security_mode,
- password,
- )
-
- if not self.dut.associate(
- ssid,
- target_security=security_mode,
- target_pwd=password,
- ):
- raise EnvironmentError("Initial network connection failed.")
-
- test_interface = self.dut.get_default_wlan_test_interface()
-
- if ip_version.ipv4():
- self.fuchsia_device.wait_for_ipv4_addr(test_interface)
- self.ping_dut_to_ap(band, IpVersionType.IPV4)
- if ip_version.ipv6():
- self.fuchsia_device.wait_for_ipv6_addr(test_interface)
- self.ping_dut_to_ap(band, IpVersionType.IPV6)
-
- # TODO(b/273923552): We take a snapshot here and during test
- # teardown for every test because the persistence component does not
- # make the inspect logs available for 120 seconds. This helps for
- # debugging issues where we need previous state.
- self.dut.take_bug_report(self.current_test_info.record)
-
- # DUT reboots
- if reboot_device is DeviceType.DUT:
- if reboot_type is RebootType.SOFT:
- self.fuchsia_device.reboot()
- elif reboot_type is RebootType.HARD:
- self.dut.hard_power_cycle(self.pdu_devices)
-
- # AP reboots
- elif reboot_device is DeviceType.AP:
- if reboot_type is RebootType.SOFT:
- self.log.info("Cleanly stopping ap.")
- self.access_point.stop_all_aps()
- elif reboot_type is RebootType.HARD:
- self.access_point.hard_power_cycle(self.pdu_devices)
- self.setup_ap(ssid, band, ip_version, security_mode, password)
-
- self.prepare_dut_for_reconnection()
- uptime = time.time()
- try:
- try:
- self.wait_for_dut_network_connection(ssid)
- except ConnectionError as e:
- if (
- reboot_device is DeviceType.DUT
- and security_mode is SecurityMode.WPA3
- ):
- # TODO(http://b/271628778): Remove this try/except statement
- # once Fuchsia respects 802.11w (PMF) comeback-time.
- raise signals.TestSkip(
- f"Received expected ConnectionError due to http://b/271628778: {e}"
- )
- raise e
- time_to_reconnect = time.time() - uptime
-
- if ip_version.ipv4():
- self.fuchsia_device.wait_for_ipv4_addr(test_interface)
- self.ping_dut_to_ap(band, IpVersionType.IPV4)
- if ip_version.ipv6():
- self.fuchsia_device.wait_for_ipv6_addr(test_interface)
- self.ping_dut_to_ap(band, IpVersionType.IPV6)
-
- except ConnectionError as err:
- self.log_and_continue(ssid, error=err)
- raise signals.TestFailure(
- f"Failed to reconnect to {ssid} after reboot."
- )
- else:
- self.log_and_continue(ssid, time_to_reconnect=time_to_reconnect)
-
-
-if __name__ == "__main__":
- test_runner.main()
diff --git a/tests/wlan/functional/WlanScanTest.py b/tests/wlan/functional/WlanScanTest.py
deleted file mode 100644
index 10bf93f..0000000
--- a/tests/wlan/functional/WlanScanTest.py
+++ /dev/null
@@ -1,223 +0,0 @@
-#!/usr/bin/env python3.4
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-"""
-This test exercises basic scanning functionality to confirm expected behavior
-related to wlan scanning
-"""
-
-import itertools
-import logging
-from dataclasses import dataclass
-from datetime import datetime
-
-import fidl_fuchsia_wlan_common_security as fidl_security
-from mobly import asserts, signals, test_runner
-from mobly.config_parser import TestRunConfig
-from mobly.records import TestResultRecord
-
-from antlion import utils
-from antlion.controllers.access_point import setup_ap
-from antlion.controllers.ap_lib.hostapd_constants import BandType
-from antlion.controllers.ap_lib.hostapd_security import Security, SecurityMode
-from antlion.controllers.fuchsia_device import FuchsiaDevice
-from antlion.test_utils.wifi import base_test
-
-
-@dataclass
-class TestParams:
- band: BandType
- security: SecurityMode
-
-
-class WlanScanTest(base_test.WifiBaseTest):
- """WLAN scan test class.
-
- Test Bed Requirement:
- * One or more Fuchsia devices
- * Several Wi-Fi networks visible to the device, including an open Wi-Fi
- network or a onHub/GoogleWifi
- """
-
- def __init__(self, configs: TestRunConfig) -> None:
- super().__init__(configs)
- self.log = logging.getLogger()
-
- if len(self.access_points) < 1:
- raise signals.TestAbortClass("Requires at least one access point")
- self.access_point = self.access_points[0]
-
- def pre_run(self) -> None:
- test_params: list[tuple[TestParams]] = []
- for (
- band,
- security,
- ) in itertools.product(
- # BandType,
- # [SecurityMode.OPEN, SecurityMode.WPA2],
- #
- # TODO(https://github.com/python/mypy/issues/14688): Replace the code below
- # with the commented code above once the bug affecting StrEnum resolves.
- [e for e in BandType],
- [SecurityMode.OPEN, SecurityMode.WPA2],
- ):
- test_params.append(
- (
- TestParams(
- band,
- security,
- ),
- )
- )
-
- def generate_test_name(t: TestParams) -> str:
- return (
- "test_scan_while_connected"
- f"_{t.security}_open_network"
- f"_{t.band}"
- )
-
- self.generate_tests(
- test_logic=self.scan_while_connected,
- name_func=generate_test_name,
- arg_sets=test_params,
- )
-
- def setup_class(self) -> None:
- super().setup_class()
-
- for fd in self.fuchsia_devices:
- fd.configure_wlan(association_mechanism="drivers")
-
- self.access_point.stop_all_aps()
-
- def on_fail(self, record: TestResultRecord) -> None:
- for fd in self.fuchsia_devices:
- self.on_device_fail(fd, record)
- fd.configure_wlan(association_mechanism="drivers")
-
- def teardown_test(self) -> None:
- for fd in self.fuchsia_devices:
- fd.honeydew_fd.wlan_core.disconnect()
- self.access_point.stop_all_aps()
-
- def teardown_class(self) -> None:
- self.download_logs()
- self.access_point.stop_all_aps()
-
- def scan_while_connected(self, t: TestParams) -> None:
- """Connects to as specified network and initiates a scan."""
- ssid = utils.rand_ascii_str(20)
- password = (
- utils.rand_ascii_str(10)
- if t.security is SecurityMode.WPA2
- else None
- )
- setup_ap(
- access_point=self.access_point,
- profile_name="whirlwind",
- channel=t.band.default_channel(),
- ssid=ssid,
- security=Security(
- security_mode=t.security,
- password=password,
- ),
- )
-
- if t.security == SecurityMode.OPEN:
- protocol = fidl_security.Protocol.OPEN
- credentials = None
- elif t.security == SecurityMode.WPA2:
- if password is None:
- raise signals.TestError("Password is required for WPA2")
- protocol = fidl_security.Protocol.WPA2_PERSONAL
- credentials = fidl_security.Credentials(
- wpa=fidl_security.WpaCredentials(
- passphrase=(list(password.encode("ascii")))
- )
- )
- else:
- raise signals.TestFailure(f"Unhandled security mode {t.security}")
- authentication = fidl_security.Authentication(
- protocol=protocol, credentials=credentials
- )
-
- for fd in self.fuchsia_devices:
- name = fd.honeydew_fd.device_name
-
- self.log.info('[%s] Scanning for ssid "%s"', name, ssid)
- scan_results = fd.honeydew_fd.wlan_core.scan_for_bss_info()
- asserts.assert_in(
- ssid, scan_results, f'Scan results did not include "{ssid}"'
- )
- target_bss = scan_results[ssid]
- asserts.assert_equal(
- len(target_bss),
- 1,
- f'Expected 1 BSS for "{ssid}", got {len(target_bss)}',
- )
-
- self.log.info('[%s] Connecting to ssid "%s"', name, ssid)
- asserts.assert_true(
- fd.honeydew_fd.wlan_core.connect(
- ssid,
- password,
- target_bss[0],
- authentication,
- ),
- f"Expected connect to {ssid} to succeed",
- )
-
- self.log.info('[%s] Scanning while connected to "%s"', name, ssid)
- self.basic_scan_request(fd, ssid)
-
- def basic_scan_request(self, fd: FuchsiaDevice, ssid: str) -> None:
- """Verify ssid is discoverable.
-
- Args:
- fd: A fuchsia device
- ssid: ssid of network to validate is in scan results
- """
- start_time = datetime.now()
- scan_results = fd.honeydew_fd.wlan_core.scan_for_bss_info()
- self.log.info("Scan contained %d results", len(scan_results))
- self.log.debug("Scan results: %s", scan_results)
- total_time_ms = (datetime.now() - start_time).total_seconds() * 1000
- self.log.info(f"Scan time: {total_time_ms:.2f} ms")
-
- asserts.assert_in(
- ssid, scan_results, f'Scan results did not include "{ssid}"'
- )
-
- def test_basic_scan_request(self) -> None:
- """Verify a general scan trigger returns at least one result"""
- ssid = utils.rand_ascii_str(20)
- setup_ap(
- access_point=self.access_point,
- profile_name="whirlwind",
- channel=BandType.BAND_2G.default_channel(),
- ssid=ssid,
- security=Security(
- security_mode=SecurityMode.OPEN,
- password=None,
- ),
- )
- for fd in self.fuchsia_devices:
- self.basic_scan_request(fd, ssid)
-
-
-if __name__ == "__main__":
- test_runner.main()
diff --git a/tests/wlan/functional/WlanTargetSecurityTest.py b/tests/wlan/functional/WlanTargetSecurityTest.py
deleted file mode 100644
index cd99872..0000000
--- a/tests/wlan/functional/WlanTargetSecurityTest.py
+++ /dev/null
@@ -1,355 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from mobly import asserts, signals, test_runner
-from mobly.records import TestResultRecord
-
-from antlion import utils
-from antlion.controllers.access_point import setup_ap
-from antlion.controllers.ap_lib.hostapd_constants import (
- AP_DEFAULT_CHANNEL_5G,
- AP_SSID_LENGTH_5G,
-)
-from antlion.controllers.ap_lib.hostapd_security import Security, SecurityMode
-from antlion.test_utils.abstract_devices.wlan_device import AssociationMode
-from antlion.test_utils.wifi import base_test
-
-
-# TODO(fxb/68956): Add security protocol check to mixed mode tests when info is
-# available.
-class WlanTargetSecurityTest(base_test.WifiBaseTest):
- """Tests Fuchsia's target security concept and security upgrading
-
- Testbed Requirements:
- * One Fuchsia device
- * One Whirlwind Access Point
- """
-
- def setup_class(self) -> None:
- super().setup_class()
-
- self.dut = self.get_dut(AssociationMode.POLICY)
-
- if len(self.access_points) == 0:
- raise signals.TestAbortClass("Requires at least one access point")
- self.access_point = self.access_points[0]
-
- def teardown_class(self) -> None:
- self.dut.disconnect()
- self.access_point.stop_all_aps()
- super().teardown_class()
-
- def teardown_test(self) -> None:
- self.dut.disconnect()
- self.download_logs()
- self.access_point.stop_all_aps()
- super().teardown_test()
-
- def on_fail(self, record: TestResultRecord) -> None:
- self.dut.disconnect()
- self.access_point.stop_all_aps()
- super().on_fail(record)
-
- def setup_ap(
- self, security_mode: SecurityMode = SecurityMode.OPEN
- ) -> tuple[str, str]:
- """Sets up an AP using the provided security mode.
-
- Args:
- security_mode: string, security mode for AP
- Returns:
- Tuple, (ssid, password). Returns a password even if for open
- security, since non-open target securities require a credential
- to attempt a connection.
- """
- ssid = utils.rand_ascii_str(AP_SSID_LENGTH_5G)
- # Length 13, so it can be used for WEP or WPA
- password = utils.rand_ascii_str(13)
- security_profile = Security(
- security_mode=security_mode, password=password
- )
-
- setup_ap(
- access_point=self.access_point,
- profile_name="whirlwind",
- channel=AP_DEFAULT_CHANNEL_5G,
- ssid=ssid,
- security=security_profile,
- )
-
- return (ssid, password)
-
- # Open Security on AP
- def test_associate_open_ap_with_open_target_security(self) -> None:
- ssid, _ = self.setup_ap()
- asserts.assert_true(
- self.dut.associate(ssid, SecurityMode.OPEN),
- "Failed to associate.",
- )
-
- def test_reject_open_ap_with_wep_target_security(self) -> None:
- ssid, password = self.setup_ap()
- asserts.assert_false(
- self.dut.associate(ssid, SecurityMode.WEP, target_pwd=password),
- "Should not have associated.",
- )
-
- def test_reject_open_ap_with_wpa_target_security(self) -> None:
- ssid, password = self.setup_ap()
- asserts.assert_false(
- self.dut.associate(ssid, SecurityMode.WPA, target_pwd=password),
- "Should not have associated.",
- )
-
- def test_reject_open_ap_with_wpa2_target_security(self) -> None:
- ssid, password = self.setup_ap()
- asserts.assert_false(
- self.dut.associate(ssid, SecurityMode.WPA2, target_pwd=password),
- "Should not have associated.",
- )
-
- def test_reject_open_ap_with_wpa3_target_security(self) -> None:
- ssid, password = self.setup_ap()
- asserts.assert_false(
- self.dut.associate(ssid, SecurityMode.WPA3, target_pwd=password),
- "Should not have associated.",
- )
-
- # WEP Security on AP
- def test_reject_wep_ap_with_open_target_security(self) -> None:
- ssid, _ = self.setup_ap(SecurityMode.WEP)
- asserts.assert_false(
- self.dut.associate(ssid, SecurityMode.OPEN),
- "Should not have associated.",
- )
-
- def test_associate_wep_ap_with_wep_target_security(self) -> None:
- ssid, password = self.setup_ap(SecurityMode.WEP)
- asserts.assert_true(
- self.dut.associate(ssid, SecurityMode.WEP, target_pwd=password),
- "Failed to associate.",
- )
-
- def test_reject_wep_ap_with_wpa_target_security(self) -> None:
- ssid, password = self.setup_ap(SecurityMode.WEP)
- asserts.assert_false(
- self.dut.associate(ssid, SecurityMode.WPA, target_pwd=password),
- "Should not have associated.",
- )
-
- def test_reject_wep_ap_with_wpa2_target_security(self) -> None:
- ssid, password = self.setup_ap(SecurityMode.WEP)
- asserts.assert_false(
- self.dut.associate(ssid, SecurityMode.WPA2, target_pwd=password),
- "Should not have associated.",
- )
-
- def test_reject_wep_ap_with_wpa3_target_security(self) -> None:
- ssid, password = self.setup_ap(SecurityMode.WEP)
- asserts.assert_false(
- self.dut.associate(ssid, SecurityMode.WPA3, target_pwd=password),
- "Should not have associated.",
- )
-
- # WPA Security on AP
- def test_reject_wpa_ap_with_open_target_security(self) -> None:
- ssid, _ = self.setup_ap(SecurityMode.WPA)
- asserts.assert_false(
- self.dut.associate(ssid, SecurityMode.OPEN),
- "Should not have associated.",
- )
-
- def test_reject_wpa_ap_with_wep_target_security(self) -> None:
- ssid, password = self.setup_ap(SecurityMode.WPA)
- asserts.assert_false(
- self.dut.associate(ssid, SecurityMode.WEP, target_pwd=password),
- "Should not have associated.",
- )
-
- def test_associate_wpa_ap_with_wpa_target_security(self) -> None:
- ssid, password = self.setup_ap(SecurityMode.WPA)
- asserts.assert_true(
- self.dut.associate(ssid, SecurityMode.WPA, target_pwd=password),
- "Failed to associate.",
- )
-
- def test_reject_wpa_ap_with_wpa2_target_security(self) -> None:
- ssid, password = self.setup_ap(SecurityMode.WPA)
- asserts.assert_false(
- self.dut.associate(ssid, SecurityMode.WPA2, target_pwd=password),
- "Should not have associated.",
- )
-
- def test_reject_wpa_ap_with_wpa3_target_security(self) -> None:
- ssid, password = self.setup_ap(SecurityMode.WPA)
- asserts.assert_false(
- self.dut.associate(ssid, SecurityMode.WPA3, target_pwd=password),
- "Should not have associated.",
- )
-
- # WPA2 Security on AP
- def test_reject_wpa2_ap_with_open_target_security(self) -> None:
- ssid, _ = self.setup_ap(SecurityMode.WPA2)
- asserts.assert_false(
- self.dut.associate(ssid, SecurityMode.OPEN),
- "Should not have associated.",
- )
-
- def test_reject_wpa2_ap_with_wep_target_security(self) -> None:
- ssid, password = self.setup_ap(SecurityMode.WPA2)
- asserts.assert_false(
- self.dut.associate(ssid, SecurityMode.WEP, target_pwd=password),
- "Should not have associated.",
- )
-
- def test_associate_wpa2_ap_with_wpa_target_security(self) -> None:
- ssid, password = self.setup_ap(SecurityMode.WPA2)
- asserts.assert_true(
- self.dut.associate(ssid, SecurityMode.WPA, target_pwd=password),
- "Failed to associate.",
- )
-
- def test_associate_wpa2_ap_with_wpa2_target_security(self) -> None:
- ssid, password = self.setup_ap(SecurityMode.WPA2)
- asserts.assert_true(
- self.dut.associate(ssid, SecurityMode.WPA2, target_pwd=password),
- "Failed to associate.",
- )
-
- def test_reject_wpa2_ap_with_wpa3_target_security(self) -> None:
- ssid, password = self.setup_ap(SecurityMode.WPA2)
- asserts.assert_false(
- self.dut.associate(ssid, SecurityMode.WPA3, target_pwd=password),
- "Should not have associated.",
- )
-
- # WPA/WPA2 Security on AP
- def test_reject_wpa_wpa2_ap_with_open_target_security(self) -> None:
- ssid, _ = self.setup_ap(SecurityMode.WPA_WPA2)
- asserts.assert_false(
- self.dut.associate(ssid, SecurityMode.OPEN),
- "Should not have associated.",
- )
-
- def test_reject_wpa_wpa2_ap_with_wep_target_security(self) -> None:
- ssid, password = self.setup_ap(SecurityMode.WPA_WPA2)
- asserts.assert_false(
- self.dut.associate(ssid, SecurityMode.WEP, target_pwd=password),
- "Should not have associated.",
- )
-
- def test_associate_wpa_wpa2_ap_with_wpa_target_security(self) -> None:
- ssid, password = self.setup_ap(SecurityMode.WPA_WPA2)
- asserts.assert_true(
- self.dut.associate(ssid, SecurityMode.WPA, target_pwd=password),
- "Failed to associate.",
- )
-
- def test_associate_wpa_wpa2_ap_with_wpa2_target_security(self) -> None:
- ssid, password = self.setup_ap(SecurityMode.WPA_WPA2)
- asserts.assert_true(
- self.dut.associate(ssid, SecurityMode.WPA2, target_pwd=password),
- "Failed to associate.",
- )
-
- def test_reject_wpa_wpa2_ap_with_wpa3_target_security(self) -> None:
- ssid, password = self.setup_ap(SecurityMode.WPA_WPA2)
- asserts.assert_false(
- self.dut.associate(ssid, SecurityMode.WPA3, target_pwd=password),
- "Should not have associated.",
- )
-
- # WPA3 Security on AP
- def test_reject_wpa3_ap_with_open_target_security(self) -> None:
- ssid, _ = self.setup_ap(SecurityMode.WPA3)
- asserts.assert_false(
- self.dut.associate(ssid, SecurityMode.OPEN),
- "Should not have associated.",
- )
-
- def test_reject_wpa3_ap_with_wep_target_security(self) -> None:
- ssid, password = self.setup_ap(SecurityMode.WPA3)
- asserts.assert_false(
- self.dut.associate(ssid, SecurityMode.WEP, target_pwd=password),
- "Should not have associated.",
- )
-
- def test_associate_wpa3_ap_with_wpa_target_security(self) -> None:
- ssid, password = self.setup_ap(SecurityMode.WPA3)
- asserts.assert_false(
- self.dut.associate(ssid, SecurityMode.WPA, target_pwd=password),
- "Expected failure to associate. WPA credentials for WPA3 was "
- "temporarily disabled, see https://fxbug.dev/42166758 for context. "
- "If this feature was reenabled, please update this test's "
- "expectation.",
- )
-
- def test_associate_wpa3_ap_with_wpa2_target_security(self) -> None:
- ssid, password = self.setup_ap(SecurityMode.WPA3)
- asserts.assert_true(
- self.dut.associate(ssid, SecurityMode.WPA2, target_pwd=password),
- "Failed to associate.",
- )
-
- def test_associate_wpa3_ap_with_wpa3_target_security(self) -> None:
- ssid, password = self.setup_ap(SecurityMode.WPA3)
- asserts.assert_true(
- self.dut.associate(ssid, SecurityMode.WPA3, target_pwd=password),
- "Failed to associate.",
- )
-
- # WPA2/WPA3 Security on AP
- def test_reject_wpa2_wpa3_ap_with_open_target_security(self) -> None:
- ssid, _ = self.setup_ap(SecurityMode.WPA2_WPA3)
- asserts.assert_false(
- self.dut.associate(ssid, SecurityMode.OPEN),
- "Should not have associated.",
- )
-
- def test_reject_wpa2_wpa3_ap_with_wep_target_security(self) -> None:
- ssid, password = self.setup_ap(SecurityMode.WPA2_WPA3)
- asserts.assert_false(
- self.dut.associate(ssid, SecurityMode.WEP, target_pwd=password),
- "Should not have associated.",
- )
-
- def test_associate_wpa2_wpa3_ap_with_wpa_target_security(self) -> None:
- ssid, password = self.setup_ap(SecurityMode.WPA2_WPA3)
- asserts.assert_false(
- self.dut.associate(ssid, SecurityMode.WPA, target_pwd=password),
- "Expected failure to associate. WPA credentials for WPA3 was "
- "temporarily disabled, see https://fxbug.dev/42166758 for context. "
- "If this feature was reenabled, please update this test's "
- "expectation.",
- )
-
- def test_associate_wpa2_wpa3_ap_with_wpa2_target_security(self) -> None:
- ssid, password = self.setup_ap(SecurityMode.WPA2_WPA3)
- asserts.assert_true(
- self.dut.associate(ssid, SecurityMode.WPA2, target_pwd=password),
- "Failed to associate.",
- )
-
- def test_associate_wpa2_wpa3_ap_with_wpa3_target_security(self) -> None:
- ssid, password = self.setup_ap(SecurityMode.WPA2_WPA3)
- asserts.assert_true(
- self.dut.associate(ssid, SecurityMode.WPA3, target_pwd=password),
- "Failed to associate.",
- )
-
-
-if __name__ == "__main__":
- test_runner.main()
diff --git a/tests/wlan/functional/WlanWirelessNetworkManagementTest.py b/tests/wlan/functional/WlanWirelessNetworkManagementTest.py
deleted file mode 100644
index 30b2617..0000000
--- a/tests/wlan/functional/WlanWirelessNetworkManagementTest.py
+++ /dev/null
@@ -1,555 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import time
-from dataclasses import dataclass
-from datetime import datetime, timedelta, timezone
-from typing import FrozenSet
-
-import fidl_fuchsia_wlan_common as f_wlan_common
-from mobly import asserts, signals, test_runner
-from mobly.records import TestResultRecord
-
-from antlion import utils
-from antlion.controllers.access_point import setup_ap
-from antlion.controllers.ap_lib import hostapd_constants
-from antlion.controllers.ap_lib.hostapd_security import Security, SecurityMode
-from antlion.controllers.ap_lib.radio_measurement import (
- BssidInformation,
- BssidInformationCapabilities,
- NeighborReportElement,
- PhyType,
-)
-from antlion.controllers.ap_lib.wireless_network_management import (
- BssTransitionCandidateList,
- BssTransitionManagementRequest,
-)
-from antlion.controllers.fuchsia_device import FuchsiaDevice
-from antlion.test_utils.abstract_devices.wlan_device import AssociationMode
-from antlion.test_utils.wifi import base_test
-
-
-@dataclass
-class TestParams:
- security_mode: SecurityMode
-
-
-# Antlion can see (via the wlan_features config directive) whether WNM features
-# are enabled, and runs or skips tests depending on presence of WNM features.
-class WlanWirelessNetworkManagementTest(base_test.WifiBaseTest):
- """Tests Fuchsia's Wireless Network Management (AKA 802.11v) support.
-
- Testbed Requirements:
- * One Fuchsia device
- * One Whirlwind access point
-
- Existing Fuchsia drivers do not yet support WNM features out-of-the-box, so this
- suite skips certain tests depending on whether specific WNM features are enabled.
- """
-
- def pre_run(self) -> None:
- test_args: list[tuple[TestParams]] = []
-
- SECURITY_MODES = (
- SecurityMode.OPEN,
- SecurityMode.WEP,
- SecurityMode.WPA,
- SecurityMode.WPA2,
- SecurityMode.WPA3,
- )
- for security_mode in SECURITY_MODES:
- test_args.append(
- (
- TestParams(
- security_mode=security_mode,
- ),
- )
- )
-
- def generate_roam_on_btm_req_test_name(test: TestParams) -> str:
- return f"test_roam_on_btm_req_from_{test.security_mode}_2g_to_{test.security_mode}_5g"
-
- self.generate_tests(
- test_logic=self.setup_connect_roam_on_btm_req,
- name_func=generate_roam_on_btm_req_test_name,
- arg_sets=test_args,
- )
-
- def setup_class(self) -> None:
- super().setup_class()
-
- self.fuchsia_device, self.dut = self.get_dut_type(
- FuchsiaDevice, AssociationMode.POLICY
- )
-
- if len(self.access_points) == 0:
- raise signals.TestAbortClass("Requires at least one access point")
- self.access_point = self.access_points[0]
-
- def teardown_class(self) -> None:
- self.dut.disconnect()
- self.access_point.stop_all_aps()
- super().teardown_class()
-
- def teardown_test(self) -> None:
- self.dut.disconnect()
- self.download_logs()
- self.access_point.stop_all_aps()
- super().teardown_test()
-
- def on_fail(self, record: TestResultRecord) -> None:
- self.dut.disconnect()
- self.access_point.stop_all_aps()
- super().on_fail(record)
-
- def setup_ap(
- self,
- ssid: str,
- security: Security | None = None,
- additional_ap_parameters: dict[str, int] | None = None,
- channel: int = hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- wnm_features: FrozenSet[hostapd_constants.WnmFeature] = frozenset(),
- ) -> None:
- """Sets up an AP using the provided parameters.
-
- Args:
- ssid: SSID for the AP.
- security: security config for AP, defaults to None (open network
- with no password).
- additional_ap_parameters: A dictionary of parameters that can be set
- directly in the hostapd config file.
- channel: which channel number to set the AP to (default is
- AP_DEFAULT_CHANNEL_2G).
- wnm_features: Wireless Network Management features to enable
- (default is no WNM features).
- """
- setup_ap(
- access_point=self.access_point,
- profile_name="whirlwind",
- channel=channel,
- ssid=ssid,
- security=security,
- additional_ap_parameters=additional_ap_parameters,
- wnm_features=wnm_features,
- )
-
- def _get_client_mac(self) -> str:
- """Get the MAC address of the DUT client interface.
-
- Returns:
- str, MAC address of the DUT client interface.
- Raises:
- ValueError if there is no DUT client interface.
- WlanError if the DUT interface query fails.
- """
- for wlan_iface in self.dut.get_wlan_interface_id_list():
- result = self.fuchsia_device.honeydew_fd.wlan_core.query_iface(
- wlan_iface
- )
- if result.role == f_wlan_common.WlanMacRole.CLIENT:
- return utils.mac_address_list_to_str(bytes(result.sta_addr))
- raise ValueError(
- "Failed to get client interface mac address. No client interface found."
- )
-
- def test_bss_transition_is_not_advertised_when_ap_supported_dut_unsupported(
- self,
- ) -> None:
- if self.dut.feature_is_present("BSS_TRANSITION_MANAGEMENT"):
- raise signals.TestSkip(
- "skipping test because BTM feature is present"
- )
-
- ssid = utils.rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_2G)
- wnm_features = frozenset(
- [hostapd_constants.WnmFeature.BSS_TRANSITION_MANAGEMENT]
- )
- self.setup_ap(ssid, wnm_features=wnm_features)
- asserts.assert_true(
- self.dut.associate(ssid, SecurityMode.OPEN),
- "Failed to associate.",
- )
- asserts.assert_true(self.dut.is_connected(), "Failed to connect.")
- client_mac = self._get_client_mac()
- # Verify that DUT is actually associated (as seen from AP).
- asserts.assert_true(
- self.access_point.sta_associated(
- self.access_point.wlan_2g, client_mac
- ),
- "DUT is not associated on the 2.4GHz band",
- )
-
- ext_capabilities = self.access_point.get_sta_extended_capabilities(
- self.access_point.wlan_2g, client_mac
- )
- asserts.assert_false(
- ext_capabilities.bss_transition,
- "DUT is incorrectly advertising BSS Transition Management support",
- )
-
- def test_bss_transition_is_advertised_when_ap_supported_dut_supported(
- self,
- ) -> None:
- if not self.dut.feature_is_present("BSS_TRANSITION_MANAGEMENT"):
- raise signals.TestSkip(
- "skipping test because BTM feature is not present"
- )
-
- ssid = utils.rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_2G)
- wnm_features = frozenset(
- [hostapd_constants.WnmFeature.BSS_TRANSITION_MANAGEMENT]
- )
- self.setup_ap(ssid, wnm_features=wnm_features)
- asserts.assert_true(
- self.dut.associate(ssid, SecurityMode.OPEN),
- "Failed to associate.",
- )
- asserts.assert_true(self.dut.is_connected(), "Failed to connect.")
- client_mac = self._get_client_mac()
- # Verify that DUT is actually associated (as seen from AP).
- asserts.assert_true(
- self.access_point.sta_associated(
- self.access_point.wlan_2g, client_mac
- ),
- "DUT is not associated on the 2.4GHz band",
- )
-
- ext_capabilities = self.access_point.get_sta_extended_capabilities(
- self.access_point.wlan_2g, client_mac
- )
- asserts.assert_true(
- ext_capabilities.bss_transition,
- "DUT is not advertising BSS Transition Management support",
- )
-
- def test_wnm_sleep_mode_is_not_advertised_when_ap_supported_dut_unsupported(
- self,
- ) -> None:
- if self.dut.feature_is_present("WNM_SLEEP_MODE"):
- raise signals.TestSkip(
- "skipping test because WNM feature is present"
- )
-
- ssid = utils.rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_2G)
- wnm_features = frozenset([hostapd_constants.WnmFeature.WNM_SLEEP_MODE])
- self.setup_ap(ssid, wnm_features=wnm_features)
- asserts.assert_true(
- self.dut.associate(ssid, SecurityMode.OPEN),
- "Failed to associate.",
- )
- asserts.assert_true(self.dut.is_connected(), "Failed to connect.")
- client_mac = self._get_client_mac()
- # Verify that DUT is actually associated (as seen from AP).
- asserts.assert_true(
- self.access_point.sta_associated(
- self.access_point.wlan_2g, client_mac
- ),
- "DUT is not associated on the 2.4GHz band",
- )
-
- ext_capabilities = self.access_point.get_sta_extended_capabilities(
- self.access_point.wlan_2g, client_mac
- )
- asserts.assert_false(
- ext_capabilities.wnm_sleep_mode,
- "DUT is incorrectly advertising WNM Sleep Mode support",
- )
-
- # This is called in generate_tests.
- def setup_connect_roam_on_btm_req(self, test: TestParams) -> None:
- """Setup the APs, associate a DUT, amd roam when BTM request is received.
-
- Args:
- test: Test parameters
- """
- if not self.dut.feature_is_present("BSS_TRANSITION_MANAGEMENT"):
- raise signals.TestSkip(
- "skipping test because BTM feature is not present"
- )
-
- ssid = utils.rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_2G)
- password = None
- if test.security_mode is not SecurityMode.OPEN:
- # Length 13, so it can be used for WEP or WPA
- password = utils.rand_ascii_str(13)
-
- wnm_features = frozenset(
- [hostapd_constants.WnmFeature.BSS_TRANSITION_MANAGEMENT]
- )
-
- # Setup 2.4 GHz AP.
- security = Security(test.security_mode, password)
- wnm_features = frozenset(
- [hostapd_constants.WnmFeature.BSS_TRANSITION_MANAGEMENT]
- )
- # Setup 2.4 GHz AP.
- self.setup_ap(
- ssid,
- security=security,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- wnm_features=wnm_features,
- )
-
- asserts.assert_true(
- self.dut.associate(
- ssid, target_pwd=password, target_security=test.security_mode
- ),
- "Failed to associate.",
- )
- # Verify that DUT is actually associated (as seen from AP).
- client_mac = self._get_client_mac()
- asserts.assert_true(
- self.access_point.sta_associated(
- self.access_point.wlan_2g, client_mac
- ),
- "DUT is not associated on the 2.4GHz band",
- )
-
- # Setup 5 GHz AP with same SSID.
- self.setup_ap(
- ssid,
- security=security,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- wnm_features=wnm_features,
- )
-
- # Construct a BTM request.
- dest_bssid = self.access_point.get_bssid_from_ssid(
- ssid,
- hostapd_constants.BandType.BAND_5G,
- )
- dest_bssid_info = BssidInformation(
- security=True, capabilities=BssidInformationCapabilities()
- )
- neighbor_5g_ap = NeighborReportElement(
- dest_bssid,
- dest_bssid_info,
- operating_class=116,
- channel_number=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- phy_type=PhyType.VHT,
- )
- btm_req = BssTransitionManagementRequest(
- preferred_candidate_list_included=True,
- disassociation_imminent=True,
- candidate_list=BssTransitionCandidateList([neighbor_5g_ap]),
- )
-
- # Sleep to avoid concurrent scan during reassociation, necessary due to a firmware bug.
- # TODO(fxbug.dev/42068735) Remove when fixed, or when non-firmware BTM support is merged.
- time.sleep(5)
-
- # Send BTM request from 2.4 GHz AP to DUT
- self.access_point.send_bss_transition_management_req(
- self.access_point.wlan_2g, client_mac, btm_req
- )
-
- # Give DUT time to roam.
- ROAM_DEADLINE = datetime.now(timezone.utc) + timedelta(seconds=2)
- while datetime.now(timezone.utc) < ROAM_DEADLINE:
- if self.access_point.sta_authorized(
- self.access_point.wlan_5g, client_mac
- ):
- break
- else:
- time.sleep(0.25)
-
- # Verify that DUT roamed (as seen from AP).
- asserts.assert_true(
- self.access_point.sta_authenticated(
- self.access_point.wlan_5g, client_mac
- ),
- "DUT is not authenticated on the 5GHz band",
- )
- asserts.assert_true(
- self.access_point.sta_associated(
- self.access_point.wlan_5g, client_mac
- ),
- "DUT is not associated on the 5GHz band",
- )
- asserts.assert_true(
- self.access_point.sta_authorized(
- self.access_point.wlan_5g, client_mac
- ),
- "DUT is not 802.1X authorized on the 5GHz band",
- )
-
- def test_btm_req_ignored_dut_unsupported(self) -> None:
- if self.dut.feature_is_present("BSS_TRANSITION_MANAGEMENT"):
- raise signals.TestSkip(
- "skipping test because BTM feature is present"
- )
-
- ssid = utils.rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_2G)
- wnm_features = frozenset(
- [hostapd_constants.WnmFeature.BSS_TRANSITION_MANAGEMENT]
- )
- # Setup 2.4 GHz AP.
- self.setup_ap(
- ssid,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- wnm_features=wnm_features,
- )
-
- asserts.assert_true(
- self.dut.associate(ssid, SecurityMode.OPEN),
- "Failed to associate.",
- )
- # Verify that DUT is actually associated (as seen from AP).
- client_mac = self._get_client_mac()
- asserts.assert_true(
- self.access_point.sta_associated(
- self.access_point.wlan_2g, client_mac
- ),
- "DUT is not associated on the 2.4GHz band",
- )
-
- # Setup 5 GHz AP with same SSID.
- self.setup_ap(
- ssid,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- wnm_features=wnm_features,
- )
-
- # Construct a BTM request.
- dest_bssid = self.access_point.get_bssid_from_ssid(
- ssid,
- hostapd_constants.BandType.BAND_5G,
- )
- dest_bssid_info = BssidInformation(
- security=True, capabilities=BssidInformationCapabilities()
- )
- neighbor_5g_ap = NeighborReportElement(
- dest_bssid,
- dest_bssid_info,
- operating_class=126,
- channel_number=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- phy_type=PhyType.VHT,
- )
- btm_req = BssTransitionManagementRequest(
- disassociation_imminent=True,
- candidate_list=BssTransitionCandidateList([neighbor_5g_ap]),
- )
-
- # Send BTM request from 2.4 GHz AP to DUT
- self.access_point.send_bss_transition_management_req(
- self.access_point.wlan_2g, client_mac, btm_req
- )
-
- # Check that DUT has not roamed.
- ROAM_DEADLINE = datetime.now(timezone.utc) + timedelta(seconds=2)
- while datetime.now(timezone.utc) < ROAM_DEADLINE:
- # Fail if DUT has reassociated to 5 GHz AP (as seen from AP).
- if self.access_point.sta_associated(
- self.access_point.wlan_5g, client_mac
- ):
- raise signals.TestFailure(
- "DUT unexpectedly roamed to target BSS after BTM request"
- )
- else:
- time.sleep(0.25)
-
- # DUT should have stayed associated to original AP.
- asserts.assert_true(
- self.access_point.sta_associated(
- self.access_point.wlan_2g, client_mac
- ),
- "DUT unexpectedly lost association on the 2.4GHz band after BTM request",
- )
-
- def test_btm_req_target_ap_rejects_reassoc(self) -> None:
- if not self.dut.feature_is_present("BSS_TRANSITION_MANAGEMENT"):
- raise signals.TestSkip(
- "skipping test because BTM feature is not present"
- )
-
- ssid = utils.rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_2G)
- wnm_features = frozenset(
- [hostapd_constants.WnmFeature.BSS_TRANSITION_MANAGEMENT]
- )
- # Setup 2.4 GHz AP.
- self.setup_ap(
- ssid,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- wnm_features=wnm_features,
- )
-
- asserts.assert_true(
- self.dut.associate(ssid, SecurityMode.OPEN),
- "Failed to associate.",
- )
- # Verify that DUT is actually associated (as seen from AP).
- client_mac = self._get_client_mac()
- asserts.assert_true(
- self.access_point.sta_associated(
- self.access_point.wlan_2g, client_mac
- ),
- "DUT is not associated on the 2.4GHz band",
- )
-
- # Setup 5 GHz AP with same SSID, but reject all STAs.
- reject_all_sta_param = {"max_num_sta": 0}
- self.setup_ap(
- ssid,
- additional_ap_parameters=reject_all_sta_param,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- wnm_features=wnm_features,
- )
-
- # Construct a BTM request.
- dest_bssid = self.access_point.get_bssid_from_ssid(
- ssid,
- hostapd_constants.BandType.BAND_5G,
- )
- dest_bssid_info = BssidInformation(
- security=True, capabilities=BssidInformationCapabilities()
- )
- neighbor_5g_ap = NeighborReportElement(
- dest_bssid,
- dest_bssid_info,
- operating_class=116,
- channel_number=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- phy_type=PhyType.VHT,
- )
- btm_req = BssTransitionManagementRequest(
- disassociation_imminent=True,
- candidate_list=BssTransitionCandidateList([neighbor_5g_ap]),
- )
-
- # Sleep to avoid concurrent scan during reassociation, necessary due to a firmware bug.
- # TODO(fxbug.dev/42068735) Remove when fixed, or when non-firmware BTM support is merged.
- time.sleep(5)
-
- # Send BTM request from 2.4 GHz AP to DUT
- self.access_point.send_bss_transition_management_req(
- self.access_point.wlan_2g, client_mac, btm_req
- )
-
- # Check that DUT has not reassociated.
- ROAM_DEADLINE = datetime.now(timezone.utc) + timedelta(seconds=2)
- while datetime.now(timezone.utc) < ROAM_DEADLINE:
- # Check that DUT has not reassociated to 5 GHz AP (as seen from AP).
- if self.access_point.sta_associated(
- self.access_point.wlan_5g, client_mac
- ):
- raise signals.TestFailure(
- "DUT unexpectedly roamed to 5GHz band"
- )
- else:
- time.sleep(0.25)
-
-
-if __name__ == "__main__":
- test_runner.main()
diff --git a/tests/wlan/misc/BUILD.gn b/tests/wlan/misc/BUILD.gn
deleted file mode 100644
index 167822b..0000000
--- a/tests/wlan/misc/BUILD.gn
+++ /dev/null
@@ -1,26 +0,0 @@
-# Copyright 2023 The Fuchsia Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import("//third_party/antlion/antlion_host_test.gni")
-import("//third_party/antlion/environments.gni")
-
-assert(is_host, "antlion tests only supported for host testing")
-
-antlion_host_test("wlan_interface_test") {
- main_source = "WlanInterfaceTest.py"
- environments = display_envs
-}
-
-antlion_host_test("wlan_misc_scenario") {
- main_source = "WlanMiscScenarioTest.py"
- environments = display_ap_envs
-}
-
-group("e2e_tests") {
- testonly = true
- public_deps = [
- ":wlan_interface_test",
- ":wlan_misc_scenario",
- ]
-}
diff --git a/tests/wlan/misc/WlanInterfaceTest.py b/tests/wlan/misc/WlanInterfaceTest.py
deleted file mode 100644
index a402459..0000000
--- a/tests/wlan/misc/WlanInterfaceTest.py
+++ /dev/null
@@ -1,50 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from mobly import test_runner
-
-from antlion.test_utils.abstract_devices.wlan_device import AssociationMode
-from antlion.test_utils.wifi import base_test
-
-
-class WlanInterfaceTest(base_test.WifiBaseTest):
- def setup_class(self) -> None:
- super().setup_class()
- self.dut = self.get_dut(AssociationMode.POLICY)
-
- def test_destroy_iface(self) -> None:
- """Test that we don't error out when destroying the WLAN interface.
-
- Steps:
- 1. Find a wlan interface
- 2. Destroy it
-
- Expected Result:
- Verify there are no errors in destroying the wlan interface.
-
- Returns:
- signals.TestPass if no errors
- signals.TestFailure if there are any errors during the test.
-
- TAGS: WLAN
- Priority: 1
- """
- wlan_interfaces = self.dut.get_wlan_interface_id_list()
- self.dut.destroy_wlan_interface(wlan_interfaces[0])
-
-
-if __name__ == "__main__":
- test_runner.main()
diff --git a/tests/wlan/misc/WlanMiscScenarioTest.py b/tests/wlan/misc/WlanMiscScenarioTest.py
deleted file mode 100644
index 7a62abc..0000000
--- a/tests/wlan/misc/WlanMiscScenarioTest.py
+++ /dev/null
@@ -1,115 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import logging
-
-from mobly import asserts, signals, test_runner
-from mobly.records import TestResultRecord
-
-from antlion import utils
-from antlion.controllers.access_point import setup_ap
-from antlion.controllers.ap_lib import hostapd_constants
-from antlion.controllers.ap_lib.hostapd_security import Security, SecurityMode
-from antlion.controllers.ap_lib.hostapd_utils import generate_random_password
-from antlion.test_utils.abstract_devices.wlan_device import AssociationMode
-from antlion.test_utils.wifi import base_test
-
-
-class WlanMiscScenarioTest(base_test.WifiBaseTest):
- """Random scenario tests, usually to reproduce certain bugs, that do not
- fit into a specific test category, but should still be run in CI to catch
- regressions.
- """
-
- def setup_class(self) -> None:
- super().setup_class()
- self.log = logging.getLogger()
- self.dut = self.get_dut(AssociationMode.POLICY)
-
- if len(self.access_points) == 0:
- raise signals.TestAbortClass("Requires at least one access point")
- self.access_point = self.access_points[0]
-
- def teardown_class(self) -> None:
- self.dut.disconnect()
- self.access_point.stop_all_aps()
-
- def teardown_test(self) -> None:
- self.dut.disconnect()
- self.download_logs()
- self.access_point.stop_all_aps()
-
- def on_fail(self, record: TestResultRecord) -> None:
- super().on_fail(record)
- self.dut.disconnect()
- self.access_point.stop_all_aps()
-
- def test_connect_to_wpa2_after_wpa3_rejection(self) -> None:
- """Test association to non-WPA3 network after receiving a WPA3
- rejection, which was triggering a firmware hang.
-
- Bug: https://bugs.fuchsia.dev/p/fuchsia/issues/detail?id=71233
- """
- # Setup a WPA3 network
- wpa3_ssid = utils.rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_5G)
- setup_ap(
- access_point=self.access_point,
- profile_name="whirlwind",
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- ssid=wpa3_ssid,
- security=Security(
- security_mode=SecurityMode.WPA3,
- password=generate_random_password(SecurityMode.WPA3),
- ),
- )
- # Attempt to associate with wrong password, expecting failure
- self.log.info("Attempting to associate WPA3 with wrong password.")
- asserts.assert_false(
- self.dut.associate(
- wpa3_ssid, SecurityMode.WPA3, target_pwd="wrongpass"
- ),
- "Associated with WPA3 network using the wrong password",
- )
-
- self.access_point.stop_all_aps()
-
- # Setup a WPA2 Network
- wpa2_ssid = utils.rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_5G)
- wpa2_password = generate_random_password(SecurityMode.WPA2)
- setup_ap(
- access_point=self.access_point,
- profile_name="whirlwind",
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- ssid=wpa2_ssid,
- security=Security(
- security_mode=SecurityMode.WPA2, password=wpa2_password
- ),
- )
-
- # Attempt to associate, expecting success
- self.log.info("Attempting to associate with WPA2 network.")
- asserts.assert_true(
- self.dut.associate(
- wpa2_ssid,
- SecurityMode.WPA2,
- target_pwd=wpa2_password,
- ),
- "Failed to associate with WPA2 network after a WPA3 rejection.",
- )
-
-
-if __name__ == "__main__":
- test_runner.main()
diff --git a/tests/wlan/performance/BUILD.gn b/tests/wlan/performance/BUILD.gn
deleted file mode 100644
index 0494873..0000000
--- a/tests/wlan/performance/BUILD.gn
+++ /dev/null
@@ -1,94 +0,0 @@
-# Copyright 2023 The Fuchsia Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import("//third_party/antlion/antlion_host_test.gni")
-import("//third_party/antlion/environments.gni")
-
-assert(is_host, "antlion tests only supported for host testing")
-
-antlion_host_test("channel_sweep_test") {
- main_source = "ChannelSweepTest.py"
- environments = display_ap_iperf_envs
- deps = [ "//third_party/iperf" ]
-}
-
-antlion_host_test("channel_sweep_test_quick") {
- main_source = "ChannelSweepTest.py"
- environments = display_ap_iperf_envs
- test_cases = [
- "test_US_wpa2_channel_8_20mhz",
- "test_US_wpa2_channel_40_80mhz", # non-DFS 5GHz channel
- "test_US_wpa2_channel_100_80mhz", # DFS 5GHz channel
- "test_US_wpa2_channel_165_20mhz",
- ]
- deps = [ "//third_party/iperf" ]
-}
-
-antlion_host_test("wlan_rvr_test_2g_open") {
- main_source = "WlanRvrTest.py"
- test_params = "rvr_settings.yaml"
- environments = display_ap_iperf_attenuator_envs
- test_cases = [ "re:test_rvr_11n_2g_20mhz_open_.*" ]
- deps = [ "//third_party/iperf" ]
-}
-
-antlion_host_test("wlan_rvr_test_2g_wpa2") {
- main_source = "WlanRvrTest.py"
- test_params = "rvr_settings.yaml"
- environments = display_ap_iperf_attenuator_envs
- test_cases = [ "re:test_rvr_11n_2g_20mhz_wpa2_.*" ]
- deps = [ "//third_party/iperf" ]
-}
-
-antlion_host_test("wlan_rvr_test_5g_open") {
- main_source = "WlanRvrTest.py"
- test_params = "rvr_settings.yaml"
- environments = display_ap_iperf_attenuator_envs
- test_cases = [ "re:test_rvr_11ac_5g_80mhz_open_.*" ]
- deps = [ "//third_party/iperf" ]
-}
-
-antlion_host_test("wlan_rvr_test_5g_wpa2") {
- main_source = "WlanRvrTest.py"
- test_params = "rvr_settings.yaml"
- environments = display_ap_iperf_attenuator_envs
- test_cases = [ "re:test_rvr_11ac_5g_80mhz_wpa2_.*" ]
- deps = [ "//third_party/iperf" ]
-}
-
-antlion_host_test("wlan_wmm_test") {
- main_source = "WlanWmmTest.py"
-
- # Requires a second station and custom configuration. There are no available
- # testbeds to support this toplogy. This will remain an at-desk test until an
- # infra-hosted testbed matching this topology is supported.
- environments = []
- deps = [ "//third_party/iperf" ]
-}
-
-group("e2e_tests") {
- testonly = true
- public_deps = [
- ":wlan_rvr_test_2g_open",
- ":wlan_rvr_test_2g_wpa2",
- ":wlan_rvr_test_5g_open",
- ":wlan_rvr_test_5g_wpa2",
- ":channel_sweep_test_quick",
- ]
-}
-
-group("e2e_tests_quick") {
- testonly = true
- public_deps = [ ":channel_sweep_test_quick" ]
-}
-
-group("e2e_tests_manual") {
- testonly = true
- public_deps = [
- # Running ChannelSweepTest is usually only necessary when verifying new WLAN
- # firmware patches. Take it out of automation; it takes too long otherwise.
- ":channel_sweep_test",
- ":wlan_wmm_test",
- ]
-}
diff --git a/tests/wlan/performance/ChannelSweepTest.py b/tests/wlan/performance/ChannelSweepTest.py
deleted file mode 100644
index e87fcbb..0000000
--- a/tests/wlan/performance/ChannelSweepTest.py
+++ /dev/null
@@ -1,708 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import logging
-import os
-import time
-from dataclasses import dataclass
-from pathlib import Path
-from statistics import pstdev
-
-from honeydew.affordances.connectivity.wlan.utils.types import CountryCode
-from mobly import asserts, signals, test_runner
-from mobly.config_parser import TestRunConfig
-
-from antlion import utils
-from antlion.controllers.access_point import setup_ap
-from antlion.controllers.ap_lib import hostapd_constants
-from antlion.controllers.ap_lib.hostapd_security import Security, SecurityMode
-from antlion.controllers.ap_lib.regulatory_channels import COUNTRY_CHANNELS
-from antlion.controllers.fuchsia_device import FuchsiaDevice
-from antlion.controllers.iperf_client import (
- IPerfClientOverAdb,
- IPerfClientOverSsh,
-)
-from antlion.controllers.iperf_server import IPerfResult, IPerfServerOverSsh
-from antlion.test_utils.abstract_devices.wlan_device import AssociationMode
-from antlion.test_utils.wifi import base_test
-
-DEFAULT_MIN_THROUGHPUT = 0.0
-DEFAULT_MAX_STD_DEV = 1.0
-DEFAULT_IPERF_TIMEOUT = 30
-
-DEFAULT_TIME_TO_WAIT_FOR_IP_ADDR = 30
-GRAPH_CIRCLE_SIZE = 10
-MAX_2_4_CHANNEL = 14
-TIME_TO_SLEEP_BETWEEN_RETRIES = 1
-WEP_HEX_STRING_LENGTH = 10
-
-MEGABITS_PER_SECOND = "Mbps"
-
-
-@dataclass
-class TestParams:
- country_code: str
- """Country code for the DUT to set before running the test."""
-
- security_mode: SecurityMode
- """Security type of the network to create. None represents an open network."""
-
- channel: int
- """Channel for the AP to broadcast on"""
-
- channel_bandwidth: int
- """Channel bandwidth in MHz for the AP to broadcast with"""
-
- expect_min_rx_throughput_mbps: float = DEFAULT_MIN_THROUGHPUT
- """Expected minimum receive throughput in Mb/s"""
-
- expect_min_tx_throughput_mbps: float = DEFAULT_MIN_THROUGHPUT
- """Expected minimum transmit throughput in Mb/s"""
-
- # TODO: Use this value
- expect_max_std_dev: float = DEFAULT_MAX_STD_DEV
- """Expected maximum standard deviation of throughput in Mb/s"""
-
-
-@dataclass(frozen=True)
-class ThroughputKey:
- country_code: str
- security_mode: SecurityMode
- channel_bandwidth: int
-
- @staticmethod
- def from_test(test: TestParams) -> "ThroughputKey":
- return ThroughputKey(
- country_code=test.country_code,
- security_mode=test.security_mode,
- channel_bandwidth=test.channel_bandwidth,
- )
-
-
-@dataclass
-class ThroughputValue:
- channel: int
- tx_throughput_mbps: float | None
- rx_throughput_mbps: float | None
-
-
-ChannelThroughputMap = dict[ThroughputKey, list[ThroughputValue]]
-
-
-class ChannelSweepTest(base_test.WifiBaseTest):
- """Tests channel performance.
-
- Testbed Requirement:
- * 1 x Fuchsia device (dut)
- * 1 x access point
- * 1 x Linux Machine used as IPerfServer
-
- Note: Performance tests should be done in isolated testbed.
- """
-
- def __init__(self, configs: TestRunConfig) -> None:
- super().__init__(configs)
- self.log = logging.getLogger()
- self.channel_throughput: ChannelThroughputMap = {}
-
- self.time_to_wait_for_ip_addr = configs.user_params.get(
- "channel_sweep_test_params", {}
- ).get("time_to_wait_for_ip_addr", DEFAULT_TIME_TO_WAIT_FOR_IP_ADDR)
-
- def pre_run(self) -> None:
- tests: list[tuple[TestParams]] = []
-
- def generate_test_name(test: TestParams) -> str:
- return f"test_{test.country_code}_{test.security_mode}_channel_{test.channel}_{test.channel_bandwidth}mhz"
-
- def test_params(test_name: str) -> dict[str, float]:
- return self.user_params.get("channel_sweep_test_params", {}).get(
- test_name, {}
- )
-
- for country_channels in [COUNTRY_CHANNELS["United States of America"]]:
- for security_mode in [
- SecurityMode.OPEN,
- SecurityMode.WEP,
- SecurityMode.WPA,
- SecurityMode.WPA2,
- SecurityMode.WPA_WPA2,
- SecurityMode.WPA3,
- ]:
- for (
- channel,
- bandwidths,
- ) in country_channels.allowed_channels.items():
- for bandwidth in bandwidths:
- test = TestParams(
- country_code=country_channels.country_code,
- security_mode=security_mode,
- channel=channel,
- channel_bandwidth=bandwidth,
- )
- name = generate_test_name(test)
- test.expect_min_rx_throughput_mbps = test_params(
- name
- ).get("min_rx_throughput", DEFAULT_MIN_THROUGHPUT)
- test.expect_min_tx_throughput_mbps = test_params(
- name
- ).get("min_tx_throughput", DEFAULT_MIN_THROUGHPUT)
- test.expect_max_std_dev = test_params(name).get(
- "max_std_dev", DEFAULT_MAX_STD_DEV
- )
- tests.append((test,))
-
- self.generate_tests(
- self.run_channel_performance, generate_test_name, tests
- )
-
- def get_existing_test_names(self) -> list[str]:
- test_names: list[str] = super().get_existing_test_names()
- # Verify standard deviation last since it depends on the throughput results from
- # all other tests.
- test_names.sort(key=lambda n: n == "test_standard_deviation")
- return test_names
-
- def setup_class(self) -> None:
- super().setup_class()
-
- self.fuchsia_device, self.dut = self.get_dut_type(
- FuchsiaDevice, AssociationMode.POLICY
- )
-
- if len(self.access_points) == 0:
- raise signals.TestAbortClass("Requires at least one access point")
- self.access_point = self.access_points[0]
- self.access_point.stop_all_aps()
-
- if len(self.iperf_servers) == 0:
- raise signals.TestAbortClass("Requires at least one iperf server")
- self.iperf_server = self.iperf_servers[0]
- self.iperf_server.start()
-
- if len(self.iperf_clients) > 0:
- self.iperf_client = self.iperf_clients[0]
- else:
- self.iperf_client = self.dut.create_iperf_client()
-
- def teardown_class(self) -> None:
- self.write_graph()
- super().teardown_class()
-
- def setup_test(self) -> None:
- super().setup_test()
- # TODO(fxb/46417): Uncomment when wlanClearCountry is implemented up any
- # country code changes.
- # for fd in self.fuchsia_devices:
- # phy_ids_response = fd.wlan_lib.wlanPhyIdList()
- # if phy_ids_response.get('error'):
- # raise ConnectionError(
- # 'Failed to retrieve phy ids from FuchsiaDevice (%s). '
- # 'Error: %s' % (fd.ip, phy_ids_response['error']))
- # for id in phy_ids_response['result']:
- # clear_country_response = fd.wlan_lib.wlanClearCountry(id)
- # if clear_country_response.get('error'):
- # raise EnvironmentError(
- # 'Failed to reset country code on FuchsiaDevice (%s). '
- # 'Error: %s' % (fd.ip, clear_country_response['error'])
- # )
- self.access_point.stop_all_aps()
- for ad in self.android_devices:
- ad.droid.wakeLockAcquireBright()
- ad.droid.wakeUpNow()
- self.dut.wifi_toggle_state(True)
- self.dut.disconnect()
-
- def teardown_test(self) -> None:
- for ad in self.android_devices:
- ad.droid.wakeLockRelease()
- ad.droid.goToSleepNow()
- self.dut.turn_location_off_and_scan_toggle_off()
- self.dut.disconnect()
- self.download_logs()
- self.access_point.stop_all_aps()
- super().teardown_test()
-
- def setup_ap(
- self,
- channel: int,
- channel_bandwidth: int,
- security_profile: Security,
- ) -> str:
- """Start network on AP with basic configuration.
-
- Args:
- channel: channel to use for network
- channel_bandwidth: channel bandwidth in mhz to use for network,
- security_profile: security type to use or None if open
-
- Returns:
- SSID of the newly created and running network
-
- Raises:
- ConnectionError if network is not started successfully.
- """
- ssid = utils.rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_2G)
- try:
- setup_ap(
- access_point=self.access_point,
- profile_name="whirlwind",
- channel=channel,
- security=security_profile,
- force_wmm=True,
- ssid=ssid,
- vht_bandwidth=channel_bandwidth,
- setup_bridge=True,
- )
- self.log.info(
- "Network (ssid: %s) up on channel %s w/ channel bandwidth %s MHz",
- ssid,
- channel,
- channel_bandwidth,
- )
- return ssid
- except Exception as err:
- raise ConnectionError(
- f"Failed to setup ap on channel: {channel}, "
- f"channel bandwidth: {channel_bandwidth} MHz. "
- ) from err
-
- def get_and_verify_iperf_address(
- self,
- channel: int,
- device: FuchsiaDevice | IPerfServerOverSsh,
- interface: str,
- ) -> str:
- """Get ip address from a devices interface and verify it belongs to
- expected subnet based on APs DHCP config.
-
- Args:
- channel: channel network is running on, to determine subnet
- device: device to get ip address for
- interface: interface on device to get ip address. If None, uses
- device.test_interface.
-
- Returns:
- IP address of device on given interface (or test_interface)
-
- Raises:
- ConnectionError, if device does not have a valid ip address after
- all retries.
- """
- if channel <= MAX_2_4_CHANNEL:
- subnet = self.access_point._AP_2G_SUBNET_STR
- else:
- subnet = self.access_point._AP_5G_SUBNET_STR
- end_time = time.time() + self.time_to_wait_for_ip_addr
- while time.time() < end_time:
- device_addresses = device.get_interface_ip_addresses(interface)
- if device_addresses["ipv4_private"]:
- for ip_addr in device_addresses["ipv4_private"]:
- if utils.ip_in_subnet(ip_addr, subnet):
- return ip_addr
- else:
- self.log.debug(
- "Device has an ip address (%s), but it is not in subnet %s",
- ip_addr,
- subnet,
- )
- else:
- self.log.debug(
- "Device does not have a valid ip address. Retrying."
- )
- time.sleep(TIME_TO_SLEEP_BETWEEN_RETRIES)
- raise ConnectionError("Device failed to get an ip address.")
-
- def get_iperf_throughput(
- self,
- iperf_server_address: str,
- iperf_client_address: str,
- reverse: bool = False,
- ) -> float:
- """Run iperf between client and server and get the throughput.
-
- Args:
- iperf_server_address: IP address of running iperf server
- iperf_client_address: IP address of iperf client (dut)
- reverse: If True, run traffic in reverse direction, from server to client.
-
- Returns:
- iperf throughput or 0 if iperf fails
- """
- if reverse:
- self.log.info(
- "Running IPerf traffic from server (%s) to dut (%s).",
- iperf_server_address,
- iperf_client_address,
- )
- iperf_results_file = self.iperf_client.start(
- iperf_server_address,
- "-i 1 -t 10 -R -J",
- "channel_sweep_rx",
- timeout=DEFAULT_IPERF_TIMEOUT,
- )
- else:
- self.log.info(
- "Running IPerf traffic from dut (%s) to server (%s).",
- iperf_client_address,
- iperf_server_address,
- )
- iperf_results_file = self.iperf_client.start(
- iperf_server_address,
- "-i 1 -t 10 -J",
- "channel_sweep_tx",
- timeout=DEFAULT_IPERF_TIMEOUT,
- )
- if iperf_results_file:
- iperf_results = IPerfResult(
- iperf_results_file, reporting_speed_units=MEGABITS_PER_SECOND
- )
- return iperf_results.avg_send_rate or 0.0
- return 0.0
-
- def log_to_file_and_throughput_data(
- self,
- test: TestParams,
- tx_throughput: float | None,
- rx_throughput: float | None,
- ) -> None:
- """Write performance info to csv file and to throughput data.
-
- Args:
- channel: int, channel that test was run on
- channel_bandwidth: int, channel bandwidth the test used
- tx_throughput: float, throughput value from dut to iperf server
- rx_throughput: float, throughput value from iperf server to dut
- """
- test_name = self.current_test_info.name
- log_file = Path(os.path.join(self.log_path, "throughput.csv"))
- self.log.info("Writing IPerf results for %s to %s", test_name, log_file)
-
- if not log_file.is_file():
- with open(log_file, "x", encoding="utf-8") as csv_file:
- csv_file.write(
- "country code,security,channel,channel bandwidth,tx throughput,rx throughput\n"
- )
-
- with open(log_file, "a", encoding="utf-8") as csv_file:
- csv_file.write(
- f"{test.country_code},{test.security_mode},{test.channel},{test.channel_bandwidth},{tx_throughput},{rx_throughput}\n"
- )
-
- key = ThroughputKey.from_test(test)
- if key not in self.channel_throughput:
- self.channel_throughput[key] = []
-
- self.channel_throughput[key].append(
- ThroughputValue(
- channel=test.channel,
- tx_throughput_mbps=tx_throughput,
- rx_throughput_mbps=rx_throughput,
- )
- )
-
- def write_graph(self) -> None:
- """Create graph html files from throughput data, plotting channel vs
- tx_throughput and channel vs rx_throughput.
- """
- # If performance measurement is skipped
- if not hasattr(self, "iperf_server") or not self.iperf_server:
- return
-
- try:
- from bokeh.plotting import (
- ColumnDataSource,
- figure,
- output_file,
- save,
- )
- except ImportError:
- self.log.warning(
- "bokeh is not installed: skipping creation of graphs. "
- "Note CSV files are still available. If graphs are "
- 'desired, install antlion with the "bokeh" feature.'
- )
- return
-
- for key, throughputs in self.channel_throughput.items():
- output_file_name = os.path.join(
- self.log_path,
- f"channel_throughput_{key.country_code}_{key.security_mode}_{key.channel_bandwidth}mhz.html",
- )
- output_file(output_file_name)
- channels = []
- tx_throughputs = []
- rx_throughputs = []
-
- for throughput in sorted(throughputs, key=lambda t: t.channel):
- channels.append(str(throughput.channel))
- tx_throughputs.append(throughput.tx_throughput_mbps)
- rx_throughputs.append(throughput.rx_throughput_mbps)
-
- channel_vs_throughput_data = ColumnDataSource(
- data=dict(
- channels=channels,
- tx_throughput=tx_throughputs,
- rx_throughput=rx_throughputs,
- )
- )
- TOOLTIPS = [
- ("Channel", "@channels"),
- ("TX_Throughput", "@tx_throughput"),
- ("RX_Throughput", "@rx_throughput"),
- ]
- channel_vs_throughput_graph = figure(
- title="Channels vs. Throughput",
- x_axis_label="Channels",
- x_range=channels,
- y_axis_label="Throughput",
- tooltips=TOOLTIPS,
- )
- channel_vs_throughput_graph.sizing_mode = "stretch_both"
- channel_vs_throughput_graph.title.align = "center"
- channel_vs_throughput_graph.line(
- "channels",
- "tx_throughput",
- source=channel_vs_throughput_data,
- line_width=2,
- line_color="blue",
- legend_label="TX_Throughput",
- )
- channel_vs_throughput_graph.circle(
- "channels",
- "tx_throughput",
- source=channel_vs_throughput_data,
- size=GRAPH_CIRCLE_SIZE,
- color="blue",
- )
- channel_vs_throughput_graph.line(
- "channels",
- "rx_throughput",
- source=channel_vs_throughput_data,
- line_width=2,
- line_color="red",
- legend_label="RX_Throughput",
- )
- channel_vs_throughput_graph.circle(
- "channels",
- "rx_throughput",
- source=channel_vs_throughput_data,
- size=GRAPH_CIRCLE_SIZE,
- color="red",
- )
-
- channel_vs_throughput_graph.legend.location = "top_left"
- graph_file = save([channel_vs_throughput_graph])
- self.log.info("Saved graph to %s", graph_file)
-
- def test_standard_deviation(self) -> None:
- """Verify throughputs don't deviate too much across channels.
-
- Assert the throughput standard deviation across all channels of the same
- country, security, and bandwidth does not exceed the maximum specified in the
- user param config. If no maximum is set, a default of 1.0 standard deviations
- will be used (34.1% from the mean).
-
- Raises:
- TestFailure, if standard deviation of throughput exceeds max_std_dev
- """
- # If performance measurement is skipped
- if not self.iperf_server:
- return
-
- max_std_dev = self.user_params.get("channel_sweep_test_params", {}).get(
- "max_std_dev", DEFAULT_MAX_STD_DEV
- )
-
- self.log.info(
- "Verifying standard deviation across channels does not exceed max standard "
- "deviation of %s Mb/s",
- max_std_dev,
- )
-
- errors: list[str] = []
-
- for test, throughputs in self.channel_throughput.items():
- tx_values = []
- rx_values = []
- for throughput in throughputs:
- if throughput.tx_throughput_mbps is not None:
- tx_values.append(throughput.tx_throughput_mbps)
- if throughput.rx_throughput_mbps is not None:
- rx_values.append(throughput.rx_throughput_mbps)
-
- tx_std_dev = pstdev(tx_values)
- rx_std_dev = pstdev(rx_values)
-
- if tx_std_dev > max_std_dev:
- errors.append(
- f"[{test.country_code} {test.security_mode} "
- f"{test.channel_bandwidth}mhz] TX throughput standard deviation "
- f"{tx_std_dev} Mb/s exceeds expected max of {max_std_dev} Mb/s"
- )
- if rx_std_dev > max_std_dev:
- errors.append(
- f"[{test.country_code} {test.security_mode} "
- f"{test.channel_bandwidth}mhz] RX throughput standard deviation "
- f"{rx_std_dev} Mb/s exceeds expected max of {max_std_dev} Mb/s"
- )
-
- if errors:
- error_message = "\n - ".join(errors)
- asserts.fail(
- f"Failed to meet standard deviation expectations:\n - {error_message}"
- )
-
- def run_channel_performance(self, test: TestParams) -> None:
- """Run a single channel performance test
-
- Log results to csv file and throughput data.
-
- 1. Sets up network with test settings
- 2. Associates DUT
- 3. Runs traffic between DUT and iperf server (both directions)
- 4. Logs channel, tx_throughput (Mb/s), and rx_throughput (Mb/s) to
- log file and throughput data.
- 5. Checks throughput values against minimum throughput thresholds.
-
- Raises:
- TestFailure, if throughput (either direction) is less than
- the directions given minimum throughput threshold.
- """
- self.fuchsia_device.wlan_controller.set_country_code(
- CountryCode(test.country_code)
- )
-
- target_security = test.security_mode
- if target_security is not SecurityMode.OPEN:
- if test.security_mode is SecurityMode.WEP:
- password = utils.rand_hex_str(WEP_HEX_STRING_LENGTH)
- else:
- password = utils.rand_ascii_str(
- hostapd_constants.MIN_WPA_PSK_LENGTH
- )
- security_profile = Security(
- security_mode=test.security_mode, password=password
- )
- else:
- password = None
- security_profile = Security()
-
- ssid = self.setup_ap(
- test.channel, test.channel_bandwidth, security_profile
- )
-
- interface = (
- self.access_point.wlan_2g
- if test.channel in hostapd_constants.ALL_CHANNELS_2G
- else self.access_point.wlan_5g
- )
-
- with self.access_point.tcpdump.start(interface, Path(self.log_path)):
- associated = self.dut.associate(
- ssid, target_pwd=password, target_security=target_security
- )
- if not associated:
- self.log_to_file_and_throughput_data(test, None, None)
- asserts.fail(f"Device failed to associate to network {ssid}")
- self.log.info(
- 'DUT (%s) connected to network "%s"', self.dut.identifier, ssid
- )
-
- self.iperf_server.renew_test_interface_ip_address()
- if not isinstance(self.iperf_server.test_interface, str):
- raise TypeError(
- "For this test, iperf_server is required to specify the "
- "test_interface configuration option"
- )
-
- self.log.info(
- "Getting ip address for iperf server. Will retry for %s seconds.",
- self.time_to_wait_for_ip_addr,
- )
- iperf_server_address = self.get_and_verify_iperf_address(
- test.channel,
- self.iperf_server,
- self.iperf_server.test_interface,
- )
- self.log.info(
- "Getting ip address for DUT. Will retry for %s seconds.",
- self.time_to_wait_for_ip_addr,
- )
-
- if not isinstance(
- self.iperf_client, (IPerfClientOverSsh, IPerfClientOverAdb)
- ):
- raise TypeError(
- f'Unknown iperf_client type "{type(self.iperf_client)}"'
- )
- if not isinstance(self.iperf_client.test_interface, str):
- raise TypeError(
- "For this test, iperf_client is required to specify the "
- "test_interface configuration option"
- )
-
- try:
- iperf_client_address = self.get_and_verify_iperf_address(
- test.channel,
- self.fuchsia_device,
- self.iperf_client.test_interface,
- )
- tx_throughput = self.get_iperf_throughput(
- iperf_server_address, iperf_client_address
- )
- rx_throughput = self.get_iperf_throughput(
- iperf_server_address, iperf_client_address, reverse=True
- )
- self.log_to_file_and_throughput_data(
- test, tx_throughput, rx_throughput
- )
- self.log.info(
- "Throughput (tx, rx): (%s Mb/s, %s Mb/s), "
- "Minimum threshold (tx, rx): (%s Mb/s, %s Mb/s)",
- tx_throughput,
- rx_throughput,
- test.expect_min_tx_throughput_mbps,
- test.expect_min_rx_throughput_mbps,
- )
- asserts.assert_greater(
- tx_throughput,
- test.expect_min_tx_throughput_mbps,
- "tx throughput below the minimal threshold",
- )
- asserts.assert_greater(
- rx_throughput,
- test.expect_min_rx_throughput_mbps,
- "rx throughput below the minimal threshold",
- )
- except Exception as e:
- if self.iperf_server._ssh_session:
- ssh = self.iperf_server._ssh_session
- self.log.warning(
- "iperf ps aux:\n%s",
- ssh.run(["sudo", "ps", "aux"]).stdout.decode("utf-8"),
- )
- self.log.warning(
- "iperf sockets:\n%s",
- ssh.run(["sudo", "ss", "-tulpn"]).stdout.decode(
- "utf-8"
- ),
- )
- raise e
-
-
-if __name__ == "__main__":
- test_runner.main()
diff --git a/tests/wlan/performance/WlanRvrTest.py b/tests/wlan/performance/WlanRvrTest.py
deleted file mode 100644
index b21d130..0000000
--- a/tests/wlan/performance/WlanRvrTest.py
+++ /dev/null
@@ -1,623 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-import itertools
-import logging
-import time
-from dataclasses import dataclass
-from enum import StrEnum, auto, unique
-
-from mobly import asserts, signals, test_runner
-from mobly.config_parser import TestRunConfig
-from mobly.records import TestResultRecord
-
-from antlion.controllers.ap_lib.hostapd_ap_preset import create_ap_preset
-from antlion.controllers.ap_lib.hostapd_constants import BandType
-from antlion.controllers.ap_lib.hostapd_security import Security, SecurityMode
-from antlion.controllers.ap_lib.radvd_config import RadvdConfig
-from antlion.controllers.attenuator import (
- Attenuator,
- get_attenuators_for_device,
-)
-from antlion.controllers.fuchsia_device import FuchsiaDevice
-from antlion.controllers.iperf_server import IPerfResult, IPerfServerOverSsh
-from antlion.test_utils.abstract_devices.wlan_device import AssociationMode
-from antlion.test_utils.wifi import base_test
-from antlion.utils import rand_ascii_str
-from antlion.validation import MapValidator
-
-REPORTING_SPEED_UNITS = "Mbps"
-DAD_TIMEOUT_SEC = 30
-
-
-@unique
-class TrafficDirection(StrEnum):
- RX = auto()
- TX = auto()
-
-
-@unique
-class IPVersion(StrEnum):
- V4 = "ipv4"
- V6 = "ipv6"
-
-
-@dataclass(frozen=True)
-class RateByRange:
- relative_attn: int
- throughput: float
-
-
-@dataclass(frozen=True)
-class TestParams:
- band: BandType
- security: Security
- ip_version: IPVersion
- direction: TrafficDirection
-
-
-def write_csv_rvr_data(
- test_name: str, csv_path: str, results: list[RateByRange]
-) -> None:
- """Writes the CSV data for the RvR test
- Args:
- test_name: The name of test that was run.
- csv_path: Where to put the csv file.
- csv_data: A dictionary of the data to be put in the csv file.
- """
- csv_file_name = f"{csv_path}rvr_throughput_vs_attn_{test_name}.csv"
- with open(csv_file_name, "w+") as csv_fileId:
- csv_fileId.write(
- f"Attenuation(db),Throughput({REPORTING_SPEED_UNITS})\n"
- )
- for res in results:
- csv_fileId.write(f"{res.relative_attn},{res.throughput}\n")
-
-
-class WlanRvrTest(base_test.WifiBaseTest):
- """Tests running WLAN RvR.
-
- Test Bed Requirement:
- * One Android device or Fuchsia device
- * One Access Point
- * One attenuator
- * One Linux iPerf Server
- """
-
- def __init__(self, configs: TestRunConfig) -> None:
- super().__init__(configs)
- self.log = logging.getLogger()
- self.rvr_graph_summary: list[object] = []
-
- params = MapValidator(self.user_params["rvr_settings"])
- self.starting_attn = params.get(int, "starting_attn", 0)
- self.ending_attn = params.get(int, "ending_attn", 95)
- self.step_size_in_db = params.get(int, "step_size_in_db", 1)
- self.dwell_time_in_secs = params.get(int, "dwell_time_in_secs", 10)
-
- self.reverse_rvr_after_forward = params.get(
- bool, "reverse_rvr_after_forward", False
- )
- self.iperf_flags = params.get(str, "iperf_flags", "-i 1")
- self.iperf_flags += f" -t {self.dwell_time_in_secs} -J"
-
- self.fuchsia_device, self.dut = self.get_dut_type(
- FuchsiaDevice, AssociationMode.POLICY
- )
-
- if len(self.access_points) == 0:
- raise signals.TestAbortClass("Requires at least one access point")
- self.access_point = self.access_points[0]
-
- self.attenuators_2g = get_attenuators_for_device(
- self.controller_configs["AccessPoint"][0]["Attenuator"],
- self.attenuators,
- "attenuator_ports_wifi_2g",
- )
- self.attenuators_5g = get_attenuators_for_device(
- self.controller_configs["AccessPoint"][0]["Attenuator"],
- self.attenuators,
- "attenuator_ports_wifi_5g",
- )
-
- self.iperf_server = self.iperf_servers[0]
-
- if hasattr(self, "iperf_clients") and self.iperf_clients:
- self.dut_iperf_client = self.iperf_clients[0]
- else:
- self.dut_iperf_client = self.dut.create_iperf_client()
-
- def pre_run(self) -> None:
- test_params: list[TestParams] = []
-
- for (
- band,
- security_mode,
- ip_version,
- direction,
- ) in itertools.product(
- [e for e in BandType],
- [SecurityMode.OPEN, SecurityMode.WPA2],
- [e for e in IPVersion],
- [e for e in TrafficDirection],
- ):
- password: str | None = None
- if security_mode is not SecurityMode.OPEN:
- password = rand_ascii_str(20)
- security = Security(security_mode, password)
- test_params.append(
- TestParams(
- band,
- security,
- ip_version,
- direction,
- )
- )
-
- def generate_test_name(t: TestParams) -> str:
- # TODO(http://b/303659781): Keep mode in sync with hostapd.
- mode = "11n" if t.band is BandType.BAND_2G else "11ac"
- frequency = "20mhz" if t.band is BandType.BAND_2G else "80mhz"
- return (
- f"test_rvr_{mode}_{t.band}_{frequency}_{t.security}_"
- f"{t.direction}_{t.ip_version}"
- )
-
- self.generate_tests(
- self._test_rvr, generate_test_name, [(p,) for p in test_params]
- )
-
- def setup_test(self) -> None:
- super().setup_test()
- self.iperf_server.start()
- if hasattr(self, "android_devices"):
- for ad in self.android_devices:
- ad.droid.wakeLockAcquireBright()
- ad.droid.wakeUpNow()
- self.dut.wifi_toggle_state(True)
- self.dut.disconnect()
- self.access_point.stop_all_aps()
-
- def teardown_test(self) -> None:
- self.cleanup_tests()
- super().teardown_test()
-
- def on_fail(self, record: TestResultRecord) -> None:
- super().on_fail(record)
- self.cleanup_tests()
-
- def cleanup_tests(self) -> None:
- """Cleans up all the dangling pieces of the tests, for example, the
- iperf server, radvd, all the currently running APs, and the various
- clients running during the tests.
- """
- self.download_logs()
- if hasattr(self, "android_devices"):
- for ad in self.android_devices:
- ad.droid.wakeLockRelease()
- ad.droid.goToSleepNow()
- self.iperf_server.stop()
- self.dut.turn_location_off_and_scan_toggle_off()
- self.dut.disconnect()
- self.dut.reset_wifi()
- self.access_point.stop_all_aps()
-
- def _wait_for_iperf_ipv4_addr(self) -> str:
- """Wait for an IPv4 addresses to become available on the iperf server.
-
- Returns:
- The private IPv4 address of the iperf server.
-
- Raises:
- TestFailure: If unable to acquire a IPv4 address.
- """
- ip_address_checker_counter = 0
- ip_address_checker_max_attempts = 3
- while ip_address_checker_counter < ip_address_checker_max_attempts:
- self.iperf_server.renew_test_interface_ip_address()
- iperf_server_ip_addresses = (
- self.iperf_server.get_interface_ip_addresses(
- self.iperf_server.test_interface
- )
- )
- self.log.info(f"IPerf server IP info: {iperf_server_ip_addresses}")
-
- if not iperf_server_ip_addresses["ipv4_private"]:
- self.log.warning(
- "Unable to get the iperf server IPv4 "
- "address. Retrying..."
- )
- ip_address_checker_counter += 1
- time.sleep(1)
- continue
-
- return iperf_server_ip_addresses["ipv4_private"][0]
-
- raise signals.TestFailure("IPv4 address not available on iperf server.")
-
- def _wait_for_iperf_dad(self) -> str:
- """Wait for Duplicate Address Detection to resolve so that an
- private-local IPv6 address is available for test.
-
- Returns:
- A string containing the private-local IPv6 address of the iperf server.
-
- Raises:
- TestFailure: If unable to acquire an IPv6 address.
- """
- now = time.time()
- start = now
- elapsed = now - start
-
- while elapsed < DAD_TIMEOUT_SEC:
- addrs = self.iperf_server.get_interface_ip_addresses(
- self.iperf_server.test_interface
- )
- now = time.time()
- elapsed = now - start
- if addrs["ipv6_private_local"]:
- # DAD has completed
- addr = addrs["ipv6_private_local"][0]
- self.log.info(
- f'DAD on iperf server resolved with "{addr}" after {elapsed}s'
- )
- return addr
- time.sleep(1)
-
- raise signals.TestFailure(
- "Iperf server unable to acquire a private-local IPv6 address for testing "
- f"after {elapsed}s"
- )
-
- def run_rvr(
- self,
- ssid: str,
- security: Security | None,
- band: BandType,
- traffic_dir: TrafficDirection,
- ip_version: IPVersion,
- ) -> list[RateByRange]:
- """Setups and runs the RvR test
-
- Args:
- ssid: The SSID for the client to associate to.
- security: Security of the AP
- band: 2g or 5g
- traffic_dir: rx or tx, bi is not supported by iperf3
- ip_version: 4 or 6
-
- Returns:
- The bokeh graph data.
- """
- match band:
- case BandType.BAND_2G:
- rvr_attenuators = self.attenuators_2g
- case BandType.BAND_5G:
- rvr_attenuators = self.attenuators_5g
-
- for rvr_attenuator in rvr_attenuators:
- rvr_attenuator.set_atten(self.starting_attn)
-
- # Attempt association to the AP multiple times. This makes the test more
- # resilient to AP flakes that may result in the DUT not being able to
- # find the network in its scan results.
- associate_counter = 0
- associate_max_attempts = 3
- while associate_counter < associate_max_attempts:
- self.dut.disconnect()
-
- self.access_point.stop_all_aps()
- self.access_point.start_ap(
- hostapd_config=create_ap_preset(
- iface_wlan_2g=self.access_point.wlan_2g,
- iface_wlan_5g=self.access_point.wlan_5g,
- profile_name="whirlwind",
- channel=band.default_channel(),
- ssid=ssid,
- security=security,
- ),
- radvd_config=(
- RadvdConfig() if ip_version is IPVersion.V6 else None
- ),
- setup_bridge=True,
- )
-
- if self.dut.associate(
- ssid,
- target_pwd=security.password if security else None,
- target_security=(
- security.security_mode if security else SecurityMode.OPEN
- ),
- check_connectivity=False,
- ):
- break
- else:
- associate_counter += 1
- else:
- asserts.fail(
- f"Unable to associate at starting attenuation: {self.starting_attn}"
- )
-
- match ip_version:
- case IPVersion.V4:
- iperf_server_ip_address = self._wait_for_iperf_ipv4_addr()
- case IPVersion.V6:
- self.iperf_server.renew_test_interface_ip_address()
- self.log.info(
- "Waiting for iperf server to complete Duplicate "
- "Address Detection..."
- )
- iperf_server_ip_address = self._wait_for_iperf_dad()
-
- results = self.rvr_loop(
- traffic_dir,
- rvr_attenuators,
- iperf_server_ip_address,
- ip_version,
- ssid,
- security=security,
- reverse=False,
- )
- if self.reverse_rvr_after_forward:
- results = results + self.rvr_loop(
- traffic_dir,
- rvr_attenuators,
- iperf_server_ip_address,
- ip_version,
- ssid=ssid,
- security=security,
- reverse=True,
- )
-
- return results
-
- def rvr_loop(
- self,
- traffic_dir: TrafficDirection,
- rvr_attenuators: list[Attenuator],
- iperf_server_ip_address: str,
- ip_version: IPVersion,
- ssid: str,
- security: Security | None,
- reverse: bool,
- ) -> list[RateByRange]:
- """The loop that goes through each attenuation level and runs the iperf
- throughput pair.
- Args:
- traffic_dir: The traffic direction from the perspective of the DUT.
- rvr_attenuators: A list of attenuators to set.
- iperf_server_ip_address: The IP address of the iperf server.
- ssid: The ssid of the wireless network that the should associated
- to.
- password: Password of the wireless network.
- reverse: Whether to run RvR test starting from the highest
- attenuation and going to the lowest. This is run after the
- normal low attenuation to high attenuation RvR test.
- throughput: The list of throughput data for the test.
- relative_attn: The list of attenuation data for the test.
-
- Returns:
- throughput: The list of throughput data for the test.
- relative_attn: The list of attenuation data for the test.
- """
- starting_attn = self.starting_attn
- ending_attn = self.ending_attn
- step_size_in_db = self.step_size_in_db
- if reverse:
- starting_attn = self.ending_attn
- ending_attn = self.starting_attn
- step_size_in_db = step_size_in_db * -1
- self.dut.disconnect()
-
- results: list[RateByRange] = []
-
- for step in range(starting_attn, ending_attn, step_size_in_db):
- try:
- for attenuator in rvr_attenuators:
- self.log.info(
- f"Setting relative attenuation of {attenuator.instrument.address} "
- f"to {step} dB"
- )
- attenuator.set_atten(step)
- except ValueError as e:
- self.log.error(
- f"{step} is beyond the max or min of the testbed "
- f"attenuator's capability. Stopping. {e}"
- )
- break
-
- self.log.info(f"Running iperf at relative attenuation of {step} dB")
-
- throughput = self._run_iperf(
- traffic_dir,
- iperf_server_ip_address,
- ip_version,
- ssid,
- security,
- reverse,
- )
- self.log.info(
- f"Iperf traffic complete. {traffic_dir} traffic received at "
- f"{throughput} {REPORTING_SPEED_UNITS} at relative attenuation "
- f"of {step} db"
- )
- results.append(RateByRange(step, throughput))
-
- return results
-
- def _run_iperf(
- self,
- traffic_dir: TrafficDirection,
- iperf_server_ip_address: str,
- ip_version: IPVersion,
- ssid: str,
- security: Security | None,
- reverse: bool,
- ) -> float:
- iperf_flags = self.iperf_flags
- if traffic_dir is TrafficDirection.RX:
- iperf_flags = f"{self.iperf_flags} -R"
-
- if not self.dut.is_connected():
- if reverse:
- # In reverse mode, we're going from a high attenuation (weak
- # signal) to a low attenuation (strong signal). It's expected
- # that the DUT is not connected to the AP at the high
- # attenuation level(s), so if we're disconnected here, we
- # should try to associate.
- self.log.info(f"Trying to associate")
- if self.dut.associate(
- ssid,
- target_pwd=security.password if security else None,
- target_security=(
- security.security_mode
- if security
- else SecurityMode.OPEN
- ),
- check_connectivity=False,
- ):
- self.log.info("Successfully associated.")
- try:
- self.log.debug("Getting DUT IP address")
- assert self.dut_iperf_client.test_interface is not None
- if ip_version is IPVersion.V4:
- self.fuchsia_device.wait_for_ipv4_addr(
- self.dut_iperf_client.test_interface
- )
- elif ip_version is IPVersion.V6:
- self.fuchsia_device.wait_for_ipv6_addr(
- self.dut_iperf_client.test_interface
- )
- except ConnectionError:
- self.log.info(
- f"Association succeeded, but unable to get DUT IP address. Marking a 0 {REPORTING_SPEED_UNITS} "
- "for throughput. Skipping running traffic and disconnecting."
- )
- # Disconnect the DUT, so that we have a fresh attempt
- # to get an IP at the next iteration of this reverse
- # test.
- self.dut.disconnect()
- return 0
- else:
- self.log.info(
- f"Association failed. Marking a 0 {REPORTING_SPEED_UNITS} "
- "for throughput. Skipping running traffic."
- )
- return 0
- else:
- self.log.info(
- f"Device no longer associated. Marking a 0 {REPORTING_SPEED_UNITS} "
- "for throughput. Skipping running traffic."
- )
- return 0
-
- self.log.debug("Pinging iperf server from DUT")
- ping_result = self.dut.ping(iperf_server_ip_address)
- if not ping_result.success:
- self.log.info(
- f'Iperf server "{iperf_server_ip_address}" is not pingable. '
- f"Marking a 0 {REPORTING_SPEED_UNITS} for throughput. "
- "Skipping running traffic."
- )
- self.log.debug(f"{iperf_server_ip_address} pingable: {ping_result}")
- return 0
-
- self.log.info(f'Iperf server "{iperf_server_ip_address}" is pingable.')
-
- match traffic_dir:
- case TrafficDirection.TX:
- self.log.info(
- f"Running traffic from DUT to iperf server ({iperf_server_ip_address})"
- )
- case TrafficDirection.RX:
- self.log.info(
- f"Running traffic from iperf server ({iperf_server_ip_address}) to DUT"
- )
-
- try:
- iperf_tag = "decreasing"
- if reverse:
- iperf_tag = "increasing"
- iperf_results_file = self.dut_iperf_client.start(
- iperf_server_ip_address,
- iperf_flags,
- f"{iperf_tag}_{traffic_dir}_{self.starting_attn}",
- timeout=(self.dwell_time_in_secs * 2),
- )
- except TimeoutError as e:
- iperf_results_file = None
- self.log.error(
- f"Iperf traffic timed out. Marking 0 {REPORTING_SPEED_UNITS} for "
- f"throughput. {e}"
- )
- return 0
-
- if not iperf_results_file:
- return 0
-
- try:
- iperf_results = IPerfResult(
- iperf_results_file,
- reporting_speed_units=REPORTING_SPEED_UNITS,
- )
- if iperf_results.error:
- self.iperf_server.stop()
- self.iperf_server.start()
- self.log.error(f"Errors in iperf logs:\n{iperf_results.error}")
- if iperf_results.avg_send_rate:
- return iperf_results.avg_send_rate
-
- self.log.error(
- '"avg_send_rate" not found in iPerf3 results file. Marking 0 '
- f"{REPORTING_SPEED_UNITS} for throughput."
- f"\n{iperf_results.get_json()}"
- )
- return 0
- except ValueError as e:
- self.iperf_server.stop()
- self.iperf_server.start()
- self.log.error(
- f"No data in iPerf3 file. Marking 0 {REPORTING_SPEED_UNITS} "
- f"for throughput: {e}"
- )
- return 0
- except Exception as e:
- self.iperf_server.stop()
- self.iperf_server.start()
- self.log.error(
- f"Unknown exception. Marking 0 {REPORTING_SPEED_UNITS} for "
- f"throughput: {e}"
- )
- return 0
-
- def _test_rvr(self, t: TestParams) -> None:
- ssid = rand_ascii_str(20)
- self.access_point.stop_all_aps()
- results = self.run_rvr(
- ssid,
- security=t.security,
- band=t.band,
- traffic_dir=t.direction,
- ip_version=t.ip_version,
- )
- write_csv_rvr_data(
- self.current_test_info.name,
- self.current_test_info.output_path,
- results,
- )
-
-
-if __name__ == "__main__":
- test_runner.main()
diff --git a/tests/wlan/performance/WlanWmmTest.py b/tests/wlan/performance/WlanWmmTest.py
deleted file mode 100644
index dca9ee4..0000000
--- a/tests/wlan/performance/WlanWmmTest.py
+++ /dev/null
@@ -1,977 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import logging
-import operator
-import time
-from typing import Any
-
-from mobly import asserts, test_runner
-
-from antlion import context, utils
-from antlion.controllers.access_point import setup_ap
-from antlion.controllers.ap_lib import hostapd_constants, hostapd_security
-from antlion.test_utils.abstract_devices import wmm_transceiver
-from antlion.test_utils.abstract_devices.wlan_device import (
- AssociationMode,
- create_wlan_device,
-)
-from antlion.test_utils.fuchsia import wmm_test_cases
-from antlion.test_utils.wifi import base_test
-
-DEFAULT_N_CAPABILITIES_20_MHZ = [
- hostapd_constants.N_CAPABILITY_LDPC,
- hostapd_constants.N_CAPABILITY_SGI20,
- hostapd_constants.N_CAPABILITY_TX_STBC,
- hostapd_constants.N_CAPABILITY_RX_STBC1,
- hostapd_constants.N_CAPABILITY_HT20,
-]
-
-DEFAULT_AP_PARAMS = {
- "profile_name": "whirlwind",
- "channel": hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- "n_capabilities": DEFAULT_N_CAPABILITIES_20_MHZ,
- "ac_capabilities": None,
-}
-
-DEFAULT_BW_PERCENTAGE = 1
-DEFAULT_STREAM_TIMEOUT = 60
-DEFAULT_STREAM_TIME = 10
-
-OPERATORS = {
- ">": operator.gt,
- ">=": operator.ge,
- "<": operator.lt,
- "<=": operator.le,
- "==": operator.eq,
-}
-
-GRAPH_COLOR_LEN = 10
-GRAPH_DEFAULT_LINE_WIDTH = 2
-GRAPH_DEFAULT_CIRCLE_SIZE = 10
-
-
-def eval_operator(
- operator_string: str,
- actual_value: float,
- expected_value: float,
- max_bw: float,
- rel_tolerance: float = 0,
- abs_tolerance: float = 0,
- max_bw_rel_tolerance: float = 0,
-) -> bool:
- """
- Determines if an inequality evaluates to True, given relative and absolute
- tolerance.
-
- Args:
- operator_string: string, the operator to use for the comparison
- actual_value: the value to compare to some expected value
- expected_value: the value the actual value is compared to
- rel_tolerance: decimal representing the percent tolerance, relative to
- the expected value. E.g. (101 <= 100) w/ rel_tol=0.01 is True
- abs_tolerance: the lowest actual (not percent) tolerance for error.
- E.g. (101 == 100) w/ rel_tol=0.005 is False, but
- (101 == 100) w/ rel_tol=0.005 and abs_tol=1 is True
- max_bw_rel_tolerance: decimal representing the percent tolerance,
- relative to the maximimum allowed bandwidth.
- E.g. (101 <= max bw of 100) w/ max_bw_rel_tol=0.01 is True
-
-
- Returns:
- True, if inequality evaluates to True within tolerances
- False, otherwise
- """
- op = OPERATORS[operator_string]
- if op(actual_value, expected_value):
- return True
-
- error = abs(actual_value - expected_value)
- accepted_error = max(
- expected_value * rel_tolerance,
- abs_tolerance,
- max_bw * max_bw_rel_tolerance,
- )
- return error <= accepted_error
-
-
-class WlanWmmTest(base_test.WifiBaseTest):
- """Tests WMM QoS Functionality (Station only)
-
- Testbed Requirements:
- * One ACTS compatible wlan_device (staut)
- * One Whirlwind Access Point
- * For some tests, One additional ACTS compatible device (secondary_sta)
-
- For accurate results, must be performed in an RF isolated environment.
- """
-
- def setup_class(self) -> None:
- super().setup_class()
- self.log = logging.getLogger()
-
- try:
- self.wmm_test_params = self.user_params["wmm_test_params"]
- self._wmm_transceiver_configs = self.wmm_test_params[
- "wmm_transceivers"
- ]
- except KeyError:
- raise AttributeError(
- "Must provide at least 2 WmmTransceivers in "
- '"wmm_test_params" field of ACTS config.'
- )
-
- if len(self._wmm_transceiver_configs) < 2:
- raise AttributeError("At least 2 WmmTransceivers must be provided.")
-
- self.android_devices = self.android_devices
- self.fuchsia_devices = self.fuchsia_devices
-
- self.wlan_devices = [
- create_wlan_device(device, AssociationMode.POLICY)
- for device in self.android_devices + self.fuchsia_devices
- ]
-
- # Create STAUT transceiver
- if "staut" not in self._wmm_transceiver_configs:
- raise AttributeError(
- 'Must provide a WmmTransceiver labeled "staut" with a '
- "wlan_device."
- )
- self.staut = wmm_transceiver.create(
- self._wmm_transceiver_configs["staut"],
- identifier="staut",
- wlan_devices=self.wlan_devices,
- )
-
- # Required to for automated power cycling
- self.dut = self.staut.wlan_device
-
- # Create AP transceiver
- if "access_point" not in self._wmm_transceiver_configs:
- raise AttributeError(
- 'Must provide a WmmTransceiver labeled "access_point" with a '
- "access_point."
- )
- self.access_point_transceiver = wmm_transceiver.create(
- self._wmm_transceiver_configs["access_point"],
- identifier="access_point",
- access_points=self.access_points,
- )
-
- self.wmm_transceivers = [self.staut, self.access_point_transceiver]
-
- # Create secondary station transceiver, if present
- if "secondary_sta" in self._wmm_transceiver_configs:
- self.secondary_sta = wmm_transceiver.create(
- self._wmm_transceiver_configs["secondary_sta"],
- identifier="secondary_sta",
- wlan_devices=self.wlan_devices,
- )
- self.wmm_transceivers.append(self.secondary_sta)
- else:
- self.secondary_sta = None
-
- self.wmm_transceiver_map = {
- tc.identifier: tc for tc in self.wmm_transceivers
- }
-
- def setup_test(self) -> None:
- super().setup_test()
- for tc in self.wmm_transceivers:
- if tc.wlan_device:
- tc.wlan_device.wifi_toggle_state(True)
- tc.wlan_device.disconnect()
- if tc.access_point:
- tc.access_point.stop_all_aps()
-
- def teardown_test(self) -> None:
- for tc in self.wmm_transceivers:
- tc.cleanup_asynchronous_streams()
- if tc.wlan_device:
- tc.wlan_device.disconnect()
- tc.wlan_device.reset_wifi()
- self.download_logs()
- if tc.access_point:
- tc.access_point.stop_all_aps()
- super().teardown_test()
-
- def teardown_class(self) -> None:
- for tc in self.wmm_transceivers:
- tc.destroy_resources()
- super().teardown_class()
-
- def start_ap_with_wmm_params(self, ap_parameters: dict[str, Any], wmm_parameters: dict[str, Any]) -> str:
- """Sets up WMM network on AP.
-
- Args:
- ap_parameters: a dictionary of kwargs to set up on ap
- wmm_parameters: a dictionary of wmm_params to set up on ap
-
- Returns:
- String, subnet of the network setup (e.g. '192.168.1.0/24')
- """
- # Defaults for required parameters
- ap_parameters["force_wmm"] = True
- if "ssid" not in ap_parameters:
- ap_parameters["ssid"] = utils.rand_ascii_str(
- hostapd_constants.AP_SSID_LENGTH_2G
- )
-
- if "profile_name" not in ap_parameters:
- ap_parameters["profile_name"] = "whirlwind"
-
- if "channel" not in ap_parameters:
- ap_parameters["channel"] = 6
-
- if "n_capabilities" not in ap_parameters:
- ap_parameters["n_capabilities"] = DEFAULT_N_CAPABILITIES_20_MHZ
-
- if "additional_ap_parameters" in ap_parameters:
- ap_parameters["additional_ap_parameters"].update(wmm_parameters)
- else:
- ap_parameters["additional_ap_parameters"] = wmm_parameters
-
- # Optional security
- security_config = ap_parameters.get("security_config", None)
- if security_config:
- ap_parameters["security"] = hostapd_security.Security(
- **security_config
- )
- ap_parameters.pop("security_config")
-
- # Start AP with kwargs
- self.log.info(f"Setting up WMM network: {ap_parameters['ssid']}")
- setup_ap(self.access_point_transceiver.access_point, **ap_parameters)
- self.log.info(f"Network ({ap_parameters['ssid']}) is up.")
-
- # Return subnet
- if ap_parameters["channel"] < hostapd_constants.LOWEST_5G_CHANNEL:
- return self.access_point_transceiver.access_point._AP_2G_SUBNET_STR
- else:
- return self.access_point_transceiver.access_point._AP_5G_SUBNET_STR
-
- def associate_transceiver(self, wmm_transceiver: Any, ap_params: dict[str, Any]) -> None:
- """Associates a WmmTransceiver that has a wlan_device.
-
- Args:
- wmm_transceiver: transceiver to associate
- ap_params: dict, contains ssid and password, if any, for network
- """
- if not wmm_transceiver.wlan_device:
- raise AttributeError(
- "Cannot associate a WmmTransceiver that does not have a WLAN device."
- )
- ssid = ap_params["ssid"]
- password = None
- target_security = None
- security = ap_params.get("security")
- if security:
- password = security.password
- target_security = hostapd_constants.SECURITY_STRING_TO_DEFAULT_TARGET_SECURITY.get(
- security.security_mode_string
- )
- associated = wmm_transceiver.wlan_device.associate(
- ssid,
- target_security,
- target_pwd=password,
- )
- if not associated:
- raise ConnectionError(
- f"Failed to associate WmmTransceiver {wmm_transceiver.identifier}."
- )
- self.log.info(
- f"WmmTransceiver {wmm_transceiver.identifier} associated."
- )
-
- def validate_streams_in_phase(self, phase_id: str, phases: dict[str, Any], max_bw: float) -> bool:
- """Validates any stream in a phase that has validation criteria.
-
- Args:
- phase_id: identifier of the phase to check
- phases: dictionary containing phases for retrieving stream
- transmitters, expected bandwidths, etc.
- max_bw: the max link bandwidth, measured in the test
-
- Returns:
- True, if ALL validation criteria for ALL streams in phase pass
- False, otherwise
- """
- pass_val = True
- for stream_id, stream in phases[phase_id].items():
- if "validation" in stream:
- transmitter = stream["transmitter"]
- uuid = stream["uuid"]
- actual_bw = transmitter.get_results(uuid).avg_rate
- if not actual_bw:
- raise ConnectionError(
- "(Phase: %s, Stream: %s) - Stream results show "
- "bandwidth: None" % (phase_id, stream_id)
- )
- for check in stream["validation"]:
- operator_str = check["operator"]
- rel_tolerance = check.get("rel_tolerance", 0)
- abs_tolerance = check.get("abs_tolerance", 0)
- max_bw_rel_tolerance = check.get("max_bw_rel_tolerance", 0)
- expected_bw_percentage = check.get(
- "bandwidth_percentage", DEFAULT_BW_PERCENTAGE
- )
- # Explicit Bandwidth Validation
- if "bandwidth" in check:
- comp_bw = check["bandwidth"]
- log_msg = (
- "Expected Bandwidth: %s (explicit validation "
- "bandwidth [%s] x expected bandwidth "
- "percentage [%s])"
- % (
- expected_bw_percentage * comp_bw,
- comp_bw,
- expected_bw_percentage,
- )
- )
-
- # Stream Comparison Validation
- elif "phase" in check and "stream" in check:
- comp_phase_id = check["phase"]
- comp_stream_id = check["stream"]
- comp_stream = phases[comp_phase_id][comp_stream_id]
- comp_transmitter = comp_stream["transmitter"]
- comp_uuid = comp_stream["uuid"]
- comp_bw = comp_transmitter.get_results(
- comp_uuid
- ).avg_rate
- log_msg = (
- "Expected Bandwidth: %s (bandwidth for phase: %s, "
- "stream: %s [%s] x expected bandwidth percentage "
- "[%s])"
- % (
- expected_bw_percentage * comp_bw,
- comp_phase_id,
- comp_stream_id,
- comp_bw,
- expected_bw_percentage,
- )
- )
-
- # Expected Bandwidth Validation
- else:
- if "bandwidth" in stream:
- comp_bw = stream["bandwidth"]
- log_msg = (
- "Expected Bandwidth: %s (expected stream "
- "bandwidth [%s] x expected bandwidth "
- "percentage [%s])"
- % (
- expected_bw_percentage * comp_bw,
- comp_bw,
- expected_bw_percentage,
- )
- )
- else:
- max_bw_percentage = stream.get(
- "max_bandwidth_percentage",
- DEFAULT_BW_PERCENTAGE,
- )
- comp_bw = max_bw * max_bw_percentage
- log_msg = (
- "Expected Bandwidth: %s (max bandwidth [%s] x "
- "stream bandwidth percentage [%s] x expected "
- "bandwidth percentage [%s])"
- % (
- expected_bw_percentage * comp_bw,
- max_bw,
- max_bw_percentage,
- expected_bw_percentage,
- )
- )
-
- self.log.info(
- "Validation criteria - Stream: %s, "
- "Actual Bandwidth: %s, Operator: %s, %s, "
- "Relative Tolerance: %s, Absolute Tolerance: %s, Max "
- "Bandwidth Relative Tolerance: %s"
- % (
- stream_id,
- actual_bw,
- operator_str,
- log_msg,
- rel_tolerance,
- abs_tolerance,
- max_bw_rel_tolerance,
- )
- )
-
- if eval_operator(
- operator_str,
- actual_bw,
- comp_bw * expected_bw_percentage,
- max_bw,
- rel_tolerance=rel_tolerance,
- abs_tolerance=abs_tolerance,
- max_bw_rel_tolerance=max_bw_rel_tolerance,
- ):
- self.log.info(
- "(Phase: %s, Stream: %s) - PASSES validation check!"
- % (phase_id, stream_id)
- )
- else:
- self.log.info(
- "(Phase: %s, Stream: %s) - Stream FAILS validation "
- "check." % (phase_id, stream_id)
- )
- pass_val = False
- if pass_val:
- self.log.info(
- f"(Phase {phase_id}) - All streams' validation criteria were met."
- )
- return True
- else:
- self.log.error(
- "(Phase %s) - At least one stream validation criterion was not "
- "met." % phase_id
- )
- return False
-
- def graph_test(self, phases: dict[str, Any], max_bw: float) -> None:
- """Outputs a bokeh html graph of the streams. Saves to ACTS log
- directory.
-
- Args:
- phases: dictionary containing phases for retrieving stream
- transmitters, expected bandwidths, etc.
- max_bw: the max link bandwidth, measured in the test
-
- """
-
- try:
- from bokeh.models import Label, Span
- from bokeh.palettes import Category10
- from bokeh.plotting import (
- ColumnDataSource,
- figure,
- output_file,
- save,
- )
- except ImportError:
- self.log.warn(
- "bokeh is not installed: skipping creation of graphs. "
- "Note CSV files are still available. If graphs are "
- 'desired, install antlion with the "bokeh" feature.'
- )
- return
-
- output_path = context.get_current_context().get_base_output_path()
- output_file_name = "%s/WlanWmmTest/%s.html" % (
- output_path,
- self.current_test_info.name,
- )
- output_file(output_file_name)
-
- start_time = 0
- graph_lines = []
-
- # Used for scaling
- highest_stream_bw = 0
- lowest_stream_bw = 100000
-
- for phase_id, phase in phases.items():
- longest_stream_time = 0
- for stream_id, stream in phase.items():
- transmitter = stream["transmitter"]
- uuid = stream["uuid"]
-
- if "bandwidth" in stream:
- stream_bw = f"{stream['bandwidth']:.3f}"
- stream_bw_formula_str = f"{stream_bw}Mb/s"
- elif "max_bandwidth_percentage" in stream:
- max_bw_percentage = stream["max_bandwidth_percentage"]
- stream_bw = f"{max_bw * max_bw_percentage:.3f}"
- stream_bw_formula_str = "%sMb/s (%s%% of max bandwidth)" % (
- stream_bw,
- str(max_bw_percentage * 100),
- )
- else:
- raise AttributeError(
- "Stream %s must have either a bandwidth or "
- "max_bandwidth_percentage parameter." % stream_id
- )
-
- stream_time = stream.get("time", DEFAULT_STREAM_TIME)
- longest_stream_time = max(longest_stream_time, stream_time)
-
- avg_rate = transmitter.get_results(uuid).avg_rate
-
- instantaneous_rates = transmitter.get_results(
- uuid
- ).instantaneous_rates
- highest_stream_bw = max(
- highest_stream_bw, max(instantaneous_rates)
- )
- lowest_stream_bw = min(
- lowest_stream_bw, min(instantaneous_rates)
- )
-
- stream_data = ColumnDataSource(
- dict(
- time=[
- x
- for x in range(start_time, start_time + stream_time)
- ],
- instantaneous_bws=instantaneous_rates,
- avg_bw=[avg_rate for _ in range(stream_time)],
- stream_id=[stream_id for _ in range(stream_time)],
- attempted_bw=[
- stream_bw_formula_str for _ in range(stream_time)
- ],
- )
- )
- line = {
- "x_axis": "time",
- "y_axis": "instantaneous_bws",
- "source": stream_data,
- "line_width": GRAPH_DEFAULT_LINE_WIDTH,
- "legend_label": f"{phase_id}:{stream_id}",
- }
- graph_lines.append(line)
-
- start_time = start_time + longest_stream_time
- TOOLTIPS = [
- ("Time", "@time"),
- ("Attempted Bandwidth", "@attempted_bw"),
- ("Instantaneous Bandwidth", "@instantaneous_bws"),
- ("Stream Average Bandwidth", "@avg_bw"),
- ("Stream", "@stream_id"),
- ]
-
- # Create and scale graph appropriately
- time_vs_bandwidth_graph = figure(
- title=f"Bandwidth for {self.current_test_info.name}",
- x_axis_label="Time",
- y_axis_label="Bandwidth",
- tooltips=TOOLTIPS,
- y_range=(
- lowest_stream_bw
- - (0.5 * (highest_stream_bw - lowest_stream_bw)),
- 1.05 * max_bw,
- ),
- )
- time_vs_bandwidth_graph.sizing_mode = "stretch_both"
- time_vs_bandwidth_graph.title.align = "center"
- colors = Category10[GRAPH_COLOR_LEN]
- color_ind = 0
-
- # Draw max bandwidth line
- max_bw_span = Span(
- location=max_bw,
- dimension="width",
- line_color="black",
- line_dash="dashed",
- line_width=GRAPH_DEFAULT_LINE_WIDTH,
- )
- max_bw_label = Label(
- x=(0.5 * start_time),
- y=max_bw,
- text=f"Max Bandwidth: {max_bw}Mb/s",
- text_align="center",
- )
- time_vs_bandwidth_graph.add_layout(max_bw_span)
- time_vs_bandwidth_graph.add_layout(max_bw_label)
-
- # Draw stream lines
- for line in graph_lines:
- time_vs_bandwidth_graph.line(
- line["x_axis"],
- line["y_axis"],
- source=line["source"],
- line_width=line["line_width"],
- legend_label=line["legend_label"],
- color=colors[color_ind],
- )
- time_vs_bandwidth_graph.circle(
- line["x_axis"],
- line["y_axis"],
- source=line["source"],
- size=GRAPH_DEFAULT_CIRCLE_SIZE,
- legend_label=line["legend_label"],
- color=colors[color_ind],
- )
- color_ind = (color_ind + 1) % GRAPH_COLOR_LEN
- time_vs_bandwidth_graph.legend.location = "top_left"
- time_vs_bandwidth_graph.legend.click_policy = "hide"
- graph_file = save([time_vs_bandwidth_graph])
- self.log.info(f"Saved graph to {graph_file}")
-
- def run_wmm_test(
- self,
- phases: dict[str, Any],
- ap_parameters: dict[str, Any] = DEFAULT_AP_PARAMS,
- wmm_parameters: dict[str, Any] = hostapd_constants.WMM_PHYS_11A_11G_11N_11AC_DEFAULT_PARAMS,
- stream_timeout: int = DEFAULT_STREAM_TIMEOUT,
- ) -> None:
- """Runs a WMM test case.
-
- Args:
- phases: dictionary of phases of streams to run in parallel,
- including any validation critera (see example below).
- ap_parameters: dictionary of custom kwargs to setup on AP (see
- start_ap_with_wmm_parameters)
- wmm_parameters: dictionary of WMM AC parameters
- stream_timeout: int, time in seconds to wait before force joining
- parallel streams
-
- Asserts:
- PASS, if all validation criteria for all phases are met
- FAIL, otherwise
- """
- # Setup AP
- subnet_str = self.start_ap_with_wmm_params(
- ap_parameters, wmm_parameters
- )
- # Determine transmitters and receivers used in test case
- transmitters = set()
- receivers = set()
- for phase in phases.values():
- for stream in phase.values():
- transmitter = self.wmm_transceiver_map[
- stream["transmitter_str"]
- ]
- transmitters.add(transmitter)
- stream["transmitter"] = transmitter
- receiver = self.wmm_transceiver_map[stream["receiver_str"]]
- receivers.add(receiver)
- stream["receiver"] = receiver
- transceivers = transmitters.union(receivers)
-
- # Associate all transceivers with wlan_devices
- for tc in transceivers:
- if tc.wlan_device:
- self.associate_transceiver(tc, ap_parameters)
-
- # Determine link max bandwidth
- self.log.info("Determining link maximum bandwidth.")
- uuid = self.staut.run_synchronous_traffic_stream(
- {"receiver": self.access_point_transceiver}, subnet_str
- )
- max_bw = self.staut.get_results(uuid).avg_send_rate
- self.log.info(f"Link maximum bandwidth: {max_bw} Mb/s")
-
- # Run parallel phases
- pass_test = True
- for phase_id, phase in phases.items():
- self.log.info(f"Setting up phase: {phase_id}")
-
- for stream_id, stream in phase.items():
- transmitter = stream["transmitter"]
- receiver = stream["receiver"]
- access_category = stream.get("access_category", None)
- stream_time = stream.get("time", DEFAULT_STREAM_TIME)
-
- # Determine stream type
- if "bandwidth" in stream:
- bw = stream["bandwidth"]
- elif "max_bandwidth_percentage" in stream:
- max_bw_percentage = stream["max_bandwidth_percentage"]
- bw = max_bw * max_bw_percentage
- else:
- raise AttributeError(
- "Stream %s must have either a bandwidth or "
- "max_bandwidth_percentage parameter." % stream_id
- )
-
- stream_params = {
- "receiver": receiver,
- "access_category": access_category,
- "bandwidth": bw,
- "time": stream_time,
- }
-
- uuid = transmitter.prepare_asynchronous_stream(
- stream_params, subnet_str
- )
- stream["uuid"] = uuid
-
- # Start all streams in phase
- start_time = time.time() + 5
- for transmitter in transmitters:
- transmitter.start_asynchronous_streams(start_time=start_time)
-
- # Wait for streams to join
- for transmitter in transmitters:
- end_time = time.time() + stream_timeout
- while transmitter.has_active_streams:
- if time.time() > end_time:
- raise ConnectionError(
- "Transmitter's (%s) active streams are not finishing."
- % transmitter.identifier
- )
- time.sleep(1)
-
- # Cleanup all streams
- for transmitter in transmitters:
- transmitter.cleanup_asynchronous_streams()
-
- # Validate streams
- pass_test = pass_test and self.validate_streams_in_phase(
- phase_id, phases, max_bw
- )
-
- self.graph_test(phases, max_bw)
- if pass_test:
- asserts.explicit_pass(
- "Validation criteria met for all streams in all phases."
- )
- else:
- asserts.fail(
- "At least one stream failed to meet validation criteria."
- )
-
- # Test Cases
-
- # Internal Traffic Differentiation
-
- def test_internal_traffic_diff_VO_VI(self) -> None:
- self.run_wmm_test(wmm_test_cases.test_internal_traffic_diff_VO_VI)
-
- def test_internal_traffic_diff_VO_BE(self) -> None:
- self.run_wmm_test(wmm_test_cases.test_internal_traffic_diff_VO_BE)
-
- def test_internal_traffic_diff_VO_BK(self) -> None:
- self.run_wmm_test(wmm_test_cases.test_internal_traffic_diff_VO_BK)
-
- def test_internal_traffic_diff_VI_BE(self) -> None:
- self.run_wmm_test(wmm_test_cases.test_internal_traffic_diff_VI_BE)
-
- def test_internal_traffic_diff_VI_BK(self) -> None:
- self.run_wmm_test(wmm_test_cases.test_internal_traffic_diff_VI_BK)
-
- def test_internal_traffic_diff_BE_BK(self) -> None:
- self.run_wmm_test(wmm_test_cases.test_internal_traffic_diff_BE_BK)
-
- # External Traffic Differentiation
-
- """Single station, STAUT transmits high priority"""
-
- def test_external_traffic_diff_staut_VO_ap_VI(self) -> None:
- self.run_wmm_test(
- wmm_test_cases.test_external_traffic_diff_staut_VO_ap_VI
- )
-
- def test_external_traffic_diff_staut_VO_ap_BE(self) -> None:
- self.run_wmm_test(
- wmm_test_cases.test_external_traffic_diff_staut_VO_ap_BE
- )
-
- def test_external_traffic_diff_staut_VO_ap_BK(self) -> None:
- self.run_wmm_test(
- wmm_test_cases.test_external_traffic_diff_staut_VO_ap_BK
- )
-
- def test_external_traffic_diff_staut_VI_ap_BE(self) -> None:
- self.run_wmm_test(
- wmm_test_cases.test_external_traffic_diff_staut_VI_ap_BE
- )
-
- def test_external_traffic_diff_staut_VI_ap_BK(self) -> None:
- self.run_wmm_test(
- wmm_test_cases.test_external_traffic_diff_staut_VI_ap_BK
- )
-
- def test_external_traffic_diff_staut_BE_ap_BK(self) -> None:
- self.run_wmm_test(
- wmm_test_cases.test_external_traffic_diff_staut_BE_ap_BK
- )
-
- """Single station, STAUT transmits low priority"""
-
- def test_external_traffic_diff_staut_VI_ap_VO(self) -> None:
- self.run_wmm_test(
- wmm_test_cases.test_external_traffic_diff_staut_VI_ap_VO
- )
-
- def test_external_traffic_diff_staut_BE_ap_VO(self) -> None:
- self.run_wmm_test(
- wmm_test_cases.test_external_traffic_diff_staut_BE_ap_VO
- )
-
- def test_external_traffic_diff_staut_BK_ap_VO(self) -> None:
- self.run_wmm_test(
- wmm_test_cases.test_external_traffic_diff_staut_BK_ap_VO
- )
-
- def test_external_traffic_diff_staut_BE_ap_VI(self) -> None:
- self.run_wmm_test(
- wmm_test_cases.test_external_traffic_diff_staut_BE_ap_VI
- )
-
- def test_external_traffic_diff_staut_BK_ap_VI(self) -> None:
- self.run_wmm_test(
- wmm_test_cases.test_external_traffic_diff_staut_BK_ap_VI
- )
-
- def test_external_traffic_diff_staut_BK_ap_BE(self) -> None:
- self.run_wmm_test(
- wmm_test_cases.test_external_traffic_diff_staut_BK_ap_BE
- )
-
- # # Dual Internal/External Traffic Differentiation (Single station)
-
- def test_dual_traffic_diff_staut_VO_VI_ap_VI(self) -> None:
- self.run_wmm_test(
- wmm_test_cases.test_dual_traffic_diff_staut_VO_VI_ap_VI
- )
-
- def test_dual_traffic_diff_staut_VO_BE_ap_BE(self) -> None:
- self.run_wmm_test(
- wmm_test_cases.test_dual_traffic_diff_staut_VO_BE_ap_BE
- )
-
- def test_dual_traffic_diff_staut_VO_BK_ap_BK(self) -> None:
- self.run_wmm_test(
- wmm_test_cases.test_dual_traffic_diff_staut_VO_BK_ap_BK
- )
-
- def test_dual_traffic_diff_staut_VI_BE_ap_BE(self) -> None:
- self.run_wmm_test(
- wmm_test_cases.test_dual_traffic_diff_staut_VI_BE_ap_BE
- )
-
- def test_dual_traffic_diff_staut_VI_BK_ap_BK(self) -> None:
- self.run_wmm_test(
- wmm_test_cases.test_dual_traffic_diff_staut_VI_BK_ap_BK
- )
-
- def test_dual_traffic_diff_staut_BE_BK_ap_BK(self) -> None:
- self.run_wmm_test(
- wmm_test_cases.test_dual_traffic_diff_staut_BE_BK_ap_BK
- )
-
- # ACM Bit Conformance Tests (Single station, as WFA test below uses two)
-
- def test_acm_bit_on_VI(self) -> None:
- wmm_params_VI_ACM = (
- hostapd_constants.WMM_PHYS_11A_11G_11N_11AC_DEFAULT_PARAMS
- | hostapd_constants.WMM_ACM_VI
- )
- self.run_wmm_test(
- wmm_test_cases.test_acm_bit_on_VI, wmm_parameters=wmm_params_VI_ACM
- )
-
- # AC Parameter Modificiation Tests (Single station, as WFA test below uses two)
-
- def test_ac_param_degrade_VO(self) -> None:
- self.run_wmm_test(
- wmm_test_cases.test_ac_param_degrade_VO,
- wmm_parameters=hostapd_constants.WMM_DEGRADED_VO_PARAMS,
- )
-
- def test_ac_param_degrade_VI(self) -> None:
- self.run_wmm_test(
- wmm_test_cases.test_ac_param_degrade_VI,
- wmm_parameters=hostapd_constants.WMM_DEGRADED_VI_PARAMS,
- )
-
- def test_ac_param_improve_BE(self) -> None:
- self.run_wmm_test(
- wmm_test_cases.test_ac_param_improve_BE,
- wmm_parameters=hostapd_constants.WMM_IMPROVE_BE_PARAMS,
- )
-
- def test_ac_param_improve_BK(self) -> None:
- self.run_wmm_test(
- wmm_test_cases.test_ac_param_improve_BK,
- wmm_parameters=hostapd_constants.WMM_IMPROVE_BK_PARAMS,
- )
-
- # WFA Test Plan Tests
-
- """Traffic Differentiation in Single BSS (Single Station)"""
-
- def test_wfa_traffic_diff_single_station_staut_BE_ap_VI_BE(self) -> None:
- self.run_wmm_test(
- wmm_test_cases.test_wfa_traffic_diff_single_station_staut_BE_ap_VI_BE
- )
-
- def test_wfa_traffic_diff_single_station_staut_VI_BE(self) -> None:
- self.run_wmm_test(
- wmm_test_cases.test_wfa_traffic_diff_single_station_staut_VI_BE
- )
-
- def test_wfa_traffic_diff_single_station_staut_VI_BE_ap_BE(self) -> None:
- self.run_wmm_test(
- wmm_test_cases.test_wfa_traffic_diff_single_station_staut_VI_BE_ap_BE
- )
-
- def test_wfa_traffic_diff_single_station_staut_BE_BK_ap_BK(self) -> None:
- self.run_wmm_test(
- wmm_test_cases.test_wfa_traffic_diff_single_station_staut_BE_BK_ap_BK
- )
-
- def test_wfa_traffic_diff_single_station_staut_VO_VI_ap_VI(self) -> None:
- self.run_wmm_test(
- wmm_test_cases.test_wfa_traffic_diff_single_station_staut_VO_VI_ap_VI
- )
-
- """Traffic Differentiation in Single BSS (Two Stations)"""
-
- def test_wfa_traffic_diff_two_stations_staut_BE_secondary_VI_BE(self) -> None:
- asserts.skip_if(not self.secondary_sta, "No secondary station.")
- self.run_wmm_test(
- wmm_test_cases.test_wfa_traffic_diff_two_stations_staut_BE_secondary_VI_BE
- )
-
- def test_wfa_traffic_diff_two_stations_staut_VI_secondary_BE(self) -> None:
- asserts.skip_if(not self.secondary_sta, "No secondary station.")
- self.run_wmm_test(
- wmm_test_cases.test_wfa_traffic_diff_two_stations_staut_VI_secondary_BE
- )
-
- def test_wfa_traffic_diff_two_stations_staut_BK_secondary_BE_BK(self) -> None:
- asserts.skip_if(not self.secondary_sta, "No secondary station.")
- self.run_wmm_test(
- wmm_test_cases.test_wfa_traffic_diff_two_stations_staut_BK_secondary_BE_BK
- )
-
- def test_wfa_traffic_diff_two_stations_staut_VI_secondary_VO_VI(self) -> None:
- asserts.skip_if(not self.secondary_sta, "No secondary station.")
- self.run_wmm_test(
- wmm_test_cases.test_wfa_traffic_diff_two_stations_staut_VI_secondary_VO_VI
- )
-
- """Test ACM Bit Conformance (Two Stations)"""
-
- def test_wfa_acm_bit_on_VI(self) -> None:
- asserts.skip_if(not self.secondary_sta, "No secondary station.")
- wmm_params_VI_ACM = (
- hostapd_constants.WMM_PHYS_11A_11G_11N_11AC_DEFAULT_PARAMS
- | hostapd_constants.WMM_ACM_VI
- )
- self.run_wmm_test(
- wmm_test_cases.test_wfa_acm_bit_on_VI,
- wmm_parameters=wmm_params_VI_ACM,
- )
-
- """Test the AC Parameter Modification"""
-
- def test_wfa_ac_param_degrade_VI(self) -> None:
- asserts.skip_if(not self.secondary_sta, "No secondary station.")
- self.run_wmm_test(
- wmm_test_cases.test_wfa_ac_param_degrade_VI,
- wmm_parameters=hostapd_constants.WMM_DEGRADED_VI_PARAMS,
- )
-
-
-if __name__ == "__main__":
- test_runner.main()
diff --git a/tests/wlan/performance/rvr_settings.yaml b/tests/wlan/performance/rvr_settings.yaml
deleted file mode 100644
index 620a48d..0000000
--- a/tests/wlan/performance/rvr_settings.yaml
+++ /dev/null
@@ -1,7 +0,0 @@
-rvr_settings:
- starting_attn: 10
- ending_attn: 61
- step_size_in_db: 3
- dwell_time_in_secs: 10
- reverse_rvr_after_forward: true
- iperf_flags: "-i 1"
diff --git a/tests/wlan_policy/BUILD.gn b/tests/wlan_policy/BUILD.gn
deleted file mode 100644
index 264613f..0000000
--- a/tests/wlan_policy/BUILD.gn
+++ /dev/null
@@ -1,49 +0,0 @@
-# Copyright 2023 The Fuchsia Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import("//third_party/antlion/antlion_host_test.gni")
-import("//third_party/antlion/environments.gni")
-
-assert(is_host, "antlion tests only supported for host testing")
-
-antlion_host_test("hidden_networks_test") {
- main_source = "HiddenNetworksTest.py"
- environments = display_ap_envs
-}
-
-antlion_host_test("policy_scan_test") {
- main_source = "PolicyScanTest.py"
- environments = display_ap_envs
-}
-
-antlion_host_test("regulatory_recovery_test") {
- main_source = "RegulatoryRecoveryTest.py"
- environments = display_envs
-}
-
-antlion_host_test("saved_networks_test") {
- main_source = "SavedNetworksTest.py"
- environments = display_ap_envs
-}
-
-antlion_host_test("start_stop_client_connections_test") {
- main_source = "StartStopClientConnectionsTest.py"
- environments = display_ap_envs
-}
-
-group("e2e_tests") {
- testonly = true
- public_deps = [
- ":hidden_networks_test",
- ":policy_scan_test",
- ":regulatory_recovery_test",
- ":saved_networks_test",
- ":start_stop_client_connections_test",
- ]
-}
-
-group("e2e_tests_quick") {
- testonly = true
- public_deps = [ ":hidden_networks_test" ]
-}
diff --git a/tests/wlan_policy/HiddenNetworksTest.py b/tests/wlan_policy/HiddenNetworksTest.py
deleted file mode 100644
index 451dd86..0000000
--- a/tests/wlan_policy/HiddenNetworksTest.py
+++ /dev/null
@@ -1,188 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import logging
-import time
-
-from honeydew.affordances.connectivity.wlan.utils.types import (
- ConnectionState,
- SecurityType,
- WlanClientState,
-)
-from mobly import signals, test_runner
-
-from antlion.controllers.access_point import setup_ap
-from antlion.controllers.ap_lib import hostapd_constants
-from antlion.controllers.ap_lib.hostapd_security import Security, SecurityMode
-from antlion.controllers.fuchsia_lib.lib_controllers.wlan_policy_controller import (
- WlanPolicyControllerError,
-)
-from antlion.test_utils.wifi import base_test
-from antlion.utils import rand_ascii_str
-
-# These tests should have a longer timeout for connecting than normal connect
-# tests because the device should probabilistically perform active scans for
-# hidden networks. Multiple scans are necessary to verify a very low chance of
-# random failure.
-TIME_WAIT_FOR_CONNECT = 90
-TIME_ATTEMPT_SCANS = 90
-
-
-class HiddenNetworksTest(base_test.WifiBaseTest):
- """Tests that WLAN Policy will detect hidden networks
-
- Test Bed Requirement:
- * One or more Fuchsia devices
- * One Access Point
- """
-
- def setup_class(self) -> None:
- super().setup_class()
- self.log = logging.getLogger()
- # Start an AP with a hidden network
- self.access_point = self.access_points[0]
- self.hidden_ssid = rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_2G)
- self.hidden_password = rand_ascii_str(
- hostapd_constants.AP_PASSPHRASE_LENGTH_2G
- )
- self.access_point.stop_all_aps()
- setup_ap(
- self.access_point,
- "whirlwind",
- hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- self.hidden_ssid,
- hidden=True,
- security=Security(
- security_mode=SecurityMode.WPA2,
- password=self.hidden_password,
- ),
- )
-
- if len(self.fuchsia_devices) < 1:
- raise EnvironmentError("No Fuchsia devices found.")
- for fd in self.fuchsia_devices:
- fd.configure_wlan(
- association_mechanism="policy", preserve_saved_networks=True
- )
-
- def setup_test(self) -> None:
- for fd in self.fuchsia_devices:
- fd.honeydew_fd.wlan_policy.remove_all_networks()
- fd.wlan_policy_controller.wait_for_no_connections()
-
- def teardown_class(self) -> None:
- self.access_point.stop_all_aps()
-
- # Tests
-
- def test_scan_hidden_networks(self) -> None:
- """Probabilistic test to see if we can see hidden networks with a scan.
-
- Scan a few times and check that we see the hidden networks in the results at
- least once. We stop client connections to not trigger a connect when saving,
- which would interfere with requested scans.
-
- Raises:
- TestFailure if we fail to see hidden network in scans before timing out.
- """
- for fd in self.fuchsia_devices:
- fd.honeydew_fd.wlan_policy.stop_client_connections()
- fd.wlan_policy_controller.wait_for_client_state(
- WlanClientState.CONNECTIONS_DISABLED
- )
- fd.honeydew_fd.wlan_policy.save_network(
- self.hidden_ssid, SecurityType.WPA2, self.hidden_password
- )
- fd.honeydew_fd.wlan_policy.start_client_connections()
- start_time = time.time()
- num_performed_scans = 0
-
- while time.time() < start_time + TIME_ATTEMPT_SCANS:
- num_performed_scans = num_performed_scans + 1
- scan_result = fd.honeydew_fd.wlan_policy.scan_for_networks()
-
- if self.hidden_ssid in scan_result:
- self.log.info(
- f"SSID of hidden network seen after {num_performed_scans} scans"
- )
- return
- # Don't overload SL4F with scan requests
- time.sleep(1)
-
- self.log.error(
- f"Failed to see SSID after {num_performed_scans} scans"
- )
- raise signals.TestFailure("Failed to see hidden network in scans")
-
- def test_auto_connect_hidden_on_startup(self) -> None:
- """Test auto connect on startup.
-
- This test checks that if we are not connected to anything but have a hidden
- network saved, we will eventually actively scan for it and connect.
-
- Raises:
- TestFailure if the client fails to auto connect to the hidden network.
- """
- # Start up AP with an open network with a random SSID
-
- for fd in self.fuchsia_devices:
- fd.honeydew_fd.wlan_policy.stop_client_connections()
- fd.honeydew_fd.wlan_policy.save_network(
- self.hidden_ssid, SecurityType.WPA2, self.hidden_password
- )
-
- # Reboot the device and check that it auto connects.
- fd.reboot()
- try:
- fd.wlan_policy_controller.wait_for_network_state(
- self.hidden_ssid,
- ConnectionState.CONNECTED,
- timeout_sec=TIME_WAIT_FOR_CONNECT,
- )
- except WlanPolicyControllerError as e:
- raise signals.TestFailure(
- "Failed to auto connect to hidden network on startup"
- ) from e
-
- def test_auto_connect_hidden_on_save(self) -> None:
- """Test auto connect to hidden network on save.
-
- This test checks that if we save a hidden network and are not connected to
- anything, the device will connect to the hidden network that was just saved.
-
- Raises:
- TestFailure if client fails to auto connect to a hidden network after saving
- it.
- """
- for fd in self.fuchsia_devices:
- fd.wlan_policy_controller.wait_for_no_connections()
- fd.honeydew_fd.wlan_policy.save_network(
- self.hidden_ssid, SecurityType.WPA2, self.hidden_password
- )
- try:
- fd.wlan_policy_controller.wait_for_network_state(
- self.hidden_ssid,
- ConnectionState.CONNECTED,
- timeout_sec=TIME_WAIT_FOR_CONNECT,
- )
- except WlanPolicyControllerError as e:
- raise signals.TestFailure(
- "Failed to auto connect to hidden network on save"
- ) from e
-
-
-if __name__ == "__main__":
- test_runner.main()
diff --git a/tests/wlan_policy/PolicyScanTest.py b/tests/wlan_policy/PolicyScanTest.py
deleted file mode 100644
index 4a01306..0000000
--- a/tests/wlan_policy/PolicyScanTest.py
+++ /dev/null
@@ -1,248 +0,0 @@
-#!/usr/bin/env python3.4
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-import logging
-
-from honeydew.affordances.connectivity.wlan.utils.types import (
- ConnectionState,
- SecurityType,
-)
-from mobly import asserts, signals, test_runner
-
-from antlion.controllers.ap_lib import (
- hostapd_ap_preset,
- hostapd_bss_settings,
- hostapd_constants,
- hostapd_security,
-)
-from antlion.test_utils.wifi import base_test
-
-
-class PolicyScanTest(base_test.WifiBaseTest):
- """WLAN policy scan test class.
-
- This test exercises the scan functionality for the WLAN Policy API.
-
- Test Bed Requirement:
- * One or more Fuchsia devices
- * One Whirlwind Access Point
- """
-
- def setup_class(self) -> None:
- super().setup_class()
- self.log = logging.getLogger()
-
- if len(self.fuchsia_devices) < 1:
- raise signals.TestFailure("No fuchsia devices found.")
- for fd in self.fuchsia_devices:
- fd.configure_wlan(
- association_mechanism="policy", preserve_saved_networks=True
- )
- if len(self.access_points) < 1:
- raise signals.TestFailure("No access points found.")
- # Prepare the AP
- self.access_point = self.access_points[0]
- self.access_point.stop_all_aps()
- # Generate network params.
- bss_settings_2g: list[hostapd_bss_settings.BssSettings] = []
- bss_settings_5g: list[hostapd_bss_settings.BssSettings] = []
- open_network = self.get_open_network(False, [])
- self.open_network_2g = open_network["2g"]
- self.open_network_5g = open_network["5g"]
- wpa2_settings = self.get_psk_network(False, [])
- self.wpa2_network_2g = wpa2_settings["2g"]
- self.wpa2_network_5g = wpa2_settings["5g"]
- bss_settings_2g.append(
- hostapd_bss_settings.BssSettings(
- name=self.wpa2_network_2g["SSID"],
- ssid=self.wpa2_network_2g["SSID"],
- security=hostapd_security.Security(
- security_mode=self.wpa2_network_2g["security"],
- password=self.wpa2_network_2g["password"],
- ),
- )
- )
- bss_settings_5g.append(
- hostapd_bss_settings.BssSettings(
- name=self.wpa2_network_5g["SSID"],
- ssid=self.wpa2_network_5g["SSID"],
- security=hostapd_security.Security(
- security_mode=self.wpa2_network_5g["security"],
- password=self.wpa2_network_5g["password"],
- ),
- )
- )
- self.ap_2g = hostapd_ap_preset.create_ap_preset(
- iface_wlan_2g=self.access_points[0].wlan_2g,
- iface_wlan_5g=self.access_points[0].wlan_5g,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
- ssid=self.open_network_2g["SSID"],
- bss_settings=bss_settings_2g,
- )
- self.ap_5g = hostapd_ap_preset.create_ap_preset(
- iface_wlan_2g=self.access_points[0].wlan_2g,
- iface_wlan_5g=self.access_points[0].wlan_5g,
- channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- ssid=self.open_network_5g["SSID"],
- bss_settings=bss_settings_5g,
- )
- # Start the networks
- self.access_point.start_ap(hostapd_config=self.ap_2g)
- self.access_point.start_ap(hostapd_config=self.ap_5g)
- # List of test SSIDs started by APs
- self.all_ssids = [
- self.open_network_2g["SSID"],
- self.wpa2_network_2g["SSID"],
- self.open_network_5g["SSID"],
- self.wpa2_network_5g["SSID"],
- ]
-
- def setup_test(self) -> None:
- super().setup_test()
- for fd in self.fuchsia_devices:
- fd.honeydew_fd.wlan_policy.remove_all_networks()
- fd.wlan_policy_controller.wait_for_no_connections()
-
- def teardown_test(self) -> None:
- self.download_logs()
- super().teardown_test()
-
- def _assert_network_is_in_results(
- self, scan_results: list[str], ssid: str
- ) -> None:
- """Verified scan results contain a specified network
-
- Args:
- scan_results: Scan results from a fuchsia Policy API scan.
- ssid: SSID for network that should be in the results.
-
- Raises:
- signals.TestFailure: if the network is not present in the scan results
- """
- asserts.assert_true(
- ssid in scan_results,
- f'Network "{ssid}" was not found in scan results: {scan_results}',
- )
-
- def test_basic_scan_request(self) -> None:
- """Verify a scan returns all expected networks"""
- for fd in self.fuchsia_devices:
- scan_results = fd.honeydew_fd.wlan_policy.scan_for_networks()
- if len(scan_results) == 0:
- raise signals.TestFailure("Scan did not find any networks")
- for ssid in self.all_ssids:
- self._assert_network_is_in_results(scan_results, ssid)
-
- def test_scan_while_connected_open_network_2g(self) -> None:
- """Connect to an open 2g network and perform a scan"""
- for fd in self.fuchsia_devices:
- fd.honeydew_fd.wlan_policy.save_network(
- self.open_network_2g["SSID"],
- SecurityType(
- self.open_network_2g["security"].fuchsia_security_type()
- ),
- self.open_network_2g["password"],
- )
- fd.honeydew_fd.wlan_policy.connect(
- self.open_network_2g["SSID"],
- SecurityType(
- self.open_network_2g["security"].fuchsia_security_type()
- ),
- )
- fd.wlan_policy_controller.wait_for_network_state(
- self.open_network_2g["SSID"], ConnectionState.CONNECTED
- )
-
- scan_results = fd.honeydew_fd.wlan_policy.scan_for_networks()
- for ssid in self.all_ssids:
- self._assert_network_is_in_results(scan_results, ssid)
-
- def test_scan_while_connected_wpa2_network_2g(self) -> None:
- """Connect to a WPA2 2g network and perform a scan"""
- for fd in self.fuchsia_devices:
- fd.honeydew_fd.wlan_policy.save_network(
- self.wpa2_network_2g["SSID"],
- SecurityType(
- self.wpa2_network_2g["security"].fuchsia_security_type()
- ),
- self.wpa2_network_2g["password"],
- )
- fd.honeydew_fd.wlan_policy.connect(
- self.wpa2_network_2g["SSID"],
- SecurityType(
- self.wpa2_network_2g["security"].fuchsia_security_type()
- ),
- )
- fd.wlan_policy_controller.wait_for_network_state(
- self.wpa2_network_2g["SSID"], ConnectionState.CONNECTED
- )
-
- scan_results = fd.honeydew_fd.wlan_policy.scan_for_networks()
- for ssid in self.all_ssids:
- self._assert_network_is_in_results(scan_results, ssid)
-
- def test_scan_while_connected_open_network_5g(self) -> None:
- """Connect to an open 5g network and perform a scan"""
- for fd in self.fuchsia_devices:
- fd.honeydew_fd.wlan_policy.save_network(
- self.open_network_5g["SSID"],
- SecurityType(
- self.open_network_5g["security"].fuchsia_security_type()
- ),
- self.open_network_5g["password"],
- )
- fd.honeydew_fd.wlan_policy.connect(
- self.open_network_5g["SSID"],
- SecurityType(
- self.open_network_5g["security"].fuchsia_security_type()
- ),
- )
- fd.wlan_policy_controller.wait_for_network_state(
- self.open_network_5g["SSID"], ConnectionState.CONNECTED
- )
-
- scan_results = fd.honeydew_fd.wlan_policy.scan_for_networks()
- for ssid in self.all_ssids:
- self._assert_network_is_in_results(scan_results, ssid)
-
- def test_scan_while_connected_wpa2_network_5g(self) -> None:
- """Connect to a WPA2 5g network and perform a scan"""
- for fd in self.fuchsia_devices:
- fd.honeydew_fd.wlan_policy.save_network(
- self.wpa2_network_5g["SSID"],
- SecurityType(
- self.wpa2_network_5g["security"].fuchsia_security_type()
- ),
- self.wpa2_network_5g["password"],
- )
- fd.honeydew_fd.wlan_policy.connect(
- self.wpa2_network_5g["SSID"],
- SecurityType(
- self.wpa2_network_5g["security"].fuchsia_security_type()
- ),
- )
- fd.wlan_policy_controller.wait_for_network_state(
- self.wpa2_network_5g["SSID"], ConnectionState.CONNECTED
- )
-
- scan_results = fd.honeydew_fd.wlan_policy.scan_for_networks()
- for ssid in self.all_ssids:
- self._assert_network_is_in_results(scan_results, ssid)
-
-
-if __name__ == "__main__":
- test_runner.main()
diff --git a/tests/wlan_policy/RegulatoryRecoveryTest.py b/tests/wlan_policy/RegulatoryRecoveryTest.py
deleted file mode 100644
index b3057f1..0000000
--- a/tests/wlan_policy/RegulatoryRecoveryTest.py
+++ /dev/null
@@ -1,197 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from honeydew.affordances.connectivity.wlan.utils.types import (
- ConnectivityMode,
- CountryCode,
- OperatingBand,
- SecurityType,
- WlanClientState,
-)
-from mobly import asserts, signals, test_runner
-
-from antlion.controllers.fuchsia_device import FuchsiaDevice
-from antlion.controllers.fuchsia_lib.lib_controllers.wlan_policy_controller import (
- WlanPolicyControllerError,
-)
-from antlion.test_utils.wifi import base_test
-
-
-class RegulatoryRecoveryTest(base_test.WifiBaseTest):
- """Tests the policy layer's response to setting country code.
-
- Test Bed Requirements:
- * One Fuchsia device that is capable of operating as a WLAN client and AP.
-
- Example Config:
- "regulatory_recovery_test_params": {
- "country_code": "US"
- }
-
- If no configuration information is provided, the test will default to
- toggling between WW and US.
- """
-
- def setup_class(self) -> None:
- super().setup_class()
- if len(self.fuchsia_devices) < 1:
- raise EnvironmentError("No Fuchsia devices found.")
-
- self.config_test_params = self.user_params.get(
- "regulatory_recovery_test_params", {}
- )
- self.country_code = self.config_test_params.get("country_code", "US")
- self.negative_test = self.config_test_params.get("negative_test", False)
-
- for fd in self.fuchsia_devices:
- fd.configure_wlan(association_mechanism="policy")
-
- def teardown_class(self) -> None:
- if not self.negative_test:
- for fd in self.fuchsia_devices:
- fd.wlan_controller.set_country_code(self.country_code)
-
- super().teardown_class()
-
- def setup_test(self) -> None:
- """Set PHYs to world-wide mode and disable AP and client connections."""
- for fd in self.fuchsia_devices:
- fd.wlan_controller.set_country_code(CountryCode.WORLDWIDE)
- fd.honeydew_fd.wlan_policy_ap.stop_all()
-
- def _set_country_code_check(self, fd: FuchsiaDevice) -> None:
- """Set the country code and check if successful.
-
- Args:
- fd: Fuchsia device to set country code on.
-
- Raises:
- EnvironmentError on failure to set country code or success setting country
- code when it should be a failure case.
- """
- try:
- fd.wlan_controller.set_country_code(self.country_code)
- except EnvironmentError as e:
- if self.negative_test:
- # In the negative case, setting the country code for an
- # invalid country should fail.
- pass
- else:
- # If this is not a negative test case, re-raise the
- # exception.
- raise e
- else:
- # The negative test case should have failed to set the country
- # code and the positive test case should succeed.
- if self.negative_test:
- raise EnvironmentError(
- "Setting invalid country code succeeded."
- )
- else:
- pass
-
- def test_interfaces_not_recreated_when_initially_disabled(self) -> None:
- """Test after applying new region no new interfaces are automatically recreated.
-
- We start with client connections and access points disabled. There should be no
- state change after applying a new regulatory region.
-
- Raises:
- TestFailure if client or AP are in unexpected state.
- """
- for fd in self.fuchsia_devices:
- fd.honeydew_fd.wlan_policy.stop_client_connections()
- fd.wlan_policy_controller.wait_for_client_state(
- WlanClientState.CONNECTIONS_DISABLED
- )
-
- self._set_country_code_check(fd)
-
- # Verify that the client is still stopped.
- try:
- fd.wlan_policy_controller.wait_for_client_state(
- WlanClientState.CONNECTIONS_DISABLED
- )
- except WlanPolicyControllerError:
- raise signals.TestFailure(
- "Client policy layer is in unexpected state"
- )
-
- # Verify that the AP is still stopped.
- fd.honeydew_fd.wlan_policy_ap.set_new_update_listener()
- ap_updates = fd.honeydew_fd.wlan_policy_ap.get_update()
- if ap_updates:
- raise signals.TestFailure(
- f"AP in unexpected state: {ap_updates}"
- )
-
- def test_interfaces_recreated_when_initially_enabled(self) -> None:
- """Test after applying new region interfaces are automatically recreated.
-
- After enabling client connections and access points we check that all interfaces
- are recreated.
-
- Raises:
- TestFailure if client or AP are in unexpected state.
- """
- test_ssid = "test_ssid"
- security_type = SecurityType.NONE
- for fd in self.fuchsia_devices:
- # Start client connections and start an AP before setting the country code.
- fd.honeydew_fd.wlan_policy.start_client_connections()
- fd.wlan_policy_controller.wait_for_client_state(
- WlanClientState.CONNECTIONS_ENABLED
- )
- fd.honeydew_fd.wlan_policy_ap.start(
- test_ssid,
- security_type,
- None,
- ConnectivityMode.LOCAL_ONLY,
- OperatingBand.ANY,
- )
-
- # Set the country code.
- self._set_country_code_check(fd)
-
- # Verify that the client connections are enabled.
- try:
- fd.wlan_policy_controller.wait_for_client_state(
- WlanClientState.CONNECTIONS_ENABLED
- )
- except WlanPolicyControllerError:
- raise signals.TestFailure(
- "Client policy layer is in unexpected state"
- )
-
- # Verify that the AP is brought up again.
- fd.honeydew_fd.wlan_policy_ap.set_new_update_listener()
- ap_updates = fd.honeydew_fd.wlan_policy_ap.get_update()
- if len(ap_updates) != 1:
- raise signals.TestFailure(f"No APs are running: {ap_updates}")
- else:
- asserts.assert_equal(
- ap_updates[0].id_.ssid, test_ssid, "Wrong ssid", ap_updates
- )
- asserts.assert_equal(
- ap_updates[0].id_.security_type,
- security_type,
- "Wrong security type",
- ap_updates,
- )
-
-
-if __name__ == "__main__":
- test_runner.main()
diff --git a/tests/wlan_policy/SavedNetworksTest.py b/tests/wlan_policy/SavedNetworksTest.py
deleted file mode 100644
index 1a7f15c..0000000
--- a/tests/wlan_policy/SavedNetworksTest.py
+++ /dev/null
@@ -1,459 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import logging
-
-from honeydew.affordances.connectivity.wlan.utils.errors import (
- HoneydewWlanError,
-)
-from honeydew.affordances.connectivity.wlan.utils.types import (
- ConnectionState,
- NetworkConfig,
- SecurityType,
- WlanClientState,
-)
-from mobly import asserts, signals, test_runner
-
-from antlion.controllers.access_point import setup_ap
-from antlion.controllers.ap_lib import hostapd_constants
-from antlion.controllers.ap_lib.hostapd_security import Security, SecurityMode
-from antlion.controllers.fuchsia_device import FuchsiaDevice
-from antlion.controllers.fuchsia_lib.lib_controllers.wlan_policy_controller import (
- WlanPolicyControllerError,
-)
-from antlion.test_utils.wifi import base_test
-from antlion.utils import rand_ascii_str, rand_hex_str
-
-PSK_LEN = 64
-CREDENTIAL_TYPE_PSK = "Psk"
-CREDENTIAL_TYPE_NONE = "None"
-CREDENTIAL_TYPE_PASSWORD = "Password"
-CREDENTIAL_VALUE_NONE = ""
-
-
-class SavedNetworksTest(base_test.WifiBaseTest):
- """WLAN policy commands test class.
-
- A test that saves various networks and verifies the behavior of save, get, and
- remove through the ClientController API of WLAN policy.
-
- Test Bed Requirement:
- * One or more Fuchsia devices
- * One Access Point
- """
-
- def setup_class(self) -> None:
- super().setup_class()
- self.log = logging.getLogger()
- # Keep track of whether we have started an access point in a test
- if len(self.fuchsia_devices) < 1:
- raise EnvironmentError("No Fuchsia devices found.")
- for fd in self.fuchsia_devices:
- fd.configure_wlan(
- association_mechanism="policy", preserve_saved_networks=True
- )
-
- def setup_test(self) -> None:
- for fd in self.fuchsia_devices:
- fd.honeydew_fd.wlan_policy.remove_all_networks()
- fd.wlan_policy_controller.wait_for_no_connections()
- self.access_points[0].stop_all_aps()
-
- def teardown_class(self) -> None:
- for fd in self.fuchsia_devices:
- fd.honeydew_fd.wlan_policy.remove_all_networks()
- self.access_points[0].stop_all_aps()
-
- def _has_saved_network(
- self, fd: FuchsiaDevice, network: NetworkConfig
- ) -> bool:
- """Verify that the network is present in saved networks.
-
- Args:
- fd: Fuchsia device to run on.
- network: Network to check for.
-
- Returns:
- True if network is found in saved networks, otherwise False.
- """
- networks: list[
- NetworkConfig
- ] = fd.honeydew_fd.wlan_policy.get_saved_networks()
- if network in networks:
- return True
- else:
- return False
-
- def _start_ap(
- self,
- ssid: str,
- security_type: SecurityMode,
- password: str | None = None,
- ) -> None:
- """Starts an access point.
-
- Args:
- ssid: The SSID of the network to broadcast
- security_type: The security type of the network to be broadcasted
- password: The password to connect to the broadcasted network. The password
- is ignored if security type is none.
-
- Raises:
- EnvironmentError if it fails to set up AP for test.
- """
- # Put together the security configuration of the network to be broadcasted.
- security = Security(security_mode=security_type, password=password)
-
- if len(self.access_points) > 0:
- # Create an AP with default values other than the specified values.
- setup_ap(
- self.access_points[0],
- "whirlwind",
- hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- ssid,
- security=security,
- )
- else:
- self.log.error(
- "No access point available for test, please check config"
- )
- raise EnvironmentError("Failed to set up AP for test")
-
- def test_open_network_with_password(self) -> None:
- """Save an open network with a password and verify that it fails to save."""
- test_network = NetworkConfig(
- rand_ascii_str(10),
- SecurityType.NONE,
- CREDENTIAL_TYPE_NONE,
- rand_ascii_str(8),
- )
-
- for fd in self.fuchsia_devices:
- try:
- fd.honeydew_fd.wlan_policy.save_network(
- test_network.ssid,
- test_network.security_type,
- test_network.credential_value,
- )
- asserts.fail("Unexpectedly succeeded to save network")
- except HoneydewWlanError:
- networks = fd.honeydew_fd.wlan_policy.get_saved_networks()
- if test_network in networks:
- asserts.fail("Got an unexpected saved network")
- # Successfully failed to save network.
- return
-
- asserts.fail("Failed to get error saving bad network")
-
- def test_open_network(self) -> None:
- """Save an open network and verify presence."""
- test_network = NetworkConfig(
- rand_ascii_str(10),
- SecurityType.NONE,
- CREDENTIAL_TYPE_NONE,
- CREDENTIAL_VALUE_NONE,
- )
-
- for fd in self.fuchsia_devices:
- fd.honeydew_fd.wlan_policy.save_network(
- test_network.ssid,
- test_network.security_type,
- test_network.credential_value,
- )
- if not self._has_saved_network(fd, test_network):
- asserts.fail("Saved network not present")
-
- def test_network_with_psk(self) -> None:
- """Save a network with a PSK and verify presence.
-
- PSK are translated from hex to bytes when saved, and when returned by
- get_saved_networks it will be lower case.
- """
- test_network = NetworkConfig(
- rand_ascii_str(11),
- SecurityType.WPA2,
- CREDENTIAL_TYPE_PSK,
- rand_hex_str(PSK_LEN).lower(),
- )
-
- for fd in self.fuchsia_devices:
- fd.honeydew_fd.wlan_policy.save_network(
- test_network.ssid,
- test_network.security_type,
- test_network.credential_value,
- )
- if not self._has_saved_network(fd, test_network):
- asserts.fail("Saved network not present")
-
- def test_wep_network(self) -> None:
- """Save a wep network and verify presence."""
- test_network = NetworkConfig(
- rand_ascii_str(12),
- SecurityType.WEP,
- CREDENTIAL_TYPE_PASSWORD,
- rand_ascii_str(13),
- )
-
- for fd in self.fuchsia_devices:
- fd.honeydew_fd.wlan_policy.save_network(
- test_network.ssid,
- test_network.security_type,
- test_network.credential_value,
- )
- if not self._has_saved_network(fd, test_network):
- asserts.fail("Saved network not present")
-
- def test_wpa2_network(self) -> None:
- """Save a wpa2 network and verify presence."""
- test_network = NetworkConfig(
- rand_ascii_str(9),
- SecurityType.WPA2,
- CREDENTIAL_TYPE_PASSWORD,
- rand_ascii_str(15),
- )
-
- for fd in self.fuchsia_devices:
- fd.honeydew_fd.wlan_policy.save_network(
- test_network.ssid,
- test_network.security_type,
- test_network.credential_value,
- )
- if not self._has_saved_network(fd, test_network):
- asserts.fail("Saved network not present")
-
- def test_wpa_network(self) -> None:
- """Save a wpa network and verify presence."""
- test_network = NetworkConfig(
- rand_ascii_str(16),
- SecurityType.WPA,
- CREDENTIAL_TYPE_PASSWORD,
- rand_ascii_str(9),
- )
-
- for fd in self.fuchsia_devices:
- fd.honeydew_fd.wlan_policy.save_network(
- test_network.ssid,
- test_network.security_type,
- test_network.credential_value,
- )
- if not self._has_saved_network(fd, test_network):
- asserts.fail("Saved network not present")
-
- def test_wpa3_network(self) -> None:
- """Save a wpa3 network and verify presence."""
- test_network = NetworkConfig(
- rand_ascii_str(9),
- SecurityType.WPA3,
- CREDENTIAL_TYPE_PASSWORD,
- rand_ascii_str(15),
- )
-
- for fd in self.fuchsia_devices:
- fd.honeydew_fd.wlan_policy.save_network(
- test_network.ssid,
- test_network.security_type,
- test_network.credential_value,
- )
- if not self._has_saved_network(fd, test_network):
- asserts.fail("Saved network not present")
-
- def test_save_network_persists(self) -> None:
- """Save a network and verify after reboot network is present."""
- test_network = NetworkConfig(
- rand_ascii_str(10),
- SecurityType.WPA2,
- CREDENTIAL_TYPE_PASSWORD,
- rand_ascii_str(10),
- )
-
- for fd in self.fuchsia_devices:
- fd.honeydew_fd.wlan_policy.save_network(
- test_network.ssid,
- test_network.security_type,
- test_network.credential_value,
- )
-
- if not self._has_saved_network(fd, test_network):
- asserts.fail("Saved network not present")
-
- fd.reboot()
-
- if not self._has_saved_network(fd, test_network):
- asserts.fail("Saved network did not persist through reboot")
-
- def test_same_ssid_diff_security(self) -> None:
- """Save two networks with the same ssids but different security types.
-
- Both networks should be saved and present in network state since they have
- different security types and therefore different network identifiers.
- """
- ssid = rand_ascii_str(19)
- test_network_wpa2 = NetworkConfig(
- ssid,
- SecurityType.WPA2,
- CREDENTIAL_TYPE_PASSWORD,
- rand_ascii_str(12),
- )
- test_network_open = NetworkConfig(
- ssid,
- SecurityType.NONE,
- CREDENTIAL_TYPE_NONE,
- CREDENTIAL_VALUE_NONE,
- )
-
- for fd in self.fuchsia_devices:
- fd.honeydew_fd.wlan_policy.save_network(
- test_network_wpa2.ssid,
- test_network_wpa2.security_type,
- test_network_wpa2.credential_value,
- )
-
- fd.honeydew_fd.wlan_policy.save_network(
- test_network_open.ssid,
- test_network_open.security_type,
- test_network_open.credential_value,
- )
-
- if not (
- self._has_saved_network(fd, test_network_wpa2)
- and self._has_saved_network(fd, test_network_open)
- ):
- asserts.fail("Both saved networks not present")
-
- def test_remove_disconnects(self) -> None:
- """Connect to network, remove it while still connected, and verify disconnect.
-
- This test requires a wpa2 network. Remove all other networks first so that we
- don't auto connect to them.
- """
- test_network = NetworkConfig(
- rand_ascii_str(10),
- SecurityType.WPA2,
- CREDENTIAL_TYPE_PASSWORD,
- rand_ascii_str(10),
- )
-
- self._start_ap(
- test_network.ssid, SecurityMode.WPA2, test_network.credential_value
- )
-
- for fd in self.fuchsia_devices:
- fd.wlan_policy_controller.wait_for_no_connections()
- # Make sure client connections are enabled
- fd.honeydew_fd.wlan_policy.start_client_connections()
- fd.wlan_policy_controller.wait_for_client_state(
- WlanClientState.CONNECTIONS_ENABLED
- )
- # Save and verify we connect to network
- fd.honeydew_fd.wlan_policy.save_network(
- test_network.ssid,
- test_network.security_type,
- test_network.credential_value,
- )
-
- fd.wlan_policy_controller.wait_for_network_state(
- test_network.ssid, ConnectionState.CONNECTED
- )
- # Remove network and verify we disconnect
- fd.honeydew_fd.wlan_policy.remove_network(
- test_network.ssid,
- test_network.security_type,
- test_network.credential_value,
- )
- try:
- fd.wlan_policy_controller.wait_for_no_connections()
- except WlanPolicyControllerError as e:
- raise signals.TestFailure("Failed to remove network") from e
-
- def test_auto_connect_open(self) -> None:
- """Save an open network and verify it auto connects.
-
- Start up AP with an open network and verify that the client auto connects to
- that network after we save it.
- """
- test_network = NetworkConfig(
- rand_ascii_str(10),
- SecurityType.NONE,
- CREDENTIAL_TYPE_NONE,
- CREDENTIAL_VALUE_NONE,
- )
-
- self._start_ap(
- test_network.ssid, SecurityMode.OPEN, test_network.credential_value
- )
-
- for fd in self.fuchsia_devices:
- fd.wlan_policy_controller.wait_for_no_connections()
- # Make sure client connections are enabled
- fd.honeydew_fd.wlan_policy.start_client_connections()
- fd.wlan_policy_controller.wait_for_client_state(
- WlanClientState.CONNECTIONS_ENABLED
- )
- # Save the network and make sure that we see the device auto connect to it.
- fd.honeydew_fd.wlan_policy.save_network(
- test_network.ssid, test_network.security_type
- )
- try:
- fd.wlan_policy_controller.wait_for_network_state(
- test_network.ssid, ConnectionState.CONNECTED
- )
- except WlanPolicyControllerError as e:
- raise signals.TestFailure(
- "network is not in connected state"
- ) from e
-
- def test_auto_connect_wpa3(self) -> None:
- """Save an wpa3 network and verify it auto connects.
-
- Start up AP with a wpa3 network and verify that the client auto connects to
- that network after we save it.
- """
- test_network = NetworkConfig(
- rand_ascii_str(10),
- SecurityType.WPA3,
- CREDENTIAL_TYPE_PASSWORD,
- rand_ascii_str(10),
- )
-
- self._start_ap(
- test_network.ssid, SecurityMode.WPA3, test_network.credential_value
- )
-
- for fd in self.fuchsia_devices:
- fd.wlan_policy_controller.wait_for_no_connections()
- # Make sure client connections are enabled
- fd.honeydew_fd.wlan_policy.start_client_connections()
- fd.wlan_policy_controller.wait_for_client_state(
- WlanClientState.CONNECTIONS_ENABLED
- )
- # Save the network and make sure that we see the device auto connect to it.
- fd.honeydew_fd.wlan_policy.save_network(
- test_network.ssid,
- SecurityType.WPA3,
- test_network.credential_value,
- )
- try:
- fd.wlan_policy_controller.wait_for_network_state(
- test_network.ssid, ConnectionState.CONNECTED
- )
- except WlanPolicyControllerError as e:
- raise signals.TestFailure(
- "network is not in connected state"
- ) from e
-
-
-if __name__ == "__main__":
- test_runner.main()
diff --git a/tests/wlan_policy/StartStopClientConnectionsTest.py b/tests/wlan_policy/StartStopClientConnectionsTest.py
deleted file mode 100644
index f6d4a44..0000000
--- a/tests/wlan_policy/StartStopClientConnectionsTest.py
+++ /dev/null
@@ -1,329 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import logging
-import time
-
-import fidl_fuchsia_wlan_policy as f_wlan_policy
-from honeydew.affordances.connectivity.wlan.utils.types import (
- ClientStateSummary,
- ConnectionState,
- DisconnectStatus,
- NetworkIdentifier,
- NetworkState,
- SecurityType,
- WlanClientState,
-)
-from mobly import asserts, signals, test_runner
-
-from antlion.controllers.access_point import setup_ap
-from antlion.controllers.ap_lib import hostapd_constants, hostapd_security
-from antlion.controllers.fuchsia_device import FuchsiaDevice
-from antlion.test_utils.wifi import base_test
-from antlion.utils import rand_ascii_str
-
-
-class StartStopClientConnectionsTest(base_test.WifiBaseTest):
- """Tests that we see the expected behavior with enabling and disabling
- client connections
-
- Test Bed Requirement:
- * One or more Fuchsia devices
- * One Access Point
- """
-
- def setup_class(self) -> None:
- super().setup_class()
- self.log = logging.getLogger()
- # Start an AP with a hidden network
- self.ssid = rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_2G)
- self.access_point = self.access_points[0]
- self.password = rand_ascii_str(
- hostapd_constants.AP_PASSPHRASE_LENGTH_2G
- )
- self.security_type = SecurityType.WPA2
- security = hostapd_security.Security(
- security_mode=hostapd_security.SecurityMode.WPA2,
- password=self.password,
- )
-
- self.access_point.stop_all_aps()
- # TODO(63719) use varying values for AP that shouldn't affect the test.
- setup_ap(
- self.access_point,
- "whirlwind",
- hostapd_constants.AP_DEFAULT_CHANNEL_5G,
- self.ssid,
- security=security,
- )
-
- if len(self.fuchsia_devices) < 1:
- raise EnvironmentError("No Fuchsia devices found.")
- for fd in self.fuchsia_devices:
- fd.configure_wlan(
- association_mechanism="policy", preserve_saved_networks=True
- )
-
- def setup_test(self) -> None:
- for fd in self.fuchsia_devices:
- fd.honeydew_fd.wlan_policy.remove_all_networks()
- fd.wlan_policy_controller.wait_for_no_connections()
-
- def teardown_class(self) -> None:
- self.download_logs()
- for fd in self.fuchsia_devices:
- fd.honeydew_fd.wlan_policy.remove_all_networks()
- fd.wlan_policy_controller.wait_for_no_connections()
- self.access_point.stop_all_aps()
- super().teardown_class()
-
- def _wait_until_update(
- self,
- fd: FuchsiaDevice,
- expected: ClientStateSummary,
- timeout_sec: float = 30,
- ) -> None:
- """Wait until the expected update.
-
- Args:
- fd: Fuchsia device to check.
- expected: Expected state.
- timeout_sec: Timeout in seconds.
-
- Raises:
- signals.TestFailure: If we don't get the expected update within
- timeout_sec.
- """
- result: ClientStateSummary | None = None
- timeout = time.time() + timeout_sec
-
- while True:
- time_left = timeout - time.time()
- try:
- if time_left < 0:
- raise TimeoutError()
- result = fd.honeydew_fd.wlan_policy.get_update(time_left)
- except TimeoutError as e:
- raise signals.TestFailure(
- f'want "{expected}" within {timeout_sec}s, got {result}'
- ) from e
- if result == expected:
- return
-
- def test_stop_client_connections_update(self) -> None:
- """Test that we can stop client connections.
-
- The fuchsia device always starts client connections during configure_wlan. We
- verify first that we are in a client connections enabled state.
- """
- for fd in self.fuchsia_devices:
- fd.honeydew_fd.wlan_policy.start_client_connections()
- fd.honeydew_fd.wlan_policy.set_new_update_listener()
- self._wait_until_update(
- fd,
- ClientStateSummary(
- state=WlanClientState.CONNECTIONS_ENABLED,
- networks=[],
- ),
- )
-
- fd.honeydew_fd.wlan_policy.stop_client_connections()
- asserts.assert_equal(
- fd.honeydew_fd.wlan_policy.get_update(),
- ClientStateSummary(
- state=WlanClientState.CONNECTIONS_DISABLED,
- networks=[],
- ),
- )
-
- def test_start_client_connections_update(self) -> None:
- """Test that we can start client connections."""
- for fd in self.fuchsia_devices:
- fd.honeydew_fd.wlan_policy.stop_client_connections()
- fd.honeydew_fd.wlan_policy.set_new_update_listener()
- self._wait_until_update(
- fd,
- ClientStateSummary(
- state=WlanClientState.CONNECTIONS_DISABLED,
- networks=[],
- ),
- )
-
- fd.honeydew_fd.wlan_policy.start_client_connections()
- asserts.assert_equal(
- fd.honeydew_fd.wlan_policy.get_update(),
- ClientStateSummary(
- state=WlanClientState.CONNECTIONS_ENABLED,
- networks=[],
- ),
- )
-
- def test_stop_client_connections_rejects_connections(self) -> None:
- """Test that if client connections are disabled connection attempts fail."""
- for fd in self.fuchsia_devices:
- fd.honeydew_fd.wlan_policy.start_client_connections()
- fd.honeydew_fd.wlan_policy.set_new_update_listener()
- self._wait_until_update(
- fd,
- ClientStateSummary(
- state=WlanClientState.CONNECTIONS_ENABLED,
- networks=[],
- ),
- )
-
- fd.honeydew_fd.wlan_policy.save_network(
- self.ssid, self.security_type, self.password
- )
- asserts.assert_equal(
- fd.honeydew_fd.wlan_policy.get_update(),
- ClientStateSummary(
- state=WlanClientState.CONNECTIONS_ENABLED,
- networks=[
- NetworkState(
- network_identifier=NetworkIdentifier(
- self.ssid, self.security_type
- ),
- connection_state=ConnectionState.CONNECTING,
- disconnect_status=None,
- )
- ],
- ),
- )
-
- # Stop connections interrupts connect attempt.
- fd.honeydew_fd.wlan_policy.stop_client_connections()
- asserts.assert_equal(
- fd.honeydew_fd.wlan_policy.get_update(),
- ClientStateSummary(
- state=WlanClientState.CONNECTIONS_DISABLED,
- networks=[
- NetworkState(
- network_identifier=NetworkIdentifier(
- self.ssid, self.security_type
- ),
- connection_state=ConnectionState.DISCONNECTED,
- disconnect_status=DisconnectStatus.CONNECTION_STOPPED,
- )
- ],
- ),
- )
-
- # Subsequent attempt to connect fails.
- status = fd.honeydew_fd.wlan_policy.connect(
- self.ssid, self.security_type
- )
- assert (
- status is f_wlan_policy.RequestStatus.REJECTED_INCOMPATIBLE_MODE
- ), "Expected connection request rejected as incompatible."
-
- def test_start_stop_client_connections(self) -> None:
- """Test automated behavior when starting/stopping client connections.
-
- When starting and stopping the client connections the device should connect and
- disconnect from the saved network.
- """
- for fd in self.fuchsia_devices:
- fd.honeydew_fd.wlan_policy.stop_client_connections()
- fd.honeydew_fd.wlan_policy.set_new_update_listener()
- self._wait_until_update(
- fd,
- ClientStateSummary(
- state=WlanClientState.CONNECTIONS_DISABLED,
- networks=[],
- ),
- )
-
- fd.honeydew_fd.wlan_policy.save_network(
- self.ssid, self.security_type, self.password
- )
- self.log.info(
- f'Saved network "{self.ssid}" with password "{self.password}" ({self.security_type})'
- )
-
- fd.honeydew_fd.wlan_policy.start_client_connections()
- self.log.info(
- "WLAN client connections enabled, expecting auto-connect"
- )
-
- asserts.assert_equal(
- fd.honeydew_fd.wlan_policy.get_update(),
- ClientStateSummary(
- state=WlanClientState.CONNECTIONS_ENABLED, networks=[]
- ),
- )
- asserts.assert_equal(
- fd.honeydew_fd.wlan_policy.get_update(),
- ClientStateSummary(
- state=WlanClientState.CONNECTIONS_ENABLED,
- networks=[
- NetworkState(
- network_identifier=NetworkIdentifier(
- self.ssid, self.security_type
- ),
- connection_state=ConnectionState.CONNECTING,
- disconnect_status=None,
- )
- ],
- ),
- f'Expected auto-connect request to "{self.ssid}"',
- )
- asserts.assert_equal(
- fd.honeydew_fd.wlan_policy.get_update(timeout=60),
- ClientStateSummary(
- state=WlanClientState.CONNECTIONS_ENABLED,
- networks=[
- NetworkState(
- network_identifier=NetworkIdentifier(
- self.ssid, self.security_type
- ),
- connection_state=ConnectionState.CONNECTED,
- disconnect_status=None,
- )
- ],
- ),
- f'Expected auto-connect to "{self.ssid}" within 1 minute',
- )
- self.log.info(f'Connected to network "{self.ssid}"')
-
- fd.honeydew_fd.wlan_policy.stop_client_connections()
- self.log.info("Stopped client connections")
-
- asserts.assert_equal(
- fd.honeydew_fd.wlan_policy.get_update(),
- ClientStateSummary(
- state=WlanClientState.CONNECTIONS_ENABLED,
- networks=[
- NetworkState(
- network_identifier=NetworkIdentifier(
- self.ssid, self.security_type
- ),
- connection_state=ConnectionState.DISCONNECTED,
- disconnect_status=DisconnectStatus.CONNECTION_STOPPED,
- )
- ],
- ),
- f'Expected auto-disconnect from "{self.ssid}"',
- )
- asserts.assert_equal(
- fd.honeydew_fd.wlan_policy.get_update(),
- ClientStateSummary(
- state=WlanClientState.CONNECTIONS_DISABLED, networks=[]
- ),
- )
-
-
-if __name__ == "__main__":
- test_runner.main()