diff --git a/.editorconfig b/.editorconfig deleted file mode 100644 index d7d9cae..0000000 --- a/.editorconfig +++ /dev/null
@@ -1,13 +0,0 @@ -# EditorConfig is awesome: https://EditorConfig.org - -root = true - -[*] -end_of_line = lf -insert_final_newline = true -trim_trailing_whitespace = true - -[*.py] -charset = utf-8 -indent_style = space -indent_size = 4
diff --git a/.git-blame-ignore-revs b/.git-blame-ignore-revs deleted file mode 100644 index befa060..0000000 --- a/.git-blame-ignore-revs +++ /dev/null
@@ -1,5 +0,0 @@ -# Run code through yapf -19a821d5f1ff9079f9a40d27553182a433a27834 - -# Run code through black -0d9e3581d57f376865f49ae62fe9171789beca56
diff --git a/.gitignore b/.gitignore deleted file mode 100644 index 767654b..0000000 --- a/.gitignore +++ /dev/null
@@ -1,46 +0,0 @@ -# -# OS-specific -# - -.DS_Store - -# -# Language specific -# - -# Python -*.py[cod] -*.egg-info/ -/build/ -/.venv - -# -# Editors -# - -/.idea/ -/.vscode/ - -# -# antlion -# - -# Configuration -/*.json -/*.yaml -/config/ - -# Generated during run-time -/logs - -# Local development scripts -/*.sh - -# -# third_party -# - -/third_party/* -!/third_party/github.com/ -!/third_party/github.com/jd/tenacity -/third_party/github.com/jd/tenacity/src
diff --git a/BUILD.gn b/BUILD.gn deleted file mode 100644 index f2aab56..0000000 --- a/BUILD.gn +++ /dev/null
@@ -1,228 +0,0 @@ -# Copyright 2023 The Fuchsia Authors. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -# Declare Fuchsia build targets for using antlion from the Fuchsia tree. -# Requires additional configuration of jiri fetch attributes from your Fuchsia -# checkout: -# `jiri init -fetch-optional=antlion` - -import("//build/python/python_library.gni") - -# Tests for full build validation -group("e2e_tests") { - testonly = true - public_deps = [ "src/antlion/tests:e2e_tests" ] -} - -# Subset of tests to validate builds in under 15 minutes. -group("e2e_tests_quick") { - testonly = true - public_deps = [ "src/antlion/tests:e2e_tests_quick" ] -} - -# Tests for at-desk custom validation -group("e2e_tests_manual") { - testonly = true - public_deps = [ "src/antlion/tests:e2e_tests_manual" ] -} - -# deprecated: prefer e2e_tests_quick -group("smoke_tests") { - testonly = true - public_deps = [ ":e2e_tests_quick" ] -} - -# Unit tests only -group("tests") { - testonly = true - public_deps = [ "runner:tests" ] -} - -python_library("antlion") { - source_root = "//third_party/antlion/src/antlion" - sources = [ - "__init__.py", - "base_test.py", - "bin/__init__.py", - "bin/act.py", - "capabilities/__init__.py", - "capabilities/ssh.py", - "config_parser.py", - "context.py", - "controllers/__init__.py", - "controllers/access_point.py", - "controllers/adb.py", - "controllers/adb_lib/__init__.py", - "controllers/adb_lib/error.py", - "controllers/android_device.py", - "controllers/android_lib/__init__.py", - "controllers/android_lib/errors.py", - "controllers/android_lib/events.py", - "controllers/android_lib/logcat.py", - "controllers/android_lib/services.py", - "controllers/android_lib/tel/__init__.py", - "controllers/android_lib/tel/tel_utils.py", - "controllers/ap_lib/__init__.py", - "controllers/ap_lib/ap_get_interface.py", - "controllers/ap_lib/ap_iwconfig.py", - "controllers/ap_lib/bridge_interface.py", - "controllers/ap_lib/dhcp_config.py", - "controllers/ap_lib/dhcp_server.py", - "controllers/ap_lib/extended_capabilities.py", - "controllers/ap_lib/hostapd.py", - "controllers/ap_lib/hostapd_ap_preset.py", - "controllers/ap_lib/hostapd_bss_settings.py", - "controllers/ap_lib/hostapd_config.py", - "controllers/ap_lib/hostapd_constants.py", - "controllers/ap_lib/hostapd_security.py", - "controllers/ap_lib/hostapd_utils.py", - "controllers/ap_lib/radio_measurement.py", - "controllers/ap_lib/radvd.py", - "controllers/ap_lib/radvd_config.py", - "controllers/ap_lib/radvd_constants.py", - "controllers/ap_lib/third_party_ap_profiles/__init__.py", - "controllers/ap_lib/third_party_ap_profiles/actiontec.py", - "controllers/ap_lib/third_party_ap_profiles/asus.py", - "controllers/ap_lib/third_party_ap_profiles/belkin.py", - "controllers/ap_lib/third_party_ap_profiles/linksys.py", - "controllers/ap_lib/third_party_ap_profiles/netgear.py", - "controllers/ap_lib/third_party_ap_profiles/securifi.py", - "controllers/ap_lib/third_party_ap_profiles/tplink.py", - "controllers/ap_lib/wireless_network_management.py", - "controllers/attenuator.py", - "controllers/attenuator_lib/__init__.py", - "controllers/attenuator_lib/_tnhelper.py", - "controllers/attenuator_lib/aeroflex/__init__.py", - "controllers/attenuator_lib/aeroflex/telnet.py", - "controllers/attenuator_lib/minicircuits/__init__.py", - "controllers/attenuator_lib/minicircuits/http.py", - "controllers/attenuator_lib/minicircuits/telnet.py", - "controllers/fastboot.py", - "controllers/fuchsia_device.py", - "controllers/fuchsia_lib/__init__.py", - "controllers/fuchsia_lib/base_lib.py", - "controllers/fuchsia_lib/device_lib.py", - "controllers/fuchsia_lib/ffx.py", - "controllers/fuchsia_lib/hardware_power_statecontrol_lib.py", - "controllers/fuchsia_lib/lib_controllers/__init__.py", - "controllers/fuchsia_lib/lib_controllers/netstack_controller.py", - "controllers/fuchsia_lib/lib_controllers/wlan_controller.py", - "controllers/fuchsia_lib/lib_controllers/wlan_policy_controller.py", - "controllers/fuchsia_lib/location/__init__.py", - "controllers/fuchsia_lib/location/regulatory_region_lib.py", - "controllers/fuchsia_lib/logging_lib.py", - "controllers/fuchsia_lib/netstack/__init__.py", - "controllers/fuchsia_lib/netstack/netstack_lib.py", - "controllers/fuchsia_lib/package_server.py", - "controllers/fuchsia_lib/sl4f.py", - "controllers/fuchsia_lib/ssh.py", - "controllers/fuchsia_lib/utils_lib.py", - "controllers/fuchsia_lib/wlan_ap_policy_lib.py", - "controllers/fuchsia_lib/wlan_deprecated_configuration_lib.py", - "controllers/fuchsia_lib/wlan_lib.py", - "controllers/fuchsia_lib/wlan_policy_lib.py", - "controllers/iperf_client.py", - "controllers/iperf_server.py", - "controllers/openwrt_ap.py", - "controllers/openwrt_lib/__init__.py", - "controllers/openwrt_lib/network_const.py", - "controllers/openwrt_lib/network_settings.py", - "controllers/openwrt_lib/openwrt_constants.py", - "controllers/openwrt_lib/wireless_config.py", - "controllers/openwrt_lib/wireless_settings_applier.py", - "controllers/pdu.py", - "controllers/pdu_lib/__init__.py", - "controllers/pdu_lib/digital_loggers/__init__.py", - "controllers/pdu_lib/digital_loggers/webpowerswitch.py", - "controllers/pdu_lib/synaccess/__init__.py", - "controllers/pdu_lib/synaccess/np02b.py", - "controllers/sl4a_lib/__init__.py", - "controllers/sl4a_lib/error_reporter.py", - "controllers/sl4a_lib/event_dispatcher.py", - "controllers/sl4a_lib/rpc_client.py", - "controllers/sl4a_lib/rpc_connection.py", - "controllers/sl4a_lib/sl4a_manager.py", - "controllers/sl4a_lib/sl4a_ports.py", - "controllers/sl4a_lib/sl4a_session.py", - "controllers/sl4a_lib/sl4a_types.py", - "controllers/sniffer.py", - "controllers/sniffer_lib/__init__.py", - "controllers/sniffer_lib/local/__init__.py", - "controllers/sniffer_lib/local/local_base.py", - "controllers/sniffer_lib/local/tcpdump.py", - "controllers/sniffer_lib/local/tshark.py", - "controllers/utils_lib/__init__.py", - "controllers/utils_lib/commands/__init__.py", - "controllers/utils_lib/commands/ip.py", - "controllers/utils_lib/commands/route.py", - "controllers/utils_lib/commands/shell.py", - "controllers/utils_lib/host_utils.py", - "controllers/utils_lib/ssh/__init__.py", - "controllers/utils_lib/ssh/connection.py", - "controllers/utils_lib/ssh/formatter.py", - "controllers/utils_lib/ssh/settings.py", - "dict_object.py", - "error.py", - "event/__init__.py", - "event/decorators.py", - "event/event.py", - "event/event_bus.py", - "event/event_subscription.py", - "event/subscription_bundle.py", - "event/subscription_handle.py", - "keys.py", - "libs/__init__.py", - "libs/logging/__init__.py", - "libs/logging/log_stream.py", - "libs/ota/__init__.py", - "libs/ota/ota_runners/__init__.py", - "libs/ota/ota_runners/ota_runner.py", - "libs/ota/ota_runners/ota_runner_factory.py", - "libs/ota/ota_tools/__init__.py", - "libs/ota/ota_tools/adb_sideload_ota_tool.py", - "libs/ota/ota_tools/ota_tool.py", - "libs/ota/ota_tools/ota_tool_factory.py", - "libs/ota/ota_tools/update_device_ota_tool.py", - "libs/ota/ota_updater.py", - "libs/proc/__init__.py", - "libs/proc/job.py", - "libs/proc/process.py", - "libs/yaml_writer.py", - "logger.py", - "net.py", - "records.py", - "signals.py", - "test_decorators.py", - "test_runner.py", - "test_utils/__init__.py", - "test_utils/abstract_devices/__init__.py", - "test_utils/abstract_devices/wlan_device.py", - "test_utils/abstract_devices/wmm_transceiver.py", - "test_utils/dhcp/__init__.py", - "test_utils/dhcp/base_test.py", - "test_utils/fuchsia/__init__.py", - "test_utils/fuchsia/utils.py", - "test_utils/fuchsia/wmm_test_cases.py", - "test_utils/net/__init__.py", - "test_utils/net/connectivity_const.py", - "test_utils/net/net_test_utils.py", - "test_utils/wifi/__init__.py", - "test_utils/wifi/base_test.py", - "test_utils/wifi/wifi_constants.py", - "test_utils/wifi/wifi_performance_test_utils/__init__.py", - "test_utils/wifi/wifi_performance_test_utils/bokeh_figure.py", - "test_utils/wifi/wifi_performance_test_utils/brcm_utils.py", - "test_utils/wifi/wifi_performance_test_utils/ping_utils.py", - "test_utils/wifi/wifi_performance_test_utils/qcom_utils.py", - "test_utils/wifi/wifi_power_test_utils.py", - "test_utils/wifi/wifi_test_utils.py", - "tracelogger.py", - "utils.py", - ] - library_deps = [ - "//third_party/mobly", - "//third_party/pyyaml:yaml", - "third_party/github.com/jd/tenacity", - ] -}
diff --git a/CHANGELOG.md b/CHANGELOG.md deleted file mode 100644 index 248b51f..0000000 --- a/CHANGELOG.md +++ /dev/null
@@ -1,151 +0,0 @@ -# Changelog - -All notable changes to this project will be documented in this file. - -The format is based on [Keep a Changelog], and this project adheres to -[Semantic Versioning]. - -[Keep a Changelog]: https://keepachangelog.com/en/1.0.0/ -[Semantic Versioning]: https://semver.org/spec/v2.0.0.html - -## [Unreleased] - -## 0.3.0 - 2023-05-17 - -### Deprecated - -- **Support for ACTS JSON configs; instead, use Mobly YAML configs.** To -ease this transition, upon running `act.py`, a compatible YAML config will be -generated for you and placed next to your JSON config. -- **The `act.py` binary; instead, invoke tests directly.** Upon running -`act.py`, a deprecation warning will provide instructions for how to invoke -antlion tests without act.py and with the newly generated YAML config. - -### Added - -- Presubmit testing in [CV][CV] (aka CQ). All tests specified with the -`qemu_env` environment will run before every antlion CL is submitted. -- Postsubmit testing in [CI][CI]. See [Milo][builders] for an exhaustive list of -builders. -- [EditorConfig](https://editorconfig.org) file for consistent coding styles. -Installing an EditorConfig plugin for your editor is highly recommended. - -[CV]: https://chromium.googlesource.com/infra/luci/luci-go/+/refs/heads/main/cv/README.md -[CI]: https://chromium.googlesource.com/chromium/src/+/master/docs/tour_of_luci_ui.md -[builders]: https://luci-milo.appspot.com/ui/search?q=antlion - -### Changed - -- Default test execution from ACTS to Mobly. `antlion_host_test()` now invokes -the test file directly using the Mobly test runner, rather than using `act.py`. - - All tests have been refactored to allow direct running with the Mobly test - runner. - - `act.py` now converts ACTS JSON config to compatible Mobly YAML config. The - resulting config is passed directly to Mobly's config parser. See notes for - this release's deprecations above. -- Generate YAML config instead of JSON config from antlion-runner. -- `FuchsiaDevice.authorized_file_loc` config field is now optional. This field -is only used during `FlashTest`; it is not used when the device is already -provisioned (e.g. when tests are dispatched in Fuchsia infrastructure). - -### Removed - -- Unused controllers and tests (full list) - -### Fixed -[unreleased]: https://fuchsia.googlesource.com/antlion/+/refs/tags/v0.2.0..refs/heads/main -- Failure to stop session_manager using ffx in `WlanRebootTest` ([@patricklu], -[bug](http://b/267330535)) -- Failure to parse 'test_name' in DHCP configuration file in `Dhcpv4InteropTest` -(invalid option) introduced by previous refactor ([@patricklu], -[bug](http://b/232574848)) -- Logging for `Dhcpv4InteropTest` changed to utilize a temp file instead of -/var/log/messages to fix test error with duplicate PID log messages -([@patricklu], [bug](http://b/232574848)) - -## [0.2.0] - 2023-01-03 - -### Added - -- Added snapshots before reboot and during test teardown in `WlanRebootTest` -([@patricklu], [bug](http://b/273923552)) -- Download radvd logs from AP for debugging IPv6 address allocation -- Optional `wlan_features` config field to `FuchsiaDevice` for declaring which -WLAN features the device supports, such as BSS Transition Management - -### Changed - -- All path config options in `FuchsiaDevice` expand the home directory (`~`) and -environmental variables - - Used by `ssh_priv_key`, `authorized_file_loc`, and `ffx_binary_path` for - sensible defaults using `$FUCHSIA_DIR` -- Running tests works out of the box without specifying `--testpaths` - - Moved `tests` and `unit_tests` to the `antlion` package, enabling - straight-forward packaging of tests. - - Merged `antlion` and `antlion_contrib` packages -- Converted several required dependencies to optional dependencies: - - `bokeh` is only needed for producing HTML graphing. If this feature is - desired, install antlion with the bokeh option: `pip install ".[bokeh]"` - - `usbinfo` and `psutil` are not needed when a static IP address is assigned - to the Fuchsia DUT. If assigning a static IP address is not an optional, - install antlion with the mdns option: `pip install ".[mdns]"` - -### Removed - -- [BREAKING CHANGE] Dependencies for Python versions older than 3.8. Please -upgrade your system to a newer version of Python to run antlion tests. -- `ssh_config` from `FuchsiaDevice` config. SSH configs are generated to provide -a reproducible connection method and ease initial setup. - -### Fixed - -- Failure to acquire IPv6 address in `WlanRebootTest` ([bug](http://b/256009189)) -- Typo in `ChannelSweepTest` preventing use of iPerf ([@patricklu]) -- "Country code never updated" error affecting all Fuchsia ToT builds -([@karlward], [bug](https://fxbug.dev/116500)) -- Parsing new stderr format from `ffx component destroy` ([@karlward], -[bug](https://fxbug.dev/116544)) -- "Socket operation on non-socket" error during initialization of ffx on MacOS -([@karlward], [bug](https://fxbug.dev/116626)) -- Python 3.8 support for IPv6 scope IDs ([bug](http://b/261746355)) - -[0.2.0]: https://fuchsia.googlesource.com/antlion/+/refs/tags/v0.1.0..refs/tags/v0.2.0 - -## [0.1.0] - 2022-11-28 - -Forked from ACTS with the following changes - -### Added - -- A modern approach to installation using `pyproject.toml` via `pip install .` - -### Changed - -- Directory structure to the [src-layout] -- Package and import names from ACTS to antlion -- Copyright notice from AOSP to Fuchsia Authors - -### Deprecated - -- Use of the `setup.py` script. This is only used to keep infrastructure -compatibility with ACTS. Once antlion is moved to Fuchsia infrastructure, this -script will be removed. - -### Removed - -- Dependency on `protobuf` and `grpc` Python packages. Removes the metric -logger, which was unused by Fuchsia WLAN tests. -- Unused Android-specific build files, tests, and utilities for features Fuchsia -doesn't support, such as telephony and automotive - -### Fixed - -- KeyError for 'mac_addr' in WlanDeprecatedConfigurationTest ([@sakuma], -[bug](http://b/237709921)) - -[0.1.0]: https://fuchsia.googlesource.com/antlion/+/refs/tags/v0.1.0 -[src-layout]: https://setuptools.pypa.io/en/latest/userguide/package_discovery.html#src-layout - -[@sakuma]: https://fuchsia-review.git.corp.google.com/q/owner:sakuma%2540google.com -[@patricklu]: https://fuchsia-review.git.corp.google.com/q/owner:patricklu%2540google.com -[@karlward]: https://fuchsia-review.git.corp.google.com/q/owner:karlward%2540google.com
diff --git a/LICENSE b/LICENSE deleted file mode 100644 index d645695..0000000 --- a/LICENSE +++ /dev/null
@@ -1,202 +0,0 @@ - - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright [yyyy] [name of copyright owner] - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License.
diff --git a/MANIFEST.in b/MANIFEST.in deleted file mode 100644 index a8ad1bb..0000000 --- a/MANIFEST.in +++ /dev/null
@@ -1,4 +0,0 @@ -include setup.py README.md -recursive-include src/antlion * -global-exclude .DS_Store -global-exclude *.pyc
diff --git a/OWNERS b/OWNERS deleted file mode 100644 index 1f83792..0000000 --- a/OWNERS +++ /dev/null
@@ -1,2 +0,0 @@ -sbalana@google.com -patricklu@google.com
diff --git a/README.md b/README.md index 7d5950b..69a82f6 100644 --- a/README.md +++ b/README.md
@@ -1,242 +1 @@ -# antlion - -Collection of host-driven, hardware-agnostic Fuchsia connectivity tests. Mainly -targeting WLAN and Netstack testing. - -[Docs] | [Report Bug] | [Request Feature] - -[TOC] - -[Docs]: http://go/fxca -[Report Bug]: http://go/conn-test-bug -[Request Feature]: http://b/issues/new?component=1182297&template=1680893 - -## Getting started with QEMU - -The quickest way to run antlion is by using the Fuchsia QEMU emulator. This -enables antlion tests that do not require hardware-specific capabilities like -WLAN. This is especially useful to verify if antlion builds and runs without -syntax errors. If you require WLAN capabilities, see -[below](#running-with-a-physical-device). - -1. [Checkout Fuchsia](https://fuchsia.dev/fuchsia-src/get-started/get_fuchsia_source) - -2. Configure and build Fuchsia to run antlion tests virtually on QEMU - - ```sh - fx set core.qemu-x64 \ - --with //src/testing/sl4f \ - --with //src/sys/bin/start_sl4f \ - --args 'core_realm_shards += [ "//src/testing/sl4f:sl4f_core_shard" ]' \ - --with-host //third_party/antlion:e2e_tests_quick - fx build - ``` - -3. In a separate terminal, run the emulator with networking enabled - - ```sh - ffx emu stop && ffx emu start -H --net tap && ffx log - ``` - -4. In a separate terminal, run a package server - - ```sh - fx serve - ``` - -5. Run an antlion test - - ```sh - fx test --e2e --output //third_party/antlion/src/antlion/tests/examples:sl4f_sanity_test - ``` - -## Running with a local physical device - -A physical device is required for most antlion tests, which rely on physical I/O -such as WLAN and Bluetooth. Antlion is designed to make testing physical devices -as easy, reliable, and reproducible as possible. The device will be discovered -using mDNS, so make sure your host machine has a network connection to the -device. - -1. Configure and build Fuchsia for your target with the following extra - arguments: - - ```sh - fx set core.my-super-cool-product \ - --with //src/testing/sl4f \ - --with //src/sys/bin/start_sl4f \ - --args='core_realm_shards += [ "//src/testing/sl4f:sl4f_core_shard" ]' \ - --with-host //third_party/antlion:e2e_tests - fx build - ``` - -2. Flash your device with the new build - -3. In a separate terminal, run a package server - - ```sh - fx serve - ``` - -4. Run an antlion test - - ```sh - fx test --e2e --output //third_party/antlion/src/antlion/tests/functional:ping_stress_test - ``` - -> Local auxiliary devices are not yet support by `antlion-runner`, which is -> responsible for generating Mobly configs. In the meantime, see the -> section below for manually crafting Mobly configs to support auxiliary -> devices. - -## Running without a Fuchsia checkout - -Requires Python 3.8+ - -1. Clone the repo - - ```sh - git clone https://fuchsia.googlesource.com/antlion - ``` - -2. Install dependencies using [venv](https://docs.python.org/3/library/venv.html#how-venvs-work) - - ```sh - cd antlion - python3 -m venv .venv # Create a virtual environment in the `.venv` directory - source .venv/bin/activate # Activate the virtual environment - pip install --editable ".[mdns]" - # Run `deactivate` later to exit the virtual environment - ``` - -3. Write the sample config and update the Fuchsia controller to match your - development environment - - ```sh - cat <<EOF > simple-config.yaml - TestBeds: - - Name: antlion-runner - Controllers: - FuchsiaDevice: - - ip: fuchsia-00e0-4c01-04df - MoblyParams: - LogPath: logs - EOF - ``` - - Replace `fuchsia-00e0-4c01-04df` with your device's nodename, or - `fuchsia-emulator` if using an emulator. The nodename can be found by looking - for a log similar to the one below. - - ```text - [0.524][klog][klog][I] netsvc: nodename='fuchsia-emulator' - ``` - -4. Run the sanity test - - ```sh - python src/antlion/tests/examples/Sl4fSanityTest.py -c simple-config.yaml - ``` - -## Contributing - -Contributions are what make open source projects a great place to learn, -inspire, and create. Any contributions you make are **greatly appreciated**. -If you have a suggestion that would make this better, please create a CL. - -Before contributing, additional setup is necessary: - -- Install developer Python packages for formatting and linting - - ```sh - pip install --editable ".[dev]" - ``` - -- Install an [EditorConfig](https://editorconfig.org/) plugin for consistent - whitespace - -- Install [Black](https://pypi.org/project/black/) our preferred code formatter. - Optionally, add the extension to your editor. - -- Complete the steps in '[Contribute source changes]' to gain authorization to - upload CLs to Fuchsia's Gerrit. - -To create a CL: - -1. Create a branch (`git checkout -b feature/amazing-feature`) -2. Make changes -3. Document the changes in `CHANGELOG.md` -4. Run your change through `Black` formatter -5. Commit changes (`git add . && git commit -m 'Add some amazing feature'`) -6. Upload CL (`git push origin HEAD:refs/for/main`) - -> A public bug tracker is not (yet) available. - -[Contribute source changes]: https://fuchsia.dev/fuchsia-src/development/source_code/contribute_changes#prerequisites - -### Recommended git aliases - -There are a handful of git commands that will be commonly used throughout the -process of contributing. Here are a few aliases to add to your git config -(`~/.gitconfig`) for a smoother workflow: - -- `git amend` to modify your CL in response to code review comments -- `git uc` to upload your CL, run pre-submit tests, enable auto-submit, and - add a reviewer - -```gitconfig -[alias] - amend = commit --amend --no-edit - uc = push origin HEAD:refs/for/main%l=Commit-Queue+1,l=Fuchsia-Auto-Submit+1,publish-comments,r=sbalana -``` - -You may also want to add a section to ignore the project's large formatting changes: - -```gitconfig -[blame] - ignoreRevsFile = .git-blame-ignore-revs -``` - -## License - -Distributed under the Apache 2.0 License. See `LICENSE` for more information. - -## Acknowledgments - -This is a fork of [ACTS][ACTS], the connectivity testing framework used by -Android. The folks over there did a great job at cultivating amazing tools, much -of which are being used or have been extended with additional features. - -[ACTS]: https://fuchsia.googlesource.com/third_party/android.googlesource.com/platform/tools/test/connectivity/ - -### Migrating CLs from ACTS - -`antlion` and ACTS share the same git history, so migrating existing changes is -straightforward: - -1. Checkout to latest `main` - - ```sh - git checkout main - git pull --rebase origin main - ``` - -2. Cherry-pick the ACTS change - - ```sh - git fetch acts refs/changes/16/12345/6 && git checkout -b change-12345 FETCH_HEAD - git fetch https://android.googlesource.com/platform/tools/test/connectivity refs/changes/30/2320530/1 && git cherry-pick FETCH_HEAD - ``` - -3. Resolve any merge conflicts, if any - - ```sh - git add [...] - git rebase --continue - ``` - -4. Upload CL - - ```sh - git push origin HEAD:refs/for/main # or "git uc" if using the alias - ``` +The code in this repository was migrated to //src/testing/end_to_end/antlion.
diff --git a/antlion_host_test.gni b/antlion_host_test.gni deleted file mode 100644 index 96f7654..0000000 --- a/antlion_host_test.gni +++ /dev/null
@@ -1,159 +0,0 @@ -import("//build/python/python_binary.gni") -import("//build/rust/rustc_binary.gni") -import("//build/testing/host_test.gni") -import("//build/testing/host_test_data.gni") - -# Declares a host-side antlion test. -# -# Example -# -# ``` -# antlion_host_test("Sl4fSanityTest") { -# main_source = "Sl4fSanityTest.py" -# } -# ``` -# -# Parameters -# -# main_source -# The .py file defining the antlion test. -# Type: path -# -# sources (optional) -# Other files that are used in the test. -# Type: list(path) -# Default: empty list -# -# test_params (optional) -# Path to a YAML file with additional test parameters. This will be provided -# to the test in the antlion config under the "test_params" key. -# Type: string -# -# extra_args (optional) -# Additional arguments to pass to the test. -# Type: list(string) -# -# deps -# environments -# visibility -template("antlion_host_test") { - assert(defined(invoker.main_source), "main_source is required") - - # - # Define antlion test python_binary(). - # - _python_binary_name = "${target_name}.pyz" - _python_binary_target = "${target_name}_python_binary" - python_binary(_python_binary_target) { - forward_variables_from(invoker, - [ - "main_source", - "sources", - ]) - output_name = _python_binary_name - main_callable = "test_runner.main" # Mobly-specific entry point. - deps = [ "//third_party/antlion" ] - testonly = true - visibility = [ ":*" ] - } - - _test_dir = "${root_out_dir}/test_data/" + get_label_info(target_name, "dir") - - # - # Define antlion test host_test_data(). - # - _host_test_data_target = "${target_name}_test_data" - host_test_data(_host_test_data_target) { - testonly = true - visibility = [ ":*" ] - sources = [ get_label_info(":${_python_binary_target}", "target_out_dir") + - "/${_python_binary_name}" ] - outputs = [ "${_test_dir}/${_python_binary_name}" ] - deps = [ ":${_python_binary_target}" ] - if (defined(invoker.deps)) { - deps += invoker.deps - } - } - - # - # Define SSH binary host_test_data(). - # - _host_test_data_ssh = "${target_name}_test_data_ssh" - host_test_data(_host_test_data_ssh) { - testonly = true - visibility = [ ":*" ] - sources = [ "//prebuilt/third_party/openssh-portable/${host_os}-${host_cpu}/bin/ssh" ] - outputs = [ "${_test_dir}/ssh" ] - } - - # - # Define Mobly test params YAML host_test_data(). - # - if (defined(invoker.test_params)) { - _host_test_data_test_params = "${target_name}_test_data_test_params" - host_test_data(_host_test_data_test_params) { - testonly = true - visibility = [ ":*" ] - sources = [ invoker.test_params ] - outputs = [ "${_test_dir}/${invoker.test_params}" ] - } - } - - # - # Define FFX binary host_test_data(). - # - _host_test_data_ffx = "${target_name}_test_data_ffx" - host_test_data(_host_test_data_ffx) { - testonly = true - visibility = [ ":*" ] - sources = [ get_label_info("//src/developer/ffx", "root_out_dir") + "/ffx" ] - outputs = [ "${_test_dir}/ffx" ] - deps = [ "//src/developer/ffx:ffx_bin($host_toolchain)" ] - } - - # - # Define the antlion host_test() using antlion-runner. - # - host_test(target_name) { - forward_variables_from(invoker, - [ - "environments", - "visibility", - ]) - - binary_path = "${root_out_dir}/antlion-runner" - - args = [ - "--python-bin", - rebase_path(python_exe_src, root_build_dir), - "--antlion-pyz", - rebase_path("${_test_dir}/${_python_binary_name}", root_build_dir), - "--out-dir", - rebase_path("${_test_dir}", root_build_dir), - "--ffx-binary", - rebase_path("${_test_dir}/ffx", root_build_dir), - "--ssh-binary", - rebase_path("${_test_dir}/ssh", root_build_dir), - ] - - deps = [ - ":${_host_test_data_ffx}", - ":${_host_test_data_ssh}", - ":${_host_test_data_target}", - "//build/python:interpreter", - "//third_party/antlion/runner", - ] - - if (defined(invoker.test_params)) { - args += [ - "--test-params", - rebase_path("${_test_dir}/${invoker.test_params}", root_build_dir), - ] - deps += [ ":${_host_test_data_test_params}" ] - } - - if (defined(invoker.extra_args)) { - args += invoker.extra_args - } - } -}
diff --git a/environments.gni b/environments.gni deleted file mode 100644 index 2bdfb53..0000000 --- a/environments.gni +++ /dev/null
@@ -1,141 +0,0 @@ -# Copyright 2023 The Fuchsia Authors. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -antlion_astro_env = { - dimensions = { - device_type = "Astro" - pool = "fuchsia.tests.connectivity" - } - tags = [ "antlion" ] -} - -antlion_sherlock_env = { - dimensions = { - device_type = "Sherlock" - pool = "fuchsia.tests.connectivity" - } - tags = [ "antlion" ] -} - -antlion_nelson_env = { - dimensions = { - device_type = "Nelson" - pool = "fuchsia.tests.connectivity" - } - tags = [ "antlion" ] -} - -astro_ap_env = { - dimensions = { - access_points = "1" - device_type = "Astro" - pool = "fuchsia.tests.connectivity" - } - tags = [ "antlion" ] -} - -astro_ap_iperf_env = { - dimensions = { - access_points = "1" - device_type = "Astro" - iperf_servers = "1" - pool = "fuchsia.tests.connectivity" - } - tags = [ "antlion" ] -} - -astro_ap_iperf_attenuator_env = { - dimensions = { - access_points = "1" - attenuators = "1" - device_type = "Astro" - iperf_servers = "1" - pool = "fuchsia.tests.connectivity" - } - tags = [ "antlion" ] -} - -sherlock_ap_env = { - dimensions = { - access_points = "1" - device_type = "Sherlock" - pool = "fuchsia.tests.connectivity" - } - tags = [ "antlion" ] -} - -sherlock_ap_iperf_env = { - dimensions = { - access_points = "1" - device_type = "Sherlock" - iperf_servers = "1" - pool = "fuchsia.tests.connectivity" - } - tags = [ "antlion" ] -} - -sherlock_ap_iperf_attenuator_env = { - dimensions = { - access_points = "1" - attenuators = "1" - device_type = "Sherlock" - iperf_servers = "1" - pool = "fuchsia.tests.connectivity" - } - tags = [ "antlion" ] -} - -nelson_ap_env = { - dimensions = { - access_points = "1" - device_type = "Nelson" - pool = "fuchsia.tests.connectivity" - } - tags = [ "antlion" ] -} - -nelson_ap_iperf_env = { - dimensions = { - access_points = "1" - device_type = "Nelson" - iperf_servers = "1" - pool = "fuchsia.tests.connectivity" - } - tags = [ "antlion" ] -} - -nelson_ap_iperf_attenuator_env = { - dimensions = { - access_points = "1" - attenuators = "1" - device_type = "Nelson" - iperf_servers = "1" - pool = "fuchsia.tests.connectivity" - } - tags = [ "antlion" ] -} - -display_envs = [ - antlion_astro_env, - antlion_sherlock_env, - antlion_nelson_env, -] - -display_ap_envs = [ - astro_ap_env, - sherlock_ap_env, - nelson_ap_env, -] - -display_ap_iperf_envs = [ - astro_ap_iperf_env, - sherlock_ap_iperf_env, - nelson_ap_iperf_env, -] - -display_ap_iperf_attenuator_envs = [ - astro_ap_iperf_attenuator_env, - sherlock_ap_iperf_attenuator_env, - nelson_ap_iperf_attenuator_env, -]
diff --git a/pyproject.toml b/pyproject.toml deleted file mode 100644 index b385122..0000000 --- a/pyproject.toml +++ /dev/null
@@ -1,61 +0,0 @@ -# Reference at https://peps.python.org/pep-0621/ - -[build-system] -requires = ["setuptools", "setuptools-scm[toml]"] -build-backend = "setuptools.build_meta" - -[project] -name = "antlion" -description = "Host-driven, hardware-agnostic Fuchsia connectivity tests" -requires-python = ">=3.8" -license = {text = "Apache-2.0"} -dynamic = ["version"] -readme = "README.md" -dependencies = [ - "mobly==1.12.0", - "pyyaml>=5.1", - "tenacity~=8.0", -] - -[project.optional-dependencies] -# Required to support development tools -dev = [ - "mock", # required for unit tests - "shiv", # packaging python - "toml", # required for pyproject.toml - "vulture", # finds unused code - "black", # code formatting -] -digital_loggers_pdu = ["dlipower"] -html_graphing = ["bokeh"] -flash = ["usbinfo"] -mdns = ["psutil", "zeroconf"] -android = [ - "numpy", - "scapy", -] - -[project.scripts] -antlion = "antlion.bin.act:main" - -[tool.setuptools.packages.find] -where = ["src"] - -[tool.autoflake] -imports = [ - "antlion", - "dataclasses", - "dlipower", - "mobly", - "mock", - "numpy", - "scapy", - "tenacity", - "usbinfo", - "zeroconf", -] - -[tools.vulture] -paths = ["src"] -sort_by_size = true -min_confidence = 80
diff --git a/runner/BUILD.gn b/runner/BUILD.gn deleted file mode 100644 index d405592..0000000 --- a/runner/BUILD.gn +++ /dev/null
@@ -1,50 +0,0 @@ -import("//build/rust/rustc_binary.gni") -import("//build/rust/rustc_test.gni") - -rustc_binary("runner") { - output_name = "antlion-runner" - edition = "2021" - with_unit_tests = true - - deps = [ - "//src/developer/ffx/lib/netext:lib($host_toolchain)", - "//src/lib/mdns/rust:mdns", - "//src/lib/network/packet", - "//third_party/rust_crates:anyhow", - "//third_party/rust_crates:argh", - "//third_party/rust_crates:home", - "//third_party/rust_crates:itertools", - "//third_party/rust_crates:lazy_static", - "//third_party/rust_crates:nix", - "//third_party/rust_crates:serde", - "//third_party/rust_crates:serde_json", - "//third_party/rust_crates:serde_yaml", - "//third_party/rust_crates:socket2", - "//third_party/rust_crates:thiserror", - ] - - test_deps = [ - "//third_party/rust_crates:assert_matches", - "//third_party/rust_crates:indoc", - "//third_party/rust_crates:pretty_assertions", - "//third_party/rust_crates:tempfile", - ] - - sources = [ - "src/config.rs", - "src/driver/infra.rs", - "src/driver/local.rs", - "src/driver/mod.rs", - "src/env.rs", - "src/finder.rs", - "src/main.rs", - "src/net.rs", - "src/runner.rs", - "src/yaml.rs", - ] -} - -group("tests") { - testonly = true - deps = [ ":runner_test($host_toolchain)" ] -}
diff --git a/runner/README.md b/runner/README.md deleted file mode 100644 index 45c926a..0000000 --- a/runner/README.md +++ /dev/null
@@ -1,42 +0,0 @@ -# antlion-runner - -A program to run antlion locally and in infrastructure. Includes a config -generator with mDNS discovery and sensible defaults. - -## Using locally with an emulator - -Running antlion locally with a Fuchsia emulator allows developers to perform a -sanity checks on their changes. Running this way is very quick (~5 seconds) and -can spot simple mistakes before code review! - -1. Build Fuchsia with antlion support - ```sh - jiri update -gc # if you haven't updated in a while - fx set workstation_eng_paused.qemu-x64 \ - --with-host //third_party/antlion:e2e_tests \ - --with-host //third_party/antlion:tests \ - --with //src/testing/sl4f - fx build # if you haven't built in a while - ``` -2. Start the package server. Keep this running in the background. - ```sh - fx serve - ``` -3. In a separate terminal, start the emulator with access to external networks. - ```sh - ffx emu stop && ffx emu start -H --net tap && ffx log - ``` -4. In a separate terminal, run a test - ```sh - fx test --e2e --output //third_party/antlion:sl4f_sanity_test - ``` - -> Note: Local testing with auxiliary devices is not supported by antlion runner. -Use antlion directly for these cases; see the antlion [README](../README.md). - -## Testing - -```sh -fx set core.qemu-x64 --with //vendor/google/build/python/antlion/runner:tests -fx test --output //vendor/google/build/python/antlion/runner:runner_test -```
diff --git a/runner/src/config.rs b/runner/src/config.rs deleted file mode 100644 index 571a8ab..0000000 --- a/runner/src/config.rs +++ /dev/null
@@ -1,162 +0,0 @@ -// Copyright 2022 The Fuchsia Authors. All rights reserved. -// Use of this source code is governed by a BSD-style license that can be -// found in the LICENSE file. - -use crate::net::IpAddr; -use crate::yaml; - -use std::path::PathBuf; - -use serde::{Deserialize, Serialize}; -use serde_yaml::Value; - -#[derive(Clone, Debug, Serialize)] -#[serde(rename_all = "PascalCase")] -/// Config used by antlion for declaring testbeds and test parameters. -pub(crate) struct Config { - #[serde(rename = "TestBeds")] - pub testbeds: Vec<Testbed>, - pub mobly_params: MoblyParams, -} - -impl Config { - /// Merge the given test parameters into all testbeds. - pub fn merge_test_params(&mut self, test_params: Value) { - for testbed in self.testbeds.iter_mut() { - match testbed.test_params.as_mut() { - Some(existing) => yaml::merge(existing, test_params.clone()), - None => testbed.test_params = Some(test_params.clone()), - } - } - } -} - -#[derive(Clone, Debug, Serialize)] -#[serde(rename_all = "PascalCase")] -/// Parameters consumed by Mobly. -pub(crate) struct MoblyParams { - pub log_path: PathBuf, -} - -#[derive(Clone, Debug, Serialize)] -#[serde(rename_all = "PascalCase")] -/// A group of interconnected devices to be used together during an antlion test. -pub(crate) struct Testbed { - pub name: String, - pub controllers: Controllers, - #[serde(skip_serializing_if = "Option::is_none")] - pub test_params: Option<Value>, -} - -#[derive(Clone, Debug, Default, Serialize)] -pub(crate) struct Controllers { - #[serde(rename = "FuchsiaDevice", skip_serializing_if = "Vec::is_empty")] - pub fuchsia_devices: Vec<Fuchsia>, - #[serde(rename = "AccessPoint", skip_serializing_if = "Vec::is_empty")] - pub access_points: Vec<AccessPoint>, - #[serde(rename = "Attenuator", skip_serializing_if = "Vec::is_empty")] - pub attenuators: Vec<Attenuator>, - #[serde(rename = "PduDevice", skip_serializing_if = "Vec::is_empty")] - pub pdus: Vec<Pdu>, - #[serde(rename = "IPerfServer", skip_serializing_if = "Vec::is_empty")] - pub iperf_servers: Vec<IPerfServer>, -} - -#[derive(Clone, Debug, Serialize)] -/// A Fuchsia device for use with antlion as defined by [fuchsia_device.py]. -/// -/// [fuchsia_device.py]: https://cs.opensource.google/fuchsia/fuchsia/+/main:third_party/antlion/src/antlion/controllers/fuchsia_device.py -pub(crate) struct Fuchsia { - pub mdns_name: String, - pub ip: IpAddr, - pub take_bug_report_on_fail: bool, - pub ssh_binary_path: PathBuf, - #[serde(skip_serializing_if = "Option::is_none")] - pub ssh_config: Option<PathBuf>, - pub ffx_binary_path: PathBuf, - pub ssh_priv_key: PathBuf, - #[serde(rename = "PduDevice", skip_serializing_if = "Option::is_none")] - pub pdu_device: Option<PduRef>, - pub hard_reboot_on_fail: bool, -} - -#[derive(Clone, Debug, Serialize, Deserialize)] -/// Reference to a PDU device. Used to specify which port the attached device -/// maps to on the PDU. -pub(crate) struct PduRef { - #[serde(default = "default_pdu_device")] - pub device: String, - #[serde(rename(serialize = "host"))] - pub ip: IpAddr, - pub port: u8, -} - -fn default_pdu_device() -> String { - "synaccess.np02b".to_string() -} - -#[derive(Clone, Debug, Serialize)] -/// Declares an access point for use with antlion as defined by [access_point.py]. -/// -/// [access_point.py]: https://cs.opensource.google/fuchsia/fuchsia/+/main:third_party/antlion/src/antlion/controllers/access_point.py -pub(crate) struct AccessPoint { - pub wan_interface: String, - pub ssh_config: SshConfig, - #[serde(rename = "PduDevice", skip_serializing_if = "Option::is_none")] - pub pdu_device: Option<PduRef>, - #[serde(rename = "Attenuator", skip_serializing_if = "Option::is_none")] - pub attenuators: Option<Vec<AttenuatorRef>>, -} - -#[derive(Clone, Debug, Serialize)] -pub(crate) struct SshConfig { - pub ssh_binary_path: PathBuf, - pub host: IpAddr, - pub user: String, - pub identity_file: PathBuf, -} - -#[derive(Clone, Debug, Serialize)] -/// Reference to an attenuator device. Used to specify which ports the attached -/// devices' channels maps to on the attenuator. -pub(crate) struct AttenuatorRef { - #[serde(rename = "Address")] - pub address: IpAddr, - #[serde(rename = "attenuator_ports_wifi_2g")] - pub ports_2g: Vec<u8>, - #[serde(rename = "attenuator_ports_wifi_5g")] - pub ports_5g: Vec<u8>, -} - -#[derive(Clone, Debug, Eq, PartialEq, Serialize)] -#[serde(rename_all = "PascalCase")] -/// Declares an attenuator for use with antlion as defined by [attenuator.py]. -/// -/// [access_point.py]: https://cs.opensource.google/fuchsia/fuchsia/+/main:third_party/antlion/src/antlion/controllers/attenuator.py -pub(crate) struct Attenuator { - pub model: String, - pub instrument_count: u8, - pub address: IpAddr, - pub protocol: String, - pub port: u16, -} - -#[derive(Clone, Debug, Eq, PartialEq, Serialize)] -/// Declares a power distribution unit for use with antlion as defined by [pdu.py]. -/// -/// [pdu.py]: https://cs.opensource.google/fuchsia/fuchsia/+/main:third_party/antlion/src/antlion/controllers/pdu.py -pub(crate) struct Pdu { - pub device: String, - pub host: IpAddr, -} - -#[derive(Clone, Debug, Serialize)] -/// Declares an iPerf3 server for use with antlion as defined by [iperf_server.py]. -/// -/// [iperf_server.py]: https://cs.opensource.google/fuchsia/fuchsia/+/main:third_party/antlion/src/antlion/controllers/iperf_server.py -pub(crate) struct IPerfServer { - pub ssh_config: SshConfig, - pub port: u16, - pub test_interface: String, - pub use_killall: bool, -}
diff --git a/runner/src/driver/infra.rs b/runner/src/driver/infra.rs deleted file mode 100644 index ceff26e..0000000 --- a/runner/src/driver/infra.rs +++ /dev/null
@@ -1,898 +0,0 @@ -// Copyright 2023 The Fuchsia Authors. All rights reserved. -// Use of this source code is governed by a BSD-style license that can be -// found in the LICENSE file. - -use crate::config::PduRef; -use crate::config::{self, Config}; -use crate::driver::Driver; -use crate::env::Environment; -use crate::net::IpAddr; -use crate::yaml; - -use std::collections::{HashMap, HashSet}; -use std::fs; -use std::path::{Path, PathBuf}; - -use anyhow::{anyhow, Context, Result}; -use itertools::Itertools; -use serde::Deserialize; -use serde_yaml::Value; -use thiserror::Error; - -const TESTBED_NAME: &'static str = "antlion-runner"; -const ENV_OUT_DIR: &'static str = "FUCHSIA_TEST_OUTDIR"; -const ENV_TESTBED_CONFIG: &'static str = "FUCHSIA_TESTBED_CONFIG"; -const TEST_SUMMARY_FILE: &'static str = "test_summary.yaml"; - -#[derive(Debug)] -/// Driver for running antlion on emulated and hardware testbeds hosted by -/// Fuchsia infrastructure. -pub(crate) struct InfraDriver { - output_dir: PathBuf, - config: Config, -} - -#[derive(Error, Debug)] -pub(crate) enum InfraDriverError { - #[error("infra environment not detected, \"{0}\" environment variable not present")] - NotDetected(String), - #[error(transparent)] - Config(#[from] ConfigError), - #[error(transparent)] - Other(#[from] anyhow::Error), -} - -#[derive(Error, Debug)] -pub(crate) enum ConfigError { - #[error("ip {ip} in use by several devices")] - DuplicateIp { ip: IpAddr }, - #[error("ip {ip} port {port} in use by several devices")] - DuplicatePort { ip: IpAddr, port: u8 }, -} - -impl InfraDriver { - /// Detect an InfraDriver. Returns None if the required environmental - /// variables are not found. - pub fn new<E: Environment>( - env: E, - ssh_binary: PathBuf, - ffx_binary: PathBuf, - ) -> Result<Self, InfraDriverError> { - let config_path = match env.var(ENV_TESTBED_CONFIG) { - Ok(p) => PathBuf::from(p), - Err(std::env::VarError::NotPresent) => { - return Err(InfraDriverError::NotDetected(ENV_TESTBED_CONFIG.to_string())) - } - Err(e) => { - return Err(InfraDriverError::Other(anyhow!( - "Failed to read \"{ENV_TESTBED_CONFIG}\" {e}" - ))) - } - }; - let config = fs::read_to_string(&config_path) - .with_context(|| format!("Failed to read \"{}\"", config_path.display()))?; - let targets: Vec<InfraTarget> = serde_json::from_str(&config) - .with_context(|| format!("Failed to parse into InfraTarget: \"{config}\""))?; - if targets.len() == 0 { - return Err(InfraDriverError::Other(anyhow!( - "Expected at least one target declared in \"{}\"", - config_path.display() - ))); - } - - let output_path = match env.var(ENV_OUT_DIR) { - Ok(p) => p, - Err(std::env::VarError::NotPresent) => { - return Err(InfraDriverError::NotDetected(ENV_OUT_DIR.to_string())) - } - Err(e) => { - return Err(InfraDriverError::Other(anyhow!( - "Failed to read \"{ENV_OUT_DIR}\" {e}" - ))) - } - }; - let output_dir = PathBuf::from(output_path); - if !fs::metadata(&output_dir).context("Failed to stat the output directory")?.is_dir() { - return Err(InfraDriverError::Other(anyhow!( - "Expected a directory but found a file at \"{}\"", - output_dir.display() - ))); - } - - Ok(InfraDriver { - output_dir: output_dir.clone(), - config: InfraDriver::parse_targets(targets, ssh_binary, ffx_binary, output_dir)?, - }) - } - - fn parse_targets( - targets: Vec<InfraTarget>, - ssh_binary: PathBuf, - ffx_binary: PathBuf, - output_dir: PathBuf, - ) -> Result<Config, InfraDriverError> { - let mut fuchsia_devices: Vec<config::Fuchsia> = vec![]; - let mut access_points: Vec<config::AccessPoint> = vec![]; - let mut attenuators: HashMap<IpAddr, config::Attenuator> = HashMap::new(); - let mut pdus: HashMap<IpAddr, config::Pdu> = HashMap::new(); - let mut iperf_servers: Vec<config::IPerfServer> = vec![]; - let mut test_params: Option<Value> = None; - - let mut used_ips: HashSet<IpAddr> = HashSet::new(); - let mut used_ports: HashMap<IpAddr, HashSet<u8>> = HashMap::new(); - - let mut register_ip = |ip: IpAddr| -> Result<(), InfraDriverError> { - if !used_ips.insert(ip.clone()) { - return Err(ConfigError::DuplicateIp { ip }.into()); - } - Ok(()) - }; - - let mut register_port = |ip: IpAddr, port: u8| -> Result<(), InfraDriverError> { - match used_ports.get_mut(&ip) { - Some(ports) => { - if !ports.insert(port) { - return Err(ConfigError::DuplicatePort { ip, port }.into()); - } - } - None => { - if used_ports.insert(ip, HashSet::from([port])).is_some() { - return Err(InfraDriverError::Other(anyhow!( - "Used ports set was unexpectedly modified by concurrent use", - ))); - } - } - }; - Ok(()) - }; - - let mut register_pdu = |p: Option<PduRef>| -> Result<(), InfraDriverError> { - if let Some(PduRef { device, ip, port }) = p { - register_port(ip.clone(), port)?; - let new = config::Pdu { device: device.clone(), host: ip.clone() }; - if let Some(old) = pdus.insert(ip.clone(), new.clone()) { - if old != new { - return Err(ConfigError::DuplicateIp { ip }.into()); - } - } - } - Ok(()) - }; - - let mut register_attenuator = |a: Option<AttenuatorRef>| -> Result<(), InfraDriverError> { - if let Some(a) = a { - let new = config::Attenuator { - model: "minicircuits".to_string(), - instrument_count: 4, - address: a.ip.clone(), - protocol: "http".to_string(), - port: 80, - }; - if let Some(old) = attenuators.insert(a.ip.clone(), new.clone()) { - if old != new { - return Err(ConfigError::DuplicateIp { ip: a.ip }.into()); - } - } - } - Ok(()) - }; - - let mut merge_test_params = |p: Option<Value>| { - match (test_params.as_mut(), p) { - (None, Some(new)) => test_params = Some(new), - (Some(existing), Some(new)) => yaml::merge(existing, new), - (_, None) => {} - }; - }; - - for target in targets { - match target { - InfraTarget::FuchsiaDevice { nodename, ipv4, ipv6, ssh_key, pdu, test_params } => { - let ip: IpAddr = if !ipv4.is_empty() { - ipv4.parse().context("Invalid IPv4 address") - } else if !ipv6.is_empty() { - ipv6.parse().context("Invalid IPv6 address") - } else { - Err(anyhow!("IP address not specified")) - }?; - - fuchsia_devices.push(config::Fuchsia { - mdns_name: nodename.clone(), - ip: ip.clone(), - take_bug_report_on_fail: true, - ssh_binary_path: ssh_binary.clone(), - // TODO(http://b/244747218): Remove when ssh_config is refactored away - ssh_config: None, - ffx_binary_path: ffx_binary.clone(), - ssh_priv_key: ssh_key.clone(), - pdu_device: pdu.clone(), - hard_reboot_on_fail: true, - }); - - register_ip(ip)?; - register_pdu(pdu)?; - merge_test_params(test_params); - } - InfraTarget::AccessPoint { ip, attenuator, pdu, ssh_key } => { - access_points.push(config::AccessPoint { - wan_interface: "eth0".to_string(), - ssh_config: config::SshConfig { - ssh_binary_path: ssh_binary.clone(), - host: ip.clone(), - user: "root".to_string(), - identity_file: ssh_key.clone(), - }, - pdu_device: pdu.clone(), - attenuators: attenuator.as_ref().map(|a| { - vec![config::AttenuatorRef { - address: a.ip.clone(), - ports_2g: vec![1, 2, 3], - ports_5g: vec![1, 2, 3], - }] - }), - }); - - register_ip(ip)?; - register_pdu(pdu)?; - register_attenuator(attenuator)?; - } - InfraTarget::IPerfServer { ip, user, test_interface, pdu, ssh_key } => { - iperf_servers.push(config::IPerfServer { - ssh_config: config::SshConfig { - ssh_binary_path: ssh_binary.clone(), - host: ip.clone(), - user: user.to_string(), - identity_file: ssh_key.clone(), - }, - port: 5201, - test_interface: test_interface.clone(), - use_killall: true, - }); - - register_ip(ip.clone())?; - register_pdu(pdu)?; - } - }; - } - - Ok(Config { - testbeds: vec![config::Testbed { - name: TESTBED_NAME.to_string(), - controllers: config::Controllers { - fuchsia_devices: fuchsia_devices, - access_points: access_points, - attenuators: attenuators - .into_values() - .sorted_by_key(|a| a.address.clone()) - .collect(), - pdus: pdus.into_values().sorted_by_key(|p| p.host.clone()).collect(), - iperf_servers: iperf_servers, - }, - test_params, - }], - mobly_params: config::MoblyParams { log_path: output_dir }, - }) - } -} - -impl Driver for InfraDriver { - fn output_path(&self) -> &Path { - self.output_dir.as_path() - } - fn config(&self) -> Config { - self.config.clone() - } - fn teardown(&self) -> Result<()> { - let results_path = - self.output_dir.join(TESTBED_NAME).join("latest").join(TEST_SUMMARY_FILE); - match fs::File::open(&results_path) { - Ok(mut results) => { - println!("\nTest results from {}\n", results_path.display()); - println!("[=====MOBLY RESULTS=====]"); - std::io::copy(&mut results, &mut std::io::stdout()) - .context("Failed to copy results to stdout")?; - } - Err(e) => eprintln!("Failed to open \"{}\": {}", results_path.display(), e), - }; - - // Remove any symlinks from the output directory; this causes errors - // while uploading to CAS. - // - // TODO: Remove when the fix is released and supported on Swarming bots - // https://github.com/bazelbuild/remote-apis-sdks/pull/229. - remove_symlinks(self.output_dir.clone())?; - - Ok(()) - } -} - -fn remove_symlinks<P: AsRef<Path>>(path: P) -> Result<()> { - let meta = fs::symlink_metadata(path.as_ref())?; - if meta.is_symlink() { - fs::remove_file(path)?; - } else if meta.is_dir() { - for entry in fs::read_dir(path)? { - remove_symlinks(entry?.path())?; - } - } - Ok(()) -} - -#[derive(Debug, Deserialize)] -#[serde(tag = "type")] -/// Schema used to communicate target information from the test environment set -/// up by botanist. -/// -/// See https://cs.opensource.google/fuchsia/fuchsia/+/main:tools/botanist/README.md -enum InfraTarget { - FuchsiaDevice { - nodename: String, - ipv4: String, - ipv6: String, - ssh_key: PathBuf, - pdu: Option<PduRef>, - test_params: Option<Value>, - }, - AccessPoint { - ip: IpAddr, - ssh_key: PathBuf, - attenuator: Option<AttenuatorRef>, - pdu: Option<PduRef>, - }, - IPerfServer { - ip: IpAddr, - ssh_key: PathBuf, - #[serde(default = "default_iperf_user")] - user: String, - test_interface: String, - pdu: Option<PduRef>, - }, -} - -fn default_iperf_user() -> String { - "pi".to_string() -} - -#[derive(Clone, Debug, Deserialize)] -struct AttenuatorRef { - ip: IpAddr, -} - -#[cfg(test)] -mod test { - use super::*; - - use crate::run; - use crate::runner::Runner; - use crate::{env::Environment, runner::ExitStatus}; - - use std::ffi::OsStr; - - use assert_matches::assert_matches; - use indoc::formatdoc; - use pretty_assertions::assert_eq; - use serde_json::json; - use tempfile::{NamedTempFile, TempDir}; - - const FUCHSIA_NAME: &'static str = "fuchsia-1234-5678-9abc"; - const FUCHSIA_ADDR: &'static str = "fe80::1%2"; - - #[derive(Default)] - struct MockRunner { - out_dir: PathBuf, - config: std::cell::Cell<PathBuf>, - } - impl MockRunner { - fn new(out_dir: PathBuf) -> Self { - Self { out_dir, ..Default::default() } - } - } - impl Runner for MockRunner { - fn run(&self, config: PathBuf) -> Result<ExitStatus> { - self.config.set(config); - - let antlion_out = self.out_dir.join(TESTBED_NAME).join("latest"); - fs::create_dir_all(&antlion_out) - .context("Failed to create antlion output directory")?; - fs::write(antlion_out.join(TEST_SUMMARY_FILE), "") - .context("Failed to write test_summary.yaml")?; - Ok(ExitStatus::Ok) - } - } - - struct MockEnvironment { - config: Option<PathBuf>, - out_dir: Option<PathBuf>, - } - impl Environment for MockEnvironment { - fn var<K: AsRef<OsStr>>(&self, key: K) -> Result<String, std::env::VarError> { - if key.as_ref() == ENV_TESTBED_CONFIG { - self.config - .clone() - .ok_or(std::env::VarError::NotPresent) - .map(|p| p.into_os_string().into_string().unwrap()) - } else if key.as_ref() == ENV_OUT_DIR { - self.out_dir - .clone() - .ok_or(std::env::VarError::NotPresent) - .map(|p| p.into_os_string().into_string().unwrap()) - } else { - Err(std::env::VarError::NotPresent) - } - } - } - - #[test] - fn infra_not_detected() { - let ssh = NamedTempFile::new().unwrap(); - let ffx = NamedTempFile::new().unwrap(); - let env = MockEnvironment { config: None, out_dir: None }; - - let got = InfraDriver::new(env, ssh.path().to_path_buf(), ffx.path().to_path_buf()); - assert_matches!(got, Err(InfraDriverError::NotDetected(_))); - } - - #[test] - fn infra_not_detected_config() { - let ssh = NamedTempFile::new().unwrap(); - let ffx = NamedTempFile::new().unwrap(); - let out_dir = TempDir::new().unwrap(); - let env = MockEnvironment { config: None, out_dir: Some(out_dir.path().to_path_buf()) }; - - let got = InfraDriver::new(env, ssh.path().to_path_buf(), ffx.path().to_path_buf()); - assert_matches!(got, Err(InfraDriverError::NotDetected(v)) if v == ENV_TESTBED_CONFIG); - } - - #[test] - fn infra_not_detected_out_dir() { - let ssh = NamedTempFile::new().unwrap(); - let ssh_key = NamedTempFile::new().unwrap(); - let ffx = NamedTempFile::new().unwrap(); - - let testbed_config = NamedTempFile::new().unwrap(); - serde_json::to_writer_pretty( - testbed_config.as_file(), - &json!([{ - "type": "FuchsiaDevice", - "nodename": FUCHSIA_NAME, - "ipv4": "", - "ipv6": FUCHSIA_ADDR, - "ssh_key": ssh_key.path(), - }]), - ) - .unwrap(); - - let env = - MockEnvironment { config: Some(testbed_config.path().to_path_buf()), out_dir: None }; - - let got = InfraDriver::new(env, ssh.path().to_path_buf(), ffx.path().to_path_buf()); - assert_matches!(got, Err(InfraDriverError::NotDetected(v)) if v == ENV_OUT_DIR); - } - - #[test] - fn infra_invalid_config() { - let ssh = NamedTempFile::new().unwrap(); - let ffx = NamedTempFile::new().unwrap(); - let out_dir = TempDir::new().unwrap(); - - let testbed_config = NamedTempFile::new().unwrap(); - serde_json::to_writer_pretty(testbed_config.as_file(), &json!({ "foo": "bar" })).unwrap(); - - let env = MockEnvironment { - config: Some(testbed_config.path().to_path_buf()), - out_dir: Some(out_dir.path().to_path_buf()), - }; - - let got = InfraDriver::new(env, ssh.path().to_path_buf(), ffx.path().to_path_buf()); - assert_matches!(got, Err(_)); - } - - #[test] - fn infra() { - let ssh = NamedTempFile::new().unwrap(); - let ssh_key = NamedTempFile::new().unwrap(); - let ffx = NamedTempFile::new().unwrap(); - let out_dir = TempDir::new().unwrap(); - - let testbed_config = NamedTempFile::new().unwrap(); - serde_json::to_writer_pretty( - testbed_config.as_file(), - &json!([{ - "type": "FuchsiaDevice", - "nodename": FUCHSIA_NAME, - "ipv4": "", - "ipv6": FUCHSIA_ADDR, - "ssh_key": ssh_key.path(), - }]), - ) - .unwrap(); - - let runner = MockRunner::new(out_dir.path().to_path_buf()); - let env = MockEnvironment { - config: Some(testbed_config.path().to_path_buf()), - out_dir: Some(out_dir.path().to_path_buf()), - }; - let driver = - InfraDriver::new(env, ssh.path().to_path_buf(), ffx.path().to_path_buf()).unwrap(); - run(runner, driver, None).unwrap(); - - let got = fs::read_to_string(out_dir.path().join("config.yaml")).unwrap(); - - let ssh_path = ssh.path().display().to_string(); - let ssh_key_path = ssh_key.path().display().to_string(); - let ffx_path = ffx.path().display().to_string(); - let out_path = out_dir.path().display(); - let want = formatdoc! {r#" - TestBeds: - - Name: {TESTBED_NAME} - Controllers: - FuchsiaDevice: - - mdns_name: {FUCHSIA_NAME} - ip: {FUCHSIA_ADDR} - take_bug_report_on_fail: true - ssh_binary_path: {ssh_path} - ffx_binary_path: {ffx_path} - ssh_priv_key: {ssh_key_path} - hard_reboot_on_fail: true - MoblyParams: - LogPath: {out_path} - "#}; - - assert_eq!(got, want); - } - - #[test] - fn infra_with_test_params() { - let ssh = NamedTempFile::new().unwrap(); - let ssh_key = NamedTempFile::new().unwrap(); - let ffx = NamedTempFile::new().unwrap(); - let out_dir = TempDir::new().unwrap(); - - let testbed_config = NamedTempFile::new().unwrap(); - serde_json::to_writer_pretty( - testbed_config.as_file(), - &json!([{ - "type": "FuchsiaDevice", - "nodename": FUCHSIA_NAME, - "ipv4": "", - "ipv6": FUCHSIA_ADDR, - "ssh_key": ssh_key.path(), - "test_params": { - "sl4f_sanity_test_params": { - "can_overwrite": false, - "from_original": true, - } - } - }]), - ) - .unwrap(); - - let runner = MockRunner::new(out_dir.path().to_path_buf()); - let env = MockEnvironment { - config: Some(testbed_config.path().to_path_buf()), - out_dir: Some(out_dir.path().to_path_buf()), - }; - let driver = - InfraDriver::new(env, ssh.path().to_path_buf(), ffx.path().to_path_buf()).unwrap(); - let params = " - sl4f_sanity_test_params: - merged_with: true - can_overwrite: true - "; - let params = serde_yaml::from_str(params).unwrap(); - run(runner, driver, Some(params)).unwrap(); - - let got = fs::read_to_string(out_dir.path().join("config.yaml")).unwrap(); - - let ssh_path = ssh.path().display().to_string(); - let ssh_key_path = ssh_key.path().display().to_string(); - let ffx_path = ffx.path().display().to_string(); - let out_path = out_dir.path().display(); - let want = formatdoc! {r#" - TestBeds: - - Name: {TESTBED_NAME} - Controllers: - FuchsiaDevice: - - mdns_name: {FUCHSIA_NAME} - ip: {FUCHSIA_ADDR} - take_bug_report_on_fail: true - ssh_binary_path: {ssh_path} - ffx_binary_path: {ffx_path} - ssh_priv_key: {ssh_key_path} - hard_reboot_on_fail: true - TestParams: - sl4f_sanity_test_params: - can_overwrite: true - from_original: true - merged_with: true - MoblyParams: - LogPath: {out_path} - "#}; - - assert_eq!(got, want); - } - - #[test] - fn infra_with_auxiliary_devices() { - const FUCHSIA_PDU_IP: &'static str = "192.168.42.14"; - const FUCHSIA_PDU_PORT: u8 = 1; - const AP_IP: &'static str = "192.168.42.11"; - const AP_AND_IPERF_PDU_IP: &'static str = "192.168.42.13"; - const AP_PDU_PORT: u8 = 1; - const ATTENUATOR_IP: &'static str = "192.168.42.15"; - const IPERF_IP: &'static str = "192.168.42.12"; - const IPERF_USER: &'static str = "alice"; - const IPERF_PDU_PORT: u8 = 2; - - let ssh = NamedTempFile::new().unwrap(); - let ssh_key = NamedTempFile::new().unwrap(); - let ffx = NamedTempFile::new().unwrap(); - let out_dir = TempDir::new().unwrap(); - - let testbed_config = NamedTempFile::new().unwrap(); - serde_json::to_writer_pretty( - testbed_config.as_file(), - &json!([{ - "type": "FuchsiaDevice", - "nodename": FUCHSIA_NAME, - "ipv4": "", - "ipv6": FUCHSIA_ADDR, - "ssh_key": ssh_key.path(), - "pdu": { - "ip": FUCHSIA_PDU_IP, - "port": FUCHSIA_PDU_PORT, - }, - }, { - "type": "AccessPoint", - "ip": AP_IP, - "ssh_key": ssh_key.path(), - "attenuator": { - "ip": ATTENUATOR_IP, - }, - "pdu": { - "ip": AP_AND_IPERF_PDU_IP, - "port": AP_PDU_PORT, - "device": "fancy-pdu", - }, - }, { - "type": "IPerfServer", - "ip": IPERF_IP, - "ssh_key": ssh_key.path(), - "user": IPERF_USER, - "test_interface": "eth0", - "pdu": { - "ip": AP_AND_IPERF_PDU_IP, - "port": IPERF_PDU_PORT, - "device": "fancy-pdu", - }, - }]), - ) - .unwrap(); - - let runner = MockRunner::new(out_dir.path().to_path_buf()); - let env = MockEnvironment { - config: Some(testbed_config.path().to_path_buf()), - out_dir: Some(out_dir.path().to_path_buf()), - }; - let driver = - InfraDriver::new(env, ssh.path().to_path_buf(), ffx.path().to_path_buf()).unwrap(); - run(runner, driver, None).unwrap(); - - let got = std::fs::read_to_string(out_dir.path().join("config.yaml")).unwrap(); - - let ssh_path = ssh.path().display().to_string(); - let ssh_key_path = ssh_key.path().display().to_string(); - let ffx_path = ffx.path().display().to_string(); - let out_path = out_dir.path().display(); - let want = formatdoc! {r#" - TestBeds: - - Name: {TESTBED_NAME} - Controllers: - FuchsiaDevice: - - mdns_name: {FUCHSIA_NAME} - ip: {FUCHSIA_ADDR} - take_bug_report_on_fail: true - ssh_binary_path: {ssh_path} - ffx_binary_path: {ffx_path} - ssh_priv_key: {ssh_key_path} - PduDevice: - device: synaccess.np02b - host: {FUCHSIA_PDU_IP} - port: {FUCHSIA_PDU_PORT} - hard_reboot_on_fail: true - AccessPoint: - - wan_interface: eth0 - ssh_config: - ssh_binary_path: {ssh_path} - host: {AP_IP} - user: root - identity_file: {ssh_key_path} - PduDevice: - device: fancy-pdu - host: {AP_AND_IPERF_PDU_IP} - port: {AP_PDU_PORT} - Attenuator: - - Address: {ATTENUATOR_IP} - attenuator_ports_wifi_2g: - - 1 - - 2 - - 3 - attenuator_ports_wifi_5g: - - 1 - - 2 - - 3 - Attenuator: - - Model: minicircuits - InstrumentCount: 4 - Address: {ATTENUATOR_IP} - Protocol: http - Port: 80 - PduDevice: - - device: fancy-pdu - host: {AP_AND_IPERF_PDU_IP} - - device: synaccess.np02b - host: {FUCHSIA_PDU_IP} - IPerfServer: - - ssh_config: - ssh_binary_path: {ssh_path} - host: {IPERF_IP} - user: {IPERF_USER} - identity_file: {ssh_key_path} - port: 5201 - test_interface: eth0 - use_killall: true - MoblyParams: - LogPath: {out_path} - "#}; - - assert_eq!(got, want); - } - - #[test] - fn infra_duplicate_port_pdu() { - let pdu_ip: IpAddr = "192.168.42.13".parse().unwrap(); - let pdu_port = 1; - - let ssh = NamedTempFile::new().unwrap(); - let ssh_key = NamedTempFile::new().unwrap(); - let ffx = NamedTempFile::new().unwrap(); - let out_dir = TempDir::new().unwrap(); - - let testbed_config = NamedTempFile::new().unwrap(); - serde_json::to_writer_pretty( - testbed_config.as_file(), - &json!([{ - "type": "FuchsiaDevice", - "nodename": "foo", - "ipv4": "", - "ipv6": "fe80::1%2", - "ssh_key": ssh_key.path(), - "pdu": { - "ip": pdu_ip, - "port": pdu_port, - }, - }, { - "type": "AccessPoint", - "ip": "192.168.42.11", - "ssh_key": ssh_key.path(), - "pdu": { - "ip": pdu_ip, - "port": pdu_port, - }, - }]), - ) - .unwrap(); - - let env = MockEnvironment { - config: Some(testbed_config.path().to_path_buf()), - out_dir: Some(out_dir.path().to_path_buf()), - }; - let got = InfraDriver::new(env, ssh.path().to_path_buf(), ffx.path().to_path_buf()); - assert_matches!(got, - Err(InfraDriverError::Config(ConfigError::DuplicatePort { ip, port })) - if ip == pdu_ip && port == pdu_port - ); - } - - #[test] - fn infra_duplicate_ip_pdu() { - let duplicate_ip: IpAddr = "192.168.42.13".parse().unwrap(); - - let ssh = NamedTempFile::new().unwrap(); - let ssh_key = NamedTempFile::new().unwrap(); - let ffx = NamedTempFile::new().unwrap(); - let out_dir = TempDir::new().unwrap(); - - let testbed_config = NamedTempFile::new().unwrap(); - serde_json::to_writer_pretty( - testbed_config.as_file(), - &json!([{ - "type": "FuchsiaDevice", - "nodename": "foo", - "ipv4": "", - "ipv6": "fe80::1%2", - "ssh_key": ssh_key.path(), - "pdu": { - "ip": duplicate_ip, - "port": 1, - "device": "A", - }, - }, { - "type": "AccessPoint", - "ip": "192.168.42.11", - "ssh_key": ssh_key.path(), - "pdu": { - "ip": duplicate_ip, - "port": 2, - "device": "B", - }, - }]), - ) - .unwrap(); - - let env = MockEnvironment { - config: Some(testbed_config.path().to_path_buf()), - out_dir: Some(out_dir.path().to_path_buf()), - }; - assert_matches!( - InfraDriver::new(env, ssh.path().to_path_buf(), ffx.path().to_path_buf()), - Err(InfraDriverError::Config(ConfigError::DuplicateIp { ip })) - if ip == duplicate_ip - ); - } - - #[test] - fn infra_duplicate_ip_devices() { - let duplicate_ip: IpAddr = "192.168.42.11".parse().unwrap(); - - let ssh = NamedTempFile::new().unwrap(); - let ssh_key = NamedTempFile::new().unwrap(); - let ffx = NamedTempFile::new().unwrap(); - let out_dir = TempDir::new().unwrap(); - - let testbed_config = NamedTempFile::new().unwrap(); - serde_json::to_writer_pretty( - testbed_config.as_file(), - &json!([{ - "type": "FuchsiaDevice", - "nodename": "foo", - "ipv4": duplicate_ip, - "ipv6": "", - "ssh_key": ssh_key.path(), - }, { - "type": "AccessPoint", - "ip": duplicate_ip, - "ssh_key": ssh_key.path(), - }]), - ) - .unwrap(); - - let env = MockEnvironment { - config: Some(testbed_config.path().to_path_buf()), - out_dir: Some(out_dir.path().to_path_buf()), - }; - let got = InfraDriver::new(env, ssh.path().to_path_buf(), ffx.path().to_path_buf()); - assert_matches!(got, - Err(InfraDriverError::Config(ConfigError::DuplicateIp { ip })) - if ip == duplicate_ip - ); - } - - #[test] - fn remove_symlinks_works() { - const SYMLINK_FILE: &'static str = "latest"; - - let out_dir = TempDir::new().unwrap(); - let test_file = NamedTempFile::new_in(&out_dir).unwrap(); - let symlink_path = out_dir.path().join(SYMLINK_FILE); - - #[cfg(unix)] - std::os::unix::fs::symlink(&test_file, &symlink_path).unwrap(); - #[cfg(windows)] - std::os::windows::fs::symlink_file(&test_file, &symlink_path).unwrap(); - - assert_matches!(remove_symlinks(out_dir.path()), Ok(())); - assert_matches!(fs::symlink_metadata(symlink_path), Err(e) if e.kind() == std::io::ErrorKind::NotFound); - assert_matches!(fs::symlink_metadata(test_file), Ok(meta) if meta.is_file()); - } -}
diff --git a/runner/src/driver/local.rs b/runner/src/driver/local.rs deleted file mode 100644 index 983a6a7..0000000 --- a/runner/src/driver/local.rs +++ /dev/null
@@ -1,287 +0,0 @@ -// Copyright 2023 The Fuchsia Authors. All rights reserved. -// Use of this source code is governed by a BSD-style license that can be -// found in the LICENSE file. - -use crate::config; -use crate::driver::Driver; -use crate::finder::{Answer, Finder}; -use crate::net::IpAddr; - -use std::path::{Path, PathBuf}; - -use anyhow::{ensure, Context, Result}; -use home::home_dir; - -const TESTBED_NAME: &'static str = "antlion-runner"; - -/// Driver for running antlion locally on an emulated or hardware testbed with -/// optional mDNS discovery when a DHCP server is not available. This is useful -/// for testing changes locally in a development environment. -pub(crate) struct LocalDriver { - target: LocalTarget, - output_dir: PathBuf, - ssh_binary: PathBuf, - ffx_binary: PathBuf, -} - -impl LocalDriver { - pub fn new<F>( - device: Option<String>, - ssh_binary: PathBuf, - ssh_key: Option<PathBuf>, - ffx_binary: PathBuf, - out_dir: Option<PathBuf>, - ) -> Result<Self> - where - F: Finder, - { - let output_dir = match out_dir { - Some(p) => Ok(p), - None => std::env::current_dir().context("Failed to get current working directory"), - }?; - Ok(Self { - target: LocalTarget::new::<F>(device, ssh_key)?, - output_dir, - ssh_binary, - ffx_binary, - }) - } -} - -impl Driver for LocalDriver { - fn output_path(&self) -> &Path { - self.output_dir.as_path() - } - fn config(&self) -> config::Config { - config::Config { - testbeds: vec![config::Testbed { - name: TESTBED_NAME.to_string(), - controllers: config::Controllers { - fuchsia_devices: vec![config::Fuchsia { - mdns_name: self.target.name.clone(), - ip: self.target.ip.clone(), - take_bug_report_on_fail: true, - ssh_binary_path: self.ssh_binary.clone(), - // TODO(http://b/244747218): Remove when ssh_config is refactored away - ssh_config: None, - ffx_binary_path: self.ffx_binary.clone(), - ssh_priv_key: self.target.ssh_key.clone(), - pdu_device: None, - hard_reboot_on_fail: true, - }], - ..Default::default() - }, - test_params: None, - }], - mobly_params: config::MoblyParams { log_path: self.output_dir.clone() }, - } - } - fn teardown(&self) -> Result<()> { - println!( - "\nView full antlion logs at {}", - self.output_dir.join(TESTBED_NAME).join("latest").display() - ); - Ok(()) - } -} - -/// LocalTargetInfo performs best-effort discovery of target information from -/// standard Fuchsia environmental variables. -struct LocalTarget { - name: String, - ip: IpAddr, - ssh_key: PathBuf, -} - -impl LocalTarget { - fn new<F>(device: Option<String>, ssh_key: Option<PathBuf>) -> Result<Self> - where - F: Finder, - { - let device_name = device.or_else(|| match std::env::var("FUCHSIA_DIR") { - Ok(dir) => match std::fs::read_to_string(format!("{dir}/out/default.device")) { - Ok(name) => Some(name.trim().to_string()), - Err(_) => { - println!("A default device using \"fx set-device\" has not been set"); - println!("Using the first Fuchsia device discovered via mDNS"); - None - } - }, - Err(_) => { - println!("Neither --device nor FUCHSIA_DIR has been set"); - println!("Using the first Fuchsia device discovered via mDNS"); - None - } - }); - - let Answer { name, ip } = F::find_device(device_name)?; - - // TODO: Move this validation out to Args - let ssh_key = ssh_key - .or_else(|| home_dir().map(|p| p.join(".ssh/fuchsia_ed25519").to_path_buf())) - .context("Failed to detect the private Fuchsia SSH key")?; - - ensure!( - ssh_key.try_exists().with_context(|| format!( - "Failed to check existence of SSH key \"{}\"", - ssh_key.display() - ))?, - "Cannot find SSH key \"{}\"", - ssh_key.display() - ); - - Ok(LocalTarget { name, ip, ssh_key }) - } -} - -#[cfg(test)] -mod test { - use super::*; - - use crate::finder::{Answer, Finder}; - use crate::run; - use crate::runner::{ExitStatus, Runner}; - - use indoc::formatdoc; - use pretty_assertions::assert_eq; - use tempfile::{NamedTempFile, TempDir}; - - const FUCHSIA_NAME: &'static str = "fuchsia-1234-5678-9abc"; - const FUCHSIA_ADDR: &'static str = "fe80::1%2"; - const FUCHSIA_IP: &'static str = "fe80::1"; - const SCOPE_ID: u32 = 2; - - struct MockFinder; - impl Finder for MockFinder { - fn find_device(_: Option<String>) -> Result<Answer> { - Ok(Answer { - name: FUCHSIA_NAME.to_string(), - ip: IpAddr::V6(FUCHSIA_IP.parse().unwrap(), Some(SCOPE_ID)), - }) - } - } - - #[derive(Default)] - struct MockRunner { - config: std::cell::Cell<PathBuf>, - } - impl Runner for MockRunner { - fn run(&self, config: PathBuf) -> Result<ExitStatus> { - self.config.set(config); - Ok(ExitStatus::Ok) - } - } - - #[test] - fn local_invalid_ssh_key() { - let ssh = NamedTempFile::new().unwrap(); - let ffx = NamedTempFile::new().unwrap(); - let out_dir = TempDir::new().unwrap(); - - assert!(LocalDriver::new::<MockFinder>( - None, - ssh.path().to_path_buf(), - Some(PathBuf::new()), - ffx.path().to_path_buf(), - Some(out_dir.path().to_path_buf()), - ) - .is_err()); - } - - #[test] - fn local() { - let ssh = NamedTempFile::new().unwrap(); - let ssh_key = NamedTempFile::new().unwrap(); - let ffx = NamedTempFile::new().unwrap(); - let out_dir = TempDir::new().unwrap(); - - let runner = MockRunner::default(); - let driver = LocalDriver::new::<MockFinder>( - None, - ssh.path().to_path_buf(), - Some(ssh_key.path().to_path_buf()), - ffx.path().to_path_buf(), - Some(out_dir.path().to_path_buf()), - ) - .unwrap(); - - run(runner, driver, None).unwrap(); - - let got = std::fs::read_to_string(out_dir.path().join("config.yaml")).unwrap(); - - let ssh_path = ssh.path().display(); - let ssh_key_path = ssh_key.path().display(); - let ffx_path = ffx.path().display(); - let out_path = out_dir.path().display(); - let want = formatdoc! {r#" - TestBeds: - - Name: {TESTBED_NAME} - Controllers: - FuchsiaDevice: - - mdns_name: {FUCHSIA_NAME} - ip: {FUCHSIA_ADDR} - take_bug_report_on_fail: true - ssh_binary_path: {ssh_path} - ffx_binary_path: {ffx_path} - ssh_priv_key: {ssh_key_path} - hard_reboot_on_fail: true - MoblyParams: - LogPath: {out_path} - "#}; - - assert_eq!(got, want); - } - - #[test] - fn local_with_test_params() { - let ssh = NamedTempFile::new().unwrap(); - let ssh_key = NamedTempFile::new().unwrap(); - let ffx = NamedTempFile::new().unwrap(); - let out_dir = TempDir::new().unwrap(); - - let runner = MockRunner::default(); - let driver = LocalDriver::new::<MockFinder>( - None, - ssh.path().to_path_buf(), - Some(ssh_key.path().to_path_buf()), - ffx.path().to_path_buf(), - Some(out_dir.path().to_path_buf()), - ) - .unwrap(); - - let params_yaml = " - sl4f_sanity_test_params: - foo: bar - "; - let params = serde_yaml::from_str(params_yaml).unwrap(); - - run(runner, driver, Some(params)).unwrap(); - - let got = std::fs::read_to_string(out_dir.path().join("config.yaml")).unwrap(); - - let ssh_path = ssh.path().display().to_string(); - let ssh_key_path = ssh_key.path().display().to_string(); - let ffx_path = ffx.path().display().to_string(); - let out_path = out_dir.path().display(); - let want = formatdoc! {r#" - TestBeds: - - Name: {TESTBED_NAME} - Controllers: - FuchsiaDevice: - - mdns_name: {FUCHSIA_NAME} - ip: {FUCHSIA_ADDR} - take_bug_report_on_fail: true - ssh_binary_path: {ssh_path} - ffx_binary_path: {ffx_path} - ssh_priv_key: {ssh_key_path} - hard_reboot_on_fail: true - TestParams: - sl4f_sanity_test_params: - foo: bar - MoblyParams: - LogPath: {out_path} - "#}; - - assert_eq!(got, want); - } -}
diff --git a/runner/src/driver/mod.rs b/runner/src/driver/mod.rs deleted file mode 100644 index 35de41f..0000000 --- a/runner/src/driver/mod.rs +++ /dev/null
@@ -1,24 +0,0 @@ -// Copyright 2023 The Fuchsia Authors. All rights reserved. -// Use of this source code is governed by a BSD-style license that can be -// found in the LICENSE file. - -pub(crate) mod infra; -pub(crate) mod local; - -use crate::config::Config; - -use std::path::Path; - -use anyhow::Result; - -/// Driver provide insight into the information surrounding running an antlion -/// test. -pub(crate) trait Driver { - /// Path to output directory for test artifacts. - fn output_path(&self) -> &Path; - /// Antlion config for use during test. - fn config(&self) -> Config; - /// Additional logic to run after all tests run, regardless of tests passing - /// or failing. - fn teardown(&self) -> Result<()>; -}
diff --git a/runner/src/env.rs b/runner/src/env.rs deleted file mode 100644 index ede8b74..0000000 --- a/runner/src/env.rs +++ /dev/null
@@ -1,25 +0,0 @@ -// Copyright 2023 The Fuchsia Authors. All rights reserved. -// Use of this source code is governed by a BSD-style license that can be -// found in the LICENSE file. - -use std::ffi::OsStr; -use std::env::VarError; - -/// Inspection of the process's environment. -pub(crate) trait Environment { - /// Fetches the environment variable `key` from the current process. - /// - /// See [std::env::var] for details. - /// - /// [std::env::var]: https://doc.rust-lang.org/std/env/fn.var.html - fn var<K: AsRef<OsStr>>(&self, key: K) -> Result<String, VarError>; -} - -/// Query the local process's environment. -pub(crate) struct LocalEnvironment; - -impl Environment for LocalEnvironment { - fn var<K: AsRef<OsStr>>(&self, key: K) -> Result<String, VarError> { - std::env::var(key) - } -}
diff --git a/runner/src/finder.rs b/runner/src/finder.rs deleted file mode 100644 index c381b36..0000000 --- a/runner/src/finder.rs +++ /dev/null
@@ -1,200 +0,0 @@ -// Copyright 2023 The Fuchsia Authors. All rights reserved. -// Use of this source code is governed by a BSD-style license that can be -// found in the LICENSE file. - -use crate::net::IpAddr; - -use std::io; -use std::net::{Ipv6Addr, SocketAddr, SocketAddrV6, UdpSocket}; -use std::str; -use std::time::{Duration, Instant}; - -use anyhow::{bail, Context, Result}; -use mdns::protocol as dns; -use netext::{get_mcast_interfaces, IsLocalAddr, McastInterface}; -use packet::{InnerPacketBuilder, ParseBuffer}; -use socket2::{Domain, Protocol, Socket, Type}; - -const FUCHSIA_DOMAIN: &str = "_fuchsia._udp.local"; -const MDNS_MCAST_V6: Ipv6Addr = Ipv6Addr::new(0xff02, 0, 0, 0, 0, 0, 0, 0x00fb); -const MDNS_PORT: u16 = 5353; -const MDNS_TIMEOUT: Duration = Duration::from_secs(10); - -lazy_static::lazy_static! { - static ref MDNS_QUERY: &'static [u8] = construct_query_buf(FUCHSIA_DOMAIN); -} - -/// Find Fuchsia devices. -pub(crate) trait Finder { - /// Find a Fuchsia device, preferring `device_name` if specified. - fn find_device(device_name: Option<String>) -> Result<Answer>; -} - -/// Answer from a Finder. -pub(crate) struct Answer { - /// Name of the Fuchsia device. - pub name: String, - /// IP address of the Fuchsia device. - pub ip: IpAddr, -} - -pub(crate) struct MulticastDns {} - -impl Finder for MulticastDns { - /// Find a Fuchsia device using mDNS. If `device_name` is not specified, the - /// first device will be used. - fn find_device(device_name: Option<String>) -> Result<Answer> { - let interfaces = - get_mcast_interfaces().context("Failed to list multicast-enabled interfaces")?; - let interface_names = - interfaces.iter().map(|i| i.name.clone()).collect::<Vec<String>>().join(", "); - if let Some(ref d) = device_name { - println!("Performing mDNS discovery for {d} on interfaces: {interface_names}"); - } else { - println!("Performing mDNS discovery on interfaces: {interface_names}"); - } - - let socket = create_socket(interfaces.iter()).context("Failed to create mDNS socket")?; - - // TODO(http://b/264936590): Remove the race condition where the Fuchsia - // device can send its answer before this socket starts listening. Add an - // async runtime and concurrently listen for answers while sending queries. - send_queries(&socket, interfaces.iter()).context("Failed to send mDNS queries")?; - let answer = listen_for_answers(socket, device_name)?; - - println!("Device {} found at {}", answer.name, answer.ip); - Ok(answer) - } -} - -fn construct_query_buf(service: &str) -> &'static [u8] { - let question = dns::QuestionBuilder::new( - dns::DomainBuilder::from_str(service).unwrap(), - dns::Type::Ptr, - dns::Class::In, - true, - ); - - let mut message = dns::MessageBuilder::new(0, true); - message.add_question(question); - - let mut buf = vec![0; message.bytes_len()]; - message.serialize(buf.as_mut_slice()); - Box::leak(buf.into_boxed_slice()) -} - -/// Create a socket for both sending and listening on all multicast-capable -/// interfaces. -fn create_socket<'a>(interfaces: impl Iterator<Item = &'a McastInterface>) -> Result<Socket> { - let socket = Socket::new(Domain::IPV6, Type::DGRAM, Some(Protocol::UDP))?; - let read_timeout = Duration::from_millis(100); - socket - .set_read_timeout(Some(read_timeout)) - .with_context(|| format!("Failed to set SO_RCVTIMEO to {}ms", read_timeout.as_millis()))?; - socket.set_only_v6(true).context("Failed to set IPV6_V6ONLY")?; - socket.set_reuse_address(true).context("Failed to set SO_REUSEADDR")?; - socket.set_reuse_port(true).context("Failed to set SO_REUSEPORT")?; - - for interface in interfaces { - // Listen on all multicast-enabled interfaces - match interface.id() { - Ok(id) => match socket.join_multicast_v6(&MDNS_MCAST_V6, id) { - Ok(()) => {} - Err(e) => eprintln!("Failed to join mDNS multicast group on interface {id}: {e}"), - }, - Err(e) => eprintln!("Failed to listen on interface {}: {}", interface.name, e), - } - } - - socket - .bind(&SocketAddrV6::new(Ipv6Addr::UNSPECIFIED, 0, 0, 0).into()) - .with_context(|| format!("Failed to bind to unspecified IPv6"))?; - - Ok(socket) -} - -fn send_queries<'a>( - socket: &Socket, - interfaces: impl Iterator<Item = &'a McastInterface>, -) -> Result<()> { - let to_addr = SocketAddrV6::new(MDNS_MCAST_V6, MDNS_PORT, 0, 0).into(); - - for interface in interfaces { - let id = interface - .id() - .with_context(|| format!("Failed to get interface ID for {}", interface.name))?; - socket - .set_multicast_if_v6(id) - .with_context(|| format!("Failed to set multicast interface for {}", interface.name))?; - for addr in &interface.addrs { - if let SocketAddr::V6(addr_v6) = addr { - if !addr.ip().is_local_addr() || addr.ip().is_loopback() { - continue; - } - if let Err(e) = socket.send_to(&MDNS_QUERY, &to_addr) { - eprintln!( - "Failed to send mDNS query out {} via {}: {e}", - interface.name, - addr_v6.ip() - ); - continue; - } - } - } - } - Ok(()) -} - -fn listen_for_answers(socket: Socket, device_name: Option<String>) -> Result<Answer> { - let s: UdpSocket = socket.into(); - let mut buf = [0; 1500]; - - let end = Instant::now() + MDNS_TIMEOUT; - while Instant::now() < end { - match s.recv_from(&mut buf) { - Ok((packet_bytes, src_sock_addr)) => { - if !src_sock_addr.ip().is_local_addr() { - continue; - } - - let mut packet_buf = &mut buf[..packet_bytes]; - match packet_buf.parse::<dns::Message<_>>() { - Ok(message) => { - if !message.answers.iter().any(|a| a.domain == FUCHSIA_DOMAIN) { - continue; - } - for answer in message.additional { - if let Some(std::net::IpAddr::V6(addr)) = answer.rdata.ip_addr() { - if let SocketAddr::V6(src_v6) = src_sock_addr { - let name = answer - .domain - .to_string() - .trim_end_matches(".local") - .to_string(); - let scope_id = src_v6.scope_id(); - - if let Some(ref device) = device_name { - if &name != device { - println!("Found irrelevant device {name} at {addr}%{scope_id}"); - continue; - } - } - - return Ok(Answer { - name, - ip: IpAddr::V6(addr, Some(scope_id)), - }); - } - } - } - } - Err(err) => eprintln!("Failed to parse mDNS packet: {err:?}"), - } - } - Err(err) if err.kind() == io::ErrorKind::WouldBlock => {} - Err(err) => return Err(err.into()), - } - } - - bail!("device {device_name:?} not found") -}
diff --git a/runner/src/main.rs b/runner/src/main.rs deleted file mode 100644 index 4252694..0000000 --- a/runner/src/main.rs +++ /dev/null
@@ -1,149 +0,0 @@ -// Copyright 2023 The Fuchsia Authors. All rights reserved. -// Use of this source code is governed by a BSD-style license that can be -// found in the LICENSE file. - -mod config; -mod driver; -mod env; -mod finder; -mod net; -mod runner; -mod yaml; - -use crate::driver::infra::{InfraDriver, InfraDriverError}; -use crate::runner::ExitStatus; - -use std::fs::File; -use std::path::PathBuf; -use std::{fs, process::ExitCode}; - -use anyhow::{Context, Result}; -use argh::FromArgs; -use serde_yaml; -use serde_yaml::Value; - -#[derive(FromArgs)] -/// antlion runner with config generation -struct Args { - /// name of the Fuchsia device to use for testing; defaults to using mDNS - /// discovery - #[argh(option)] - device: Option<String>, - - /// path to the SSH binary used to communicate with all devices - #[argh(option, from_str_fn(parse_file))] - ssh_binary: PathBuf, - - /// path to the SSH private key used to communicate with Fuchsia; defaults - /// to ~/.ssh/fuchsia_ed25519 - #[argh(option, from_str_fn(parse_file))] - ssh_key: Option<PathBuf>, - - /// path to the FFX binary used to communicate with Fuchsia - #[argh(option, from_str_fn(parse_file))] - ffx_binary: PathBuf, - - /// path to the python interpreter binary (e.g. /bin/python3.9) - #[argh(option)] - python_bin: String, - - /// path to the antlion zipapp, ending in .pyz - #[argh(option, from_str_fn(parse_file))] - antlion_pyz: PathBuf, - - /// path to a directory for outputting artifacts; defaults to the current - /// working directory or FUCHSIA_TEST_OUTDIR - #[argh(option, from_str_fn(parse_directory))] - out_dir: Option<PathBuf>, - - /// path to additional YAML config for this test; placed in the - /// "test_params" key in the antlion config - #[argh(option, from_str_fn(parse_file))] - test_params: Option<PathBuf>, -} - -fn parse_file(s: &str) -> Result<PathBuf, String> { - let path = PathBuf::from(s); - let _ = File::open(&path).map_err(|e| format!("Failed to open \"{s}\": {e}"))?; - Ok(path) -} - -fn parse_directory(s: &str) -> Result<PathBuf, String> { - let path = PathBuf::from(s); - let meta = - std::fs::metadata(&path).map_err(|e| format!("Failed to read metadata of \"{s}\": {e}"))?; - if meta.is_file() { - return Err(format!("Expected a directory but found a file at \"{s}\"")); - } - Ok(path) -} - -fn run<R, D>(runner: R, driver: D, test_params: Option<Value>) -> Result<ExitCode> -where - R: runner::Runner, - D: driver::Driver, -{ - let mut config = driver.config(); - if let Some(params) = test_params { - config.merge_test_params(params); - } - - let yaml = - serde_yaml::to_string(&config).context("Failed to convert antlion config to YAML")?; - - let output_path = driver.output_path().to_path_buf(); - let config_path = output_path.join("config.yaml"); - println!("Writing {}", config_path.display()); - println!("\n{yaml}\n"); - fs::write(&config_path, yaml).context("Failed to write config to file")?; - - let exit_code = runner.run(config_path).context("Failed to run antlion")?; - match exit_code { - ExitStatus::Ok => println!("Antlion successfully exited"), - ExitStatus::Err(code) => eprintln!("Antlion failed with status code {}", code), - ExitStatus::Interrupt(Some(code)) => eprintln!("Antlion interrupted by signal {}", code), - ExitStatus::Interrupt(None) => eprintln!("Antlion interrupted by signal"), - }; - driver.teardown().context("Failed to teardown environment")?; - Ok(exit_code.into()) -} - -fn main() -> Result<ExitCode> { - let args: Args = argh::from_env(); - let env = env::LocalEnvironment; - let runner = - runner::ProcessRunner { python_bin: args.python_bin, antlion_pyz: args.antlion_pyz }; - - let test_params = match args.test_params { - Some(path) => { - let text = fs::read_to_string(&path) - .with_context(|| format!("Failed to read file \"{}\"", path.display()))?; - let yaml = serde_yaml::from_str(&text) - .with_context(|| format!("Failed to parse \"{text}\" as YAML"))?; - Some(yaml) - } - None => None, - }; - - match InfraDriver::new(env, args.ssh_binary.clone(), args.ffx_binary.clone()) { - Ok(env) => return run(runner, env, test_params), - Err(InfraDriverError::NotDetected(_)) => {} - Err(InfraDriverError::Config(e)) => { - return Err(anyhow::Error::from(e).context("Config validation")) - } - Err(InfraDriverError::Other(e)) => { - return Err(anyhow::Error::from(e).context("Unexpected infra driver error")) - } - }; - - let env = driver::local::LocalDriver::new::<finder::MulticastDns>( - args.device.clone(), - args.ssh_binary.clone(), - args.ssh_key.clone(), - args.ffx_binary.clone(), - args.out_dir.clone(), - ) - .context("Failed to detect local environment")?; - - run(runner, env, test_params) -}
diff --git a/runner/src/net.rs b/runner/src/net.rs deleted file mode 100644 index 70db2eb..0000000 --- a/runner/src/net.rs +++ /dev/null
@@ -1,231 +0,0 @@ -// Copyright 2023 The Fuchsia Authors. All rights reserved. -// Use of this source code is governed by a BSD-style license that can be -// found in the LICENSE file. - -use std::fmt::{Debug, Display}; -use std::marker::PhantomData; -use std::net::{Ipv4Addr, Ipv6Addr}; - -use netext::IsLocalAddr; -use nix::net::if_::if_nametoindex; -use serde::{Deserialize, Serialize}; -use thiserror::Error; - -/// IP address with support for IPv6 scope identifiers as defined in RFC 4007. -#[derive(Copy, Clone, Eq, PartialEq, Hash, PartialOrd, Ord)] -pub enum IpAddr { - /// An IPv4 address. - V4(Ipv4Addr), - /// An IPv6 address with optional scope identifier. - V6(Ipv6Addr, Option<u32>), -} - -impl Into<std::net::IpAddr> for IpAddr { - fn into(self) -> std::net::IpAddr { - match self { - IpAddr::V4(ip) => std::net::IpAddr::from(ip), - IpAddr::V6(ip, _) => std::net::IpAddr::from(ip), - } - } -} - -impl From<Ipv6Addr> for IpAddr { - fn from(value: Ipv6Addr) -> Self { - IpAddr::V6(value, None) - } -} - -impl From<Ipv4Addr> for IpAddr { - fn from(value: Ipv4Addr) -> Self { - IpAddr::V4(value) - } -} - -impl From<std::net::IpAddr> for IpAddr { - fn from(value: std::net::IpAddr) -> Self { - match value { - std::net::IpAddr::V4(ip) => IpAddr::from(ip), - std::net::IpAddr::V6(ip) => IpAddr::from(ip), - } - } -} - -#[derive(Debug, Clone, PartialEq, Eq, Error)] -/// An error which can be returned when parsing an IP address with optional IPv6 -/// scope ID. See [`std::net::AddrParseError`]. -pub enum AddrParseError { - #[error(transparent)] - IpInvalid(#[from] std::net::AddrParseError), - #[error("no interface found with name \"{0}\"")] - InterfaceNotFound(String), - #[error("only IPv6 link-local may include a scope ID")] - /// Scope IDs are only supported for IPv6 link-local addresses as per RFC - /// 6874 Section 4. - ScopeNotSupported, -} - -impl std::str::FromStr for IpAddr { - type Err = AddrParseError; - - fn from_str(s: &str) -> Result<Self, Self::Err> { - let mut parts = s.splitn(2, '%'); - let addr = parts.next().unwrap(); // first element is guaranteed - let ip = std::net::IpAddr::from_str(addr)?; - let scope = parts.next(); - match (ip, scope) { - (std::net::IpAddr::V4(ip), None) => Ok(IpAddr::from(ip)), - (std::net::IpAddr::V4(_), Some(_)) => Err(AddrParseError::ScopeNotSupported), - (std::net::IpAddr::V6(ip), None) => Ok(IpAddr::V6(ip, None)), - (std::net::IpAddr::V6(ip), Some(scope)) => { - if !ip.is_link_local_addr() { - return Err(AddrParseError::ScopeNotSupported); - } - if let Ok(index) = scope.parse::<u32>() { - return Ok(IpAddr::V6(ip, Some(index))); - } - match if_nametoindex(scope) { - Ok(index) => Ok(IpAddr::V6(ip, Some(index))), - Err(_) => Err(AddrParseError::InterfaceNotFound(scope.to_string())), - } - } - } - } -} - -impl Display for IpAddr { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - match self { - IpAddr::V4(ip) => Display::fmt(ip, f), - IpAddr::V6(ip, None) => Display::fmt(ip, f), - IpAddr::V6(ip, Some(scope)) => { - Display::fmt(ip, f)?; - write!(f, "%{}", scope) - } - } - } -} - -impl Debug for IpAddr { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - Display::fmt(self, f) - } -} - -impl Serialize for IpAddr { - fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error> - where - S: serde::Serializer, - { - serializer.serialize_str(self.to_string().as_str()) - } -} - -impl<'de> Deserialize<'de> for IpAddr { - fn deserialize<D>(deserializer: D) -> Result<Self, D::Error> - where - D: serde::Deserializer<'de>, - { - deserializer.deserialize_str(FromStrVisitor::new()) - } -} - -struct FromStrVisitor<T> { - ty: PhantomData<T>, -} - -impl<T> FromStrVisitor<T> { - fn new() -> Self { - FromStrVisitor { ty: PhantomData } - } -} - -impl<'de, T> serde::de::Visitor<'de> for FromStrVisitor<T> -where - T: std::str::FromStr, - T::Err: std::fmt::Display, -{ - type Value = T; - - fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - formatter.write_str("IP address") - } - - fn visit_str<E>(self, s: &str) -> Result<Self::Value, E> - where - E: serde::de::Error, - { - s.parse().map_err(serde::de::Error::custom) - } -} - -#[cfg(test)] -mod test { - use super::{AddrParseError, IpAddr}; - use assert_matches::assert_matches; - - #[test] - fn parse_ip_invalid() { - assert_matches!("".parse::<IpAddr>(), Err(AddrParseError::IpInvalid(_))); - assert_matches!("192.168.1.".parse::<IpAddr>(), Err(AddrParseError::IpInvalid(_))); - assert_matches!("fe80:".parse::<IpAddr>(), Err(AddrParseError::IpInvalid(_))); - } - - #[test] - fn parse_ipv4() { - assert_matches!( - "192.168.1.1".parse::<IpAddr>(), - Ok(IpAddr::V4(ip)) - if ip == "192.168.1.1".parse::<std::net::Ipv4Addr>().unwrap() - ); - } - - #[test] - fn parse_ipv4_with_scope() { - assert_matches!( - "192.168.1.1%1".parse::<IpAddr>(), - Err(AddrParseError::ScopeNotSupported) - ); - } - - #[test] - fn parse_ipv6() { - assert_matches!( - "fe80::1".parse::<IpAddr>(), - Ok(IpAddr::V6(ip, None)) - if ip == "fe80::1".parse::<std::net::Ipv6Addr>().unwrap() - ); - } - - #[test] - fn parse_ipv6_global_with_scope() { - assert_matches!("2001::1%1".parse::<IpAddr>(), Err(AddrParseError::ScopeNotSupported)); - } - - #[test] - fn parse_ipv6_link_local_with_scope() { - assert_matches!( - "fe80::1%1".parse::<IpAddr>(), - Ok(IpAddr::V6(ip, Some(scope))) - if ip == "fe80::1".parse::<std::net::Ipv6Addr>().unwrap() - && scope == 1 - ); - } - - #[test] - fn parse_ipv6_link_local_with_scope_interface_not_found() { - // An empty scope ID should trigger a failed lookup. - assert_matches!( - "fe80::1%".parse::<IpAddr>(), - Err(AddrParseError::InterfaceNotFound(name)) - if name == "" - ); - - // The trailing '%' forces a failed lookup. At the time of writing, no - // OS supports this character as part of interface names. - assert_matches!( - "fe80::1%eth0%".parse::<IpAddr>(), - Err(AddrParseError::InterfaceNotFound(name)) - if name == "eth0%" - ); - } -}
diff --git a/runner/src/runner.rs b/runner/src/runner.rs deleted file mode 100644 index c40e05d..0000000 --- a/runner/src/runner.rs +++ /dev/null
@@ -1,83 +0,0 @@ -// Copyright 2023 The Fuchsia Authors. All rights reserved. -// Use of this source code is governed by a BSD-style license that can be -// found in the LICENSE file. - -#[cfg(unix)] -use std::os::unix::process::ExitStatusExt; -use std::process::Command; -use std::{path::PathBuf, process::ExitCode}; - -use anyhow::{Context, Result}; -use itertools::Itertools; - -/// Runner for dispatching antlion. -pub(crate) trait Runner { - /// Run antlion using the provided config and output directory. - fn run(&self, config: PathBuf) -> Result<ExitStatus>; -} - -/// Executes antlion as a local process. -pub(crate) struct ProcessRunner { - pub python_bin: String, - pub antlion_pyz: PathBuf, -} - -impl Runner for ProcessRunner { - fn run(&self, config: PathBuf) -> Result<ExitStatus> { - let args = [ - &self.antlion_pyz.clone().into_os_string().into_string().unwrap(), - "--config", - &config.into_os_string().into_string().unwrap(), - ]; - - println!( - "Launching antlion to run: \"{} {}\"\n", - &self.python_bin, - args.iter().format(" "), - ); - - let status = Command::new(&self.python_bin) - .args(args) - .status() - .context("Failed to execute antlion")?; - - Ok(ExitStatus::from(status)) - } -} - -/// Describes the result of a child process after it has terminated. -pub(crate) enum ExitStatus { - /// Process terminated without error. - Ok, - /// Process terminated with a non-zero status code. - Err(i32), - /// Process was interrupted by a signal. - Interrupt(Option<i32>), -} - -impl From<std::process::ExitStatus> for ExitStatus { - fn from(status: std::process::ExitStatus) -> Self { - match status.code() { - Some(0) => ExitStatus::Ok, - Some(code) => ExitStatus::Err(code), - None if cfg!(target_os = "unix") => ExitStatus::Interrupt(status.signal()), - None => ExitStatus::Interrupt(None), - } - } -} - -impl Into<ExitCode> for ExitStatus { - fn into(self) -> ExitCode { - match self { - ExitStatus::Ok => ExitCode::SUCCESS, - ExitStatus::Err(code) => { - let code = match u8::try_from(code) { - Ok(c) => c, - Err(_) => 1, - }; - ExitCode::from(code) - } - ExitStatus::Interrupt(_) => ExitCode::FAILURE, - } - } -}
diff --git a/runner/src/yaml.rs b/runner/src/yaml.rs deleted file mode 100644 index ae972bf..0000000 --- a/runner/src/yaml.rs +++ /dev/null
@@ -1,95 +0,0 @@ -use serde_yaml::Value; - -/// Merge `b` into `a`, appending arrays and overwriting everything else. -pub fn merge(a: &mut Value, b: Value) { - match (a, b) { - (Value::Mapping(ref mut a), Value::Mapping(b)) => { - for (k, v) in b { - if !a.contains_key(&k) { - a.insert(k, v); - } else { - merge(&mut a[&k], v); - } - } - } - (Value::Sequence(ref mut a), Value::Sequence(ref mut b)) => { - a.append(b); - } - (a, b) => *a = b, - } -} - -#[cfg(test)] -mod test { - use super::*; - - #[test] - fn test_merge_mapping() { - let a = " - test_params: - name: a - who_called: - was_a: true - "; - let mut a: Value = serde_yaml::from_str(a).unwrap(); - let b = " - test_params: - name: b - who_called: - was_b: true - "; - let b: Value = serde_yaml::from_str(b).unwrap(); - merge(&mut a, b); - let want = " - test_params: - name: b - who_called: - was_a: true - was_b: true - "; - let want: Value = serde_yaml::from_str(want).unwrap(); - assert_eq!(a, want); - } - - #[test] - fn test_merge_append_arrays() { - let mut a: Value = serde_yaml::from_str(" - a").unwrap(); - let b: Value = serde_yaml::from_str(" - b").unwrap(); - merge(&mut a, b); - let want = " - - a - - b - "; - let want: Value = serde_yaml::from_str(want).unwrap(); - assert_eq!(a, want); - } - - #[test] - fn test_merge_append_arrays_allow_duplicates() { - let mut a: Value = serde_yaml::from_str(" - a").unwrap(); - let b: Value = serde_yaml::from_str(" - a").unwrap(); - merge(&mut a, b); - let want = " - - a - - a - "; - let want: Value = serde_yaml::from_str(want).unwrap(); - assert_eq!(a, want); - } - - #[test] - fn test_merge_overwrite_from_null() { - let mut a: Value = Value::Null; - let b: Value = serde_yaml::from_str("true").unwrap(); - merge(&mut a, b.clone()); - assert_eq!(a, b); - } - - #[test] - fn test_merge_overwrite_with_null() { - let mut a: Value = serde_yaml::from_str("true").unwrap(); - let b: Value = Value::Null; - merge(&mut a, b.clone()); - assert_eq!(a, b); - } -}
diff --git a/setup.py b/setup.py deleted file mode 100644 index 28f080f..0000000 --- a/setup.py +++ /dev/null
@@ -1,51 +0,0 @@ -#!/usr/bin/env python3 -# -# Copyright 2022 The Fuchsia Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from setuptools import setup, find_packages - -install_requires = [ - "mobly==1.12.0", - "pyyaml>=5.1", - "tenacity~=8.0", - # TODO(b/240443856): Remove these dependencies once antlion runs in - # Fuchsia's LUCI infrastructure. These are needed for flashing and using - # mDNS discovery, which are unnecessary in the future infrastructure. - "usbinfo", - "psutil", - "zeroconf", -] - -setup( - name="antlion", - version="0.2.0", - description="Host-driven, hardware-agnostic Fuchsia connectivity tests", - license="Apache-2.0", - packages=find_packages( - where="src", - ), - package_dir={"": "src"}, - include_package_data=True, - tests_require=[], - install_requires=install_requires, - extras_require={ - "html_graphing": ["bokeh"], - "digital_loggers_pdu": ["dlipower"], - "android": [ - "numpy", - "scapy", - ], - }, -)
diff --git a/src/antlion/__init__.py b/src/antlion/__init__.py deleted file mode 100644 index e69de29..0000000 --- a/src/antlion/__init__.py +++ /dev/null
diff --git a/src/antlion/base_test.py b/src/antlion/base_test.py deleted file mode 100755 index 5033552..0000000 --- a/src/antlion/base_test.py +++ /dev/null
@@ -1,976 +0,0 @@ -#!/usr/bin/env python3 -# -# Copyright 2022 The Fuchsia Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import fnmatch -import functools -import importlib -import logging -import os -import traceback -from concurrent.futures import ThreadPoolExecutor - -from antlion import error -from antlion import keys -from antlion import logger -from antlion import records -from antlion import signals -from antlion import tracelogger -from antlion import utils -from antlion.event import event_bus -from antlion.event import subscription_bundle -from antlion.event.decorators import subscribe_static -from antlion.event.event import TestCaseBeginEvent -from antlion.event.event import TestCaseEndEvent -from antlion.event.event import TestClassBeginEvent -from antlion.event.event import TestClassEndEvent -from antlion.event.subscription_bundle import SubscriptionBundle - -from mobly import asserts -from mobly.base_test import BaseTestClass as MoblyBaseTest -from mobly.records import ExceptionRecord - -# Macro strings for test result reporting -TEST_CASE_TOKEN = "[Test Case]" -RESULT_LINE_TEMPLATE = TEST_CASE_TOKEN + " %s %s" - - -@subscribe_static(TestCaseBeginEvent) -def _logcat_log_test_begin(event): - """Ensures that logcat is running. Write a logcat line indicating test case - begin.""" - test_instance = event.test_class - try: - for ad in getattr(test_instance, "android_devices", []): - if not ad.is_adb_logcat_on: - ad.start_adb_logcat() - # Write test start token to adb log if android device is attached. - if not ad.skip_sl4a and ad.droid: - ad.droid.logV("%s BEGIN %s" % (TEST_CASE_TOKEN, event.test_case_name)) - - except error.ActsError as e: - test_instance.results.error.append( - ExceptionRecord(e, "Logcat for test begin: %s" % event.test_case_name) - ) - test_instance.log.error("BaseTest setup_test error: %s" % e.details) - except Exception as e: - test_instance.log.warning("Unable to send BEGIN log command to all devices.") - test_instance.log.warning("Error: %s" % e) - - -@subscribe_static(TestCaseEndEvent) -def _logcat_log_test_end(event): - """Write a logcat line indicating test case end.""" - test_instance = event.test_class - try: - # Write test end token to adb log if android device is attached. - for ad in getattr(test_instance, "android_devices", []): - if not ad.skip_sl4a and ad.droid: - ad.droid.logV("%s END %s" % (TEST_CASE_TOKEN, event.test_case_name)) - - except error.ActsError as e: - test_instance.results.error.append( - ExceptionRecord(e, "Logcat for test end: %s" % event.test_case_name) - ) - test_instance.log.error("BaseTest teardown_test error: %s" % e.details) - except Exception as e: - test_instance.log.warning("Unable to send END log command to all devices.") - test_instance.log.warning("Error: %s" % e) - - -@subscribe_static(TestCaseBeginEvent) -def _syslog_log_test_begin(event): - """This adds a BEGIN log message with the test name to the syslog of any - Fuchsia device""" - test_instance = event.test_class - try: - for fd in getattr(test_instance, "fuchsia_devices", []): - if hasattr(fd, "_sl4f"): - fd.sl4f.logging_lib.logI( - "%s BEGIN %s" % (TEST_CASE_TOKEN, event.test_case_name) - ) - - except Exception as e: - test_instance.log.warning("Unable to send BEGIN log command to all devices.") - test_instance.log.warning("Error: %s" % e) - - -@subscribe_static(TestCaseEndEvent) -def _syslog_log_test_end(event): - """This adds a END log message with the test name to the syslog of any - Fuchsia device""" - test_instance = event.test_class - try: - for fd in getattr(test_instance, "fuchsia_devices", []): - if hasattr(fd, "_sl4f"): - fd.sl4f.logging_lib.logI( - "%s END %s" % (TEST_CASE_TOKEN, event.test_case_name) - ) - - except Exception as e: - test_instance.log.warning("Unable to send END log command to all devices.") - test_instance.log.warning("Error: %s" % e) - - -event_bus.register_subscription(_logcat_log_test_begin.subscription) -event_bus.register_subscription(_logcat_log_test_end.subscription) -event_bus.register_subscription(_syslog_log_test_begin.subscription) -event_bus.register_subscription(_syslog_log_test_end.subscription) - - -class Error(Exception): - """Raised for exceptions that occured in BaseTestClass.""" - - -class BaseTestClass(MoblyBaseTest): - """Base class for all test classes to inherit from. Inherits some - functionality from Mobly's base test class. - - This class gets all the controller objects from test_runner and executes - the test cases requested within itself. - - Most attributes of this class are set at runtime based on the configuration - provided. - - Attributes: - tests: A list of strings, each representing a test case name. - TAG: A string used to refer to a test class. Default is the test class - name. - log: A logger object used for logging. - results: A records.TestResult object for aggregating test results from - the execution of test cases. - controller_configs: A dict of controller configs provided by the user - via the testbed config. - consecutive_failures: Tracks the number of consecutive test case - failures within this class. - consecutive_failure_limit: Number of consecutive test failures to allow - before blocking remaining tests in the same - test class. - size_limit_reached: True if the size of the log directory has reached - its limit. - current_test_name: A string that's the name of the test case currently - being executed. If no test is executing, this should - be None. - """ - - TAG = None - - def __init__(self, configs): - """Initializes a BaseTestClass given a TestRunConfig, which provides - all of the config information for this test class. - - Args: - configs: A config_parser.TestRunConfig object. - """ - super().__init__(configs) - - self.__handle_file_user_params() - - self.class_subscriptions = SubscriptionBundle() - self.class_subscriptions.register() - self.all_subscriptions = [self.class_subscriptions] - - self.current_test_name = None - self.log = tracelogger.TraceLogger(logging.getLogger()) - # TODO: remove after converging log path definitions with mobly - self.log_path = configs.log_path - - self.consecutive_failures = 0 - self.consecutive_failure_limit = self.user_params.get( - "consecutive_failure_limit", -1 - ) - self.size_limit_reached = False - self.retryable_exceptions = signals.TestFailure - - def _import_builtin_controllers(self): - """Import built-in controller modules. - - Go through the testbed configs, find any built-in controller configs - and import the corresponding controller module from antlion.controllers - package. - - Returns: - A list of controller modules. - """ - builtin_controllers = [] - for ctrl_name in keys.Config.builtin_controller_names.value: - if ctrl_name in self.controller_configs: - module_name = keys.get_module_name(ctrl_name) - module = importlib.import_module("antlion.controllers.%s" % module_name) - builtin_controllers.append(module) - return builtin_controllers - - def __handle_file_user_params(self): - """For backwards compatibility, moves all contents of the "files" dict - into the root level of user_params. - - This allows existing tests to run with the new Mobly-style format - without needing to make changes. - """ - for key, value in self.user_params.items(): - if key.endswith("files") and isinstance(value, dict): - new_user_params = dict(value) - new_user_params.update(self.user_params) - self.user_params = new_user_params - break - - @staticmethod - def get_module_reference_name(a_module): - """Returns the module's reference name. - - This is largely for backwards compatibility with log parsing. If the - module defines ACTS_CONTROLLER_REFERENCE_NAME, it will return that - value, or the module's submodule name. - - Args: - a_module: Any module. Ideally, a controller module. - Returns: - A string corresponding to the module's name. - """ - if hasattr(a_module, "ACTS_CONTROLLER_REFERENCE_NAME"): - return a_module.ACTS_CONTROLLER_REFERENCE_NAME - else: - return a_module.__name__.split(".")[-1] - - def register_controller(self, controller_module, required=True, builtin=False): - """Registers an ACTS controller module for a test class. Invokes Mobly's - implementation of register_controller. - - An ACTS controller module is a Python lib that can be used to control - a device, service, or equipment. To be ACTS compatible, a controller - module needs to have the following members: - - def create(configs): - [Required] Creates controller objects from configurations. - Args: - configs: A list of serialized data like string/dict. Each - element of the list is a configuration for a - controller object. - Returns: - A list of objects. - - def destroy(objects): - [Required] Destroys controller objects created by the create - function. Each controller object shall be properly cleaned up - and all the resources held should be released, e.g. memory - allocation, sockets, file handlers etc. - Args: - A list of controller objects created by the create function. - - def get_info(objects): - [Optional] Gets info from the controller objects used in a test - run. The info will be included in test_result_summary.json under - the key "ControllerInfo". Such information could include unique - ID, version, or anything that could be useful for describing the - test bed and debugging. - Args: - objects: A list of controller objects created by the create - function. - Returns: - A list of json serializable objects, each represents the - info of a controller object. The order of the info object - should follow that of the input objects. - Registering a controller module declares a test class's dependency the - controller. If the module config exists and the module matches the - controller interface, controller objects will be instantiated with - corresponding configs. The module should be imported first. - - Args: - controller_module: A module that follows the controller module - interface. - required: A bool. If True, failing to register the specified - controller module raises exceptions. If False, returns None upon - failures. - builtin: Specifies that the module is a builtin controller module in - ACTS. If true, adds itself to test attributes. - Returns: - A list of controller objects instantiated from controller_module, or - None. - - Raises: - When required is True, ControllerError is raised if no corresponding - config can be found. - Regardless of the value of "required", ControllerError is raised if - the controller module has already been registered or any other error - occurred in the registration process. - """ - module_ref_name = self.get_module_reference_name(controller_module) - module_config_name = controller_module.MOBLY_CONTROLLER_CONFIG_NAME - - # Get controller objects from Mobly's register_controller - controllers = self._controller_manager.register_controller( - controller_module, required=required - ) - if not controllers: - return None - - # Log controller information - # Implementation of "get_info" is optional for a controller module. - if hasattr(controller_module, "get_info"): - controller_info = controller_module.get_info(controllers) - self.log.info("Controller %s: %s", module_config_name, controller_info) - - if builtin: - setattr(self, module_ref_name, controllers) - return controllers - - def _setup_class(self): - """Proxy function to guarantee the base implementation of setup_class - is called. - """ - event_bus.post(TestClassBeginEvent(self)) - # Import and register the built-in controller modules specified - # in testbed config. - for module in self._import_builtin_controllers(): - self.register_controller(module, builtin=True) - return self.setup_class() - - def _teardown_class(self): - """Proxy function to guarantee the base implementation of teardown_class - is called. - """ - super()._teardown_class() - event_bus.post(TestClassEndEvent(self, self.results)) - - def _setup_test(self, test_name): - """Proxy function to guarantee the base implementation of setup_test is - called. - """ - self.current_test_name = test_name - - # Skip the test if the consecutive test case failure limit is reached. - if self.consecutive_failures == self.consecutive_failure_limit: - raise signals.TestError("Consecutive test failure") - - return self.setup_test() - - def setup_test(self): - """Setup function that will be called every time before executing each - test case in the test class. - - To signal setup failure, return False or raise an exception. If - exceptions were raised, the stack trace would appear in log, but the - exceptions would not propagate to upper levels. - - Implementation is optional. - """ - return True - - def _teardown_test(self, test_name): - """Proxy function to guarantee the base implementation of teardown_test - is called. - """ - self.log.debug("Tearing down test %s" % test_name) - self.teardown_test() - - def _on_fail(self, record): - """Proxy function to guarantee the base implementation of on_fail is - called. - - Args: - record: The records.TestResultRecord object for the failed test - case. - """ - self.consecutive_failures += 1 - if record.details: - self.log.error(record.details) - self.log.info(RESULT_LINE_TEMPLATE, record.test_name, record.result) - self.on_fail(record.test_name, record.begin_time) - - def on_fail(self, test_name, begin_time): - """A function that is executed upon a test case failure. - - User implementation is optional. - - Args: - test_name: Name of the test that triggered this function. - begin_time: Logline format timestamp taken when the test started. - """ - - def _on_pass(self, record): - """Proxy function to guarantee the base implementation of on_pass is - called. - - Args: - record: The records.TestResultRecord object for the passed test - case. - """ - self.consecutive_failures = 0 - msg = record.details - if msg: - self.log.info(msg) - self.log.info(RESULT_LINE_TEMPLATE, record.test_name, record.result) - self.on_pass(record.test_name, record.begin_time) - - def on_pass(self, test_name, begin_time): - """A function that is executed upon a test case passing. - - Implementation is optional. - - Args: - test_name: Name of the test that triggered this function. - begin_time: Logline format timestamp taken when the test started. - """ - - def _on_skip(self, record): - """Proxy function to guarantee the base implementation of on_skip is - called. - - Args: - record: The records.TestResultRecord object for the skipped test - case. - """ - self.log.info(RESULT_LINE_TEMPLATE, record.test_name, record.result) - self.log.info("Reason to skip: %s", record.details) - self.on_skip(record.test_name, record.begin_time) - - def on_skip(self, test_name, begin_time): - """A function that is executed upon a test case being skipped. - - Implementation is optional. - - Args: - test_name: Name of the test that triggered this function. - begin_time: Logline format timestamp taken when the test started. - """ - - def _on_exception(self, record): - """Proxy function to guarantee the base implementation of on_exception - is called. - - Args: - record: The records.TestResultRecord object for the failed test - case. - """ - self.log.exception(record.details) - self.on_exception(record.test_name, record.begin_time) - - def on_exception(self, test_name, begin_time): - """A function that is executed upon an unhandled exception from a test - case. - - Implementation is optional. - - Args: - test_name: Name of the test that triggered this function. - begin_time: Logline format timestamp taken when the test started. - """ - - def on_retry(self): - """Function to run before retrying a test through get_func_with_retry. - - This function runs when a test is automatically retried. The function - can be used to modify internal test parameters, for example, to retry - a test with slightly different input variables. - """ - - def _exec_procedure_func(self, func, tr_record): - """Executes a procedure function like on_pass, on_fail etc. - - This function will alternate the 'Result' of the test's record if - exceptions happened when executing the procedure function. - - This will let signals.TestAbortAll through so abort_all works in all - procedure functions. - - Args: - func: The procedure function to be executed. - tr_record: The TestResultRecord object associated with the test - case executed. - """ - try: - func(tr_record) - except signals.TestAbortAll: - raise - except Exception as e: - self.log.exception( - "Exception happened when executing %s for %s.", - func.__name__, - self.current_test_name, - ) - tr_record.add_error(func.__name__, e) - - def exec_one_testcase(self, test_name, test_func): - """Executes one test case and update test results. - - Executes one test case, create a records.TestResultRecord object with - the execution information, and add the record to the test class's test - results. - - Args: - test_name: Name of the test. - test_func: The test function. - """ - class_name = self.__class__.__name__ - tr_record = records.TestResultRecord(test_name, class_name) - tr_record.test_begin() - self.begin_time = int(tr_record.begin_time) - self.log_begin_time = tr_record.log_begin_time - self.test_name = tr_record.test_name - event_bus.post(TestCaseBeginEvent(self, self.test_name)) - self.log.info("%s %s", TEST_CASE_TOKEN, test_name) - - # Enable test retry if specified in the ACTS config - retry_tests = self.user_params.get("retry_tests", []) - full_test_name = "%s.%s" % (class_name, self.test_name) - if any(name in retry_tests for name in [class_name, full_test_name]): - test_func = self.get_func_with_retry(test_func) - - verdict = None - test_signal = None - try: - try: - ret = self._setup_test(self.test_name) - asserts.assert_true( - ret is not False, "Setup for %s failed." % test_name - ) - verdict = test_func() - finally: - try: - self._teardown_test(self.test_name) - except signals.TestAbortAll: - raise - except Exception as e: - self.log.error(traceback.format_exc()) - tr_record.add_error("teardown_test", e) - except (signals.TestFailure, AssertionError) as e: - test_signal = e - if self.user_params.get( - keys.Config.key_test_failure_tracebacks.value, False - ): - self.log.exception(e) - tr_record.test_fail(e) - except signals.TestSkip as e: - # Test skipped. - test_signal = e - tr_record.test_skip(e) - except (signals.TestAbortClass, signals.TestAbortAll) as e: - # Abort signals, pass along. - test_signal = e - tr_record.test_fail(e) - raise e - except signals.TestPass as e: - # Explicit test pass. - test_signal = e - tr_record.test_pass(e) - except Exception as e: - test_signal = e - self.log.error(traceback.format_exc()) - # Exception happened during test. - tr_record.test_error(e) - else: - if verdict or (verdict is None): - # Test passed. - tr_record.test_pass() - return - tr_record.test_fail() - finally: - tr_record.update_record() - try: - # Execute post-test procedures - result = tr_record.result - if result == records.TestResultEnums.TEST_RESULT_PASS: - self._exec_procedure_func(self._on_pass, tr_record) - elif result == records.TestResultEnums.TEST_RESULT_FAIL: - self._exec_procedure_func(self._on_fail, tr_record) - elif result == records.TestResultEnums.TEST_RESULT_SKIP: - self._exec_procedure_func(self._on_skip, tr_record) - elif result == records.TestResultEnums.TEST_RESULT_ERROR: - self._exec_procedure_func(self._on_exception, tr_record) - self._exec_procedure_func(self._on_fail, tr_record) - finally: - self.results.add_record(tr_record) - self.summary_writer.dump( - tr_record.to_dict(), records.TestSummaryEntryType.RECORD - ) - self.current_test_name = None - event_bus.post(TestCaseEndEvent(self, self.test_name, test_signal)) - - def get_func_with_retry(self, func, attempts=2): - """Returns a wrapped test method that re-runs after failure. Return test - result upon success. If attempt limit reached, collect all failure - messages and raise a TestFailure signal. - - Params: - func: The test method - attempts: Number of attempts to run test - - Returns: result of the test method - """ - exceptions = self.retryable_exceptions - - def wrapper(*args, **kwargs): - error_msgs = [] - extras = {} - retry = False - for i in range(attempts): - try: - if retry: - self.teardown_test() - self.setup_test() - self.on_retry() - return func(*args, **kwargs) - except exceptions as e: - retry = True - msg = "Failure on attempt %d: %s" % (i + 1, e.details) - self.log.warning(msg) - error_msgs.append(msg) - if e.extras: - extras["Attempt %d" % (i + 1)] = e.extras - raise signals.TestFailure("\n".join(error_msgs), extras) - - return wrapper - - def run_generated_testcases( - self, - test_func, - settings, - args=None, - kwargs=None, - tag="", - name_func=None, - format_args=False, - ): - """Deprecated. Please use pre_run and generate_tests. - - Generated test cases are not written down as functions, but as a list - of parameter sets. This way we reduce code repetition and improve - test case scalability. - - Args: - test_func: The common logic shared by all these generated test - cases. This function should take at least one argument, - which is a parameter set. - settings: A list of strings representing parameter sets. These are - usually json strings that get loaded in the test_func. - args: Iterable of additional position args to be passed to - test_func. - kwargs: Dict of additional keyword args to be passed to test_func - tag: Name of this group of generated test cases. Ignored if - name_func is provided and operates properly. - name_func: A function that takes a test setting and generates a - proper test name. The test name should be shorter than - utils.MAX_FILENAME_LEN. Names over the limit will be - truncated. - format_args: If True, args will be appended as the first argument - in the args list passed to test_func. - - Returns: - A list of settings that did not pass. - """ - args = args or () - kwargs = kwargs or {} - failed_settings = [] - - for setting in settings: - test_name = "{} {}".format(tag, setting) - - if name_func: - try: - test_name = name_func(setting, *args, **kwargs) - except: - self.log.exception( - ( - "Failed to get test name from " - "test_func. Fall back to default %s" - ), - test_name, - ) - - self.results.requested.append(test_name) - - if len(test_name) > utils.MAX_FILENAME_LEN: - test_name = test_name[: utils.MAX_FILENAME_LEN] - - previous_success_cnt = len(self.results.passed) - - if format_args: - self.exec_one_testcase( - test_name, - functools.partial(test_func, *(args + (setting,)), **kwargs), - ) - else: - self.exec_one_testcase( - test_name, - functools.partial(test_func, *((setting,) + args), **kwargs), - ) - - if len(self.results.passed) - previous_success_cnt != 1: - failed_settings.append(setting) - - return failed_settings - - def _exec_func(self, func, *args): - """Executes a function with exception safeguard. - - This will let signals.TestAbortAll through so abort_all works in all - procedure functions. - - Args: - func: Function to be executed. - args: Arguments to be passed to the function. - - Returns: - Whatever the function returns, or False if unhandled exception - occured. - """ - try: - return func(*args) - except signals.TestAbortAll: - raise - except: - self.log.exception( - "Exception happened when executing %s in %s.", func.__name__, self.TAG - ) - return False - - def _block_all_test_cases(self, tests, reason="Failed class setup"): - """ - Block all passed in test cases. - Args: - tests: The tests to block. - reason: Message describing the reason that the tests are blocked. - Default is 'Failed class setup' - """ - for test_name, test_func in tests: - signal = signals.TestError(reason) - record = records.TestResultRecord(test_name, self.TAG) - record.test_begin() - if hasattr(test_func, "gather"): - signal.extras = test_func.gather() - record.test_error(signal) - self.results.add_record(record) - self.summary_writer.dump( - record.to_dict(), records.TestSummaryEntryType.RECORD - ) - self._on_skip(record) - - def run(self, test_names=None): - """Runs test cases within a test class by the order they appear in the - execution list. - - One of these test cases lists will be executed, shown here in priority - order: - 1. The test_names list, which is passed from cmd line. - 2. The self.tests list defined in test class. Invalid names are - ignored. - 3. All function that matches test case naming convention in the test - class. - - Args: - test_names: A list of string that are test case names/patterns - requested in cmd line. - - Returns: - The test results object of this class. - """ - # Executes pre-setup procedures, like generating test methods. - if not self._pre_run(): - return self.results - - self.register_test_class_event_subscriptions() - self.log.info("==========> %s <==========", self.TAG) - # Devise the actual test cases to run in the test class. - if self.tests: - # Specified by run list in class. - valid_tests = list(self.tests) - else: - # No test case specified by user, gather the run list automatically. - valid_tests = self.get_existing_test_names() - if test_names: - # Match test cases with any of the user-specified patterns - matches = [] - for test_name in test_names: - for valid_test in valid_tests: - if ( - fnmatch.fnmatch(valid_test, test_name) - and valid_test not in matches - ): - matches.append(valid_test) - else: - matches = valid_tests - self.results.requested = matches - self.summary_writer.dump( - self.results.requested_test_names_dict(), - records.TestSummaryEntryType.TEST_NAME_LIST, - ) - tests = self._get_test_methods(matches) - - # Setup for the class. - setup_fail = False - try: - if self._setup_class() is False: - self.log.error("Failed to setup %s.", self.TAG) - self._block_all_test_cases(tests) - setup_fail = True - except signals.TestAbortClass: - self.log.exception("Test class %s aborted" % self.TAG) - setup_fail = True - except Exception as e: - self.log.exception("Failed to setup %s.", self.TAG) - self._block_all_test_cases(tests) - setup_fail = True - if setup_fail: - self._exec_func(self._teardown_class) - self.log.info( - "Summary for test class %s: %s", self.TAG, self.results.summary_str() - ) - return self.results - - # Run tests in order. - test_case_iterations = self.user_params.get( - keys.Config.key_test_case_iterations.value, 1 - ) - if any( - [ - substr in self.__class__.__name__ - for substr in ["Preflight", "Postflight"] - ] - ): - test_case_iterations = 1 - try: - for test_name, test_func in tests: - for _ in range(test_case_iterations): - self.exec_one_testcase(test_name, test_func) - return self.results - except signals.TestAbortClass: - self.log.exception("Test class %s aborted" % self.TAG) - return self.results - except signals.TestAbortAll as e: - # Piggy-back test results on this exception object so we don't lose - # results from this test class. - setattr(e, "results", self.results) - raise e - finally: - self._exec_func(self._teardown_class) - self.log.info( - "Summary for test class %s: %s", self.TAG, self.results.summary_str() - ) - - def _ad_take_bugreport(self, ad, test_name, begin_time): - for i in range(3): - try: - ad.take_bug_report(test_name, begin_time) - return True - except Exception as e: - ad.log.error("bugreport attempt %s error: %s", i + 1, e) - - def _ad_take_extra_logs(self, ad, test_name, begin_time): - result = True - if getattr(ad, "qxdm_log", False): - # Gather qxdm log modified 3 minutes earlier than test start time - if begin_time: - qxdm_begin_time = begin_time - 1000 * 60 * 3 - else: - qxdm_begin_time = None - try: - ad.get_qxdm_logs(test_name, qxdm_begin_time) - except Exception as e: - ad.log.error( - "Failed to get QXDM log for %s with error %s", test_name, e - ) - result = False - - try: - ad.check_crash_report(test_name, begin_time, log_crash_report=True) - except Exception as e: - ad.log.error( - "Failed to check crash report for %s with error %s", test_name, e - ) - result = False - return result - - def _skip_bug_report(self, test_name): - """A function to check whether we should skip creating a bug report. - - Args: - test_name: The test case name - - Returns: True if bug report is to be skipped. - """ - if "no_bug_report_on_fail" in self.user_params: - return True - - # If the current test class or test case is found in the set of - # problematic tests, we skip bugreport and other failure artifact - # creation. - class_name = self.__class__.__name__ - quiet_tests = self.user_params.get("quiet_tests", []) - if class_name in quiet_tests: - self.log.info("Skipping bug report, as directed for this test class.") - return True - full_test_name = "%s.%s" % (class_name, test_name) - if full_test_name in quiet_tests: - self.log.info("Skipping bug report, as directed for this test case.") - return True - - # Once we hit a certain log path size, it's not going to get smaller. - # We cache the result so we don't have to keep doing directory walks. - if self.size_limit_reached: - return True - try: - max_log_size = int( - self.user_params.get("soft_output_size_limit") or "invalid" - ) - log_path = getattr(logging, "log_path", None) - if log_path: - curr_log_size = utils.get_directory_size(log_path) - if curr_log_size > max_log_size: - self.log.info( - "Skipping bug report, as we've reached the size limit." - ) - self.size_limit_reached = True - return True - except ValueError: - pass - return False - - def _take_bug_report(self, test_name, begin_time): - if self._skip_bug_report(test_name): - return - - executor = ThreadPoolExecutor(max_workers=10) - for ad in getattr(self, "android_devices", []): - executor.submit(self._ad_take_bugreport, ad, test_name, begin_time) - executor.submit(self._ad_take_extra_logs, ad, test_name, begin_time) - executor.shutdown() - - def _reboot_device(self, ad): - ad.log.info("Rebooting device.") - ad = ad.reboot() - - def _cleanup_logger_sessions(self): - for mylogger, session in self.logger_sessions: - self.log.info("Resetting a diagnostic session %s, %s", mylogger, session) - mylogger.reset() - self.logger_sessions = [] - - def _pull_diag_logs(self, test_name, begin_time): - for mylogger, session in self.logger_sessions: - self.log.info("Pulling diagnostic session %s", mylogger) - mylogger.stop(session) - diag_path = os.path.join( - self.log_path, logger.epoch_to_log_line_timestamp(begin_time) - ) - os.makedirs(diag_path, exist_ok=True) - mylogger.pull(session, diag_path) - - def register_test_class_event_subscriptions(self): - self.class_subscriptions = subscription_bundle.create_from_instance(self) - self.class_subscriptions.register() - - def unregister_test_class_event_subscriptions(self): - for package in self.all_subscriptions: - package.unregister()
diff --git a/src/antlion/bin/__init__.py b/src/antlion/bin/__init__.py deleted file mode 100644 index e69de29..0000000 --- a/src/antlion/bin/__init__.py +++ /dev/null
diff --git a/src/antlion/bin/act.py b/src/antlion/bin/act.py deleted file mode 100755 index 2f78645..0000000 --- a/src/antlion/bin/act.py +++ /dev/null
@@ -1,272 +0,0 @@ -#!/usr/bin/env python3 -# -# Copyright 2022 The Fuchsia Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import argparse -import os -import re -import signal -import sys -import traceback - -from mobly import config_parser as mobly_config_parser - -from antlion import config_parser -from antlion import keys -from antlion import signals -from antlion import test_runner -from antlion import utils -from antlion.config_parser import ActsConfigError - - -def _run_test(parsed_config, test_identifiers, repeat=1): - """Instantiate and runs test_runner.TestRunner. - - This is the function to start separate processes with. - - Args: - parsed_config: A mobly.config_parser.TestRunConfig that is a set of - configs for one test_runner.TestRunner. - test_identifiers: A list of tuples, each identifies what test case to - run on what test class. - repeat: Number of times to iterate the specified tests. - - Returns: - True if all tests passed without any error, False otherwise. - """ - runner = _create_test_runner(parsed_config, test_identifiers) - try: - for i in range(repeat): - runner.run() - return runner.results.is_all_pass - except signals.TestAbortAll: - return True - except: - print("Exception when executing %s, iteration %s." % (runner.testbed_name, i)) - print(traceback.format_exc()) - finally: - runner.stop() - - -def _create_test_runner(parsed_config, test_identifiers): - """Instantiates one test_runner.TestRunner object and register termination - signal handlers that properly shut down the test_runner.TestRunner run. - - Args: - parsed_config: A mobly.config_parser.TestRunConfig that is a set of - configs for one test_runner.TestRunner. - test_identifiers: A list of tuples, each identifies what test case to - run on what test class. - - Returns: - A test_runner.TestRunner object. - """ - try: - t = test_runner.TestRunner(parsed_config, test_identifiers) - except: - print("Failed to instantiate test runner, abort.") - print(traceback.format_exc()) - sys.exit(1) - # Register handler for termination signals. - handler = config_parser.gen_term_signal_handler([t]) - signal.signal(signal.SIGTERM, handler) - signal.signal(signal.SIGINT, handler) - return t - - -def _run_tests(parsed_configs, test_identifiers, repeat): - """Executes requested tests sequentially. - - Requested test runs will commence one after another according to the order - of their corresponding configs. - - Args: - parsed_configs: A list of mobly.config_parser.TestRunConfig, each is a - set of configs for one test_runner.TestRunner. - test_identifiers: A list of tuples, each identifies what test case to - run on what test class. - repeat: Number of times to iterate the specified tests. - - Returns: - True if all test runs executed successfully, False otherwise. - """ - ok = True - for c in parsed_configs: - try: - ret = _run_test(c, test_identifiers, repeat) - ok = ok and ret - except Exception as e: - print( - "Exception occurred when executing test bed %s. %s" - % (c.testbed_name, e) - ) - return ok - - -def main(): - """This is the default implementation of a cli entry point for ACTS test - execution. - - Or you could implement your own cli entry point using acts.config_parser - functions and acts.test_runner.execute_one_test_class. - """ - parser = argparse.ArgumentParser( - description=( - "Specify tests to run. If nothing specified, " "run all test cases found." - ) - ) - parser.add_argument( - "-c", - "--config", - type=str, - required=True, - metavar="<PATH>", - help="Path to the test configuration file.", - ) - parser.add_argument( - "-ci", - "--campaign_iterations", - metavar="<CAMPAIGN_ITERATIONS>", - nargs="?", - type=int, - const=1, - default=1, - help="Number of times to run the campaign or a group of test cases.", - ) - parser.add_argument( - "-tb", - "--testbed", - nargs="+", - type=str, - metavar="[<TEST BED NAME1> <TEST BED NAME2> ...]", - help="Specify which test beds to run tests on.", - ) - parser.add_argument( - "-lp", - "--logpath", - type=str, - metavar="<PATH>", - help="Root path under which all logs will be placed.", - ) - parser.add_argument( - "-tp", - "--testpaths", - nargs="*", - type=str, - metavar="<PATH> <PATH>", - help="One or more non-recursive test class search paths.", - ) - - group = parser.add_mutually_exclusive_group(required=True) - group.add_argument( - "-tc", - "--testclass", - nargs="+", - type=str, - metavar="[TestClass1 TestClass2:test_xxx ...]", - help="A list of test classes/cases to run.", - ) - group.add_argument( - "-tf", - "--testfile", - nargs=1, - type=str, - metavar="<PATH>", - help=( - "Path to a file containing a comma delimited list of test " - "classes to run." - ), - ) - parser.add_argument( - "-ti", - "--test_case_iterations", - metavar="<TEST_CASE_ITERATIONS>", - nargs="?", - type=int, - help="Number of times to run every test case.", - ) - - args = parser.parse_args(sys.argv[1:]) - test_list = None - if args.testfile: - test_list = config_parser.parse_test_file(args.testfile[0]) - elif args.testclass: - test_list = args.testclass - - config = args.config - - if config.endswith(".json"): - print( - "DEPRECATION NOTICE: Converting ACTS JSON to Mobly YAML. ACTS is " - + "deprecated. Support will be removed in the next release." - ) - config = utils.acts_json_to_mobly_yaml(config) - print(f"Wrote YAML config to {config}") - - parsed_configs = mobly_config_parser.load_test_config_file(config, args.testbed) - - for test_run_config in parsed_configs: - if args.testpaths: - tp_key = keys.Config.key_test_paths.value - test_run_config.controller_configs[tp_key] = args.testpaths - if args.logpath: - test_run_config.log_path = args.logpath - if args.test_case_iterations: - ti_key = keys.Config.key_test_case_iterations.value - test_run_config.user_params[ti_key] = args.test_case_iterations - - # Sets the --testpaths flag to the default test directory if left unset. - testpath_key = keys.Config.key_test_paths.value - if ( - testpath_key not in test_run_config.controller_configs - or test_run_config.controller_configs[testpath_key] is None - ): - test_run_config.controller_configs[testpath_key] = [ - os.path.join(os.path.dirname(__file__), "../tests/"), - ] - - for path in test_run_config.controller_configs[testpath_key]: - path = utils.abs_path(path) - - # TODO(markdr): Find a way to merge this with the validation done in - # Mobly's load_test_config_file. - if not test_run_config.log_path: - raise ActsConfigError( - "Required key %s missing in test config." - % keys.Config.key_log_path.value - ) - test_run_config.log_path = utils.abs_path(test_run_config.log_path) - - # Prepare args for test runs - test_identifiers = config_parser.parse_test_list(test_list) - - print( - "\n\nDEPRECATION NOTICE: Running antlion tests with act.py is " - "deprecated and will be removed in the next release. Please migrate " - "by using Mobly YAML configs and executing the test class directly:\n\n" - ) - for test_class, _ in test_identifiers: - print(f" python {test_class}.py -c {config}") - print("\n") - - exec_result = _run_tests(parsed_configs, test_identifiers, args.campaign_iterations) - if exec_result is False: - # return 1 upon test failure. - sys.exit(1) - sys.exit(0) - - -if __name__ == "__main__": - main()
diff --git a/src/antlion/capabilities/__init__.py b/src/antlion/capabilities/__init__.py deleted file mode 100644 index e69de29..0000000 --- a/src/antlion/capabilities/__init__.py +++ /dev/null
diff --git a/src/antlion/capabilities/ssh.py b/src/antlion/capabilities/ssh.py deleted file mode 100644 index eeb1e16..0000000 --- a/src/antlion/capabilities/ssh.py +++ /dev/null
@@ -1,377 +0,0 @@ -#!/usr/bin/env python3 -# -# Copyright 2023 The Fuchsia Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import subprocess -import time - -from dataclasses import dataclass -from typing import List, Union, BinaryIO - -from antlion import logger -from antlion import signals -from antlion.net import wait_for_port - -DEFAULT_SSH_PORT: int = 22 -DEFAULT_SSH_TIMEOUT_SEC: int = 60 -DEFAULT_SSH_CONNECT_TIMEOUT_SEC: int = 90 -DEFAULT_SSH_SERVER_ALIVE_INTERVAL: int = 30 -# The default package repository for all components. - - -class SSHResult: - """Result of an SSH command.""" - - def __init__( - self, process: Union[subprocess.CompletedProcess, subprocess.CalledProcessError] - ) -> None: - self._raw_stdout = process.stdout - self._stderr = process.stderr.decode("utf-8", errors="replace") - self._exit_status: int = process.returncode - - def __str__(self): - if self.exit_status == 0: - return self.stdout - return f'status {self.exit_status}, stdout: "{self.stdout}", stderr: "{self.stderr}"' - - @property - def stdout(self) -> str: - if not hasattr(self, "_stdout"): - self._stdout = self._raw_stdout.decode("utf-8", errors="replace") - return self._stdout - - @property - def stderr(self) -> str: - return self._stderr - - @property - def exit_status(self) -> int: - return self._exit_status - - @property - def raw_stdout(self) -> bytes: - return self._raw_stdout - - -class SSHError(signals.TestError): - """A SSH command returned with a non-zero status code.""" - - def __init__(self, command: str, result: SSHResult): - super().__init__(f'SSH command "{command}" unexpectedly returned {result}') - self.result = result - - -class SSHTimeout(signals.TestError): - """A SSH command timed out.""" - - def __init__(self, err: subprocess.TimeoutExpired): - super().__init__( - f'SSH command "{err.cmd}" timed out after {err.timeout}s, ' - f'stdout="{err.stdout}", stderr="{err.stderr}"' - ) - - -class SSHTransportError(signals.TestError): - """Failure to send an SSH command.""" - - -@dataclass -class SSHConfig: - """SSH client config.""" - - # SSH flags. See ssh(1) for full details. - user: str - host_name: str - identity_file: str - - ssh_binary: str = "ssh" - config_file: str = "/dev/null" - port: int = 22 - - # SSH options. See ssh_config(5) for full details. - connect_timeout: int = DEFAULT_SSH_CONNECT_TIMEOUT_SEC - server_alive_interval: int = DEFAULT_SSH_SERVER_ALIVE_INTERVAL - strict_host_key_checking: bool = False - user_known_hosts_file: str = "/dev/null" - log_level: str = "ERROR" - - def full_command(self, command: str, force_tty: bool = False) -> List[str]: - """Generate the complete command to execute command over SSH. - - Args: - command: The command to run over SSH - force_tty: Force pseudo-terminal allocation. This can be used to - execute arbitrary screen-based programs on a remote machine, - which can be very useful, e.g. when implementing menu services. - - Returns: - Arguments composing the complete call to SSH. - """ - optional_flags = [] - if force_tty: - # Multiple -t options force tty allocation, even if ssh has no local - # tty. This is necessary for launching ssh with subprocess without - # shell=True. - optional_flags.append("-tt") - - return ( - [ - self.ssh_binary, - # SSH flags - "-i", - self.identity_file, - "-F", - self.config_file, - "-p", - str(self.port), - # SSH configuration options - "-o", - f"ConnectTimeout={self.connect_timeout}", - "-o", - f"ServerAliveInterval={self.server_alive_interval}", - "-o", - f'StrictHostKeyChecking={"yes" if self.strict_host_key_checking else "no"}', - "-o", - f"UserKnownHostsFile={self.user_known_hosts_file}", - "-o", - f"LogLevel={self.log_level}", - ] - + optional_flags - + [f"{self.user}@{self.host_name}"] - + command.split() - ) - - -class SSHProvider: - """Device-specific provider for SSH clients.""" - - def __init__(self, config: SSHConfig) -> None: - """ - Args: - config: SSH client config - """ - logger_tag = f"ssh | {config.host_name}" - if config.port != DEFAULT_SSH_PORT: - logger_tag += f":{config.port}" - - # Check if the private key exists - - self.log = logger.create_tagged_trace_logger(logger_tag) - self.config = config - - try: - self.wait_until_reachable() - self.log.info("sshd is reachable") - except Exception as e: - raise TimeoutError("sshd is unreachable") from e - - def wait_until_reachable(self) -> None: - """Wait for the device to become reachable via SSH. - - Raises: - TimeoutError: connect_timeout has expired without a successful SSH - connection to the device - SSHTransportError: SSH is available on the device but - connect_timeout has expired and SSH fails to run - SSHTimeout: SSH is available on the device but connect_timeout has - expired and SSH takes too long to run a command - """ - timeout_sec = self.config.connect_timeout - timeout = time.time() + timeout_sec - wait_for_port(self.config.host_name, self.config.port, timeout_sec=timeout_sec) - - while True: - try: - self._run("echo", timeout_sec, False, None) - return - except SSHTransportError as e: - # Repeat if necessary; _run() can exit prematurely by receiving - # SSH transport errors. These errors can be caused by sshd not - # being fully initialized yet. - if time.time() < timeout: - continue - else: - raise e - - def wait_until_unreachable( - self, interval_sec: int = 1, timeout_sec: int = DEFAULT_SSH_CONNECT_TIMEOUT_SEC - ) -> None: - """Wait for the device to become unreachable via SSH. - - Args: - interval_sec: Seconds to wait between unreachability attempts - timeout_sec: Seconds to wait until raising TimeoutError - - Raises: - TimeoutError: when timeout_sec has expired without an unsuccessful - SSH connection to the device - """ - timeout = time.time() + timeout_sec - - while True: - try: - wait_for_port( - self.config.host_name, self.config.port, timeout_sec=interval_sec - ) - except TimeoutError: - return - - if time.time() < timeout: - raise TimeoutError( - f"Connection to {self.config.host_name} is still reachable " - f"after {timeout_sec}s" - ) - - def run( - self, - command: str, - timeout_sec: int = DEFAULT_SSH_TIMEOUT_SEC, - connect_retries: int = 3, - force_tty: bool = False, - ) -> SSHResult: - """Run a command on the device then exit. - - Args: - command: String to send to the device. - timeout_sec: Seconds to wait for the command to complete. - connect_retries: Amount of times to retry connect on fail. - force_tty: Force pseudo-terminal allocation. - - Raises: - SSHError: if the SSH command returns a non-zero status code - SSHTransportError: if SSH fails to run the command - SSHTimeout: if there is no response within timeout_sec - - Returns: - SSHResults from the executed command. - """ - return self._run_with_retry( - command, timeout_sec, connect_retries, force_tty, stdin=None - ) - - def _run_with_retry( - self, - command: str, - timeout_sec: int, - connect_retries: int, - force_tty: bool, - stdin: BinaryIO, - ) -> SSHResult: - err: Exception = ValueError("connect_retries cannot be 0") - for i in range(0, connect_retries): - try: - return self._run(command, timeout_sec, force_tty, stdin) - except SSHTransportError as e: - err = e - self.log.warn(f"Connect failed: {e}") - raise err - - def _run( - self, command: str, timeout_sec: int, force_tty: bool, stdin: BinaryIO - ) -> SSHResult: - full_command = self.config.full_command(command, force_tty) - self.log.debug( - f'Running "{command}" (full command: "{" ".join(full_command)}")' - ) - try: - process = subprocess.run( - full_command, - capture_output=True, - timeout=timeout_sec, - check=True, - stdin=stdin, - ) - except subprocess.CalledProcessError as e: - if e.returncode == 255: - stderr = e.stderr.decode("utf-8", errors="replace") - if ( - "Name or service not known" in stderr - or "Host does not exist" in stderr - ): - raise SSHTransportError( - f"Hostname {self.config.host_name} cannot be resolved to an address" - ) from e - if "Connection timed out" in stderr: - raise SSHTransportError( - f"Failed to establish a connection to {self.config.host_name} within {timeout_sec}s" - ) from e - if "Connection refused" in stderr: - raise SSHTransportError( - f"Connection refused by {self.config.host_name}" - ) from e - - raise SSHError(command, SSHResult(e)) from e - except subprocess.TimeoutExpired as e: - raise SSHTimeout(e) from e - - return SSHResult(process) - - def upload_file( - self, - local_path: str, - remote_path: str, - timeout_sec: int = DEFAULT_SSH_TIMEOUT_SEC, - connect_retries: int = 3, - ) -> None: - """Upload a file to the device. - - Args: - local_path: Path to the file to upload - remote_path: Path on the remote device to place the uploaded file. - timeout_sec: Seconds to wait for the command to complete. - connect_retries: Amount of times to retry connect on fail. - - Raises: - SSHError: if the SSH upload returns a non-zero status code - SSHTransportError: if SSH fails to run the upload command - SSHTimeout: if there is no response within timeout_sec - """ - file = open(local_path, "rb") - self._run_with_retry( - f"cat > {remote_path}", - timeout_sec, - connect_retries, - force_tty=False, - stdin=file, - ) - - def download_file( - self, - remote_path: str, - local_path: str, - timeout_sec: int = DEFAULT_SSH_TIMEOUT_SEC, - connect_retries: int = 3, - ) -> None: - """Upload a file to the device. - - Args: - remote_path: Path on the remote device to download. - local_path: Path on the host to the place the downloaded file. - timeout_sec: Seconds to wait for the command to complete. - connect_retries: Amount of times to retry connect on fail. - - Raises: - SSHError: if the SSH command returns a non-zero status code - SSHTransportError: if SSH fails to run the command - SSHTimeout: if there is no response within timeout_sec - """ - file = open(local_path, "rb") - return self._run_with_retry( - f"cat > {remote_path}", - timeout_sec, - connect_retries, - force_tty=False, - stdin=file, - )
diff --git a/src/antlion/config_parser.py b/src/antlion/config_parser.py deleted file mode 100755 index 7f202ff..0000000 --- a/src/antlion/config_parser.py +++ /dev/null
@@ -1,250 +0,0 @@ -#!/usr/bin/env python3 -# -# Copyright 2022 The Fuchsia Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -import itertools -import os -import sys - -import mobly.config_parser as mobly_config_parser - -from antlion import keys -from antlion import utils - -# An environment variable defining the base location for ACTS logs. -_ENV_ACTS_LOGPATH = "ACTS_LOGPATH" -# An environment variable that enables test case failures to log stack traces. -_ENV_TEST_FAILURE_TRACEBACKS = "ACTS_TEST_FAILURE_TRACEBACKS" -# An environment variable defining the test search paths for ACTS. -_ENV_ACTS_TESTPATHS = "ACTS_TESTPATHS" -_PATH_SEPARATOR = ":" - - -class ActsConfigError(Exception): - """Raised when there is a problem in test configuration file.""" - - -def _validate_test_config(test_config): - """Validates the raw configuration loaded from the config file. - - Making sure all the required fields exist. - """ - for k in keys.Config.reserved_keys.value: - # TODO(markdr): Remove this continue after merging this with the - # validation done in Mobly's load_test_config_file. - if k == keys.Config.key_test_paths.value or k == keys.Config.key_log_path.value: - continue - - if k not in test_config: - raise ActsConfigError("Required key %s missing in test config." % k) - - -def _validate_testbed_name(name): - """Validates the name of a test bed. - - Since test bed names are used as part of the test run id, it needs to meet - certain requirements. - - Args: - name: The test bed's name specified in config file. - - Raises: - If the name does not meet any criteria, ActsConfigError is raised. - """ - if not name: - raise ActsConfigError("Test bed names can't be empty.") - if not isinstance(name, str): - raise ActsConfigError("Test bed names have to be string.") - for l in name: - if l not in utils.valid_filename_chars: - raise ActsConfigError("Char '%s' is not allowed in test bed names." % l) - - -def _validate_testbed_configs(testbed_configs): - """Validates the testbed configurations. - - Args: - testbed_configs: A list of testbed configuration json objects. - - Raises: - If any part of the configuration is invalid, ActsConfigError is raised. - """ - # Cross checks testbed configs for resource conflicts. - for name in testbed_configs: - _validate_testbed_name(name) - - -def gen_term_signal_handler(test_runners): - def termination_sig_handler(signal_num, frame): - print("Received sigterm %s." % signal_num) - for t in test_runners: - t.stop() - sys.exit(1) - - return termination_sig_handler - - -def _parse_one_test_specifier(item): - """Parse one test specifier from command line input. - - Args: - item: A string that specifies a test class or test cases in one test - class to run. - - Returns: - A tuple of a string and a list of strings. The string is the test class - name, the list of strings is a list of test case names. The list can be - None. - """ - tokens = item.split(":") - if len(tokens) > 2: - raise ActsConfigError("Syntax error in test specifier %s" % item) - if len(tokens) == 1: - # This should be considered a test class name - test_cls_name = tokens[0] - return test_cls_name, None - elif len(tokens) == 2: - # This should be considered a test class name followed by - # a list of test case names. - test_cls_name, test_case_names = tokens - clean_names = [elem.strip() for elem in test_case_names.split(",")] - return test_cls_name, clean_names - - -def parse_test_list(test_list): - """Parse user provided test list into internal format for test_runner. - - Args: - test_list: A list of test classes/cases. - """ - result = [] - for elem in test_list: - result.append(_parse_one_test_specifier(elem)) - return result - - -def load_test_config_file(test_config_path, tb_filters=None): - """Processes the test configuration file provided by the user. - - Loads the configuration file into a json object, unpacks each testbed - config into its own TestRunConfig object, and validate the configuration in - the process. - - Args: - test_config_path: Path to the test configuration file. - tb_filters: A subset of test bed names to be pulled from the config - file. If None, then all test beds will be selected. - - Returns: - A list of mobly.config_parser.TestRunConfig objects to be passed to - test_runner.TestRunner. - """ - configs = utils.load_config(test_config_path) - - testbeds = configs[keys.Config.key_testbed.value] - if type(testbeds) is list: - tb_dict = dict() - for testbed in testbeds: - tb_dict[testbed[keys.Config.key_testbed_name.value]] = testbed - testbeds = tb_dict - elif type(testbeds) is dict: - # For compatibility, make sure the entry name is the same as - # the testbed's "name" entry - for name, testbed in testbeds.items(): - testbed[keys.Config.key_testbed_name.value] = name - - if tb_filters: - tbs = {} - for name in tb_filters: - if name in testbeds: - tbs[name] = testbeds[name] - else: - raise ActsConfigError( - 'Expected testbed named "%s", but none was found. Check ' - "if you have the correct testbed names." % name - ) - testbeds = tbs - - if ( - keys.Config.key_log_path.value not in configs - and _ENV_ACTS_LOGPATH in os.environ - ): - print("Using environment log path: %s" % (os.environ[_ENV_ACTS_LOGPATH])) - configs[keys.Config.key_log_path.value] = os.environ[_ENV_ACTS_LOGPATH] - if ( - keys.Config.key_test_paths.value not in configs - and _ENV_ACTS_TESTPATHS in os.environ - ): - print("Using environment test paths: %s" % (os.environ[_ENV_ACTS_TESTPATHS])) - configs[keys.Config.key_test_paths.value] = os.environ[ - _ENV_ACTS_TESTPATHS - ].split(_PATH_SEPARATOR) - if ( - keys.Config.key_test_failure_tracebacks not in configs - and _ENV_TEST_FAILURE_TRACEBACKS in os.environ - ): - configs[keys.Config.key_test_failure_tracebacks.value] = os.environ[ - _ENV_TEST_FAILURE_TRACEBACKS - ] - - # TODO: See if there is a better way to do this: b/29836695 - config_path, _ = os.path.split(utils.abs_path(test_config_path)) - configs[keys.Config.key_config_path.value] = config_path - _validate_test_config(configs) - _validate_testbed_configs(testbeds) - # Unpack testbeds into separate json objects. - configs.pop(keys.Config.key_testbed.value) - test_run_configs = [] - - for _, testbed in testbeds.items(): - test_run_config = mobly_config_parser.TestRunConfig() - test_run_config.testbed_name = testbed[keys.Config.key_testbed_name.value] - test_run_config.controller_configs = testbed - test_run_config.controller_configs[ - keys.Config.key_test_paths.value - ] = configs.get(keys.Config.key_test_paths.value, None) - test_run_config.log_path = configs.get(keys.Config.key_log_path.value, None) - if test_run_config.log_path is not None: - test_run_config.log_path = utils.abs_path(test_run_config.log_path) - - user_param_pairs = [] - for item in itertools.chain(configs.items(), testbed.items()): - if item[0] not in keys.Config.reserved_keys.value: - user_param_pairs.append(item) - test_run_config.user_params = dict(user_param_pairs) - - test_run_configs.append(test_run_config) - return test_run_configs - - -def parse_test_file(fpath): - """Parses a test file that contains test specifiers. - - Args: - fpath: A string that is the path to the test file to parse. - - Returns: - A list of strings, each is a test specifier. - """ - with open(fpath, "r") as f: - tf = [] - for line in f: - line = line.strip() - if not line: - continue - if len(tf) and (tf[-1].endswith(":") or tf[-1].endswith(",")): - tf[-1] += line - else: - tf.append(line) - return tf
diff --git a/src/antlion/context.py b/src/antlion/context.py deleted file mode 100644 index cfe9df8..0000000 --- a/src/antlion/context.py +++ /dev/null
@@ -1,355 +0,0 @@ -#!/usr/bin/env python3 -# -# Copyright 2022 The Fuchsia Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import enum -import logging -import os - -from antlion.event import event_bus -from antlion.event.event import Event -from antlion.event.event import TestCaseBeginEvent -from antlion.event.event import TestCaseEndEvent -from antlion.event.event import TestCaseEvent -from antlion.event.event import TestClassBeginEvent -from antlion.event.event import TestClassEndEvent -from antlion.event.event import TestClassEvent - - -class ContextLevel(enum.IntEnum): - ROOT = 0 - TESTCLASS = 1 - TESTCASE = 2 - - -def get_current_context(depth=None): - """Get the current test context at the specified depth. - Pulls the most recently created context, with a level at or below the given - depth, from the _contexts stack. - - Args: - depth: The desired context level. For example, the TESTCLASS level would - yield the current test class context, even if the test is currently - within a test case. - - Returns: An instance of TestContext. - """ - if depth is None: - return _contexts[-1] - return _contexts[min(depth, len(_contexts) - 1)] - - -def get_context_for_event(event): - """Creates and returns a TestContext from the given event. - A TestClassContext is created for a TestClassEvent, and a TestCaseContext - is created for a TestCaseEvent. - - Args: - event: An instance of TestCaseEvent or TestClassEvent. - - Returns: An instance of TestContext corresponding to the event. - - Raises: TypeError if event is neither a TestCaseEvent nor TestClassEvent - """ - if isinstance(event, TestCaseEvent): - return _get_context_for_test_case_event(event) - if isinstance(event, TestClassEvent): - return _get_context_for_test_class_event(event) - raise TypeError("Unrecognized event type: %s %s", event, event.__class__) - - -def _get_context_for_test_case_event(event): - """Generate a TestCaseContext from the given TestCaseEvent.""" - return TestCaseContext(event.test_class, event.test_case) - - -def _get_context_for_test_class_event(event): - """Generate a TestClassContext from the given TestClassEvent.""" - return TestClassContext(event.test_class) - - -class NewContextEvent(Event): - """The event posted when a test context has changed.""" - - -class NewTestClassContextEvent(NewContextEvent): - """The event posted when the test class context has changed.""" - - -class NewTestCaseContextEvent(NewContextEvent): - """The event posted when the test case context has changed.""" - - -def _update_test_class_context(event): - """Pushes a new TestClassContext to the _contexts stack upon a - TestClassBeginEvent. Pops the most recent context off the stack upon a - TestClassEndEvent. Posts the context change to the event bus. - - Args: - event: An instance of TestClassBeginEvent or TestClassEndEvent. - """ - if isinstance(event, TestClassBeginEvent): - _contexts.append(_get_context_for_test_class_event(event)) - if isinstance(event, TestClassEndEvent): - if _contexts: - _contexts.pop() - event_bus.post(NewTestClassContextEvent()) - - -def _update_test_case_context(event): - """Pushes a new TestCaseContext to the _contexts stack upon a - TestCaseBeginEvent. Pops the most recent context off the stack upon a - TestCaseEndEvent. Posts the context change to the event bus. - - Args: - event: An instance of TestCaseBeginEvent or TestCaseEndEvent. - """ - if isinstance(event, TestCaseBeginEvent): - _contexts.append(_get_context_for_test_case_event(event)) - if isinstance(event, TestCaseEndEvent): - if _contexts: - _contexts.pop() - event_bus.post(NewTestCaseContextEvent()) - - -event_bus.register(TestClassEvent, _update_test_class_context) -event_bus.register(TestCaseBeginEvent, _update_test_case_context, order=-100) -event_bus.register(TestCaseEndEvent, _update_test_case_context, order=100) - - -class TestContext(object): - """An object representing the current context in which a test is executing. - - The context encodes the current state of the test runner with respect to a - particular scenario in which code is being executed. For example, if some - code is being executed as part of a test case, then the context should - encode information about that test case such as its name or enclosing - class. - - The subcontext specifies a relative path in which certain outputs, - e.g. logcat, should be kept for the given context. - - The full output path is given by - <base_output_path>/<context_dir>/<subcontext>. - - Attributes: - _base_output_paths: a dictionary mapping a logger's name to its base - output path - _subcontexts: a dictionary mapping a logger's name to its - subcontext-level output directory - """ - - _base_output_paths = {} - _subcontexts = {} - - def get_base_output_path(self, log_name=None): - """Gets the base output path for this logger. - - The base output path is interpreted as the reporting root for the - entire test runner. - - If a path has been added with add_base_output_path, it is returned. - Otherwise, a default is determined by _get_default_base_output_path(). - - Args: - log_name: The name of the logger. - - Returns: - The output path. - """ - if log_name in self._base_output_paths: - return self._base_output_paths[log_name] - return self._get_default_base_output_path() - - @classmethod - def add_base_output_path(cls, log_name, base_output_path): - """Store the base path for this logger. - - Args: - log_name: The name of the logger. - base_output_path: The base path of output files for this logger. - """ - cls._base_output_paths[log_name] = base_output_path - - def get_subcontext(self, log_name=None): - """Gets the subcontext for this logger. - - The subcontext is interpreted as the directory, relative to the - context-level path, where all outputs of the given logger are stored. - - If a path has been added with add_subcontext, it is returned. - Otherwise, the empty string is returned. - - Args: - log_name: The name of the logger. - - Returns: - The output path. - """ - return self._subcontexts.get(log_name, "") - - @classmethod - def add_subcontext(cls, log_name, subcontext): - """Store the subcontext path for this logger. - - Args: - log_name: The name of the logger. - subcontext: The relative subcontext path of output files for this - logger. - """ - cls._subcontexts[log_name] = subcontext - - def get_full_output_path(self, log_name=None): - """Gets the full output path for this context. - - The full path represents the absolute path to the output directory, - as given by <base_output_path>/<context_dir>/<subcontext> - - Args: - log_name: The name of the logger. Used to specify the base output - path and the subcontext. - - Returns: - The output path. - """ - - path = os.path.join( - self.get_base_output_path(log_name), - self._get_default_context_dir(), - self.get_subcontext(log_name), - ) - os.makedirs(path, exist_ok=True) - return path - - @property - def identifier(self): - raise NotImplementedError() - - def _get_default_base_output_path(self): - """Gets the default base output path. - - This will attempt to use the ACTS logging path set up in the global - logger. - - Returns: - The logging path. - - Raises: - EnvironmentError: If the ACTS logger has not been initialized. - """ - try: - return logging.log_path - except AttributeError as e: - raise EnvironmentError( - "The ACTS logger has not been set up and" - ' "base_output_path" has not been set.' - ) from e - - def _get_default_context_dir(self): - """Gets the default output directory for this context.""" - raise NotImplementedError() - - -class RootContext(TestContext): - """A TestContext that represents a test run.""" - - @property - def identifier(self): - return "root" - - def _get_default_context_dir(self): - """Gets the default output directory for this context. - - Logs at the root level context are placed directly in the base level - directory, so no context-level path exists.""" - return "" - - -class TestClassContext(TestContext): - """A TestContext that represents a test class. - - Attributes: - test_class: The test class instance that this context represents. - """ - - def __init__(self, test_class): - """Initializes a TestClassContext for the given test class. - - Args: - test_class: A test class object. Must be an instance of the test - class, not the class object itself. - """ - self.test_class = test_class - - @property - def test_class_name(self): - return self.test_class.__class__.__name__ - - @property - def identifier(self): - return self.test_class_name - - def _get_default_context_dir(self): - """Gets the default output directory for this context. - - For TestClassContexts, this will be the name of the test class. This is - in line with the ACTS logger itself. - """ - return self.test_class_name - - -class TestCaseContext(TestContext): - """A TestContext that represents a test case. - - Attributes: - test_case: The string name of the test case. - test_class: The test class instance enclosing the test case. - """ - - def __init__(self, test_class, test_case): - """Initializes a TestCaseContext for the given test case. - - Args: - test_class: A test class object. Must be an instance of the test - class, not the class object itself. - test_case: The string name of the test case. - """ - self.test_class = test_class - self.test_case = test_case - - @property - def test_case_name(self): - return self.test_case - - @property - def test_class_name(self): - return self.test_class.__class__.__name__ - - @property - def identifier(self): - return "%s.%s" % (self.test_class_name, self.test_case_name) - - def _get_default_context_dir(self): - """Gets the default output directory for this context. - - For TestCaseContexts, this will be the name of the test class followed - by the name of the test case. This is in line with the ACTS logger - itself. - """ - return os.path.join(self.test_class_name, self.test_case_name) - - -# stack for keeping track of the current test context -_contexts = [RootContext()]
diff --git a/src/antlion/controllers/OWNERS b/src/antlion/controllers/OWNERS deleted file mode 100644 index ea76291..0000000 --- a/src/antlion/controllers/OWNERS +++ /dev/null
@@ -1,5 +0,0 @@ -per-file asus_axe11000_ap.py = martschneider@google.com -per-file fuchsia_device.py = chcl@google.com, dhobsd@google.com, haydennix@google.com, jmbrenna@google.com, mnck@google.com, nickchee@google.com, sbalana@google.com, silberst@google.com, tturney@google.com -per-file bluetooth_pts_device.py = tturney@google.com -per-file cellular_simulator.py = iguarna@google.com, chaoyangf@google.com, codycaldwell@google.com, yixiang@google.com -per-file openwrt_ap.py = jerrypcchen@google.com, martschneider@google.com, gmoturu@google.com, sishichen@google.com
diff --git a/src/antlion/controllers/__init__.py b/src/antlion/controllers/__init__.py deleted file mode 100644 index e69de29..0000000 --- a/src/antlion/controllers/__init__.py +++ /dev/null
diff --git a/src/antlion/controllers/access_point.py b/src/antlion/controllers/access_point.py deleted file mode 100755 index 91a241d..0000000 --- a/src/antlion/controllers/access_point.py +++ /dev/null
@@ -1,865 +0,0 @@ -#!/usr/bin/env python3 -# -# Copyright 2022 The Fuchsia Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import ipaddress -import time - -from dataclasses import dataclass -from typing import Any, Dict, FrozenSet, List, Optional, Set, Tuple - -from antlion import logger -from antlion import utils -from antlion.capabilities.ssh import SSHConfig, SSHProvider -from antlion.controllers.ap_lib import hostapd_constants -from antlion.controllers.ap_lib.ap_get_interface import ApInterfaces -from antlion.controllers.ap_lib.ap_iwconfig import ApIwconfig -from antlion.controllers.ap_lib.bridge_interface import BridgeInterface -from antlion.controllers.ap_lib.dhcp_config import DhcpConfig, Subnet -from antlion.controllers.ap_lib.dhcp_server import DhcpServer, NoInterfaceError -from antlion.controllers.ap_lib.extended_capabilities import ExtendedCapabilities -from antlion.controllers.ap_lib.hostapd import Hostapd -from antlion.controllers.ap_lib.hostapd_ap_preset import create_ap_preset -from antlion.controllers.ap_lib.hostapd_config import HostapdConfig -from antlion.controllers.ap_lib.hostapd_security import Security -from antlion.controllers.ap_lib.radvd import Radvd -from antlion.controllers.ap_lib.radvd_config import RadvdConfig -from antlion.controllers.ap_lib.wireless_network_management import ( - BssTransitionManagementRequest, -) -from antlion.controllers.pdu import PduDevice, get_pdu_port_for_device -from antlion.controllers.utils_lib.commands import ip -from antlion.controllers.utils_lib.commands import route -from antlion.controllers.utils_lib.ssh import connection -from antlion.controllers.utils_lib.ssh import settings -from antlion.libs.proc import job - -MOBLY_CONTROLLER_CONFIG_NAME = "AccessPoint" -ACTS_CONTROLLER_REFERENCE_NAME = "access_points" - - -class Error(Exception): - """Error raised when there is a problem with the access point.""" - - -@dataclass -class _ApInstance: - hostapd: Hostapd - subnet: Subnet - - -# These ranges were split this way since each physical radio can have up -# to 8 SSIDs so for the 2GHz radio the DHCP range will be -# 192.168.1 - 8 and the 5Ghz radio will be 192.168.9 - 16 -_AP_2GHZ_SUBNET_STR_DEFAULT = "192.168.1.0/24" -_AP_5GHZ_SUBNET_STR_DEFAULT = "192.168.9.0/24" - -# The last digit of the ip for the bridge interface -BRIDGE_IP_LAST = "100" - - -class AccessPoint(object): - """An access point controller. - - Attributes: - ssh: The ssh connection to this ap. - ssh_settings: The ssh settings being used by the ssh connection. - dhcp_settings: The dhcp server settings being used. - """ - - def __init__(self, configs: Dict[str, Any]) -> None: - """ - Args: - configs: configs for the access point from config file. - """ - self.ssh_settings = settings.from_config(configs["ssh_config"]) - self.log = logger.create_logger( - lambda msg: f"[Access Point|{self.ssh_settings.hostname}] {msg}" - ) - self.device_pdu_config = configs.get("PduDevice", None) - self.identifier = self.ssh_settings.hostname - - if "ap_subnet" in configs: - self._AP_2G_SUBNET_STR: str = configs["ap_subnet"]["2g"] - self._AP_5G_SUBNET_STR: str = configs["ap_subnet"]["5g"] - else: - self._AP_2G_SUBNET_STR = _AP_2GHZ_SUBNET_STR_DEFAULT - self._AP_5G_SUBNET_STR = _AP_5GHZ_SUBNET_STR_DEFAULT - - self._AP_2G_SUBNET = Subnet(ipaddress.ip_network(self._AP_2G_SUBNET_STR)) - self._AP_5G_SUBNET = Subnet(ipaddress.ip_network(self._AP_5G_SUBNET_STR)) - - self.ssh = connection.SshConnection(self.ssh_settings) - - # TODO(http://b/278758876): Replace self.ssh with self.ssh_provider - self.ssh_provider = SSHProvider( - SSHConfig( - self.ssh_settings.username, - self.ssh_settings.hostname, - self.ssh_settings.identity_file, - port=self.ssh_settings.port, - ssh_binary=self.ssh_settings.executable, - connect_timeout=90, - ) - ) - - # Singleton utilities for running various commands. - self._ip_cmd = ip.LinuxIpCommand(self.ssh) - self._route_cmd = route.LinuxRouteCommand(self.ssh) - - # A map from network interface name to _ApInstance objects representing - # the hostapd instance running against the interface. - self._aps: Dict[str, _ApInstance] = dict() - self._dhcp: Optional[DhcpServer] = None - self._dhcp_bss: Dict[Any, Subnet] = dict() - self._radvd: Optional[Radvd] = None - self.bridge = BridgeInterface(self) - self.iwconfig = ApIwconfig(self) - - # Check to see if wan_interface is specified in acts_config for tests - # isolated from the internet and set this override. - self.interfaces = ApInterfaces(self, configs.get("wan_interface")) - - # Get needed interface names and initialize the unnecessary ones. - self.wan = self.interfaces.get_wan_interface() - self.wlan = self.interfaces.get_wlan_interface() - self.wlan_2g = self.wlan[0] - self.wlan_5g = self.wlan[1] - self.lan = self.interfaces.get_lan_interface() - self._initial_ap() - self.setup_bridge = False - - def _initial_ap(self) -> None: - """Initial AP interfaces. - - Bring down hostapd if instance is running, bring down all bridge - interfaces. - """ - # This is necessary for Gale/Whirlwind flashed with dev channel image - # Unused interfaces such as existing hostapd daemon, guest, mesh - # interfaces need to be brought down as part of the AP initialization - # process, otherwise test would fail. - try: - self.ssh.run("stop wpasupplicant") - except job.Error: - self.log.info("No wpasupplicant running") - try: - self.ssh.run("stop hostapd") - except job.Error: - self.log.info("No hostapd running") - # Bring down all wireless interfaces - for iface in self.wlan: - WLAN_DOWN = f"ip link set {iface} down" - self.ssh.run(WLAN_DOWN) - # Bring down all bridge interfaces - bridge_interfaces = self.interfaces.get_bridge_interface() - if bridge_interfaces: - for iface in bridge_interfaces: - BRIDGE_DOWN = f"ip link set {iface} down" - BRIDGE_DEL = f"brctl delbr {iface}" - self.ssh.run(BRIDGE_DOWN) - self.ssh.run(BRIDGE_DEL) - - def start_ap( - self, - hostapd_config: HostapdConfig, - radvd_config: RadvdConfig = None, - setup_bridge: bool = False, - is_nat_enabled: bool = True, - additional_parameters: Dict[str, Any] = None, - ) -> List[Any]: - """Starts as an ap using a set of configurations. - - This will start an ap on this host. To start an ap the controller - selects a network interface to use based on the configs given. It then - will start up hostapd on that interface. Next a subnet is created for - the network interface and dhcp server is refreshed to give out ips - for that subnet for any device that connects through that interface. - - Args: - hostapd_config: The configurations to use when starting up the ap. - radvd_config: The IPv6 configuration to use when starting up the ap. - setup_bridge: Whether to bridge the LAN interface WLAN interface. - Only one WLAN interface can be bridged with the LAN interface - and none of the guest networks can be bridged. - is_nat_enabled: If True, start NAT on the AP to allow the DUT to be - able to access the internet if the WAN port is connected to the - internet. - additional_parameters: Parameters that can sent directly into the - hostapd config file. This can be used for debugging and or - adding one off parameters into the config. - - Returns: - An identifier for each ssid being started. These identifiers can be - used later by this controller to control the ap. - - Raises: - Error: When the ap can't be brought up. - """ - if hostapd_config.frequency < 5000: - interface = self.wlan_2g - subnet = self._AP_2G_SUBNET - else: - interface = self.wlan_5g - subnet = self._AP_5G_SUBNET - - # radvd requires the interface to have a IPv6 link-local address. - if radvd_config: - self.ssh.run(f"sysctl -w net.ipv6.conf.{interface}.disable_ipv6=0") - self.ssh.run(f"sysctl -w net.ipv6.conf.{interface}.forwarding=1") - - # In order to handle dhcp servers on any interface, the initiation of - # the dhcp server must be done after the wlan interfaces are figured - # out as opposed to being in __init__ - self._dhcp = DhcpServer(self.ssh, interface=interface) - - # For multi bssid configurations the mac address - # of the wireless interface needs to have enough space to mask out - # up to 8 different mac addresses. So in for one interface the range is - # hex 0-7 and for the other the range is hex 8-f. - interface_mac_orig = None - cmd = f"ip link show {interface}|grep ether|awk -F' ' '{{print $2}}'" - interface_mac_orig = self.ssh.run(cmd) - if interface == self.wlan_5g: - hostapd_config.bssid = interface_mac_orig.stdout[:-1] + "0" - last_octet = 1 - if interface == self.wlan_2g: - hostapd_config.bssid = interface_mac_orig.stdout[:-1] + "8" - last_octet = 9 - if interface in self._aps: - raise ValueError( - "No WiFi interface available for AP on " - f"channel {hostapd_config.channel}" - ) - - apd = Hostapd(self.ssh, interface) - new_instance = _ApInstance(hostapd=apd, subnet=subnet) - self._aps[interface] = new_instance - - # Turn off the DHCP server, we're going to change its settings. - self.stop_dhcp() - # Clear all routes to prevent old routes from interfering. - self._route_cmd.clear_routes(net_interface=interface) - # Add IPv6 link-local route so packets destined to the AP will be - # processed by the AP. This is necessary if an iperf server is running - # on the AP, but not for traffic handled by the Linux networking stack - # such as ping. - if radvd_config: - self._route_cmd.add_route(interface, "fe80::/64") - - self._dhcp_bss = dict() - if hostapd_config.bss_lookup: - # The self._dhcp_bss dictionary is created to hold the key/value - # pair of the interface name and the ip scope that will be - # used for the particular interface. The a, b, c, d - # variables below are the octets for the ip address. The - # third octet is then incremented for each interface that - # is requested. This part is designed to bring up the - # hostapd interfaces and not the DHCP servers for each - # interface. - counter = 1 - for bss in hostapd_config.bss_lookup: - if interface_mac_orig: - hostapd_config.bss_lookup[bss].bssid = ( - interface_mac_orig.stdout[:-1] + hex(last_octet)[-1:] - ) - self._route_cmd.clear_routes(net_interface=str(bss)) - if interface is self.wlan_2g: - starting_ip_range = self._AP_2G_SUBNET_STR - else: - starting_ip_range = self._AP_5G_SUBNET_STR - a, b, c, d = starting_ip_range.split(".") - self._dhcp_bss[bss] = Subnet( - ipaddress.ip_network(f"{a}.{b}.{int(c) + counter}.{d}") - ) - counter = counter + 1 - last_octet = last_octet + 1 - - apd.start(hostapd_config, additional_parameters=additional_parameters) - - # The DHCP serer requires interfaces to have ips and routes before - # the server will come up. - interface_ip = ipaddress.ip_interface( - f"{subnet.router}/{subnet.network.netmask}" - ) - if setup_bridge is True: - bridge_interface_name = "eth_test" - self.create_bridge(bridge_interface_name, [interface, self.lan]) - self._ip_cmd.set_ipv4_address(bridge_interface_name, interface_ip) - else: - self._ip_cmd.set_ipv4_address(interface, interface_ip) - if hostapd_config.bss_lookup: - # This loop goes through each interface that was setup for - # hostapd and assigns the DHCP scopes that were defined but - # not used during the hostapd loop above. The k and v - # variables represent the interface name, k, and dhcp info, v. - for k, v in self._dhcp_bss.items(): - bss_interface_ip = ipaddress.ip_interface( - f"{self._dhcp_bss[k].router}/{self._dhcp_bss[k].network.netmask}" - ) - self._ip_cmd.set_ipv4_address(str(k), bss_interface_ip) - - # Restart the DHCP server with our updated list of subnets. - configured_subnets = self.get_configured_subnets() - dhcp_conf = DhcpConfig(subnets=configured_subnets) - self.start_dhcp(dhcp_conf=dhcp_conf) - if is_nat_enabled: - self.start_nat() - self.enable_forwarding() - else: - self.stop_nat() - self.enable_forwarding() - if radvd_config: - radvd_interface = bridge_interface_name if setup_bridge else interface - self._radvd = Radvd(self.ssh, radvd_interface) - self._radvd.start(radvd_config) - else: - self._radvd = None - - bss_interfaces = [bss for bss in hostapd_config.bss_lookup] - bss_interfaces.append(interface) - - return bss_interfaces - - def get_configured_subnets(self) -> List[Subnet]: - """Get the list of configured subnets on the access point. - - This allows consumers of the access point objects create custom DHCP - configs with the correct subnets. - - Returns: a list of Subnet objects - """ - configured_subnets = [x.subnet for x in self._aps.values()] - for k, v in self._dhcp_bss.items(): - configured_subnets.append(v) - return configured_subnets - - def start_dhcp(self, dhcp_conf: DhcpConfig) -> None: - """Start a DHCP server for the specified subnets. - - This allows consumers of the access point objects to control DHCP. - - Args: - dhcp_conf: A DhcpConfig object. - - Raises: - Error: Raised when a dhcp server error is found. - """ - self._dhcp.start(config=dhcp_conf) - - def stop_dhcp(self) -> None: - """Stop DHCP for this AP object. - - This allows consumers of the access point objects to control DHCP. - """ - self._dhcp.stop() - - def get_dhcp_logs(self) -> Optional[str]: - """Get DHCP logs for this AP object. - - This allows consumers of the access point objects to validate DHCP - behavior. - - Returns: - A string of the dhcp server logs, or None is a DHCP server has not - been started. - """ - if self._dhcp: - return self._dhcp.get_logs() - return None - - def get_hostapd_logs(self) -> Dict[str, str]: - """Get hostapd logs for all interfaces on AP object. - - This allows consumers of the access point objects to validate hostapd - behavior. - - Returns: A dict with {interface: log} from hostapd instances. - """ - hostapd_logs = dict() - for identifier in self._aps: - hostapd_logs[identifier] = self._aps.get(identifier).hostapd.pull_logs() - return hostapd_logs - - def get_radvd_logs(self) -> Optional[str]: - """Get radvd logs for this AP object. - - This allows consumers of the access point objects to validate radvd - behavior. - - Returns: - A string of the radvd logs, or None is a radvd server has not been - started. - """ - if self._radvd: - return self._radvd.pull_logs() - return None - - def enable_forwarding(self) -> None: - """Enable IPv4 and IPv6 forwarding on the AP. - - When forwarding is enabled, the access point is able to route IP packets - between devices in the same subnet. - """ - self.ssh.run("echo 1 > /proc/sys/net/ipv4/ip_forward") - self.ssh.run("echo 1 > /proc/sys/net/ipv6/conf/all/forwarding") - - def start_nat(self) -> None: - """Start NAT on the AP. - - This allows consumers of the access point objects to enable NAT - on the AP. - - Note that this is currently a global setting, since we don't - have per-interface masquerade rules. - """ - # The following three commands are needed to enable NAT between - # the WAN and LAN/WLAN ports. This means anyone connecting to the - # WLAN/LAN ports will be able to access the internet if the WAN port - # is connected to the internet. - self.ssh.run("iptables -t nat -F") - self.ssh.run(f"iptables -t nat -A POSTROUTING -o {self.wan} -j MASQUERADE") - - def stop_nat(self) -> None: - """Stop NAT on the AP. - - This allows consumers of the access point objects to disable NAT on the - AP. - - Note that this is currently a global setting, since we don't have - per-interface masquerade rules. - """ - self.ssh.run("iptables -t nat -F") - - def create_bridge(self, bridge_name: str, interfaces: List[str]) -> None: - """Create the specified bridge and bridge the specified interfaces. - - Args: - bridge_name: The name of the bridge to create. - interfaces: A list of interfaces to add to the bridge. - """ - - # Create the bridge interface - self.ssh.run(f"brctl addbr {bridge_name}") - - for interface in interfaces: - self.ssh.run(f"brctl addif {bridge_name} {interface}") - - self.ssh.run(f"ip link set {bridge_name} up") - - def remove_bridge(self, bridge_name: str) -> None: - """Removes the specified bridge - - Args: - bridge_name: The name of the bridge to remove. - """ - # Check if the bridge exists. - # - # Cases where it may not are if we failed to initialize properly - # - # Or if we're doing 2.4Ghz and 5Ghz SSIDs and we've already torn - # down the bridge once, but we got called for each band. - result = self.ssh.run(f"brctl show {bridge_name}", ignore_status=True) - - # If the bridge exists, we'll get an exit_status of 0, indicating - # success, so we can continue and remove the bridge. - if result.exit_status == 0: - self.ssh.run(f"ip link set {bridge_name} down") - self.ssh.run(f"brctl delbr {bridge_name}") - - def get_bssid_from_ssid(self, ssid: str, band: str) -> Optional[str]: - """Gets the BSSID from a provided SSID - - Args: - ssid: An SSID string. - band: 2G or 5G Wifi band. - Returns: The BSSID if on the AP or None if SSID could not be found. - """ - if band == hostapd_constants.BAND_2G: - interfaces = [self.wlan_2g, ssid] - else: - interfaces = [self.wlan_5g, ssid] - - # Get the interface name associated with the given ssid. - for interface in interfaces: - iw_output = self.ssh.run( - f"iw dev {interface} info|grep ssid|awk -F' ' '{{print $2}}'" - ) - if "command failed: No such device" in iw_output.stderr: - continue - else: - # If the configured ssid is equal to the given ssid, we found - # the right interface. - if iw_output.stdout == ssid: - iw_output = self.ssh.run( - f"iw dev {interface} info|grep addr|awk -F' ' '{{print $2}}'" - ) - return iw_output.stdout - return None - - def stop_ap(self, identifier: str) -> None: - """Stops a running ap on this controller. - - Args: - identifier: The identify of the ap that should be taken down. - """ - - if identifier not in list(self._aps.keys()): - raise ValueError(f"Invalid identifier {identifier} given") - - instance = self._aps.get(identifier) - - if self._radvd: - self._radvd.stop() - try: - self.stop_dhcp() - except NoInterfaceError: - pass - self.stop_nat() - instance.hostapd.stop() - self._ip_cmd.clear_ipv4_addresses(identifier) - - del self._aps[identifier] - bridge_interfaces = self.interfaces.get_bridge_interface() - if bridge_interfaces: - for iface in bridge_interfaces: - BRIDGE_DOWN = f"ip link set {iface} down" - BRIDGE_DEL = f"brctl delbr {iface}" - self.ssh.run(BRIDGE_DOWN) - self.ssh.run(BRIDGE_DEL) - - def stop_all_aps(self) -> None: - """Stops all running aps on this device.""" - - for ap in list(self._aps.keys()): - self.stop_ap(ap) - - def close(self) -> None: - """Called to take down the entire access point. - - When called will stop all aps running on this host, shutdown the dhcp - server, and stop the ssh connection. - """ - - if self._aps: - self.stop_all_aps() - self.ssh.close() - - def generate_bridge_configs(self, channel: int) -> Tuple[str, Optional[str], str]: - """Generate a list of configs for a bridge between LAN and WLAN. - - Args: - channel: the channel WLAN interface is brought up on - iface_lan: the LAN interface to bridge - Returns: - configs: tuple containing iface_wlan, iface_lan and bridge_ip - """ - - if channel < 15: - iface_wlan = self.wlan_2g - subnet_str = self._AP_2G_SUBNET_STR - else: - iface_wlan = self.wlan_5g - subnet_str = self._AP_5G_SUBNET_STR - - iface_lan = self.lan - - a, b, c, _ = subnet_str.strip("/24").split(".") - bridge_ip = f"{a}.{b}.{c}.{BRIDGE_IP_LAST}" - - return (iface_wlan, iface_lan, bridge_ip) - - def ping( - self, - dest_ip: str, - count: int = 3, - interval: int = 1000, - timeout: int = 1000, - size: int = 56, - additional_ping_params: Optional[Any] = None, - ) -> Dict[str, Any]: - """Pings from AP to dest_ip, returns dict of ping stats (see utils.ping)""" - return utils.ping( - self.ssh, - dest_ip, - count=count, - interval=interval, - timeout=timeout, - size=size, - additional_ping_params=additional_ping_params, - ) - - def can_ping( - self, - dest_ip: str, - count: int = 1, - interval: int = 1000, - timeout: int = 1000, - size: int = 56, - additional_ping_params: Optional[Any] = None, - ) -> bool: - """Returns whether ap can ping dest_ip (see utils.can_ping)""" - return utils.can_ping( - self.ssh, - dest_ip, - count=count, - interval=interval, - timeout=timeout, - size=size, - additional_ping_params=additional_ping_params, - ) - - def hard_power_cycle( - self, - pdus: List[PduDevice], - hostapd_configs: Optional[List[HostapdConfig]] = None, - ) -> None: - """Kills, then restores power to AccessPoint, verifying it goes down and - comes back online cleanly. - - Args: - pdus: PDUs in the testbed - hostapd_configs: Hostapd settings. If present, these networks will - be spun up after the AP has rebooted. This list can either - contain HostapdConfig objects, or dictionaries with the start_ap - params - (i.e { 'hostapd_config': <HostapdConfig>, - 'setup_bridge': <bool>, - 'additional_parameters': <dict> } ). - Raise: - Error, if no PduDevice is provided in AccessPoint config. - ConnectionError, if AccessPoint fails to go offline or come back. - """ - if not self.device_pdu_config: - raise Error("No PduDevice provided in AccessPoint config.") - - if hostapd_configs is None: - hostapd_configs = [] - - self.log.info(f"Power cycling") - ap_pdu, ap_pdu_port = get_pdu_port_for_device(self.device_pdu_config, pdus) - - self.log.info(f"Killing power") - ap_pdu.off(str(ap_pdu_port)) - - self.log.info("Verifying AccessPoint is unreachable.") - self.ssh_provider.wait_until_unreachable() - self.log.info("AccessPoint is unreachable as expected.") - - self._aps.clear() - - self.log.info(f"Restoring power") - ap_pdu.on(str(ap_pdu_port)) - - self.log.info("Waiting for AccessPoint to become available via SSH.") - self.ssh_provider.wait_until_reachable() - self.log.info("AccessPoint responded to SSH.") - - # Allow 5 seconds for OS to finish getting set up - time.sleep(5) - self._initial_ap() - self.log.info("Power cycled successfully") - - for settings in hostapd_configs: - if type(settings) == HostapdConfig: - config = settings - setup_bridge = False - additional_parameters = None - - elif type(settings) == dict: - config = settings["hostapd_config"] - setup_bridge = settings.get("setup_bridge", False) - additional_parameters = settings.get("additional_parameters", None) - else: - raise TypeError( - "Items in hostapd_configs list must either be " - "HostapdConfig objects or dictionaries." - ) - - self.log.info(f"Restarting network {config.ssid}") - self.start_ap( - config, - setup_bridge=setup_bridge, - additional_parameters=additional_parameters, - ) - - def channel_switch(self, identifier: str, channel_num: int) -> None: - """Switch to a different channel on the given AP.""" - if identifier not in list(self._aps.keys()): - raise ValueError(f"Invalid identifier {identifier} given") - instance = self._aps.get(identifier) - self.log.info(f"channel switch to channel {channel_num}") - instance.hostapd.channel_switch(channel_num) - - def get_current_channel(self, identifier: str) -> int: - """Find the current channel on the given AP.""" - if identifier not in list(self._aps.keys()): - raise ValueError(f"Invalid identifier {identifier} given") - instance = self._aps.get(identifier) - return instance.hostapd.get_current_channel() - - def get_stas(self, identifier: str) -> Set[str]: - """Return MAC addresses of all associated STAs on the given AP.""" - if identifier not in list(self._aps.keys()): - raise ValueError(f"Invalid identifier {identifier} given") - instance = self._aps.get(identifier) - return instance.hostapd.get_stas() - - def get_sta_extended_capabilities( - self, identifier: str, sta_mac: str - ) -> ExtendedCapabilities: - """Get extended capabilities for the given STA, as seen by the AP.""" - if identifier not in list(self._aps.keys()): - raise ValueError(f"Invalid identifier {identifier} given") - instance = self._aps.get(identifier) - return instance.hostapd.get_sta_extended_capabilities(sta_mac) - - def send_bss_transition_management_req( - self, identifier: str, sta_mac: str, request: BssTransitionManagementRequest - ) -> job.Result: - """Send a BSS Transition Management request to an associated STA.""" - if identifier not in list(self._aps.keys()): - raise ValueError("Invalid identifier {identifier} given") - instance = self._aps.get(identifier) - return instance.hostapd.send_bss_transition_management_req(sta_mac, request) - - -def setup_ap( - access_point: AccessPoint, - profile_name: str, - channel: int, - ssid: str, - mode: Optional[str] = None, - preamble: Optional[bool] = None, - beacon_interval: Optional[int] = None, - dtim_period: Optional[int] = None, - frag_threshold: Optional[int] = None, - rts_threshold: Optional[int] = None, - force_wmm: Optional[bool] = None, - hidden: Optional[bool] = False, - security: Optional[Security] = None, - pmf_support: Optional[int] = None, - additional_ap_parameters: Optional[Dict[str, Any]] = None, - password: Optional[str] = None, - n_capabilities: Optional[List[Any]] = None, - ac_capabilities: Optional[List[Any]] = None, - vht_bandwidth: Optional[int] = None, - wnm_features: FrozenSet[hostapd_constants.WnmFeature] = frozenset(), - setup_bridge: bool = False, - is_ipv6_enabled: bool = False, - is_nat_enabled: bool = True, -): - """Creates a hostapd profile and runs it on an ap. This is a convenience - function that allows us to start an ap with a single function, without first - creating a hostapd config. - - Args: - access_point: An ACTS access_point controller - profile_name: The profile name of one of the hostapd ap presets. - channel: What channel to set the AP to. - preamble: Whether to set short or long preamble - beacon_interval: The beacon interval - dtim_period: Length of dtim period - frag_threshold: Fragmentation threshold - rts_threshold: RTS threshold - force_wmm: Enable WMM or not - hidden: Advertise the SSID or not - security: What security to enable. - pmf_support: Whether pmf is not disabled, enabled, or required - additional_ap_parameters: Additional parameters to send the AP. - password: Password to connect to WLAN if necessary. - check_connectivity: Whether to check for internet connectivity. - wnm_features: WNM features to enable on the AP. - setup_bridge: Whether to bridge the LAN interface WLAN interface. - Only one WLAN interface can be bridged with the LAN interface - and none of the guest networks can be bridged. - is_ipv6_enabled: If True, start a IPv6 router advertisement daemon - is_nat_enabled: If True, start NAT on the AP to allow the DUT to be able - to access the internet if the WAN port is connected to the internet. - - Returns: - An identifier for each ssid being started. These identifiers can be - used later by this controller to control the ap. - - Raises: - Error: When the ap can't be brought up. - """ - ap = create_ap_preset( - profile_name=profile_name, - iface_wlan_2g=access_point.wlan_2g, - iface_wlan_5g=access_point.wlan_5g, - channel=channel, - ssid=ssid, - mode=mode, - short_preamble=preamble, - beacon_interval=beacon_interval, - dtim_period=dtim_period, - frag_threshold=frag_threshold, - rts_threshold=rts_threshold, - force_wmm=force_wmm, - hidden=hidden, - bss_settings=[], - security=security, - pmf_support=pmf_support, - n_capabilities=n_capabilities, - ac_capabilities=ac_capabilities, - vht_bandwidth=vht_bandwidth, - wnm_features=wnm_features, - ) - return access_point.start_ap( - hostapd_config=ap, - radvd_config=RadvdConfig() if is_ipv6_enabled else None, - setup_bridge=setup_bridge, - is_nat_enabled=is_nat_enabled, - additional_parameters=additional_ap_parameters, - ) - - -def create(configs: Any) -> List[AccessPoint]: - """Creates ap controllers from a json config. - - Creates an ap controller from either a list, or a single - element. The element can either be just the hostname or a dictionary - containing the hostname and username of the ap to connect to over ssh. - - Args: - The json configs that represent this controller. - - Returns: - A new AccessPoint. - """ - return [AccessPoint(c) for c in configs] - - -def destroy(aps: List[AccessPoint]) -> None: - """Destroys a list of access points. - - Args: - aps: The list of access points to destroy. - """ - for ap in aps: - ap.close() - - -def get_info(aps: List[AccessPoint]) -> List[str]: - """Get information on a list of access points. - - Args: - aps: A list of AccessPoints. - - Returns: - A list of all aps hostname. - """ - return [ap.ssh_settings.hostname for ap in aps]
diff --git a/src/antlion/controllers/adb.py b/src/antlion/controllers/adb.py deleted file mode 100644 index 5c3848d..0000000 --- a/src/antlion/controllers/adb.py +++ /dev/null
@@ -1,297 +0,0 @@ -#!/usr/bin/env python3 -# -# Copyright 2022 The Fuchsia Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import logging -import re -import shlex -import shutil - -from antlion.controllers.adb_lib.error import AdbCommandError -from antlion.controllers.adb_lib.error import AdbError -from antlion.libs.proc import job - -DEFAULT_ADB_TIMEOUT = 60 -DEFAULT_ADB_PULL_TIMEOUT = 180 - -ADB_REGEX = re.compile("adb:") -# Uses a regex to be backwards compatible with previous versions of ADB -# (N and above add the serial to the error msg). -DEVICE_NOT_FOUND_REGEX = re.compile("error: device (?:'.*?' )?not found") -DEVICE_OFFLINE_REGEX = re.compile("error: device offline") -# Raised when adb forward commands fail to forward a port. -CANNOT_BIND_LISTENER_REGEX = re.compile("error: cannot bind listener:") -# Expected output is "Android Debug Bridge version 1.0.XX -ADB_VERSION_REGEX = re.compile("Android Debug Bridge version 1.0.(\d+)") -GREP_REGEX = re.compile("grep(\s+)") - -ROOT_USER_ID = "0" -SHELL_USER_ID = "2000" - - -def parsing_parcel_output(output): - """Parsing the adb output in Parcel format. - - Parsing the adb output in format: - Result: Parcel( - 0x00000000: 00000000 00000014 00390038 00340031 '........8.9.1.4.' - 0x00000010: 00300038 00300030 00300030 00340032 '8.0.0.0.0.0.2.4.' - 0x00000020: 00350034 00330035 00320038 00310033 '4.5.5.3.8.2.3.1.' - 0x00000030: 00000000 '.... ') - """ - output = "".join(re.findall(r"'(.*)'", output)) - return re.sub(r"[.\s]", "", output) - - -class AdbProxy(object): - """Proxy class for ADB. - - For syntactic reasons, the '-' in adb commands need to be replaced with - '_'. Can directly execute adb commands on an object: - >> adb = AdbProxy(<serial>) - >> adb.start_server() - >> adb.devices() # will return the console output of "adb devices". - """ - - def __init__(self, serial="", ssh_connection=None): - """Construct an instance of AdbProxy. - - Args: - serial: str serial number of Android device from `adb devices` - ssh_connection: SshConnection instance if the Android device is - connected to a remote host that we can reach via SSH. - """ - self.serial = serial - self._server_local_port = None - adb_path = shutil.which("adb") - adb_cmd = [shlex.quote(adb_path)] - if serial: - adb_cmd.append("-s %s" % serial) - if ssh_connection is not None: - # Kill all existing adb processes on the remote host (if any) - # Note that if there are none, then pkill exits with non-zero status - ssh_connection.run("pkill adb", ignore_status=True) - # Copy over the adb binary to a temp dir - temp_dir = ssh_connection.run("mktemp -d").stdout.strip() - ssh_connection.send_file(adb_path, temp_dir) - # Start up a new adb server running as root from the copied binary. - remote_adb_cmd = "%s/adb %s root" % ( - temp_dir, - "-s %s" % serial if serial else "", - ) - ssh_connection.run(remote_adb_cmd) - # Proxy a local port to the adb server port - local_port = ssh_connection.create_ssh_tunnel(5037) - self._server_local_port = local_port - - if self._server_local_port: - adb_cmd.append("-P %d" % local_port) - self.adb_str = " ".join(adb_cmd) - self._ssh_connection = ssh_connection - - def get_user_id(self): - """Returns the adb user. Either 2000 (shell) or 0 (root).""" - return self.shell("id -u") - - def is_root(self, user_id=None): - """Checks if the user is root. - - Args: - user_id: if supplied, the id to check against. - Returns: - True if the user is root. False otherwise. - """ - if not user_id: - user_id = self.get_user_id() - return user_id == ROOT_USER_ID - - def ensure_root(self): - """Ensures the user is root after making this call. - - Note that this will still fail if the device is a user build, as root - is not accessible from a user build. - - Returns: - False if the device is a user build. True otherwise. - """ - self.ensure_user(ROOT_USER_ID) - return self.is_root() - - def ensure_user(self, user_id=SHELL_USER_ID): - """Ensures the user is set to the given user. - - Args: - user_id: The id of the user. - """ - if self.is_root(user_id): - self.root() - else: - self.unroot() - self.wait_for_device() - return self.get_user_id() == user_id - - def _exec_cmd(self, cmd, ignore_status=False, timeout=DEFAULT_ADB_TIMEOUT): - """Executes adb commands in a new shell. - - This is specific to executing adb commands. - - Args: - cmd: A string or list that is the adb command to execute. - - Returns: - The stdout of the adb command. - - Raises: - AdbError for errors in ADB operations. - AdbCommandError for errors from commands executed through ADB. - """ - if isinstance(cmd, list): - cmd = " ".join(cmd) - result = job.run(cmd, ignore_status=True, timeout=timeout) - ret, out, err = result.exit_status, result.stdout, result.stderr - - if any( - pattern.match(err) - for pattern in [ - ADB_REGEX, - DEVICE_OFFLINE_REGEX, - DEVICE_NOT_FOUND_REGEX, - CANNOT_BIND_LISTENER_REGEX, - ] - ): - raise AdbError(cmd=cmd, stdout=out, stderr=err, ret_code=ret) - if "Result: Parcel" in out: - return parsing_parcel_output(out) - if ignore_status or (ret == 1 and GREP_REGEX.search(cmd)): - return out or err - if ret != 0: - raise AdbCommandError(cmd=cmd, stdout=out, stderr=err, ret_code=ret) - return out - - def _exec_adb_cmd(self, name, arg_str, **kwargs): - return self._exec_cmd(" ".join((self.adb_str, name, arg_str)), **kwargs) - - def _exec_cmd_nb(self, cmd, **kwargs): - """Executes adb commands in a new shell, non blocking. - - Args: - cmds: A string that is the adb command to execute. - - """ - return job.run_async(cmd, **kwargs) - - def _exec_adb_cmd_nb(self, name, arg_str, **kwargs): - return self._exec_cmd_nb(" ".join((self.adb_str, name, arg_str)), **kwargs) - - def tcp_forward(self, host_port, device_port): - """Starts tcp forwarding from localhost to this android device. - - Args: - host_port: Port number to use on localhost - device_port: Port number to use on the android device. - - Returns: - Forwarded port on host as int or command output string on error - """ - if self._ssh_connection: - # We have to hop through a remote host first. - # 1) Find some free port on the remote host's localhost - # 2) Setup forwarding between that remote port and the requested - # device port - remote_port = self._ssh_connection.find_free_port() - host_port = self._ssh_connection.create_ssh_tunnel( - remote_port, local_port=host_port - ) - output = self.forward( - "tcp:%d tcp:%d" % (host_port, device_port), ignore_status=True - ) - # If hinted_port is 0, the output will be the selected port. - # Otherwise, there will be no output upon successfully - # forwarding the hinted port. - if not output: - return host_port - try: - output_int = int(output) - except ValueError: - return output - return output_int - - def remove_tcp_forward(self, host_port): - """Stop tcp forwarding a port from localhost to this android device. - - Args: - host_port: Port number to use on localhost - """ - if self._ssh_connection: - remote_port = self._ssh_connection.close_ssh_tunnel(host_port) - if remote_port is None: - logging.warning( - "Cannot close unknown forwarded tcp port: %d", host_port - ) - return - # The actual port we need to disable via adb is on the remote host. - host_port = remote_port - self.forward("--remove tcp:%d" % host_port) - - def getprop(self, prop_name): - """Get a property of the device. - - This is a convenience wrapper for "adb shell getprop xxx". - - Args: - prop_name: A string that is the name of the property to get. - - Returns: - A string that is the value of the property, or None if the property - doesn't exist. - """ - return self.shell("getprop %s" % prop_name) - - # TODO: This should be abstracted out into an object like the other shell - # command. - def shell(self, command, ignore_status=False, timeout=DEFAULT_ADB_TIMEOUT): - return self._exec_adb_cmd( - "shell", shlex.quote(command), ignore_status=ignore_status, timeout=timeout - ) - - def shell_nb(self, command): - return self._exec_adb_cmd_nb("shell", shlex.quote(command)) - - def __getattr__(self, name): - def adb_call(*args, **kwargs): - clean_name = name.replace("_", "-") - if clean_name in ["pull", "push", "remount"] and "timeout" not in kwargs: - kwargs["timeout"] = DEFAULT_ADB_PULL_TIMEOUT - arg_str = " ".join(str(elem) for elem in args) - return self._exec_adb_cmd(clean_name, arg_str, **kwargs) - - return adb_call - - def get_version_number(self): - """Returns the version number of ADB as an int (XX in 1.0.XX). - - Raises: - AdbError if the version number is not found/parsable. - """ - version_output = self.version() - match = re.search(ADB_VERSION_REGEX, version_output) - - if not match: - logging.error( - "Unable to capture ADB version from adb version " - "output: %s" % version_output - ) - raise AdbError("adb version", version_output, "", "") - return int(match.group(1))
diff --git a/src/antlion/controllers/adb_lib/__init__.py b/src/antlion/controllers/adb_lib/__init__.py deleted file mode 100644 index e69de29..0000000 --- a/src/antlion/controllers/adb_lib/__init__.py +++ /dev/null
diff --git a/src/antlion/controllers/adb_lib/error.py b/src/antlion/controllers/adb_lib/error.py deleted file mode 100644 index 9599214..0000000 --- a/src/antlion/controllers/adb_lib/error.py +++ /dev/null
@@ -1,40 +0,0 @@ -#!/usr/bin/env python3 -# -# Copyright 2022 The Fuchsia Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from antlion import error - - -class AdbError(error.ActsError): - """Raised when there is an error in adb operations.""" - - def __init__(self, cmd, stdout, stderr, ret_code): - super().__init__() - self.cmd = cmd - self.stdout = stdout - self.stderr = stderr - self.ret_code = ret_code - - def __str__(self): - return ("Error executing adb cmd '%s'. ret: %d, stdout: %s, stderr: %s") % ( - self.cmd, - self.ret_code, - self.stdout, - self.stderr, - ) - - -class AdbCommandError(AdbError): - """Raised when there is an error in the command being run through ADB."""
diff --git a/src/antlion/controllers/android_device.py b/src/antlion/controllers/android_device.py deleted file mode 100755 index 0eb0969..0000000 --- a/src/antlion/controllers/android_device.py +++ /dev/null
@@ -1,1823 +0,0 @@ -#!/usr/bin/env python3 -# -# Copyright 2022 The Fuchsia Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import collections -import logging -import math -import os -import re -import shutil -import socket -import time -from datetime import datetime - -from antlion import context -from antlion import logger as acts_logger -from antlion import tracelogger -from antlion import utils -from antlion.controllers import adb -from antlion.controllers.adb_lib.error import AdbError -from antlion.controllers import fastboot -from antlion.controllers.android_lib import errors -from antlion.controllers.android_lib import events as android_events -from antlion.controllers.android_lib import logcat -from antlion.controllers.android_lib import services -from antlion.controllers.sl4a_lib import sl4a_manager -from antlion.controllers.utils_lib.ssh import connection -from antlion.controllers.utils_lib.ssh import settings -from antlion.event import event_bus -from antlion.libs.proc import job - -MOBLY_CONTROLLER_CONFIG_NAME = "AndroidDevice" -ACTS_CONTROLLER_REFERENCE_NAME = "android_devices" - -ANDROID_DEVICE_PICK_ALL_TOKEN = "*" -# Key name for SL4A extra params in config file -ANDROID_DEVICE_SL4A_CLIENT_PORT_KEY = "sl4a_client_port" -ANDROID_DEVICE_SL4A_FORWARDED_PORT_KEY = "sl4a_forwarded_port" -ANDROID_DEVICE_SL4A_SERVER_PORT_KEY = "sl4a_server_port" -# Key name for adb logcat extra params in config file. -ANDROID_DEVICE_ADB_LOGCAT_PARAM_KEY = "adb_logcat_param" -ANDROID_DEVICE_EMPTY_CONFIG_MSG = "Configuration is empty, abort!" -ANDROID_DEVICE_NOT_LIST_CONFIG_MSG = "Configuration should be a list, abort!" -CRASH_REPORT_PATHS = ( - "/data/tombstones/", - "/data/vendor/ramdump/", - "/data/ramdump/", - "/data/vendor/ssrdump", - "/data/vendor/ramdump/bluetooth", - "/data/vendor/log/cbd", -) -CRASH_REPORT_SKIPS = ( - "RAMDUMP_RESERVED", - "RAMDUMP_STATUS", - "RAMDUMP_OUTPUT", - "bluetooth", -) -ALWAYS_ON_LOG_PATH = "/data/vendor/radio/logs/always-on" -DEFAULT_QXDM_LOG_PATH = "/data/vendor/radio/diag_logs" -DEFAULT_SDM_LOG_PATH = "/data/vendor/slog/" -DEFAULT_SCREENSHOT_PATH = "/sdcard/Pictures/screencap" -BUG_REPORT_TIMEOUT = 1800 -PULL_TIMEOUT = 300 -PORT_RETRY_COUNT = 3 -ADB_ROOT_RETRY_COUNT = 2 -ADB_ROOT_RETRY_INTERVAL = 10 -IPERF_TIMEOUT = 60 -SL4A_APK_NAME = "com.googlecode.android_scripting" -WAIT_FOR_DEVICE_TIMEOUT = 180 -ENCRYPTION_WINDOW = "CryptKeeper" -DEFAULT_DEVICE_PASSWORD = "1111" -RELEASE_ID_REGEXES = [re.compile(r"\w+\.\d+\.\d+"), re.compile(r"N\w+")] - - -def create(configs): - """Creates AndroidDevice controller objects. - - Args: - configs: A list of dicts, each representing a configuration for an - Android device. - - Returns: - A list of AndroidDevice objects. - """ - if not configs: - raise errors.AndroidDeviceConfigError(ANDROID_DEVICE_EMPTY_CONFIG_MSG) - elif configs == ANDROID_DEVICE_PICK_ALL_TOKEN: - ads = get_all_instances() - elif not isinstance(configs, list): - raise errors.AndroidDeviceConfigError(ANDROID_DEVICE_NOT_LIST_CONFIG_MSG) - elif isinstance(configs[0], str): - # Configs is a list of serials. - ads = get_instances(configs) - else: - # Configs is a list of dicts. - ads = get_instances_with_configs(configs) - - ads[0].log.info('The primary device under test is "%s".' % ads[0].serial) - - for ad in ads: - if not ad.is_connected(): - raise errors.AndroidDeviceError( - ("Android device %s is specified in config" " but is not attached.") - % ad.serial, - serial=ad.serial, - ) - _start_services_on_ads(ads) - for ad in ads: - if ad.droid: - utils.set_location_service(ad, False) - utils.sync_device_time(ad) - return ads - - -def destroy(ads): - """Cleans up AndroidDevice objects. - - Args: - ads: A list of AndroidDevice objects. - """ - for ad in ads: - try: - ad.clean_up() - except: - ad.log.exception("Failed to clean up properly.") - - -def get_info(ads): - """Get information on a list of AndroidDevice objects. - - Args: - ads: A list of AndroidDevice objects. - - Returns: - A list of dict, each representing info for an AndroidDevice objects. - """ - device_info = [] - for ad in ads: - info = {"serial": ad.serial, "model": ad.model} - info.update(ad.build_info) - device_info.append(info) - return device_info - - -def _start_services_on_ads(ads): - """Starts long running services on multiple AndroidDevice objects. - - If any one AndroidDevice object fails to start services, cleans up all - existing AndroidDevice objects and their services. - - Args: - ads: A list of AndroidDevice objects whose services to start. - """ - running_ads = [] - for ad in ads: - running_ads.append(ad) - try: - ad.start_services() - except: - ad.log.exception("Failed to start some services, abort!") - destroy(running_ads) - raise - - -def _parse_device_list(device_list_str, key): - """Parses a byte string representing a list of devices. The string is - generated by calling either adb or fastboot. - - Args: - device_list_str: Output of adb or fastboot. - key: The token that signifies a device in device_list_str. - - Returns: - A list of android device serial numbers. - """ - return re.findall(r"(\S+)\t%s" % key, device_list_str) - - -def list_adb_devices(): - """List all android devices connected to the computer that are detected by - adb. - - Returns: - A list of android device serials. Empty if there's none. - """ - out = adb.AdbProxy().devices() - return _parse_device_list(out, "device") - - -def list_fastboot_devices(): - """List all android devices connected to the computer that are in in - fastboot mode. These are detected by fastboot. - - Returns: - A list of android device serials. Empty if there's none. - """ - out = fastboot.FastbootProxy().devices() - return _parse_device_list(out, "fastboot") - - -def get_instances(serials): - """Create AndroidDevice instances from a list of serials. - - Args: - serials: A list of android device serials. - - Returns: - A list of AndroidDevice objects. - """ - results = [] - for s in serials: - results.append(AndroidDevice(s)) - return results - - -def get_instances_with_configs(configs): - """Create AndroidDevice instances from a list of json configs. - - Each config should have the required key-value pair "serial". - - Args: - configs: A list of dicts each representing the configuration of one - android device. - - Returns: - A list of AndroidDevice objects. - """ - results = [] - for c in configs: - try: - serial = c.pop("serial") - except KeyError: - raise errors.AndroidDeviceConfigError( - "Required value 'serial' is missing in AndroidDevice config %s." % c - ) - client_port = 0 - if ANDROID_DEVICE_SL4A_CLIENT_PORT_KEY in c: - try: - client_port = int(c.pop(ANDROID_DEVICE_SL4A_CLIENT_PORT_KEY)) - except ValueError: - raise errors.AndroidDeviceConfigError( - "'%s' is not a valid number for config %s" - % (ANDROID_DEVICE_SL4A_CLIENT_PORT_KEY, c) - ) - server_port = None - if ANDROID_DEVICE_SL4A_SERVER_PORT_KEY in c: - try: - server_port = int(c.pop(ANDROID_DEVICE_SL4A_SERVER_PORT_KEY)) - except ValueError: - raise errors.AndroidDeviceConfigError( - "'%s' is not a valid number for config %s" - % (ANDROID_DEVICE_SL4A_SERVER_PORT_KEY, c) - ) - forwarded_port = 0 - if ANDROID_DEVICE_SL4A_FORWARDED_PORT_KEY in c: - try: - forwarded_port = int(c.pop(ANDROID_DEVICE_SL4A_FORWARDED_PORT_KEY)) - except ValueError: - raise errors.AndroidDeviceConfigError( - "'%s' is not a valid number for config %s" - % (ANDROID_DEVICE_SL4A_FORWARDED_PORT_KEY, c) - ) - ssh_config = c.pop("ssh_config", None) - ssh_connection = None - if ssh_config is not None: - ssh_settings = settings.from_config(ssh_config) - ssh_connection = connection.SshConnection(ssh_settings) - ad = AndroidDevice( - serial, - ssh_connection=ssh_connection, - client_port=client_port, - forwarded_port=forwarded_port, - server_port=server_port, - ) - ad.load_config(c) - results.append(ad) - return results - - -def get_all_instances(include_fastboot=False): - """Create AndroidDevice instances for all attached android devices. - - Args: - include_fastboot: Whether to include devices in bootloader mode or not. - - Returns: - A list of AndroidDevice objects each representing an android device - attached to the computer. - """ - if include_fastboot: - serial_list = list_adb_devices() + list_fastboot_devices() - return get_instances(serial_list) - return get_instances(list_adb_devices()) - - -def filter_devices(ads, func): - """Finds the AndroidDevice instances from a list that match certain - conditions. - - Args: - ads: A list of AndroidDevice instances. - func: A function that takes an AndroidDevice object and returns True - if the device satisfies the filter condition. - - Returns: - A list of AndroidDevice instances that satisfy the filter condition. - """ - results = [] - for ad in ads: - if func(ad): - results.append(ad) - return results - - -def get_device(ads, **kwargs): - """Finds a unique AndroidDevice instance from a list that has specific - attributes of certain values. - - Example: - get_device(android_devices, label="foo", phone_number="1234567890") - get_device(android_devices, model="angler") - - Args: - ads: A list of AndroidDevice instances. - kwargs: keyword arguments used to filter AndroidDevice instances. - - Returns: - The target AndroidDevice instance. - - Raises: - AndroidDeviceError is raised if none or more than one device is - matched. - """ - - def _get_device_filter(ad): - for k, v in kwargs.items(): - if not hasattr(ad, k): - return False - elif getattr(ad, k) != v: - return False - return True - - filtered = filter_devices(ads, _get_device_filter) - if not filtered: - raise ValueError( - "Could not find a target device that matches condition: %s." % kwargs - ) - elif len(filtered) == 1: - return filtered[0] - else: - serials = [ad.serial for ad in filtered] - raise ValueError("More than one device matched: %s" % serials) - - -def take_bug_reports(ads, test_name, begin_time): - """Takes bug reports on a list of android devices. - - If you want to take a bug report, call this function with a list of - android_device objects in on_fail. But reports will be taken on all the - devices in the list concurrently. Bug report takes a relative long - time to take, so use this cautiously. - - Args: - ads: A list of AndroidDevice instances. - test_name: Name of the test case that triggered this bug report. - begin_time: Logline format timestamp taken when the test started. - """ - - def take_br(test_name, begin_time, ad): - ad.take_bug_report(test_name, begin_time) - - args = [(test_name, begin_time, ad) for ad in ads] - utils.concurrent_exec(take_br, args) - - -class AndroidDevice: - """Class representing an android device. - - Each object of this class represents one Android device in ACTS, including - handles to adb, fastboot, and sl4a clients. In addition to direct adb - commands, this object also uses adb port forwarding to talk to the Android - device. - - Attributes: - serial: A string that's the serial number of the Android device. - log_path: A string that is the path where all logs collected on this - android device should be stored. - log: A logger adapted from root logger with added token specific to an - AndroidDevice instance. - adb_logcat_process: A process that collects the adb logcat. - adb: An AdbProxy object used for interacting with the device via adb. - fastboot: A FastbootProxy object used for interacting with the device - via fastboot. - client_port: Preferred client port number on the PC host side for SL4A - forwarded_port: Preferred server port number forwarded from Android - to the host PC via adb for SL4A connections - server_port: Preferred server port used by SL4A on Android device - - """ - - def __init__( - self, - serial="", - ssh_connection=None, - client_port=0, - forwarded_port=0, - server_port=None, - ): - self.serial = serial - # logging.log_path only exists when this is used in an ACTS test run. - log_path_base = getattr(logging, "log_path", "/tmp/logs") - self.log_dir = "AndroidDevice%s" % serial - self.log_path = os.path.join(log_path_base, self.log_dir) - self.client_port = client_port - self.forwarded_port = forwarded_port - self.server_port = server_port - self.log = tracelogger.TraceLogger( - AndroidDeviceLoggerAdapter(logging.getLogger(), {"serial": serial}) - ) - self._event_dispatchers = {} - self._services = [] - self.register_service(services.AdbLogcatService(self)) - self.register_service(services.Sl4aService(self)) - self.adb_logcat_process = None - self.adb = adb.AdbProxy(serial, ssh_connection=ssh_connection) - self.fastboot = fastboot.FastbootProxy(serial, ssh_connection=ssh_connection) - if not self.is_bootloader: - self.root_adb() - self._ssh_connection = ssh_connection - self.skip_sl4a = False - self.crash_report = None - self.data_accounting = collections.defaultdict(int) - self._sl4a_manager = sl4a_manager.create_sl4a_manager(self.adb) - self.last_logcat_timestamp = None - # Device info cache. - self._user_added_device_info = {} - self._sdk_api_level = None - - def clean_up(self): - """Cleans up the AndroidDevice object and releases any resources it - claimed. - """ - self.stop_services() - for service in self._services: - service.unregister() - self._services.clear() - if self._ssh_connection: - self._ssh_connection.close() - - def recreate_services(self, serial): - """Clean up the AndroidDevice object and re-create adb/sl4a services. - - Unregister the existing services and re-create adb and sl4a services, - call this method when the connection break after certain API call - (e.g., enable USB tethering by #startTethering) - - Args: - serial: the serial number of the AndroidDevice - """ - # Clean the old services - for service in self._services: - service.unregister() - self._services.clear() - if self._ssh_connection: - self._ssh_connection.close() - self._sl4a_manager.stop_service() - - # Wait for old services to stop - time.sleep(5) - - # Re-create the new adb and sl4a services - self.register_service(services.AdbLogcatService(self)) - self.register_service(services.Sl4aService(self)) - self.adb.wait_for_device() - self.terminate_all_sessions() - self.start_services() - - def register_service(self, service): - """Registers the service on the device.""" - service.register() - self._services.append(service) - - # TODO(angli): This function shall be refactored to accommodate all services - # and not have hard coded switch for SL4A when b/29157104 is done. - def start_services(self, skip_setup_wizard=True): - """Starts long running services on the android device. - - 1. Start adb logcat capture. - 2. Start SL4A if not skipped. - - Args: - skip_setup_wizard: Whether or not to skip the setup wizard. - """ - if skip_setup_wizard: - self.exit_setup_wizard() - - event_bus.post(android_events.AndroidStartServicesEvent(self)) - - def stop_services(self): - """Stops long running services on the android device. - - Stop adb logcat and terminate sl4a sessions if exist. - """ - event_bus.post( - android_events.AndroidStopServicesEvent(self), ignore_errors=True - ) - - def is_connected(self): - out = self.adb.devices() - devices = _parse_device_list(out, "device") - return self.serial in devices - - @property - def build_info(self): - """Get the build info of this Android device, including build id and - build type. - - This is not available if the device is in bootloader mode. - - Returns: - A dict with the build info of this Android device, or None if the - device is in bootloader mode. - """ - if self.is_bootloader: - self.log.error("Device is in fastboot mode, could not get build " "info.") - return - - build_id = self.adb.getprop("ro.build.id") - incremental_build_id = self.adb.getprop("ro.build.version.incremental") - valid_build_id = False - for regex in RELEASE_ID_REGEXES: - if re.match(regex, build_id): - valid_build_id = True - break - if not valid_build_id: - build_id = incremental_build_id - - info = { - "build_id": build_id, - "incremental_build_id": incremental_build_id, - "build_type": self.adb.getprop("ro.build.type"), - } - return info - - @property - def device_info(self): - """Information to be pulled into controller info. - - The latest serial, model, and build_info are included. Additional info - can be added via `add_device_info`. - """ - info = { - "serial": self.serial, - "model": self.model, - "build_info": self.build_info, - "user_added_info": self._user_added_device_info, - "flavor": self.flavor, - } - return info - - def add_device_info(self, name, info): - """Add custom device info to the user_added_info section. - - Adding the same info name the second time will override existing info. - - Args: - name: string, name of this info. - info: serializable, content of the info. - """ - self._user_added_device_info.update({name: info}) - - def sdk_api_level(self): - if self._sdk_api_level is not None: - return self._sdk_api_level - if self.is_bootloader: - self.log.error("Device is in fastboot mode. Cannot get build info.") - return - self._sdk_api_level = int(self.adb.shell("getprop ro.build.version.sdk")) - return self._sdk_api_level - - @property - def is_bootloader(self): - """True if the device is in bootloader mode.""" - return self.serial in list_fastboot_devices() - - @property - def is_adb_root(self): - """True if adb is running as root for this device.""" - try: - return "0" == self.adb.shell("id -u") - except AdbError: - # Wait a bit and retry to work around adb flakiness for this cmd. - time.sleep(0.2) - return "0" == self.adb.shell("id -u") - - @property - def model(self): - """The Android code name for the device.""" - # If device is in bootloader mode, get mode name from fastboot. - if self.is_bootloader: - out = self.fastboot.getvar("product").strip() - # "out" is never empty because of the "total time" message fastboot - # writes to stderr. - lines = out.split("\n", 1) - if lines: - tokens = lines[0].split(" ") - if len(tokens) > 1: - return tokens[1].lower() - return None - model = self.adb.getprop("ro.build.product").lower() - if model == "sprout": - return model - else: - return self.adb.getprop("ro.product.name").lower() - - @property - def flavor(self): - """Returns the specific flavor of Android build the device is using.""" - return self.adb.getprop("ro.build.flavor").lower() - - @property - def droid(self): - """Returns the RPC Service of the first Sl4aSession created.""" - if len(self._sl4a_manager.sessions) > 0: - session_id = sorted(self._sl4a_manager.sessions.keys())[0] - return self._sl4a_manager.sessions[session_id].rpc_client - else: - return None - - @property - def ed(self): - """Returns the event dispatcher of the first Sl4aSession created.""" - if len(self._sl4a_manager.sessions) > 0: - session_id = sorted(self._sl4a_manager.sessions.keys())[0] - return self._sl4a_manager.sessions[session_id].get_event_dispatcher() - else: - return None - - @property - def sl4a_sessions(self): - """Returns a dictionary of session ids to sessions.""" - return list(self._sl4a_manager.sessions) - - @property - def is_adb_logcat_on(self): - """Whether there is an ongoing adb logcat collection.""" - if self.adb_logcat_process: - if self.adb_logcat_process.is_running(): - return True - else: - # if skip_sl4a is true, there is no sl4a session - # if logcat died due to device reboot and sl4a session has - # not restarted there is no droid. - if self.droid: - self.droid.logI("Logcat died") - self.log.info("Logcat to %s died", self.log_path) - return False - return False - - @property - def device_log_path(self): - """Returns the directory for all Android device logs for the current - test context and serial. - """ - return context.get_current_context().get_full_output_path(self.serial) - - def update_sdk_api_level(self): - self._sdk_api_level = None - self.sdk_api_level() - - def load_config(self, config): - """Add attributes to the AndroidDevice object based on json config. - - Args: - config: A dictionary representing the configs. - - Raises: - AndroidDeviceError is raised if the config is trying to overwrite - an existing attribute. - """ - for k, v in config.items(): - # skip_sl4a value can be reset from config file - if hasattr(self, k) and k != "skip_sl4a": - raise errors.AndroidDeviceError( - "Attempting to set existing attribute %s on %s" % (k, self.serial), - serial=self.serial, - ) - setattr(self, k, v) - - def root_adb(self): - """Change adb to root mode for this device if allowed. - - If executed on a production build, adb will not be switched to root - mode per security restrictions. - """ - if self.is_adb_root: - return - - for attempt in range(ADB_ROOT_RETRY_COUNT): - try: - self.log.debug("Enabling ADB root mode: attempt %d." % attempt) - self.adb.root() - except AdbError: - if attempt == ADB_ROOT_RETRY_COUNT: - raise - time.sleep(ADB_ROOT_RETRY_INTERVAL) - self.adb.wait_for_device() - - def get_droid(self, handle_event=True): - """Create an sl4a connection to the device. - - Return the connection handler 'droid'. By default, another connection - on the same session is made for EventDispatcher, and the dispatcher is - returned to the caller as well. - If sl4a server is not started on the device, try to start it. - - Args: - handle_event: True if this droid session will need to handle - events. - - Returns: - droid: Android object used to communicate with sl4a on the android - device. - ed: An optional EventDispatcher to organize events for this droid. - - Examples: - Don't need event handling: - >>> ad = AndroidDevice() - >>> droid = ad.get_droid(False) - - Need event handling: - >>> ad = AndroidDevice() - >>> droid, ed = ad.get_droid() - """ - self.log.debug( - "Creating RPC client_port={}, forwarded_port={}, server_port={}".format( - self.client_port, self.forwarded_port, self.server_port - ) - ) - session = self._sl4a_manager.create_session( - client_port=self.client_port, - forwarded_port=self.forwarded_port, - server_port=self.server_port, - ) - droid = session.rpc_client - if handle_event: - ed = session.get_event_dispatcher() - return droid, ed - return droid - - def get_package_pid(self, package_name): - """Gets the pid for a given package. Returns None if not running. - Args: - package_name: The name of the package. - Returns: - The first pid found under a given package name. None if no process - was found running the package. - Raises: - AndroidDeviceError if the output of the phone's process list was - in an unexpected format. - """ - for cmd in ("ps -A", "ps"): - try: - out = self.adb.shell( - '%s | grep "S %s"' % (cmd, package_name), ignore_status=True - ) - if package_name not in out: - continue - try: - pid = int(out.split()[1]) - self.log.info("apk %s has pid %s.", package_name, pid) - return pid - except (IndexError, ValueError) as e: - # Possible ValueError from string to int cast. - # Possible IndexError from split. - self.log.warning( - 'Command "%s" returned output line: ' '"%s".\nError: %s', - cmd, - out, - e, - ) - except Exception as e: - self.log.warning( - 'Device fails to check if %s running with "%s"\n' "Exception %s", - package_name, - cmd, - e, - ) - self.log.debug("apk %s is not running", package_name) - return None - - def get_dispatcher(self, droid): - """Return an EventDispatcher for an sl4a session - - Args: - droid: Session to create EventDispatcher for. - - Returns: - ed: An EventDispatcher for specified session. - """ - return self._sl4a_manager.sessions[droid.uid].get_event_dispatcher() - - def _is_timestamp_in_range(self, target, log_begin_time, log_end_time): - low = acts_logger.logline_timestamp_comparator(log_begin_time, target) <= 0 - high = acts_logger.logline_timestamp_comparator(log_end_time, target) >= 0 - return low and high - - def cat_adb_log(self, tag, begin_time, end_time=None, dest_path="AdbLogExcerpts"): - """Takes an excerpt of the adb logcat log from a certain time point to - current time. - - Args: - tag: An identifier of the time period, usually the name of a test. - begin_time: Epoch time of the beginning of the time period. - end_time: Epoch time of the ending of the time period, default None - dest_path: Destination path of the excerpt file. - """ - log_begin_time = acts_logger.epoch_to_log_line_timestamp(begin_time) - if end_time is None: - log_end_time = acts_logger.get_log_line_timestamp() - else: - log_end_time = acts_logger.epoch_to_log_line_timestamp(end_time) - self.log.debug("Extracting adb log from logcat.") - logcat_path = os.path.join( - self.device_log_path, "adblog_%s_debug.txt" % self.serial - ) - if not os.path.exists(logcat_path): - self.log.warning("Logcat file %s does not exist." % logcat_path) - return - adb_excerpt_dir = os.path.join(self.log_path, dest_path) - os.makedirs(adb_excerpt_dir, exist_ok=True) - out_name = "%s,%s.txt" % ( - acts_logger.normalize_log_line_timestamp(log_begin_time), - self.serial, - ) - tag_len = utils.MAX_FILENAME_LEN - len(out_name) - out_name = "%s,%s" % (tag[:tag_len], out_name) - adb_excerpt_path = os.path.join(adb_excerpt_dir, out_name) - with open(adb_excerpt_path, "w", encoding="utf-8") as out: - in_file = logcat_path - with open(in_file, "r", encoding="utf-8", errors="replace") as f: - while True: - line = None - try: - line = f.readline() - if not line: - break - except: - continue - line_time = line[: acts_logger.log_line_timestamp_len] - if not acts_logger.is_valid_logline_timestamp(line_time): - continue - if self._is_timestamp_in_range( - line_time, log_begin_time, log_end_time - ): - if not line.endswith("\n"): - line += "\n" - out.write(line) - return adb_excerpt_path - - def search_logcat( - self, matching_string, begin_time=None, end_time=None, logcat_path=None - ): - """Search logcat message with given string. - - Args: - matching_string: matching_string to search. - begin_time: only the lines with time stamps later than begin_time - will be searched. - end_time: only the lines with time stamps earlier than end_time - will be searched. - logcat_path: the path of a specific file in which the search should - be performed. If None the path will be the default device log - path. - - Returns: - A list of dictionaries with full log message, time stamp string, - time object and message ID. For example: - [{"log_message": "05-03 17:39:29.898 968 1001 D" - "ActivityManager: Sending BOOT_COMPLETE user #0", - "time_stamp": "2017-05-03 17:39:29.898", - "datetime_obj": datetime object, - "message_id": None}] - - [{"log_message": "08-12 14:26:42.611043 2360 2510 D RILJ : " - "[0853]< DEACTIVATE_DATA_CALL [PHONE0]", - "time_stamp": "2020-08-12 14:26:42.611043", - "datetime_obj": datetime object}, - "message_id": "0853"}] - """ - if not logcat_path: - logcat_path = os.path.join( - self.device_log_path, "adblog_%s_debug.txt" % self.serial - ) - if not os.path.exists(logcat_path): - self.log.warning("Logcat file %s does not exist." % logcat_path) - return - output = job.run( - "grep '%s' %s" % (matching_string, logcat_path), ignore_status=True - ) - if not output.stdout or output.exit_status != 0: - return [] - if begin_time: - if not isinstance(begin_time, datetime): - log_begin_time = acts_logger.epoch_to_log_line_timestamp(begin_time) - begin_time = datetime.strptime(log_begin_time, "%Y-%m-%d %H:%M:%S.%f") - if end_time: - if not isinstance(end_time, datetime): - log_end_time = acts_logger.epoch_to_log_line_timestamp(end_time) - end_time = datetime.strptime(log_end_time, "%Y-%m-%d %H:%M:%S.%f") - result = [] - logs = re.findall(r"(\S+\s\S+)(.*)", output.stdout) - for log in logs: - time_stamp = log[0] - time_obj = datetime.strptime(time_stamp, "%Y-%m-%d %H:%M:%S.%f") - - if begin_time and time_obj < begin_time: - continue - - if end_time and time_obj > end_time: - continue - - res = re.findall(r".*\[(\d+)\]", log[1]) - try: - message_id = res[0] - except: - message_id = None - - result.append( - { - "log_message": "".join(log), - "time_stamp": time_stamp, - "datetime_obj": time_obj, - "message_id": message_id, - } - ) - return result - - def start_adb_logcat(self): - """Starts a standing adb logcat collection in separate subprocesses and - save the logcat in a file. - """ - if self.is_adb_logcat_on: - self.log.warning( - "Android device %s already has a running adb logcat thread. " - % self.serial - ) - return - # Disable adb log spam filter. Have to stop and clear settings first - # because 'start' doesn't support --clear option before Android N. - self.adb.shell("logpersist.stop --clear", ignore_status=True) - self.adb.shell("logpersist.start", ignore_status=True) - if hasattr(self, "adb_logcat_param"): - extra_params = self.adb_logcat_param - else: - extra_params = "-b all" - - self.adb_logcat_process = logcat.create_logcat_keepalive_process( - self.serial, self.log_dir, extra_params - ) - self.adb_logcat_process.start() - - def stop_adb_logcat(self): - """Stops the adb logcat collection subprocess.""" - if not self.is_adb_logcat_on: - self.log.warning( - "Android device %s does not have an ongoing adb logcat " % self.serial - ) - return - # Set the last timestamp to the current timestamp. This may cause - # a race condition that allows the same line to be logged twice, - # but it does not pose a problem for our logging purposes. - self.adb_logcat_process.stop() - self.adb_logcat_process = None - - def get_apk_uid(self, apk_name): - """Get the uid of the given apk. - - Args: - apk_name: Name of the package, e.g., com.android.phone. - - Returns: - Linux UID for the apk. - """ - output = self.adb.shell( - "dumpsys package %s | grep userId=" % apk_name, ignore_status=True - ) - result = re.search(r"userId=(\d+)", output) - if result: - return result.group(1) - else: - None - - def get_apk_version(self, package_name): - """Get the version of the given apk. - - Args: - package_name: Name of the package, e.g., com.android.phone. - - Returns: - Version of the given apk. - """ - try: - output = self.adb.shell( - "dumpsys package %s | grep versionName" % package_name - ) - pattern = re.compile(r"versionName=(.+)", re.I) - result = pattern.findall(output) - if result: - return result[0] - except Exception as e: - self.log.warning( - "Fail to get the version of package %s: %s", package_name, e - ) - self.log.debug("apk %s is not found", package_name) - return None - - def is_apk_installed(self, package_name): - """Check if the given apk is already installed. - - Args: - package_name: Name of the package, e.g., com.android.phone. - - Returns: - True if package is installed. False otherwise. - """ - - try: - return bool( - self.adb.shell( - '(pm list packages | grep -w "package:%s") || true' % package_name - ) - ) - - except Exception as err: - self.log.error( - "Could not determine if %s is installed. " "Received error:\n%s", - package_name, - err, - ) - return False - - def is_sl4a_installed(self): - return self.is_apk_installed(SL4A_APK_NAME) - - def is_apk_running(self, package_name): - """Check if the given apk is running. - - Args: - package_name: Name of the package, e.g., com.android.phone. - - Returns: - True if package is installed. False otherwise. - """ - for cmd in ("ps -A", "ps"): - try: - out = self.adb.shell( - '%s | grep "S %s"' % (cmd, package_name), ignore_status=True - ) - if package_name in out: - self.log.info("apk %s is running", package_name) - return True - except Exception as e: - self.log.warning( - "Device fails to check is %s running by %s " "Exception %s", - package_name, - cmd, - e, - ) - continue - self.log.debug("apk %s is not running", package_name) - return False - - def is_sl4a_running(self): - return self.is_apk_running(SL4A_APK_NAME) - - def force_stop_apk(self, package_name): - """Force stop the given apk. - - Args: - package_name: Name of the package, e.g., com.android.phone. - - Returns: - True if package is installed. False otherwise. - """ - try: - self.adb.shell("am force-stop %s" % package_name, ignore_status=True) - except Exception as e: - self.log.warning("Fail to stop package %s: %s", package_name, e) - - def take_bug_report(self, test_name=None, begin_time=None): - """Takes a bug report on the device and stores it in a file. - - Args: - test_name: Name of the test case that triggered this bug report. - begin_time: Epoch time when the test started. If none is specified, - the current time will be used. - """ - self.adb.wait_for_device(timeout=WAIT_FOR_DEVICE_TIMEOUT) - new_br = True - try: - stdout = self.adb.shell("bugreportz -v") - # This check is necessary for builds before N, where adb shell's ret - # code and stderr are not propagated properly. - if "not found" in stdout: - new_br = False - except AdbError: - new_br = False - br_path = self.device_log_path - os.makedirs(br_path, exist_ok=True) - epoch = begin_time if begin_time else utils.get_current_epoch_time() - time_stamp = acts_logger.normalize_log_line_timestamp( - acts_logger.epoch_to_log_line_timestamp(epoch) - ) - out_name = "AndroidDevice%s_%s" % (self.serial, time_stamp) - out_name = "%s.zip" % out_name if new_br else "%s.txt" % out_name - full_out_path = os.path.join(br_path, out_name) - # in case device restarted, wait for adb interface to return - self.wait_for_boot_completion() - if test_name: - self.log.info("Taking bugreport for %s.", test_name) - else: - self.log.info("Taking bugreport.") - if new_br: - out = self.adb.shell("bugreportz", timeout=BUG_REPORT_TIMEOUT) - if not out.startswith("OK"): - raise errors.AndroidDeviceError( - "Failed to take bugreport on %s: %s" % (self.serial, out), - serial=self.serial, - ) - br_out_path = out.split(":")[1].strip().split()[0] - self.adb.pull("%s %s" % (br_out_path, full_out_path)) - else: - self.adb.bugreport( - " > {}".format(full_out_path), timeout=BUG_REPORT_TIMEOUT - ) - if test_name: - self.log.info("Bugreport for %s taken at %s.", test_name, full_out_path) - else: - self.log.info("Bugreport taken at %s.", test_name, full_out_path) - self.adb.wait_for_device(timeout=WAIT_FOR_DEVICE_TIMEOUT) - - def get_file_names( - self, directory, begin_time=None, skip_files=[], match_string=None - ): - """Get files names with provided directory.""" - cmd = "find %s -type f" % directory - if begin_time: - current_time = utils.get_current_epoch_time() - seconds = int(math.ceil((current_time - begin_time) / 1000.0)) - cmd = "%s -mtime -%ss" % (cmd, seconds) - if match_string: - cmd = "%s -iname %s" % (cmd, match_string) - for skip_file in skip_files: - cmd = "%s ! -iname %s" % (cmd, skip_file) - out = self.adb.shell(cmd, ignore_status=True) - if ( - not out - or "No such" in out - or "Permission denied" in out - or "Not a directory" in out - ): - return [] - files = out.split("\n") - self.log.debug("Find files in directory %s: %s", directory, files) - return files - - @property - def external_storage_path(self): - """ - The $EXTERNAL_STORAGE path on the device. Most commonly set to '/sdcard' - """ - return self.adb.shell("echo $EXTERNAL_STORAGE") - - def file_exists(self, file_path): - """Returns whether a file exists on a device. - - Args: - file_path: The path of the file to check for. - """ - cmd = "(test -f %s && echo yes) || echo no" % file_path - result = self.adb.shell(cmd) - if result == "yes": - return True - elif result == "no": - return False - raise ValueError( - "Couldn't determine if %s exists. " - "Expected yes/no, got %s" % (file_path, result[cmd]) - ) - - def pull_files(self, device_paths, host_path=None): - """Pull files from devices. - - Args: - device_paths: List of paths on the device to pull from. - host_path: Destination path - """ - if isinstance(device_paths, str): - device_paths = [device_paths] - if not host_path: - host_path = self.log_path - for device_path in device_paths: - self.log.info("Pull from device: %s -> %s" % (device_path, host_path)) - self.adb.pull("%s %s" % (device_path, host_path), timeout=PULL_TIMEOUT) - - def check_crash_report( - self, test_name=None, begin_time=None, log_crash_report=False - ): - """check crash report on the device.""" - crash_reports = [] - for crash_path in CRASH_REPORT_PATHS: - try: - cmd = "cd %s" % crash_path - self.adb.shell(cmd) - except Exception as e: - self.log.debug("received exception %s", e) - continue - crashes = self.get_file_names( - crash_path, skip_files=CRASH_REPORT_SKIPS, begin_time=begin_time - ) - if crash_path == "/data/tombstones/" and crashes: - tombstones = crashes[:] - for tombstone in tombstones: - if self.adb.shell( - 'cat %s | grep "crash_dump failed to dump process"' % tombstone - ): - crashes.remove(tombstone) - if crashes: - crash_reports.extend(crashes) - if crash_reports and log_crash_report: - crash_log_path = os.path.join( - self.device_log_path, "Crashes_%s" % self.serial - ) - os.makedirs(crash_log_path, exist_ok=True) - self.pull_files(crash_reports, crash_log_path) - return crash_reports - - def get_qxdm_logs(self, test_name="", begin_time=None): - """Get qxdm logs.""" - # Sleep 10 seconds for the buffered log to be written in qxdm log file - time.sleep(10) - log_path = getattr(self, "qxdm_log_path", DEFAULT_QXDM_LOG_PATH) - qxdm_logs = self.get_file_names( - log_path, begin_time=begin_time, match_string="*.qmdl" - ) - if qxdm_logs: - qxdm_log_path = os.path.join(self.device_log_path, "QXDM_%s" % self.serial) - os.makedirs(qxdm_log_path, exist_ok=True) - - self.log.info("Pull QXDM Log %s to %s", qxdm_logs, qxdm_log_path) - self.pull_files(qxdm_logs, qxdm_log_path) - - self.adb.pull( - "/firmware/image/qdsp6m.qdb %s" % qxdm_log_path, - timeout=PULL_TIMEOUT, - ignore_status=True, - ) - # Zip Folder - utils.zip_directory("%s.zip" % qxdm_log_path, qxdm_log_path) - shutil.rmtree(qxdm_log_path) - else: - self.log.error("Didn't find QXDM logs in %s." % log_path) - if "Verizon" in self.adb.getprop("gsm.sim.operator.alpha"): - omadm_log_path = os.path.join( - self.device_log_path, "OMADM_%s" % self.serial - ) - os.makedirs(omadm_log_path, exist_ok=True) - self.log.info("Pull OMADM Log") - self.adb.pull( - "/data/data/com.android.omadm.service/files/dm/log/ %s" - % omadm_log_path, - timeout=PULL_TIMEOUT, - ignore_status=True, - ) - - def get_sdm_logs(self, test_name="", begin_time=None): - """Get sdm logs.""" - # Sleep 10 seconds for the buffered log to be written in sdm log file - time.sleep(10) - log_paths = [ - ALWAYS_ON_LOG_PATH, - getattr(self, "sdm_log_path", DEFAULT_SDM_LOG_PATH), - ] - sdm_logs = [] - for path in log_paths: - sdm_logs += self.get_file_names( - path, begin_time=begin_time, match_string="*.sdm*" - ) - if sdm_logs: - sdm_log_path = os.path.join(self.device_log_path, "SDM_%s" % self.serial) - os.makedirs(sdm_log_path, exist_ok=True) - self.log.info("Pull SDM Log %s to %s", sdm_logs, sdm_log_path) - self.pull_files(sdm_logs, sdm_log_path) - else: - self.log.error("Didn't find SDM logs in %s." % log_paths) - if "Verizon" in self.adb.getprop("gsm.sim.operator.alpha"): - omadm_log_path = os.path.join( - self.device_log_path, "OMADM_%s" % self.serial - ) - os.makedirs(omadm_log_path, exist_ok=True) - self.log.info("Pull OMADM Log") - self.adb.pull( - "/data/data/com.android.omadm.service/files/dm/log/ %s" - % omadm_log_path, - timeout=PULL_TIMEOUT, - ignore_status=True, - ) - - def start_new_session(self, max_connections=None, server_port=None): - """Start a new session in sl4a. - - Also caches the droid in a dict with its uid being the key. - - Returns: - An Android object used to communicate with sl4a on the android - device. - - Raises: - Sl4aException: Something is wrong with sl4a and it returned an - existing uid to a new session. - """ - session = self._sl4a_manager.create_session( - max_connections=max_connections, server_port=server_port - ) - - self._sl4a_manager.sessions[session.uid] = session - return session.rpc_client - - def terminate_all_sessions(self): - """Terminate all sl4a sessions on the AndroidDevice instance. - - Terminate all sessions and clear caches. - """ - self._sl4a_manager.terminate_all_sessions() - - def run_iperf_client_nb( - self, server_host, extra_args="", timeout=IPERF_TIMEOUT, log_file_path=None - ): - """Start iperf client on the device asynchronously. - - Return status as true if iperf client start successfully. - And data flow information as results. - - Args: - server_host: Address of the iperf server. - extra_args: A string representing extra arguments for iperf client, - e.g. "-i 1 -t 30". - log_file_path: The complete file path to log the results. - - """ - cmd = "iperf3 -c {} {}".format(server_host, extra_args) - if log_file_path: - cmd += " --logfile {} &".format(log_file_path) - self.adb.shell_nb(cmd) - - def run_iperf_client(self, server_host, extra_args="", timeout=IPERF_TIMEOUT): - """Start iperf client on the device. - - Return status as true if iperf client start successfully. - And data flow information as results. - - Args: - server_host: Address of the iperf server. - extra_args: A string representing extra arguments for iperf client, - e.g. "-i 1 -t 30". - - Returns: - status: true if iperf client start successfully. - results: results have data flow information - """ - out = self.adb.shell( - "iperf3 -c {} {}".format(server_host, extra_args), timeout=timeout - ) - clean_out = out.split("\n") - if "error" in clean_out[0].lower(): - return False, clean_out - return True, clean_out - - def run_iperf_server(self, extra_args=""): - """Start iperf server on the device - - Return status as true if iperf server started successfully. - - Args: - extra_args: A string representing extra arguments for iperf server. - - Returns: - status: true if iperf server started successfully. - results: results have output of command - """ - out = self.adb.shell("iperf3 -s {}".format(extra_args)) - clean_out = out.split("\n") - if "error" in clean_out[0].lower(): - return False, clean_out - return True, clean_out - - def wait_for_boot_completion(self, timeout=900.0): - """Waits for Android framework to broadcast ACTION_BOOT_COMPLETED. - - Args: - timeout: Seconds to wait for the device to boot. Default value is - 15 minutes. - """ - timeout_start = time.time() - - self.log.debug("ADB waiting for device") - self.adb.wait_for_device(timeout=timeout) - self.log.debug("Waiting for sys.boot_completed") - while time.time() < timeout_start + timeout: - try: - completed = self.adb.getprop("sys.boot_completed") - if completed == "1": - self.log.debug("Device has rebooted") - return - except AdbError: - # adb shell calls may fail during certain period of booting - # process, which is normal. Ignoring these errors. - pass - time.sleep(5) - raise errors.AndroidDeviceError( - "Device %s booting process timed out." % self.serial, serial=self.serial - ) - - def reboot( - self, stop_at_lock_screen=False, timeout=180, wait_after_reboot_complete=1 - ): - """Reboots the device. - - Terminate all sl4a sessions, reboot the device, wait for device to - complete booting, and restart an sl4a session if restart_sl4a is True. - - Args: - stop_at_lock_screen: whether to unlock after reboot. Set to False - if want to bring the device to reboot up to password locking - phase. Sl4a checking need the device unlocked after rebooting. - timeout: time in seconds to wait for the device to complete - rebooting. - wait_after_reboot_complete: time in seconds to wait after the boot - completion. - """ - if self.is_bootloader: - self.fastboot.reboot() - return - self.stop_services() - self.log.info("Rebooting") - self.adb.reboot() - - timeout_start = time.time() - # b/111791239: Newer versions of android sometimes return early after - # `adb reboot` is called. This means subsequent calls may make it to - # the device before the reboot goes through, return false positives for - # getprops such as sys.boot_completed. - while time.time() < timeout_start + timeout: - try: - self.adb.get_state() - time.sleep(0.1) - except AdbError: - # get_state will raise an error if the device is not found. We - # want the device to be missing to prove the device has kicked - # off the reboot. - break - self.wait_for_boot_completion(timeout=(timeout - time.time() + timeout_start)) - - self.log.debug("Wait for a while after boot completion.") - time.sleep(wait_after_reboot_complete) - self.root_adb() - skip_sl4a = self.skip_sl4a - self.skip_sl4a = self.skip_sl4a or stop_at_lock_screen - self.start_services() - self.skip_sl4a = skip_sl4a - - def restart_runtime(self): - """Restarts android runtime. - - Terminate all sl4a sessions, restarts runtime, wait for framework - complete restart, and restart an sl4a session if restart_sl4a is True. - """ - self.stop_services() - self.log.info("Restarting android runtime") - self.adb.shell("stop") - # Reset the boot completed flag before we restart the framework - # to correctly detect when the framework has fully come up. - self.adb.shell("setprop sys.boot_completed 0") - self.adb.shell("start") - self.wait_for_boot_completion() - self.root_adb() - - self.start_services() - - def get_ipv4_address(self, interface="wlan0", timeout=5): - for timer in range(0, timeout): - try: - ip_string = self.adb.shell("ifconfig %s|grep inet" % interface) - break - except adb.AdbError as e: - if timer + 1 == timeout: - self.log.warning("Unable to find IP address for %s." % interface) - return None - else: - time.sleep(1) - result = re.search("addr:(.*) Bcast", ip_string) - if result != None: - ip_address = result.group(1) - try: - socket.inet_aton(ip_address) - return ip_address - except socket.error: - return None - else: - return None - - def get_ipv4_gateway(self, timeout=5): - for timer in range(0, timeout): - try: - gateway_string = self.adb.shell("dumpsys wifi | grep mDhcpResults") - break - except adb.AdbError as e: - if timer + 1 == timeout: - self.log.warning("Unable to find gateway") - return None - else: - time.sleep(1) - result = re.search("Gateway (.*) DNS servers", gateway_string) - if result != None: - ipv4_gateway = result.group(1) - try: - socket.inet_aton(ipv4_gateway) - return ipv4_gateway - except socket.error: - return None - else: - return None - - def send_keycode(self, keycode): - self.adb.shell("input keyevent KEYCODE_%s" % keycode) - - def get_my_current_focus_window(self): - """Get the current focus window on screen""" - output = self.adb.shell( - "dumpsys window displays | grep -E mCurrentFocus | grep -v null", - ignore_status=True, - ) - if not output or "not found" in output or "Can't find" in output: - result = "" - else: - result = output.split(" ")[-1].strip("}") - self.log.debug("Current focus window is %s", result) - return result - - def get_my_current_focus_app(self): - """Get the current focus application""" - dumpsys_cmd = [ - "dumpsys window | grep -E mFocusedApp", - "dumpsys window displays | grep -E mFocusedApp", - ] - for cmd in dumpsys_cmd: - output = self.adb.shell(cmd, ignore_status=True) - if ( - not output - or "not found" in output - or "Can't find" in output - or ("mFocusedApp=null" in output) - ): - result = "" - else: - result = output.split(" ")[-2] - break - self.log.debug("Current focus app is %s", result) - return result - - def is_window_ready(self, window_name=None): - current_window = self.get_my_current_focus_window() - if window_name: - return window_name in current_window - return current_window and ENCRYPTION_WINDOW not in current_window - - def wait_for_window_ready( - self, window_name=None, check_interval=5, check_duration=60 - ): - elapsed_time = 0 - while elapsed_time < check_duration: - if self.is_window_ready(window_name=window_name): - return True - time.sleep(check_interval) - elapsed_time += check_interval - self.log.info("Current focus window is %s", self.get_my_current_focus_window()) - return False - - def is_user_setup_complete(self): - return "1" in self.adb.shell("settings get secure user_setup_complete") - - def is_screen_awake(self): - """Check if device screen is in sleep mode""" - return "Awake" in self.adb.shell("dumpsys power | grep mWakefulness=") - - def is_screen_emergency_dialer(self): - """Check if device screen is in emergency dialer mode""" - return "EmergencyDialer" in self.get_my_current_focus_window() - - def is_screen_in_call_activity(self): - """Check if device screen is in in-call activity notification""" - return "InCallActivity" in self.get_my_current_focus_window() - - def is_setupwizard_on(self): - """Check if device screen is in emergency dialer mode""" - return "setupwizard" in self.get_my_current_focus_app() - - def is_screen_lock_enabled(self): - """Check if screen lock is enabled""" - cmd = "dumpsys window policy | grep showing=" - out = self.adb.shell(cmd, ignore_status=True) - return "true" in out - - def is_waiting_for_unlock_pin(self): - """Check if device is waiting for unlock pin to boot up""" - current_window = self.get_my_current_focus_window() - current_app = self.get_my_current_focus_app() - if ENCRYPTION_WINDOW in current_window: - self.log.info("Device is in CrpytKeeper window") - return True - if "StatusBar" in current_window and ( - (not current_app) or "FallbackHome" in current_app - ): - self.log.info("Device is locked") - return True - return False - - def ensure_screen_on(self): - """Ensure device screen is powered on""" - if self.is_screen_lock_enabled(): - for _ in range(2): - self.unlock_screen() - time.sleep(1) - if self.is_waiting_for_unlock_pin(): - self.unlock_screen(password=DEFAULT_DEVICE_PASSWORD) - time.sleep(1) - if ( - not self.is_waiting_for_unlock_pin() - and self.wait_for_window_ready() - ): - return True - return False - else: - self.wakeup_screen() - return True - - def wakeup_screen(self): - if not self.is_screen_awake(): - self.log.info("Screen is not awake, wake it up") - self.send_keycode("WAKEUP") - - def go_to_sleep(self): - if self.is_screen_awake(): - self.send_keycode("SLEEP") - - def send_keycode_number_pad(self, number): - self.send_keycode("NUMPAD_%s" % number) - - def unlock_screen(self, password=None): - self.log.info("Unlocking with %s", password or "swipe up") - # Bring device to SLEEP so that unlock process can start fresh - self.send_keycode("SLEEP") - time.sleep(1) - self.send_keycode("WAKEUP") - if ENCRYPTION_WINDOW not in self.get_my_current_focus_app(): - self.send_keycode("MENU") - if password: - self.send_keycode("DEL") - for number in password: - self.send_keycode_number_pad(number) - self.send_keycode("ENTER") - self.send_keycode("BACK") - - def screenshot(self, name=""): - """Take a screenshot on the device. - - Args: - name: additional information of screenshot on the file name. - """ - if name: - file_name = "%s_%s" % (DEFAULT_SCREENSHOT_PATH, name) - file_name = "%s_%s.png" % (file_name, utils.get_current_epoch_time()) - self.ensure_screen_on() - self.log.info("Log screenshot to %s", file_name) - try: - self.adb.shell("screencap -p %s" % file_name) - except: - self.log.error("Fail to log screenshot to %s", file_name) - - def exit_setup_wizard(self): - # Handling Android TV's setupwizard is ignored for now. - if "feature:android.hardware.type.television" in self.adb.shell( - "pm list features" - ): - return - if not self.is_user_setup_complete() or self.is_setupwizard_on(): - # b/116709539 need this to prevent reboot after skip setup wizard - self.adb.shell( - "am start -a com.android.setupwizard.EXIT", ignore_status=True - ) - self.adb.shell( - "pm disable %s" % self.get_setupwizard_package_name(), - ignore_status=True, - ) - # Wait up to 5 seconds for user_setup_complete to be updated - end_time = time.time() + 5 - while time.time() < end_time: - if self.is_user_setup_complete() or not self.is_setupwizard_on(): - return - - # If fail to exit setup wizard, set local.prop and reboot - if not self.is_user_setup_complete() and self.is_setupwizard_on(): - self.adb.shell("echo ro.test_harness=1 > /data/local.prop") - self.adb.shell("chmod 644 /data/local.prop") - self.reboot(stop_at_lock_screen=True) - - def get_setupwizard_package_name(self): - """Finds setupwizard package/.activity - - Bypass setupwizard or setupwraith depending on device. - - Returns: - packageName/.ActivityName - """ - packages_to_skip = "'setupwizard|setupwraith'" - android_package_name = "com.google.android" - package = self.adb.shell( - "pm list packages -f | grep -E {} | grep {}".format( - packages_to_skip, android_package_name - ) - ) - wizard_package = package.split("=")[1] - activity = package.split("=")[0].split("/")[-2] - self.log.info("%s/.%sActivity" % (wizard_package, activity)) - return "%s/.%sActivity" % (wizard_package, activity) - - def push_system_file(self, src_file_path, dst_file_path, push_timeout=300): - """Pushes a file onto the read-only file system. - - For speed, the device is left in root mode after this call, and leaves - verity disabled. To re-enable verity, call ensure_verity_enabled(). - - Args: - src_file_path: The path to the system app to install. - dst_file_path: The destination of the file. - push_timeout: How long to wait for the push to finish. - Returns: - Whether or not the install was successful. - """ - self.adb.ensure_root() - try: - self.ensure_verity_disabled() - self.adb.remount() - out = self.adb.push( - "%s %s" % (src_file_path, dst_file_path), timeout=push_timeout - ) - if "error" in out: - self.log.error( - "Unable to push system file %s to %s due to %s", - src_file_path, - dst_file_path, - out, - ) - return False - return True - except Exception as e: - self.log.error( - "Unable to push system file %s to %s due to %s", - src_file_path, - dst_file_path, - e, - ) - return False - - def ensure_verity_enabled(self): - """Ensures that verity is enabled. - - If verity is not enabled, this call will reboot the phone. Note that - this only works on debuggable builds. - """ - user = self.adb.get_user_id() - # The below properties will only exist if verity has been enabled. - system_verity = self.adb.getprop("partition.system.verified") - vendor_verity = self.adb.getprop("partition.vendor.verified") - if not system_verity or not vendor_verity: - self.adb.ensure_root() - self.adb.enable_verity() - self.reboot() - self.adb.ensure_user(user) - - def ensure_verity_disabled(self): - """Ensures that verity is disabled. - - If verity is enabled, this call will reboot the phone. - """ - user = self.adb.get_user_id() - # The below properties will only exist if verity has been enabled. - system_verity = self.adb.getprop("partition.system.verified") - vendor_verity = self.adb.getprop("partition.vendor.verified") - if system_verity or vendor_verity: - self.adb.ensure_root() - self.adb.disable_verity() - self.reboot() - self.adb.ensure_user(user) - - -class AndroidDeviceLoggerAdapter(logging.LoggerAdapter): - def process(self, msg, kwargs): - msg = "[AndroidDevice|%s] %s" % (self.extra["serial"], msg) - return (msg, kwargs)
diff --git a/src/antlion/controllers/android_lib/__init__.py b/src/antlion/controllers/android_lib/__init__.py deleted file mode 100644 index e69de29..0000000 --- a/src/antlion/controllers/android_lib/__init__.py +++ /dev/null
diff --git a/src/antlion/controllers/android_lib/errors.py b/src/antlion/controllers/android_lib/errors.py deleted file mode 100644 index 79e3949..0000000 --- a/src/antlion/controllers/android_lib/errors.py +++ /dev/null
@@ -1,25 +0,0 @@ -#!/usr/bin/env python3 -# -# Copyright 2022 The Fuchsia Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from antlion import error - - -class AndroidDeviceConfigError(Exception): - """Raised when AndroidDevice configs are malformatted.""" - - -class AndroidDeviceError(error.ActsError): - """Raised when there is an error in AndroidDevice."""
diff --git a/src/antlion/controllers/android_lib/events.py b/src/antlion/controllers/android_lib/events.py deleted file mode 100644 index 92ae40e..0000000 --- a/src/antlion/controllers/android_lib/events.py +++ /dev/null
@@ -1,56 +0,0 @@ -#!/usr/bin/env python3 -# -# Copyright 2022 The Fuchsia Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from antlion.event.event import Event - - -class AndroidEvent(Event): - """The base class for AndroidDevice-related events.""" - - def __init__(self, android_device): - self.android_device = android_device - - @property - def ad(self): - return self.android_device - - -class AndroidStartServicesEvent(AndroidEvent): - """The event posted when an AndroidDevice begins its services.""" - - -class AndroidStopServicesEvent(AndroidEvent): - """The event posted when an AndroidDevice ends its services.""" - - -class AndroidRebootEvent(AndroidEvent): - """The event posted when an AndroidDevice has rebooted.""" - - -class AndroidDisconnectEvent(AndroidEvent): - """The event posted when an AndroidDevice has disconnected.""" - - -class AndroidReconnectEvent(AndroidEvent): - """The event posted when an AndroidDevice has disconnected.""" - - -class AndroidBugReportEvent(AndroidEvent): - """The event posted when an AndroidDevice captures a bugreport.""" - - def __init__(self, android_device, bugreport_dir): - super().__init__(android_device) - self.bugreport_dir = bugreport_dir
diff --git a/src/antlion/controllers/android_lib/logcat.py b/src/antlion/controllers/android_lib/logcat.py deleted file mode 100644 index 0a5e8f7..0000000 --- a/src/antlion/controllers/android_lib/logcat.py +++ /dev/null
@@ -1,108 +0,0 @@ -#!/usr/bin/env python3 -# -# Copyright 2022 The Fuchsia Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import logging -import re - -from antlion.libs.proc.process import Process -from antlion.libs.logging import log_stream -from antlion.libs.logging.log_stream import LogStyles - -TIMESTAMP_REGEX = r"((?:\d+-)?\d+-\d+ \d+:\d+:\d+.\d+)" - - -class TimestampTracker(object): - """Stores the last timestamp outputted by the Logcat process.""" - - def __init__(self): - self._last_timestamp = None - - @property - def last_timestamp(self): - return self._last_timestamp - - def read_output(self, message): - """Reads the message and parses all timestamps from it.""" - all_timestamps = re.findall(TIMESTAMP_REGEX, message) - if len(all_timestamps) > 0: - self._last_timestamp = all_timestamps[0] - - -def _get_log_level(message): - """Returns the log level for the given message.""" - if message.startswith("-") or len(message) < 37: - return logging.ERROR - else: - log_level = message[36] - if log_level in ("V", "D"): - return logging.DEBUG - elif log_level == "I": - return logging.INFO - elif log_level == "W": - return logging.WARNING - elif log_level == "E": - return logging.ERROR - return logging.NOTSET - - -def _log_line_func(log, timestamp_tracker): - """Returns a lambda that logs a message to the given logger.""" - - def log_line(message): - timestamp_tracker.read_output(message) - log.log(_get_log_level(message), message) - - return log_line - - -def _on_retry(serial, extra_params, timestamp_tracker): - def on_retry(_): - begin_at = '"%s"' % (timestamp_tracker.last_timestamp or 1) - additional_params = extra_params or "" - - return "adb -s %s logcat -T %s -v year %s" % ( - serial, - begin_at, - additional_params, - ) - - return on_retry - - -def create_logcat_keepalive_process(serial, logcat_dir, extra_params=""): - """Creates a Logcat Process that automatically attempts to reconnect. - - Args: - serial: The serial of the device to read the logcat of. - logcat_dir: The directory used for logcat file output. - extra_params: Any additional params to be added to the logcat cmdline. - - Returns: - A acts.libs.proc.process.Process object. - """ - logger = log_stream.create_logger( - "adblog_%s" % serial, - log_name=serial, - subcontext=logcat_dir, - log_styles=(LogStyles.LOG_DEBUG | LogStyles.TESTCASE_LOG), - ) - process = Process("adb -s %s logcat -T 1 -v year %s" % (serial, extra_params)) - timestamp_tracker = TimestampTracker() - process.set_on_output_callback(_log_line_func(logger, timestamp_tracker)) - process.set_on_terminate_callback( - _on_retry(serial, extra_params, timestamp_tracker) - ) - return process
diff --git a/src/antlion/controllers/android_lib/services.py b/src/antlion/controllers/android_lib/services.py deleted file mode 100644 index 098f524..0000000 --- a/src/antlion/controllers/android_lib/services.py +++ /dev/null
@@ -1,118 +0,0 @@ -#!/usr/bin/env python3 -# -# Copyright 2022 The Fuchsia Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from antlion.controllers.android_lib import errors -from antlion.controllers.android_lib import events as android_events -from antlion.event import event_bus - - -class AndroidService(object): - """The base class for Android long-running services. - - The _start method is registered to an AndroidStartServicesEvent, and - the _stop method is registered to an AndroidStopServicesEvent. - - Attributes: - ad: The AndroidDevice instance associated with the service. - serial: The serial of the device. - _registration_ids: List of registration IDs for the event subscriptions. - """ - - def __init__(self, ad): - self.ad = ad - self._registration_ids = [] - - @property - def serial(self): - return self.ad.serial - - def register(self): - """Registers the _start and _stop methods to their corresponding - events. - """ - - def check_serial(event): - return self.serial == event.ad.serial - - self._registration_ids = [ - event_bus.register( - android_events.AndroidStartServicesEvent, - self._start, - filter_fn=check_serial, - ), - event_bus.register( - android_events.AndroidStopServicesEvent, - self._stop, - filter_fn=check_serial, - ), - ] - - def unregister(self): - """Unregisters all subscriptions in this service.""" - event_bus.unregister_all(from_list=self._registration_ids) - self._registration_ids.clear() - - def _start(self, start_event): - """Start the service. Called upon an AndroidStartServicesEvent. - - Args: - start_event: The AndroidStartServicesEvent instance. - """ - raise NotImplementedError - - def _stop(self, stop_event): - """Stop the service. Called upon an AndroidStopServicesEvent. - - Args: - stop_event: The AndroidStopServicesEvent instance. - """ - raise NotImplementedError - - -class AdbLogcatService(AndroidService): - """Service for adb logcat.""" - - def _start(self, _): - self.ad.start_adb_logcat() - - def _stop(self, _): - self.ad.stop_adb_logcat() - - -class Sl4aService(AndroidService): - """Service for SL4A.""" - - def _start(self, start_event): - if self.ad.skip_sl4a: - return - - if not self.ad.is_sl4a_installed(): - self.ad.log.error("sl4a.apk is not installed") - raise errors.AndroidDeviceError( - "The required sl4a.apk is not installed", serial=self.serial - ) - if not self.ad.ensure_screen_on(): - self.ad.log.error("User window cannot come up") - raise errors.AndroidDeviceError( - "User window cannot come up", serial=self.serial - ) - - droid, ed = self.ad.get_droid() - ed.start() - - def _stop(self, _): - self.ad.terminate_all_sessions() - self.ad._sl4a_manager.stop_service()
diff --git a/src/antlion/controllers/android_lib/tel/__init__.py b/src/antlion/controllers/android_lib/tel/__init__.py deleted file mode 100644 index e69de29..0000000 --- a/src/antlion/controllers/android_lib/tel/__init__.py +++ /dev/null
diff --git a/src/antlion/controllers/android_lib/tel/tel_utils.py b/src/antlion/controllers/android_lib/tel/tel_utils.py deleted file mode 100644 index c18741c..0000000 --- a/src/antlion/controllers/android_lib/tel/tel_utils.py +++ /dev/null
@@ -1,690 +0,0 @@ -#!/usr/bin/env python3 -# -# Copyright 2022 The Fuchsia Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Generic telephony utility functions. Cloned from test_utils.tel.""" - -import re -import struct -import time -from queue import Empty - -from antlion.logger import epoch_to_log_line_timestamp -from antlion.controllers.adb_lib.error import AdbCommandError - -INCALL_UI_DISPLAY_FOREGROUND = "foreground" -INCALL_UI_DISPLAY_BACKGROUND = "background" -INCALL_UI_DISPLAY_DEFAULT = "default" - -# Max time to wait after caller make a call and before -# callee start ringing -MAX_WAIT_TIME_ACCEPT_CALL_TO_OFFHOOK_EVENT = 30 - -# Max time to wait after toggle airplane mode and before -# get expected event -MAX_WAIT_TIME_AIRPLANEMODE_EVENT = 90 - -# Wait time between state check retry -WAIT_TIME_BETWEEN_STATE_CHECK = 5 - -# Constant for Data Roaming State -DATA_ROAMING_ENABLE = 1 -DATA_ROAMING_DISABLE = 0 - -# Constant for Telephony Manager Call State -TELEPHONY_STATE_RINGING = "RINGING" -TELEPHONY_STATE_IDLE = "IDLE" -TELEPHONY_STATE_OFFHOOK = "OFFHOOK" -TELEPHONY_STATE_UNKNOWN = "UNKNOWN" - -# Constant for Service State -SERVICE_STATE_EMERGENCY_ONLY = "EMERGENCY_ONLY" -SERVICE_STATE_IN_SERVICE = "IN_SERVICE" -SERVICE_STATE_OUT_OF_SERVICE = "OUT_OF_SERVICE" -SERVICE_STATE_POWER_OFF = "POWER_OFF" -SERVICE_STATE_UNKNOWN = "UNKNOWN" - -# Constant for Network Mode -NETWORK_MODE_GSM_ONLY = "NETWORK_MODE_GSM_ONLY" -NETWORK_MODE_WCDMA_ONLY = "NETWORK_MODE_WCDMA_ONLY" -NETWORK_MODE_LTE_ONLY = "NETWORK_MODE_LTE_ONLY" - -# Constant for Events -EVENT_CALL_STATE_CHANGED = "CallStateChanged" -EVENT_SERVICE_STATE_CHANGED = "ServiceStateChanged" - - -class CallStateContainer: - INCOMING_NUMBER = "incomingNumber" - SUBSCRIPTION_ID = "subscriptionId" - CALL_STATE = "callState" - - -class ServiceStateContainer: - VOICE_REG_STATE = "voiceRegState" - VOICE_NETWORK_TYPE = "voiceNetworkType" - DATA_REG_STATE = "dataRegState" - DATA_NETWORK_TYPE = "dataNetworkType" - OPERATOR_NAME = "operatorName" - OPERATOR_ID = "operatorId" - IS_MANUAL_NW_SELECTION = "isManualNwSelection" - ROAMING = "roaming" - IS_EMERGENCY_ONLY = "isEmergencyOnly" - NETWORK_ID = "networkId" - SYSTEM_ID = "systemId" - SUBSCRIPTION_ID = "subscriptionId" - SERVICE_STATE = "serviceState" - - -def dumpsys_last_call_info(ad): - """Get call information by dumpsys telecom.""" - num = dumpsys_last_call_number(ad) - output = ad.adb.shell("dumpsys telecom") - result = re.search(r"Call TC@%s: {(.*?)}" % num, output, re.DOTALL) - call_info = {"TC": num} - if result: - result = result.group(1) - for attr in ( - "startTime", - "endTime", - "direction", - "isInterrupted", - "callTechnologies", - "callTerminationsReason", - "isVideoCall", - "callProperties", - ): - match = re.search(r"%s: (.*)" % attr, result) - if match: - if attr in ("startTime", "endTime"): - call_info[attr] = epoch_to_log_line_timestamp(int(match.group(1))) - else: - call_info[attr] = match.group(1) - ad.log.debug("call_info = %s", call_info) - return call_info - - -def dumpsys_last_call_number(ad): - output = ad.adb.shell("dumpsys telecom") - call_nums = re.findall("Call TC@(\d+):", output) - if not call_nums: - return 0 - else: - return int(call_nums[-1]) - - -def get_device_epoch_time(ad): - return int(1000 * float(ad.adb.shell("date +%s.%N"))) - - -def get_outgoing_voice_sub_id(ad): - """Get outgoing voice subscription id""" - if hasattr(ad, "outgoing_voice_sub_id"): - return ad.outgoing_voice_sub_id - else: - return ad.droid.subscriptionGetDefaultVoiceSubId() - - -def get_rx_tx_power_levels(log, ad): - """Obtains Rx and Tx power levels from the MDS application. - - The method requires the MDS app to be installed in the DUT. - - Args: - log: logger object - ad: an android device - - Return: - A tuple where the first element is an array array with the RSRP value - in Rx chain, and the second element is the transmitted power in dBm. - Values for invalid Rx / Tx chains are set to None. - """ - cmd = ( - 'am instrument -w -e request "80 00 e8 03 00 08 00 00 00" -e ' - 'response wait "com.google.mdstest/com.google.mdstest.instrument.' - 'ModemCommandInstrumentation"' - ) - try: - output = ad.adb.shell(cmd) - except AdbCommandError as e: - log.error(e) - output = None - - if not output or "result=SUCCESS" not in output: - raise RuntimeError( - "Could not obtain Tx/Rx power levels from MDS. Is " "the MDS app installed?" - ) - - response = re.search(r"(?<=response=).+", output) - - if not response: - raise RuntimeError("Invalid response from the MDS app:\n" + output) - - # Obtain a list of bytes in hex format from the response string - response_hex = response.group(0).split(" ") - - def get_bool(pos): - """Obtain a boolean variable from the byte array.""" - return response_hex[pos] == "01" - - def get_int32(pos): - """Obtain an int from the byte array. Bytes are printed in - little endian format.""" - return struct.unpack( - "<i", bytearray.fromhex("".join(response_hex[pos : pos + 4])) - )[0] - - rx_power = [] - RX_CHAINS = 4 - - for i in range(RX_CHAINS): - # Calculate starting position for the Rx chain data structure - start = 12 + i * 22 - - # The first byte in the data structure indicates if the rx chain is - # valid. - if get_bool(start): - rx_power.append(get_int32(start + 2) / 10) - else: - rx_power.append(None) - - # Calculate the position for the tx chain data structure - tx_pos = 12 + RX_CHAINS * 22 - - tx_valid = get_bool(tx_pos) - if tx_valid: - tx_power = get_int32(tx_pos + 2) / -10 - else: - tx_power = None - - return rx_power, tx_power - - -def get_telephony_signal_strength(ad): - # {'evdoEcio': -1, 'asuLevel': 28, 'lteSignalStrength': 14, 'gsmLevel': 0, - # 'cdmaAsuLevel': 99, 'evdoDbm': -120, 'gsmDbm': -1, 'cdmaEcio': -160, - # 'level': 2, 'lteLevel': 2, 'cdmaDbm': -120, 'dbm': -112, 'cdmaLevel': 0, - # 'lteAsuLevel': 28, 'gsmAsuLevel': 99, 'gsmBitErrorRate': 0, - # 'lteDbm': -112, 'gsmSignalStrength': 99} - try: - signal_strength = ad.droid.telephonyGetSignalStrength() - if not signal_strength: - signal_strength = {} - except Exception as e: - ad.log.error(e) - signal_strength = {} - return signal_strength - - -def initiate_call( - log, - ad, - callee_number, - emergency=False, - incall_ui_display=INCALL_UI_DISPLAY_FOREGROUND, - video=False, -): - """Make phone call from caller to callee. - - Args: - log: log object. - ad: Caller android device object. - callee_number: Callee phone number. - emergency : specify the call is emergency. - Optional. Default value is False. - incall_ui_display: show the dialer UI foreground or background - video: whether to initiate as video call - - Returns: - result: if phone call is placed successfully. - """ - ad.ed.clear_events(EVENT_CALL_STATE_CHANGED) - sub_id = get_outgoing_voice_sub_id(ad) - begin_time = get_device_epoch_time(ad) - ad.droid.telephonyStartTrackingCallStateForSubscription(sub_id) - try: - # Make a Call - ad.log.info("Make a phone call to %s", callee_number) - if emergency: - ad.droid.telecomCallEmergencyNumber(callee_number) - else: - ad.droid.telecomCallNumber(callee_number, video) - - # Verify OFFHOOK state - if not wait_for_call_offhook_for_subscription( - log, ad, sub_id, event_tracking_started=True - ): - ad.log.info("sub_id %s not in call offhook state", sub_id) - last_call_drop_reason(ad, begin_time=begin_time) - return False - else: - return True - finally: - if hasattr(ad, "sdm_log") and getattr(ad, "sdm_log"): - ad.adb.shell("i2cset -fy 3 64 6 1 b", ignore_status=True) - ad.adb.shell("i2cset -fy 3 65 6 1 b", ignore_status=True) - ad.droid.telephonyStopTrackingCallStateChangeForSubscription(sub_id) - if incall_ui_display == INCALL_UI_DISPLAY_FOREGROUND: - ad.droid.telecomShowInCallScreen() - elif incall_ui_display == INCALL_UI_DISPLAY_BACKGROUND: - ad.droid.showHomeScreen() - - -def is_event_match(event, field, value): - """Return if <field> in "event" match <value> or not. - - Args: - event: event to test. This event need to have <field>. - field: field to match. - value: value to match. - - Returns: - True if <field> in "event" match <value>. - False otherwise. - """ - return is_event_match_for_list(event, field, [value]) - - -def is_event_match_for_list(event, field, value_list): - """Return if <field> in "event" match any one of the value - in "value_list" or not. - - Args: - event: event to test. This event need to have <field>. - field: field to match. - value_list: a list of value to match. - - Returns: - True if <field> in "event" match one of the value in "value_list". - False otherwise. - """ - try: - value_in_event = event["data"][field] - except KeyError: - return False - for value in value_list: - if value_in_event == value: - return True - return False - - -def is_phone_in_call(log, ad): - """Return True if phone in call. - - Args: - log: log object. - ad: android device. - """ - try: - return ad.droid.telecomIsInCall() - except: - return "mCallState=2" in ad.adb.shell( - "dumpsys telephony.registry | grep mCallState" - ) - - -def last_call_drop_reason(ad, begin_time=None): - reasons = ad.search_logcat( - "qcril_qmi_voice_map_qmi_to_ril_last_call_failure_cause", begin_time - ) - reason_string = "" - if reasons: - log_msg = "Logcat call drop reasons:" - for reason in reasons: - log_msg = "%s\n\t%s" % (log_msg, reason["log_message"]) - if "ril reason str" in reason["log_message"]: - reason_string = reason["log_message"].split(":")[-1].strip() - ad.log.info(log_msg) - reasons = ad.search_logcat("ACTION_FORBIDDEN_NO_SERVICE_AUTHORIZATION", begin_time) - if reasons: - ad.log.warning("ACTION_FORBIDDEN_NO_SERVICE_AUTHORIZATION is seen") - ad.log.info("last call dumpsys: %s", sorted(dumpsys_last_call_info(ad).items())) - return reason_string - - -def toggle_airplane_mode(log, ad, new_state=None, strict_checking=True): - """Toggle the state of airplane mode. - - Args: - log: log handler. - ad: android_device object. - new_state: Airplane mode state to set to. - If None, opposite of the current state. - strict_checking: Whether to turn on strict checking that checks all features. - - Returns: - result: True if operation succeed. False if error happens. - """ - if ad.skip_sl4a: - return toggle_airplane_mode_by_adb(log, ad, new_state) - else: - return toggle_airplane_mode_msim( - log, ad, new_state, strict_checking=strict_checking - ) - - -def toggle_airplane_mode_by_adb(log, ad, new_state=None): - """Toggle the state of airplane mode. - - Args: - log: log handler. - ad: android_device object. - new_state: Airplane mode state to set to. - If None, opposite of the current state. - - Returns: - result: True if operation succeed. False if error happens. - """ - cur_state = bool(int(ad.adb.shell("settings get global airplane_mode_on"))) - if new_state == cur_state: - ad.log.info("Airplane mode already in %s", new_state) - return True - elif new_state is None: - new_state = not cur_state - ad.log.info("Change airplane mode from %s to %s", cur_state, new_state) - try: - ad.adb.shell("settings put global airplane_mode_on %s" % int(new_state)) - ad.adb.shell("am broadcast -a android.intent.action.AIRPLANE_MODE") - except Exception as e: - ad.log.error(e) - return False - changed_state = bool(int(ad.adb.shell("settings get global airplane_mode_on"))) - return changed_state == new_state - - -def toggle_airplane_mode_msim(log, ad, new_state=None, strict_checking=True): - """Toggle the state of airplane mode. - - Args: - log: log handler. - ad: android_device object. - new_state: Airplane mode state to set to. - If None, opposite of the current state. - strict_checking: Whether to turn on strict checking that checks all features. - - Returns: - result: True if operation succeed. False if error happens. - """ - - cur_state = ad.droid.connectivityCheckAirplaneMode() - if cur_state == new_state: - ad.log.info("Airplane mode already in %s", new_state) - return True - elif new_state is None: - new_state = not cur_state - ad.log.info("Toggle APM mode, from current tate %s to %s", cur_state, new_state) - sub_id_list = [] - active_sub_info = ad.droid.subscriptionGetAllSubInfoList() - if active_sub_info: - for info in active_sub_info: - sub_id_list.append(info["subscriptionId"]) - - ad.ed.clear_all_events() - time.sleep(0.1) - service_state_list = [] - if new_state: - service_state_list.append(SERVICE_STATE_POWER_OFF) - ad.log.info("Turn on airplane mode") - - else: - # If either one of these 3 events show up, it should be OK. - # Normal SIM, phone in service - service_state_list.append(SERVICE_STATE_IN_SERVICE) - # NO SIM, or Dead SIM, or no Roaming coverage. - service_state_list.append(SERVICE_STATE_OUT_OF_SERVICE) - service_state_list.append(SERVICE_STATE_EMERGENCY_ONLY) - ad.log.info("Turn off airplane mode") - - for sub_id in sub_id_list: - ad.droid.telephonyStartTrackingServiceStateChangeForSubscription(sub_id) - - timeout_time = time.time() + MAX_WAIT_TIME_AIRPLANEMODE_EVENT - ad.droid.connectivityToggleAirplaneMode(new_state) - - try: - try: - event = ad.ed.wait_for_event( - EVENT_SERVICE_STATE_CHANGED, - is_event_match_for_list, - timeout=MAX_WAIT_TIME_AIRPLANEMODE_EVENT, - field=ServiceStateContainer.SERVICE_STATE, - value_list=service_state_list, - ) - ad.log.info("Got event %s", event) - except Empty: - ad.log.warning( - "Did not get expected service state change to %s", service_state_list - ) - finally: - for sub_id in sub_id_list: - ad.droid.telephonyStopTrackingServiceStateChangeForSubscription(sub_id) - except Exception as e: - ad.log.error(e) - - # APM on (new_state=True) will turn off bluetooth but may not turn it on - try: - if new_state and not _wait_for_bluetooth_in_state( - log, ad, False, timeout_time - time.time() - ): - ad.log.error("Failed waiting for bluetooth during airplane mode toggle") - if strict_checking: - return False - except Exception as e: - ad.log.error("Failed to check bluetooth state due to %s", e) - if strict_checking: - raise - - # APM on (new_state=True) will turn off wifi but may not turn it on - if new_state and not _wait_for_wifi_in_state( - log, ad, False, timeout_time - time.time() - ): - ad.log.error("Failed waiting for wifi during airplane mode toggle on") - if strict_checking: - return False - - if ad.droid.connectivityCheckAirplaneMode() != new_state: - ad.log.error("Set airplane mode to %s failed", new_state) - return False - return True - - -def toggle_cell_data_roaming(ad, state): - """Enable cell data roaming for default data subscription. - - Wait for the data roaming status to be DATA_STATE_CONNECTED - or DATA_STATE_DISCONNECTED. - - Args: - ad: Android Device Object. - state: True or False for enable or disable cell data roaming. - - Returns: - True if success. - False if failed. - """ - state_int = {True: DATA_ROAMING_ENABLE, False: DATA_ROAMING_DISABLE}[state] - action_str = {True: "Enable", False: "Disable"}[state] - if ad.droid.connectivityCheckDataRoamingMode() == state: - ad.log.info("Data roaming is already in state %s", state) - return True - if not ad.droid.connectivitySetDataRoaming(state_int): - ad.error.info("Fail to config data roaming into state %s", state) - return False - if ad.droid.connectivityCheckDataRoamingMode() == state: - ad.log.info("Data roaming is configured into state %s", state) - return True - else: - ad.log.error("Data roaming is not configured into state %s", state) - return False - - -def wait_for_call_offhook_event( - log, - ad, - sub_id, - event_tracking_started=False, - timeout=MAX_WAIT_TIME_ACCEPT_CALL_TO_OFFHOOK_EVENT, -): - """Wait for an incoming call on specified subscription. - - Args: - log: log object. - ad: android device object. - event_tracking_started: True if event tracking already state outside - timeout: time to wait for event - - Returns: - True: if call offhook event is received. - False: if call offhook event is not received. - """ - if not event_tracking_started: - ad.ed.clear_events(EVENT_CALL_STATE_CHANGED) - ad.droid.telephonyStartTrackingCallStateForSubscription(sub_id) - try: - ad.ed.wait_for_event( - EVENT_CALL_STATE_CHANGED, - is_event_match, - timeout=timeout, - field=CallStateContainer.CALL_STATE, - value=TELEPHONY_STATE_OFFHOOK, - ) - ad.log.info("Got event %s", TELEPHONY_STATE_OFFHOOK) - except Empty: - ad.log.info("No event for call state change to OFFHOOK") - return False - finally: - if not event_tracking_started: - ad.droid.telephonyStopTrackingCallStateChangeForSubscription(sub_id) - return True - - -def wait_for_call_offhook_for_subscription( - log, - ad, - sub_id, - event_tracking_started=False, - timeout=MAX_WAIT_TIME_ACCEPT_CALL_TO_OFFHOOK_EVENT, - interval=WAIT_TIME_BETWEEN_STATE_CHECK, -): - """Wait for an incoming call on specified subscription. - - Args: - log: log object. - ad: android device object. - sub_id: subscription ID - timeout: time to wait for ring - interval: checking interval - - Returns: - True: if incoming call is received and answered successfully. - False: for errors - """ - if not event_tracking_started: - ad.ed.clear_events(EVENT_CALL_STATE_CHANGED) - ad.droid.telephonyStartTrackingCallStateForSubscription(sub_id) - offhook_event_received = False - end_time = time.time() + timeout - try: - while time.time() < end_time: - if not offhook_event_received: - if wait_for_call_offhook_event(log, ad, sub_id, True, interval): - offhook_event_received = True - telephony_state = ad.droid.telephonyGetCallStateForSubscription(sub_id) - telecom_state = ad.droid.telecomGetCallState() - if telephony_state == TELEPHONY_STATE_OFFHOOK and ( - telecom_state == TELEPHONY_STATE_OFFHOOK - ): - ad.log.info("telephony and telecom are in OFFHOOK state") - return True - else: - ad.log.info( - "telephony in %s, telecom in %s, expecting OFFHOOK state", - telephony_state, - telecom_state, - ) - if offhook_event_received: - time.sleep(interval) - finally: - if not event_tracking_started: - ad.droid.telephonyStopTrackingCallStateChangeForSubscription(sub_id) - - -def _wait_for_bluetooth_in_state(log, ad, state, max_wait): - # FIXME: These event names should be defined in a common location - _BLUETOOTH_STATE_ON_EVENT = "BluetoothStateChangedOn" - _BLUETOOTH_STATE_OFF_EVENT = "BluetoothStateChangedOff" - ad.ed.clear_events(_BLUETOOTH_STATE_ON_EVENT) - ad.ed.clear_events(_BLUETOOTH_STATE_OFF_EVENT) - - ad.droid.bluetoothStartListeningForAdapterStateChange() - try: - bt_state = ad.droid.bluetoothCheckState() - if bt_state == state: - return True - if max_wait <= 0: - ad.log.error( - "Time out: bluetooth state still %s, expecting %s", bt_state, state - ) - return False - - event = {False: _BLUETOOTH_STATE_OFF_EVENT, True: _BLUETOOTH_STATE_ON_EVENT}[ - state - ] - event = ad.ed.pop_event(event, max_wait) - ad.log.info("Got event %s", event["name"]) - return True - except Empty: - ad.log.error( - "Time out: bluetooth state still in %s, expecting %s", bt_state, state - ) - return False - finally: - ad.droid.bluetoothStopListeningForAdapterStateChange() - - -def wait_for_droid_in_call(log, ad, max_time): - """Wait for android to be in call state. - - Args: - log: log object. - ad: android device. - max_time: maximal wait time. - - Returns: - If phone become in call state within max_time, return True. - Return False if timeout. - """ - return _wait_for_droid_in_state(log, ad, max_time, is_phone_in_call) - - -def _wait_for_droid_in_state(log, ad, max_time, state_check_func, *args, **kwargs): - while max_time >= 0: - if state_check_func(log, ad, *args, **kwargs): - return True - - time.sleep(WAIT_TIME_BETWEEN_STATE_CHECK) - max_time -= WAIT_TIME_BETWEEN_STATE_CHECK - - return False - - -# TODO: replace this with an event-based function -def _wait_for_wifi_in_state(log, ad, state, max_wait): - return _wait_for_droid_in_state( - log, - ad, - max_wait, - lambda log, ad, state: (True if ad.droid.wifiCheckState() == state else False), - state, - )
diff --git a/src/antlion/controllers/ap_lib/__init__.py b/src/antlion/controllers/ap_lib/__init__.py deleted file mode 100644 index e69de29..0000000 --- a/src/antlion/controllers/ap_lib/__init__.py +++ /dev/null
diff --git a/src/antlion/controllers/ap_lib/ap_get_interface.py b/src/antlion/controllers/ap_lib/ap_get_interface.py deleted file mode 100644 index 74a6d2c..0000000 --- a/src/antlion/controllers/ap_lib/ap_get_interface.py +++ /dev/null
@@ -1,189 +0,0 @@ -#!/usr/bin/env python3 -# -# Copyright 2022 The Fuchsia Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import logging - -from typing import List, Optional, Tuple, TYPE_CHECKING - -from antlion.libs.proc import job - -if TYPE_CHECKING: - from antlion.controllers.access_point import AccessPoint - -GET_ALL_INTERFACE = "ls /sys/class/net" -GET_VIRTUAL_INTERFACE = "ls /sys/devices/virtual/net" -BRCTL_SHOW = "brctl show" - - -class ApInterfacesError(Exception): - """Error related to AP interfaces.""" - - -class ApInterfaces(object): - """Class to get network interface information for the device.""" - - def __init__( - self, ap: "AccessPoint", wan_interface_override: Optional[str] = None - ) -> None: - """Initialize the ApInterface class. - - Args: - ap: the ap object within ACTS - wan_interface_override: wan interface to use if specified by config - """ - self.ssh = ap.ssh - self.wan_interface_override = wan_interface_override - - def get_all_interface(self) -> List[str]: - """Get all network interfaces on the device. - - Returns: - interfaces_all: list of all the network interfaces on device - """ - output = self.ssh.run(GET_ALL_INTERFACE) - interfaces_all = output.stdout.split("\n") - - return interfaces_all - - def get_virtual_interface(self) -> List[str]: - """Get all virtual interfaces on the device. - - Returns: - interfaces_virtual: list of all the virtual interfaces on device - """ - output = self.ssh.run(GET_VIRTUAL_INTERFACE) - interfaces_virtual = output.stdout.split("\n") - - return interfaces_virtual - - def get_physical_interface(self) -> List[str]: - """Get all the physical interfaces of the device. - - Get all physical interfaces such as eth ports and wlan ports - Returns: - interfaces_phy: list of all the physical interfaces - """ - interfaces_all = self.get_all_interface() - interfaces_virtual = self.get_virtual_interface() - interfaces_phy = list(set(interfaces_all) - set(interfaces_virtual)) - - return interfaces_phy - - def get_bridge_interface(self) -> Optional[List[str]]: - """Get all the bridge interfaces of the device. - - Returns: - interfaces_bridge: the list of bridge interfaces, return None if - bridge utility is not available on the device - """ - interfaces_bridge = [] - try: - output = self.ssh.run(BRCTL_SHOW) - lines = output.stdout.split("\n") - for line in lines: - interfaces_bridge.append(line.split("\t")[0]) - interfaces_bridge.pop(0) - return [x for x in interfaces_bridge if x != ""] - except job.Error: - logging.info("No brctl utility is available") - return None - - def get_wlan_interface(self) -> Tuple[str, str]: - """Get all WLAN interfaces and specify 2.4 GHz and 5 GHz interfaces. - - Returns: - interfaces_wlan: all wlan interfaces - Raises: - ApInterfacesError: Missing at least one WLAN interface - """ - wlan_2g = None - wlan_5g = None - interfaces_phy = self.get_physical_interface() - for iface in interfaces_phy: - output = self.ssh.run(f"iwlist {iface} freq") - if "Channel 06" in output.stdout and "Channel 36" not in output.stdout: - wlan_2g = iface - elif "Channel 36" in output.stdout and "Channel 06" not in output.stdout: - wlan_5g = iface - - if wlan_2g is None or wlan_5g is None: - raise ApInterfacesError("Missing at least one WLAN interface") - - return (wlan_2g, wlan_5g) - - def get_wan_interface(self) -> str: - """Get the WAN interface which has internet connectivity. If a wan - interface is already specified return that instead. - - Returns: - wan: the only one WAN interface - Raises: - ApInterfacesError: no running WAN can be found - """ - if self.wan_interface_override: - return self.wan_interface_override - - wan = None - interfaces_phy = self.get_physical_interface() - interfaces_wlan = self.get_wlan_interface() - interfaces_eth = list(set(interfaces_phy) - set(interfaces_wlan)) - for iface in interfaces_eth: - network_status = self.check_ping(iface) - if network_status == 1: - wan = iface - break - if wan: - return wan - - output = self.ssh.run("ifconfig") - interfaces_all = output.stdout.split("\n") - logging.info(f"IFCONFIG output = {interfaces_all}") - - raise ApInterfacesError("No WAN interface available") - - def get_lan_interface(self) -> Optional[str]: - """Get the LAN interface connecting to local devices. - - Returns: - lan: the only one running LAN interface of the devices - None, if nothing was found. - """ - lan = None - interfaces_phy = self.get_physical_interface() - interfaces_wlan = self.get_wlan_interface() - interfaces_eth = list(set(interfaces_phy) - set(interfaces_wlan)) - interface_wan = self.get_wan_interface() - interfaces_eth.remove(interface_wan) - for iface in interfaces_eth: - output = self.ssh.run(f"ifconfig {iface}") - if "RUNNING" in output.stdout: - lan = iface - break - return lan - - def check_ping(self, iface: str) -> int: - """Check the ping status on specific interface to determine the WAN. - - Args: - iface: the specific interface to check - Returns: - network_status: the connectivity status of the interface - """ - try: - self.ssh.run(f"ping -c 3 -I {iface} 8.8.8.8") - return 1 - except job.Error: - return 0
diff --git a/src/antlion/controllers/ap_lib/ap_iwconfig.py b/src/antlion/controllers/ap_lib/ap_iwconfig.py deleted file mode 100644 index 225a397..0000000 --- a/src/antlion/controllers/ap_lib/ap_iwconfig.py +++ /dev/null
@@ -1,50 +0,0 @@ -#!/usr/bin/env python3 -# -# Copyright 2022 The Fuchsia Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from typing import Optional, TYPE_CHECKING - -from antlion.libs.proc.job import Result - -if TYPE_CHECKING: - from antlion.controllers.access_point import AccessPoint - - -class ApIwconfigError(Exception): - """Error related to configuring the wireless interface via iwconfig.""" - - -class ApIwconfig(object): - """Class to configure wireless interface via iwconfig""" - - PROGRAM_FILE = "/usr/local/sbin/iwconfig" - - def __init__(self, ap: "AccessPoint") -> None: - """Initialize the ApIwconfig class. - - Args: - ap: the ap object within ACTS - """ - self.ssh = ap.ssh - - def ap_iwconfig( - self, interface: str, arguments: Optional[str] = None - ) -> Optional[Result]: - """Configure the wireless interface using iwconfig. - - Returns: - output: the output of the command, if any - """ - return self.ssh.run(f"{self.PROGRAM_FILE} {interface} {arguments}")
diff --git a/src/antlion/controllers/ap_lib/bridge_interface.py b/src/antlion/controllers/ap_lib/bridge_interface.py deleted file mode 100644 index ee4733e..0000000 --- a/src/antlion/controllers/ap_lib/bridge_interface.py +++ /dev/null
@@ -1,121 +0,0 @@ -#!/usr/bin/env python3 -# -# Copyright 2022 The Fuchsia Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import logging -import time -from antlion.libs.proc import job - -_BRCTL = "brctl" -BRIDGE_NAME = "br-lan" -CREATE_BRIDGE = "%s addbr %s" % (_BRCTL, BRIDGE_NAME) -DELETE_BRIDGE = "%s delbr %s" % (_BRCTL, BRIDGE_NAME) -BRING_DOWN_BRIDGE = "ifconfig %s down" % BRIDGE_NAME - - -class BridgeInterfaceConfigs(object): - """Configs needed for creating bridge interface between LAN and WLAN.""" - - def __init__(self, iface_wlan, iface_lan, bridge_ip): - """Set bridge interface configs based on the channel info. - - Args: - iface_wlan: the wlan interface as part of the bridge - iface_lan: the ethernet LAN interface as part of the bridge - bridge_ip: the ip address assigned to the bridge interface - """ - self.iface_wlan = iface_wlan - self.iface_lan = iface_lan - self.bridge_ip = bridge_ip - - -class BridgeInterface(object): - """Class object for bridge interface betwen WLAN and LAN""" - - def __init__(self, ap): - """Initialize the BridgeInterface class. - - Bridge interface will be added between ethernet LAN port and WLAN port. - Args: - ap: AP object within ACTS - """ - self.ssh = ap.ssh - - def startup(self, brconfigs): - """Start up the bridge interface. - - Args: - brconfigs: the bridge interface config, type BridgeInterfaceConfigs - """ - - logging.info("Create bridge interface between LAN and WLAN") - # Create the bridge - try: - self.ssh.run(CREATE_BRIDGE) - except job.Error: - logging.warning( - "Bridge interface {} already exists, no action needed".format( - BRIDGE_NAME - ) - ) - - # Enable 4addr mode on for the wlan interface - ENABLE_4ADDR = "iw dev %s set 4addr on" % (brconfigs.iface_wlan) - try: - self.ssh.run(ENABLE_4ADDR) - except job.Error: - logging.warning( - "4addr is already enabled on {}".format(brconfigs.iface_wlan) - ) - - # Add both LAN and WLAN interfaces to the bridge interface - for interface in [brconfigs.iface_lan, brconfigs.iface_wlan]: - ADD_INTERFACE = "%s addif %s %s" % (_BRCTL, BRIDGE_NAME, interface) - try: - self.ssh.run(ADD_INTERFACE) - except job.Error: - logging.warning( - "{} has already been added to {}".format(interface, BRIDGE_NAME) - ) - time.sleep(5) - - # Set IP address on the bridge interface to bring it up - SET_BRIDGE_IP = "ifconfig %s %s" % (BRIDGE_NAME, brconfigs.bridge_ip) - self.ssh.run(SET_BRIDGE_IP) - time.sleep(2) - - # Bridge interface is up - logging.info("Bridge interface is up and running") - - def teardown(self, brconfigs): - """Tear down the bridge interface. - - Args: - brconfigs: the bridge interface config, type BridgeInterfaceConfigs - """ - logging.info("Bringing down the bridge interface") - # Delete the bridge interface - self.ssh.run(BRING_DOWN_BRIDGE) - time.sleep(1) - self.ssh.run(DELETE_BRIDGE) - - # Bring down wlan interface and disable 4addr mode - BRING_DOWN_WLAN = "ifconfig %s down" % brconfigs.iface_wlan - self.ssh.run(BRING_DOWN_WLAN) - time.sleep(2) - DISABLE_4ADDR = "iw dev %s set 4addr off" % (brconfigs.iface_wlan) - self.ssh.run(DISABLE_4ADDR) - time.sleep(1) - logging.info("Bridge interface is down")
diff --git a/src/antlion/controllers/ap_lib/dhcp_config.py b/src/antlion/controllers/ap_lib/dhcp_config.py deleted file mode 100644 index a50b6d0..0000000 --- a/src/antlion/controllers/ap_lib/dhcp_config.py +++ /dev/null
@@ -1,205 +0,0 @@ -# Copyright 2022 The Fuchsia Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import copy - -_ROUTER_DNS = "8.8.8.8, 4.4.4.4" - - -class Subnet(object): - """Configs for a subnet on the dhcp server. - - Attributes: - network: ipaddress.IPv4Network, the network that this subnet is in. - start: ipaddress.IPv4Address, the start ip address. - end: ipaddress.IPv4Address, the end ip address. - router: The router to give to all hosts in this subnet. - lease_time: The lease time of all hosts in this subnet. - additional_parameters: A dictionary corresponding to DHCP parameters. - additional_options: A dictionary corresponding to DHCP options. - """ - - def __init__( - self, - subnet, - start=None, - end=None, - router=None, - lease_time=None, - additional_parameters={}, - additional_options={}, - ): - """ - Args: - subnet: ipaddress.IPv4Network, The address space of the subnetwork - served by the DHCP server. - start: ipaddress.IPv4Address, The start of the address range to - give hosts in this subnet. If not given, the second ip in - the network is used, under the assumption that the first - address is the router. - end: ipaddress.IPv4Address, The end of the address range to give - hosts. If not given then the address prior to the broadcast - address (i.e. the second to last ip in the network) is used. - router: ipaddress.IPv4Address, The router hosts should use in this - subnet. If not given the first ip in the network is used. - lease_time: int, The amount of lease time in seconds - hosts in this subnet have. - additional_parameters: A dictionary corresponding to DHCP parameters. - additional_options: A dictionary corresponding to DHCP options. - """ - self.network = subnet - - if start: - self.start = start - else: - self.start = self.network[2] - - if not self.start in self.network: - raise ValueError("The start range is not in the subnet.") - if self.start.is_reserved: - raise ValueError("The start of the range cannot be reserved.") - - if end: - self.end = end - else: - self.end = self.network[-2] - - if not self.end in self.network: - raise ValueError("The end range is not in the subnet.") - if self.end.is_reserved: - raise ValueError("The end of the range cannot be reserved.") - if self.end < self.start: - raise ValueError("The end must be an address larger than the start.") - - if router: - if router >= self.start and router <= self.end: - raise ValueError("Router must not be in pool range.") - if not router in self.network: - raise ValueError("Router must be in the given subnet.") - - self.router = router - else: - # TODO: Use some more clever logic so that we don't have to search - # every host potentially. - # This is especially important if we support IPv6 networks in this - # configuration. The improved logic that we can use is: - # a) erroring out if start and end encompass the whole network, and - # b) picking any address before self.start or after self.end. - self.router = None - for host in self.network.hosts(): - if host < self.start or host > self.end: - self.router = host - break - - if not self.router: - raise ValueError("No useable host found.") - - self.lease_time = lease_time - self.additional_parameters = additional_parameters - self.additional_options = additional_options - if "domain-name-servers" not in self.additional_options: - self.additional_options["domain-name-servers"] = _ROUTER_DNS - - -class StaticMapping(object): - """Represents a static dhcp host. - - Attributes: - identifier: How id of the host (usually the mac addres - e.g. 00:11:22:33:44:55). - address: ipaddress.IPv4Address, The ipv4 address to give the host. - lease_time: How long to give a lease to this host. - """ - - def __init__(self, identifier, address, lease_time=None): - self.identifier = identifier - self.ipv4_address = address - self.lease_time = lease_time - - -class DhcpConfig(object): - """The configs for a dhcp server. - - Attributes: - subnets: A list of all subnets for the dhcp server to create. - static_mappings: A list of static host addresses. - default_lease_time: The default time for a lease. - max_lease_time: The max time to allow a lease. - """ - - def __init__( - self, - subnets=None, - static_mappings=None, - default_lease_time=600, - max_lease_time=7200, - ): - self.subnets = copy.deepcopy(subnets) if subnets else [] - self.static_mappings = copy.deepcopy(static_mappings) if static_mappings else [] - self.default_lease_time = default_lease_time - self.max_lease_time = max_lease_time - - def render_config_file(self): - """Renders the config parameters into a format compatible with - the ISC DHCP server (dhcpd). - """ - lines = [] - - if self.default_lease_time: - lines.append("default-lease-time %d;" % self.default_lease_time) - if self.max_lease_time: - lines.append("max-lease-time %s;" % self.max_lease_time) - - for subnet in self.subnets: - address = subnet.network.network_address - mask = subnet.network.netmask - router = subnet.router - start = subnet.start - end = subnet.end - lease_time = subnet.lease_time - additional_parameters = subnet.additional_parameters - additional_options = subnet.additional_options - - lines.append("subnet %s netmask %s {" % (address, mask)) - lines.append("\tpool {") - lines.append("\t\toption subnet-mask %s;" % mask) - lines.append("\t\toption routers %s;" % router) - lines.append("\t\trange %s %s;" % (start, end)) - if lease_time: - lines.append("\t\tdefault-lease-time %d;" % lease_time) - lines.append("\t\tmax-lease-time %d;" % lease_time) - for param, value in additional_parameters.items(): - lines.append("\t\t%s %s;" % (param, value)) - for option, value in additional_options.items(): - lines.append("\t\toption %s %s;" % (option, value)) - lines.append("\t}") - lines.append("}") - - for mapping in self.static_mappings: - identifier = mapping.identifier - fixed_address = mapping.ipv4_address - host_fake_name = "host%s" % identifier.replace(":", "") - lease_time = mapping.lease_time - - lines.append("host %s {" % host_fake_name) - lines.append("\thardware ethernet %s;" % identifier) - lines.append("\tfixed-address %s;" % fixed_address) - if lease_time: - lines.append("\tdefault-lease-time %d;" % lease_time) - lines.append("\tmax-lease-time %d;" % lease_time) - lines.append("}") - - config_str = "\n".join(lines) - - return config_str
diff --git a/src/antlion/controllers/ap_lib/dhcp_server.py b/src/antlion/controllers/ap_lib/dhcp_server.py deleted file mode 100644 index c52983b..0000000 --- a/src/antlion/controllers/ap_lib/dhcp_server.py +++ /dev/null
@@ -1,202 +0,0 @@ -# Copyright 2022 The Fuchsia Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import time - -from tenacity import retry, retry_if_exception_type, stop_after_attempt, wait_fixed - -from antlion.controllers.ap_lib.dhcp_config import DhcpConfig -from antlion.controllers.utils_lib.commands import shell -from antlion import logger - - -class Error(Exception): - """An error caused by the dhcp server.""" - - -class NoInterfaceError(Exception): - """Error thrown when the dhcp server has no interfaces on any subnet.""" - - -class DhcpServer(object): - """Manages the dhcp server program. - - Only one of these can run in an environment at a time. - - Attributes: - config: The dhcp server configuration that is being used. - """ - - PROGRAM_FILE = "dhcpd" - - def __init__(self, runner, interface, working_dir="/tmp"): - """ - Args: - runner: Object that has a run_async and run methods for running - shell commands. - interface: string, The name of the interface to use. - working_dir: The directory to work out of. - """ - self._log = logger.create_logger(lambda msg: f"[DHCP Server|{interface}] {msg}") - - self._runner = runner - self._working_dir = working_dir - self._shell = shell.ShellCommand(runner, working_dir) - self._stdio_log_file = f"dhcpd_{interface}.log" - self._config_file = f"dhcpd_{interface}.conf" - self._lease_file = f"dhcpd_{interface}.leases" - self._pid_file = f"dhcpd_{interface}.pid" - self._identifier = f"{self.PROGRAM_FILE}.*{self._config_file}" - - # There is a slight timing issue where if the proc filesystem in Linux - # doesn't get updated in time as when this is called, the NoInterfaceError - # will happening. By adding this retry, the error appears to have gone away - # but will still show a warning if the problem occurs. The error seems to - # happen more with bridge interfaces than standard interfaces. - @retry( - retry=retry_if_exception_type(NoInterfaceError), - stop=stop_after_attempt(3), - wait=wait_fixed(1), - ) - def start(self, config: DhcpConfig, timeout_sec: int = 60) -> None: - """Starts the dhcp server. - - Starts the dhcp server daemon and runs it in the background. - - Args: - config: Configs to start the dhcp server with. - - Raises: - Error: Raised when a dhcp server error is found. - """ - if self.is_alive(): - self.stop() - - self._write_configs(config) - self._shell.delete_file(self._stdio_log_file) - self._shell.delete_file(self._pid_file) - self._shell.touch_file(self._lease_file) - - dhcpd_command = ( - f"{self.PROGRAM_FILE} " - f'-cf "{self._config_file}" ' - f"-lf {self._lease_file} " - f'-pf "{self._pid_file}" ' - "-f -d" - ) - - base_command = f'cd "{self._working_dir}"; {dhcpd_command}' - job_str = f'{base_command} > "{self._stdio_log_file}" 2>&1' - self._runner.run_async(job_str) - - try: - self._wait_for_process(timeout=timeout_sec) - self._wait_for_server(timeout=timeout_sec) - except: - self._log.warn("Failed to start DHCP server.") - self._log.info("DHCP configuration:\n" + config.render_config_file() + "\n") - self._log.info("DHCP logs:\n" + self.get_logs() + "\n") - self.stop() - raise - - def stop(self): - """Kills the daemon if it is running.""" - if self.is_alive(): - self._shell.kill(self._identifier) - - def is_alive(self): - """ - Returns: - True if the daemon is running. - """ - return self._shell.is_alive(self._identifier) - - def get_logs(self) -> str: - """Pulls the log files from where dhcp server is running. - - Returns: - A string of the dhcp server logs. - """ - return self._shell.read_file(self._stdio_log_file) - - def _wait_for_process(self, timeout=60): - """Waits for the process to come up. - - Waits until the dhcp server process is found running, or there is - a timeout. If the program never comes up then the log file - will be scanned for errors. - - Raises: See _scan_for_errors - """ - start_time = time.time() - while time.time() - start_time < timeout and not self.is_alive(): - self._scan_for_errors(False) - time.sleep(0.1) - - self._scan_for_errors(True) - - def _wait_for_server(self, timeout=60): - """Waits for dhcp server to report that the server is up. - - Waits until dhcp server says the server has been brought up or an - error occurs. - - Raises: see _scan_for_errors - """ - start_time = time.time() - while time.time() - start_time < timeout: - success = self._shell.search_file( - "Wrote [0-9]* leases to leases file", self._stdio_log_file - ) - if success: - return - - self._scan_for_errors(True) - - def _scan_for_errors(self, should_be_up): - """Scans the dhcp server log for any errors. - - Args: - should_be_up: If true then dhcp server is expected to be alive. - If it is found not alive while this is true an error - is thrown. - - Raises: - Error: Raised when a dhcp server error is found. - """ - # If this is checked last we can run into a race condition where while - # scanning the log the process has not died, but after scanning it - # has. If this were checked last in that condition then the wrong - # error will be thrown. To prevent this we gather the alive state first - # so that if it is dead it will definitely give the right error before - # just giving a generic one. - is_dead = not self.is_alive() - - no_interface = self._shell.search_file( - "Not configured to listen on any interfaces", self._stdio_log_file - ) - if no_interface: - raise NoInterfaceError( - "Dhcp does not contain a subnet for any of the networks the" - " current interfaces are on." - ) - - if should_be_up and is_dead: - raise Error("Dhcp server failed to start.", self) - - def _write_configs(self, config): - """Writes the configs to the dhcp server config file.""" - self._shell.delete_file(self._config_file) - config_str = config.render_config_file() - self._shell.write_file(self._config_file, config_str)
diff --git a/src/antlion/controllers/ap_lib/extended_capabilities.py b/src/antlion/controllers/ap_lib/extended_capabilities.py deleted file mode 100644 index 82029cc..0000000 --- a/src/antlion/controllers/ap_lib/extended_capabilities.py +++ /dev/null
@@ -1,194 +0,0 @@ -#!/usr/bin/env python3 -# -# Copyright 2022 The Fuchsia Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from enum import IntEnum, unique -from typing import Tuple - - -@unique -class ExtendedCapability(IntEnum): - """All extended capabilities present in IEEE 802.11-2020 Table 9-153. - - Each name has a value corresponding to that extended capability's bit offset - in the specification's extended capabilities field. - - Note that most extended capabilities are represented by a single bit, which - indicates whether the extended capability is advertised by the STA; but - some are represented by multiple bits. In the enum, each extended capability - has the value of its offset; comments indicate capabilities that use - multiple bits. - """ - - TWENTY_FORTY_BSS_COEXISTENCE_MANAGEMENT_SUPPORT = 0 - GLK = 1 - EXTENDED_CHANNEL_SWITCHING = 2 - GLK_GCR = 3 - PSMP_CAPABILITY = 4 - # 5 reserved - S_PSMP_SUPPORT = 6 - EVENT = 7 - DIAGNOSTICS = 8 - MULTICAST_DIAGNOSTICS = 9 - LOCATION_TRACKING = 10 - FMS = 11 - PROXY_ARP_SERVICE = 12 - COLLOCATED_INTERFERENCE_REPORTING = 13 - CIVIC_LOCATION = 14 - GEOSPATIAL_LOCATION = 15 - TFS = 16 - WNM_SLEEP_MODE = 17 - TIM_BROADCAST = 18 - BSS_TRANSITION = 19 - QOS_TRAFFIC_CAPABILITY = 20 - AC_STATION_COUNT = 21 - MULTIPLE_BSSID = 22 - TIMING_MEASUREMENT = 23 - CHANNEL_USAGE = 24 - SSID_LIST = 25 - DMS = 26 - UTC_TSF_OFFSET = 27 - TPU_BUFFER_STA_SUPPORT = 28 - TDLS_PEER_PSM_SUPPORT = 29 - TDLS_CHANNEL_SWITCHING = 30 - INTERWORKING = 31 - QOS_MAP = 32 - EBR = 33 - SSPN_INTERFACE = 34 - # 35 reserved - MSGCF_CAPABILITY = 36 - TDLS_SUPPORT = 37 - TDLS_PROHIBITED = 38 - TDLS_CHANNEL_SWITCHING_PROHIBITED = 39 - REJECT_UNADMITTED_FRAME = 40 - SERVICE_INTERVAL_GRANULARITY = 41 - # Bits 41-43 contain SERVICE_INTERVAL_GRANULARITY value - IDENTIFIER_LOCATION = 44 - U_APSD_COEXISTENCE = 45 - WNM_NOTIFICATION = 46 - QAB_CAPABILITY = 47 - UTF_8_SSID = 48 - QMF_ACTIVATED = 49 - QMF_RECONFIGURATION_ACTIVATED = 50 - ROBUST_AV_STREAMING = 51 - ADVANCED_GCR = 52 - MESH_GCR = 53 - SCS = 54 - QLOAD_REPORT = 55 - ALTERNATE_EDCA = 56 - UNPROTECTED_TXOP_NEGOTIATION = 57 - PROTECTED_TXOP_NEGOTIATION = 58 - # 59 reserved - PROTECTED_QLOAD_REPORT = 60 - TDLS_WIDER_BANDWIDTH = 61 - OPERATING_MODE_NOTIFICATION = 62 - MAX_NUMBER_OF_MSDUS_IN_A_MSDU = 63 - # 63-64 contain MAX_NUMBER_OF_MSDUS_IN_A_MSDU value - CHANNEL_SCHEDULE_MANAGEMENT = 65 - GEODATABASE_INBAND_ENABLING_SIGNAL = 66 - NETWORK_CHANNEL_CONTROL = 67 - WHITE_SPACE_MAP = 68 - CHANNEL_AVAILABILITY_QUERY = 69 - FINE_TIMING_MEASUREMENT_RESPONDER = 70 - FINE_TIMING_MEASUREMENT_INITIATOR = 71 - FILS_CAPABILITY = 72 - EXTENDED_SPECTRUM_MANAGEMENT_CAPABLE = 73 - FUTURE_CHANNEL_GUIDANCE = 74 - PAD = 75 - # 76-79 reserved - COMPLETE_LIST_OF_NON_TX_BSSID_PROFILES = 80 - SAE_PASSWORD_IDENTIFIERS_IN_USE = 81 - SAE_PASSWORD_IDENTIFIERS_USED_EXCLUSIVELY = 82 - # 83 reserved - BEACON_PROTECTION_ENABLED = 84 - MIRRORED_SCS = 85 - # 86 reserved - LOCAL_MAC_ADDRESS_POLICY = 87 - # 88-n reserved - - -def _offsets(ext_cap_offset: ExtendedCapability) -> Tuple[int, int]: - """For given capability, return the byte and bit offsets within the field. - - 802.11 divides the extended capability field into bytes, as does the - ExtendedCapabilities class below. This function returns the index of the - byte that contains the given extended capability, as well as the bit offset - inside that byte (all offsets zero-indexed). For example, - MULTICAST_DIAGNOSTICS is bit 9, which is within byte 1 at bit offset 1. - """ - byte_offset = ext_cap_offset // 8 - bit_offset = ext_cap_offset % 8 - return byte_offset, bit_offset - - -class ExtendedCapabilities: - """Extended capability parsing and representation. - - See IEEE 802.11-2020 9.4.2.26. - """ - - def __init__(self, ext_cap: bytearray = bytearray()): - """Represent the given extended capabilities field. - - Args: - ext_cap: IEEE 802.11-2020 9.4.2.26 extended capabilities field. - Default is an empty field, meaning no extended capabilities are - advertised. - """ - self._ext_cap = ext_cap - - def _capability_advertised(self, ext_cap: ExtendedCapability) -> bool: - """Whether an extended capability is advertised. - - Args: - ext_cap: an extended capability. - Returns: - True if the bit is present and its value is 1, otherwise False. - Raises: - NotImplementedError: for extended capabilities that span more than - a single bit. These could be supported, but no callers need them - at this time. - """ - if ext_cap in [ - ExtendedCapability.SERVICE_INTERVAL_GRANULARITY, - ExtendedCapability.MAX_NUMBER_OF_MSDUS_IN_A_MSDU, - ]: - raise NotImplementedError( - f"{ext_cap.name} not implemented yet by {__class__}" - ) - byte_offset, bit_offset = _offsets(ext_cap) - if len(self._ext_cap) > byte_offset: - # Use bit_offset to derive a mask that will check the correct bit. - if self._ext_cap[byte_offset] & 2**bit_offset > 0: - return True - return False - - @property - def bss_transition(self) -> bool: - return self._capability_advertised(ExtendedCapability.BSS_TRANSITION) - - @property - def proxy_arp_service(self) -> bool: - return self._capability_advertised(ExtendedCapability.PROXY_ARP_SERVICE) - - @property - def utc_tsf_offset(self) -> bool: - return self._capability_advertised(ExtendedCapability.UTC_TSF_OFFSET) - - @property - def wnm_sleep_mode(self) -> bool: - return self._capability_advertised(ExtendedCapability.WNM_SLEEP_MODE) - - # Other extended capability property methods can be added as needed by callers.
diff --git a/src/antlion/controllers/ap_lib/hostapd.py b/src/antlion/controllers/ap_lib/hostapd.py deleted file mode 100644 index b3f780d..0000000 --- a/src/antlion/controllers/ap_lib/hostapd.py +++ /dev/null
@@ -1,383 +0,0 @@ -# Copyright 2022 The Fuchsia Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import collections -import itertools -import logging -import re -import time - -from typing import Any, Dict, Optional, Set - -from antlion.controllers.ap_lib import hostapd_constants -from antlion.controllers.ap_lib.extended_capabilities import ExtendedCapabilities -from antlion.controllers.ap_lib.wireless_network_management import ( - BssTransitionManagementRequest, -) -from antlion.controllers.utils_lib.commands import shell -from antlion.libs.proc.job import Result - -PROGRAM_FILE = "/usr/sbin/hostapd" -CLI_PROGRAM_FILE = "/usr/bin/hostapd_cli" - - -class Error(Exception): - """An error caused by hostapd.""" - - -class Hostapd(object): - """Manages the hostapd program. - - Attributes: - config: The hostapd configuration that is being used. - """ - - def __init__(self, runner: Any, interface: str, working_dir: str = "/tmp") -> None: - """ - Args: - runner: Object that has run_async and run methods for executing - shell commands (e.g. connection.SshConnection) - interface: The name of the interface to use (eg. wlan0). - working_dir: The directory to work out of. - """ - self._runner = runner - self._interface = interface - self._working_dir = working_dir - self.config = None - self._shell = shell.ShellCommand(runner, working_dir) - self._log_file = f"hostapd-{self._interface}.log" - self._ctrl_file = f"hostapd-{self._interface}.ctrl" - self._config_file = f"hostapd-{self._interface}.conf" - self._identifier = f"{PROGRAM_FILE}.*{self._config_file}" - - def start( - self, - config: Any, - timeout: int = 60, - additional_parameters: Optional[Dict[str, Any]] = None, - ) -> None: - """Starts hostapd - - Starts the hostapd daemon and runs it in the background. - - Args: - config: Configs to start the hostapd with. - timeout: Time to wait for DHCP server to come up. - additional_parameters: A dictionary of parameters that can sent - directly into the hostapd config file. This - can be used for debugging and or adding one - off parameters into the config. - - Returns: - True if the daemon could be started. Note that the daemon can still - start and not work. Invalid configurations can take a long amount - of time to be produced, and because the daemon runs indefinitely - it's impossible to wait on. If you need to check if configs are ok - then periodic checks to is_running and logs should be used. - """ - if self.is_alive(): - self.stop() - - self.config = config - - self._shell.delete_file(self._ctrl_file) - self._shell.delete_file(self._log_file) - self._shell.delete_file(self._config_file) - self._write_configs(additional_parameters=additional_parameters) - - hostapd_command = f'{PROGRAM_FILE} -dd -t "{self._config_file}"' - base_command = f'cd "{self._working_dir}"; {hostapd_command}' - job_str = f'rfkill unblock all; {base_command} > "{self._log_file}" 2>&1' - self._runner.run_async(job_str) - - try: - self._wait_for_process(timeout=timeout) - self._wait_for_interface(timeout=timeout) - except: - self.stop() - raise - - def stop(self) -> None: - """Kills the daemon if it is running.""" - if self.is_alive(): - self._shell.kill(self._identifier) - - def channel_switch(self, channel_num: int) -> None: - """Switches to the given channel. - - Returns: - acts.libs.proc.job.Result containing the results of the command. - Raises: See _run_hostapd_cli_cmd - """ - try: - channel_freq = hostapd_constants.FREQUENCY_MAP[channel_num] - except KeyError: - raise ValueError(f"Invalid channel number {channel_num}") - csa_beacon_count = 10 - channel_switch_cmd = f"chan_switch {csa_beacon_count} {channel_freq}" - self._run_hostapd_cli_cmd(channel_switch_cmd) - - def get_current_channel(self) -> int: - """Returns the current channel number. - - Raises: See _run_hostapd_cli_cmd - """ - status_cmd = "status" - result = self._run_hostapd_cli_cmd(status_cmd) - match = re.search(r"^channel=(\d+)$", result.stdout, re.MULTILINE) - if not match: - raise Error("Current channel could not be determined") - try: - channel = int(match.group(1)) - except ValueError: - raise Error("Internal error: current channel could not be parsed") - return channel - - def _list_sta(self) -> Result: - """List all associated STA MAC addresses. - - Returns: - acts.libs.proc.job.Result containing the results of the command. - Raises: See _run_hostapd_cli_cmd - """ - list_sta_cmd = "list_sta" - return self._run_hostapd_cli_cmd(list_sta_cmd) - - def get_stas(self) -> Set[str]: - """Return MAC addresses of all associated STAs.""" - list_sta_result = self._list_sta() - stas = set() - for line in list_sta_result.stdout.splitlines(): - # Each line must be a valid MAC address. Capture it. - m = re.match(r"((?:[0-9A-Fa-f]{2}:){5}[0-9A-Fa-f]{2})", line) - if m: - stas.add(m.group(1)) - return stas - - def _sta(self, sta_mac: str) -> Result: - """Return hostapd's detailed info about an associated STA. - - Returns: - acts.libs.proc.job.Result containing the results of the command. - Raises: See _run_hostapd_cli_cmd - """ - sta_cmd = "sta {}".format(sta_mac) - return self._run_hostapd_cli_cmd(sta_cmd) - - def get_sta_extended_capabilities(self, sta_mac: str) -> ExtendedCapabilities: - """Get extended capabilities for the given STA, as seen by the AP. - - Args: - sta_mac: MAC address of the STA in question. - Returns: - Extended capabilities of the given STA. - Raises: - Error if extended capabilities for the STA cannot be obtained. - """ - sta_result = self._sta(sta_mac) - # hostapd ext_capab field is a hex encoded string representation of the - # 802.11 extended capabilities structure, each byte represented by two - # chars (each byte having format %02x). - m = re.search(r"ext_capab=([0-9A-Faf]+)", sta_result.stdout, re.MULTILINE) - if not m: - raise Error("Failed to get ext_capab from STA details") - raw_ext_capab = m.group(1) - try: - return ExtendedCapabilities(bytearray.fromhex(raw_ext_capab)) - except ValueError: - raise Error(f"ext_capab contains invalid hex string repr {raw_ext_capab}") - - def _bss_tm_req( - self, client_mac: str, request: BssTransitionManagementRequest - ) -> Result: - """Send a hostapd BSS Transition Management request command to a STA. - - Args: - client_mac: MAC address that will receive the request. - request: BSS Transition Management request that will be sent. - Returns: - acts.libs.proc.job.Result containing the results of the command. - Raises: See _run_hostapd_cli_cmd - """ - bss_tm_req_cmd = f"bss_tm_req {client_mac}" - - if request.abridged: - bss_tm_req_cmd += " abridged=1" - if request.bss_termination_included and request.bss_termination_duration: - bss_tm_req_cmd += f" bss_term={request.bss_termination_duration.duration}" - if request.disassociation_imminent: - bss_tm_req_cmd += " disassoc_imminent=1" - if request.disassociation_timer is not None: - bss_tm_req_cmd += f" disassoc_timer={request.disassociation_timer}" - if request.preferred_candidate_list_included: - bss_tm_req_cmd += " pref=1" - if request.session_information_url: - bss_tm_req_cmd += f" url={request.session_information_url}" - if request.validity_interval: - bss_tm_req_cmd += f" valid_int={request.validity_interval}" - - # neighbor= can appear multiple times, so it requires special handling. - for neighbor in request.candidate_list: - bssid = neighbor.bssid - bssid_info = hex(neighbor.bssid_information) - op_class = neighbor.operating_class - chan_num = neighbor.channel_number - phy_type = int(neighbor.phy_type) - bss_tm_req_cmd += ( - f" neighbor={bssid},{bssid_info},{op_class},{chan_num},{phy_type}" - ) - - return self._run_hostapd_cli_cmd(bss_tm_req_cmd) - - def send_bss_transition_management_req( - self, sta_mac: str, request: BssTransitionManagementRequest - ) -> Result: - """Send a BSS Transition Management request to an associated STA. - - Args: - sta_mac: MAC address of the STA in question. - request: BSS Transition Management request that will be sent. - Returns: - acts.libs.proc.job.Result containing the results of the command. - Raises: See _run_hostapd_cli_cmd - """ - return self._bss_tm_req(sta_mac, request) - - def is_alive(self) -> bool: - """ - Returns: - True if the daemon is running. - """ - return self._shell.is_alive(self._identifier) - - def pull_logs(self) -> str: - """Pulls the log files from where hostapd is running. - - Returns: - A string of the hostapd logs. - """ - # TODO: Auto pulling of logs when stop is called. - return self._shell.read_file(self._log_file) - - def _run_hostapd_cli_cmd(self, cmd: str) -> Result: - """Run the given hostapd_cli command. - - Runs the command, waits for the output (up to default timeout), and - returns the result. - - Returns: - acts.libs.proc.job.Result containing the results of the ssh command. - - Raises: - acts.lib.proc.job.TimeoutError: When the remote command took too - long to execute. - antlion.controllers.utils_lib.ssh.connection.Error: When the ssh - connection failed to be created. - antlion.controllers.utils_lib.ssh.connection.CommandError: Ssh worked, - but the command had an error executing. - """ - hostapd_cli_job = ( - f"cd {self._working_dir}; " f"{CLI_PROGRAM_FILE} -p {self._ctrl_file} {cmd}" - ) - return self._runner.run(hostapd_cli_job) - - def _wait_for_process(self, timeout: int = 60) -> None: - """Waits for the process to come up. - - Waits until the hostapd process is found running, or there is - a timeout. If the program never comes up then the log file - will be scanned for errors. - - Raises: See _scan_for_errors - """ - start_time = time.time() - while time.time() - start_time < timeout and not self.is_alive(): - self._scan_for_errors(False) - time.sleep(0.1) - - def _wait_for_interface(self, timeout: int = 60) -> None: - """Waits for hostapd to report that the interface is up. - - Waits until hostapd says the interface has been brought up or an - error occurs. - - Raises: see _scan_for_errors - """ - start_time = time.time() - while time.time() - start_time < timeout: - time.sleep(0.1) - success = self._shell.search_file("Setup of interface done", self._log_file) - if success: - return - self._scan_for_errors(False) - - self._scan_for_errors(True) - - def _scan_for_errors(self, should_be_up: bool) -> None: - """Scans the hostapd log for any errors. - - Args: - should_be_up: If true then hostapd program is expected to be alive. - If it is found not alive while this is true an error - is thrown. - - Raises: - Error: Raised when a hostapd error is found. - """ - # Store this so that all other errors have priority. - is_dead = not self.is_alive() - - bad_config = self._shell.search_file( - "Interface initialization failed", self._log_file - ) - if bad_config: - raise Error("Interface failed to start", self) - - bad_config = self._shell.search_file( - f"Interface {self._interface} wasn't started", self._log_file - ) - if bad_config: - raise Error("Interface failed to start", self) - - if should_be_up and is_dead: - raise Error("Hostapd failed to start", self) - - def _write_configs( - self, additional_parameters: Optional[Dict[str, Any]] = None - ) -> None: - """Writes the configs to the hostapd config file.""" - self._shell.delete_file(self._config_file) - - interface_configs = collections.OrderedDict() - interface_configs["interface"] = self._interface - interface_configs["ctrl_interface"] = self._ctrl_file - pairs = (f"{k}={v}" for k, v in interface_configs.items()) - - packaged_configs = self.config.package_configs() - if additional_parameters: - packaged_configs.append(additional_parameters) - for packaged_config in packaged_configs: - config_pairs = ( - f"{k}={v}" for k, v in packaged_config.items() if v is not None - ) - pairs = itertools.chain(pairs, config_pairs) - - hostapd_conf = "\n".join(pairs) - - logging.info(f"Writing {self._config_file}") - logging.debug("******************Start*******************") - logging.debug(f"\n{hostapd_conf}") - logging.debug("*******************End********************") - - self._shell.write_file(self._config_file, hostapd_conf)
diff --git a/src/antlion/controllers/ap_lib/hostapd_ap_preset.py b/src/antlion/controllers/ap_lib/hostapd_ap_preset.py deleted file mode 100644 index 3b694c0..0000000 --- a/src/antlion/controllers/ap_lib/hostapd_ap_preset.py +++ /dev/null
@@ -1,543 +0,0 @@ -# Copyright 2022 The Fuchsia Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from typing import Any, FrozenSet, List, Optional - -from antlion import utils -from antlion.controllers.ap_lib import hostapd_config, hostapd_constants, hostapd_utils -from antlion.controllers.ap_lib.hostapd_security import Security -from antlion.controllers.ap_lib.third_party_ap_profiles import ( - actiontec, - asus, - belkin, - linksys, - netgear, - securifi, - tplink, -) - - -def _get_or_default(var: Optional[Any], default_value: Any) -> Any: - """Check variable and return non-null value. - - Args: - var: Any variable. - default_value: Value to return if the var is None. - - Returns: - Variable value if not None, default value otherwise. - """ - return var if var is not None else default_value - - -def create_ap_preset( - profile_name: str = "whirlwind", - iface_wlan_2g: Optional[str] = None, - iface_wlan_5g: Optional[str] = None, - channel: Optional[int] = None, - mode: Optional[str] = None, - frequency: Optional[int] = None, - security: Optional[Security] = None, - pmf_support: Optional[int] = None, - ssid: Optional[str] = None, - hidden: Optional[bool] = None, - dtim_period: Optional[int] = None, - frag_threshold: Optional[int] = None, - rts_threshold: Optional[int] = None, - force_wmm: Optional[bool] = None, - beacon_interval: Optional[int] = None, - short_preamble: Optional[bool] = None, - n_capabilities: Optional[List[Any]] = None, - ac_capabilities: Optional[List[Any]] = None, - vht_bandwidth: Optional[int] = None, - wnm_features: FrozenSet[hostapd_constants.WnmFeature] = frozenset(), - bss_settings: List[Any] = [], -): - """AP preset config generator. This a wrapper for hostapd_config but - but supplies the default settings for the preset that is selected. - - You may specify channel or frequency, but not both. Both options - are checked for validity (i.e. you can't specify an invalid channel - or a frequency that will not be accepted). - - Args: - profile_name: The name of the device want the preset for. - Options: whirlwind - channel: int, channel number. - dtim: int, DTIM value of the AP, default is 2. - frequency: int, frequency of channel. - security: The security settings to use. - ssid: string, The name of the ssid to broadcast. - pmf_support: int, whether pmf is disabled, enabled, or required - vht_bandwidth: VHT bandwidth for 11ac operation. - bss_settings: The settings for all bss. - iface_wlan_2g: the wlan 2g interface name of the AP. - iface_wlan_5g: the wlan 5g interface name of the AP. - mode: The hostapd 802.11 mode of operation. - ssid: The ssid for the wireless network. - hidden: Whether to include the ssid in the beacons. - dtim_period: The dtim period for the BSS - frag_threshold: Max size of packet before fragmenting the packet. - rts_threshold: Max size of packet before requiring protection for - rts/cts or cts to self. - n_capabilities: 802.11n capabilities for for BSS to advertise. - ac_capabilities: 802.11ac capabilities for for BSS to advertise. - wnm_features: WNM features to enable on the AP. - - Returns: A hostapd_config object that can be used by the hostapd object. - """ - - # Verify interfaces - hostapd_utils.verify_interface(iface_wlan_2g, hostapd_constants.INTERFACE_2G_LIST) - hostapd_utils.verify_interface(iface_wlan_5g, hostapd_constants.INTERFACE_5G_LIST) - - if channel: - frequency = hostapd_config.get_frequency_for_channel(channel) - elif frequency: - channel = hostapd_config.get_channel_for_frequency(frequency) - else: - raise ValueError("Specify either frequency or channel.") - - if profile_name == "whirlwind": - # profile indicates phy mode is 11bgn for 2.4Ghz or 11acn for 5Ghz - hidden = _get_or_default(hidden, False) - force_wmm = _get_or_default(force_wmm, True) - beacon_interval = _get_or_default(beacon_interval, 100) - short_preamble = _get_or_default(short_preamble, True) - dtim_period = _get_or_default(dtim_period, 2) - frag_threshold = _get_or_default(frag_threshold, 2346) - rts_threshold = _get_or_default(rts_threshold, 2347) - if frequency < 5000: - interface = iface_wlan_2g - mode = _get_or_default(mode, hostapd_constants.MODE_11N_MIXED) - n_capabilities = _get_or_default( - n_capabilities, - [ - hostapd_constants.N_CAPABILITY_LDPC, - hostapd_constants.N_CAPABILITY_SGI20, - hostapd_constants.N_CAPABILITY_SGI40, - hostapd_constants.N_CAPABILITY_TX_STBC, - hostapd_constants.N_CAPABILITY_RX_STBC1, - hostapd_constants.N_CAPABILITY_DSSS_CCK_40, - ], - ) - config = hostapd_config.HostapdConfig( - ssid=ssid, - hidden=hidden, - security=security, - pmf_support=pmf_support, - interface=interface, - mode=mode, - force_wmm=force_wmm, - beacon_interval=beacon_interval, - dtim_period=dtim_period, - short_preamble=short_preamble, - frequency=frequency, - n_capabilities=n_capabilities, - frag_threshold=frag_threshold, - rts_threshold=rts_threshold, - wnm_features=wnm_features, - bss_settings=bss_settings, - ) - else: - interface = iface_wlan_5g - vht_bandwidth = _get_or_default(vht_bandwidth, 80) - mode = _get_or_default(mode, hostapd_constants.MODE_11AC_MIXED) - if hostapd_config.ht40_plus_allowed(channel): - extended_channel = hostapd_constants.N_CAPABILITY_HT40_PLUS - elif hostapd_config.ht40_minus_allowed(channel): - extended_channel = hostapd_constants.N_CAPABILITY_HT40_MINUS - # Channel 165 operates in 20MHz with n or ac modes. - if channel == 165: - mode = hostapd_constants.MODE_11N_MIXED - extended_channel = hostapd_constants.N_CAPABILITY_HT20 - # Define the n capability vector for 20 MHz and higher bandwidth - if not vht_bandwidth: - pass - elif vht_bandwidth >= 40: - n_capabilities = _get_or_default( - n_capabilities, - [ - hostapd_constants.N_CAPABILITY_LDPC, - extended_channel, - hostapd_constants.N_CAPABILITY_SGI20, - hostapd_constants.N_CAPABILITY_SGI40, - hostapd_constants.N_CAPABILITY_TX_STBC, - hostapd_constants.N_CAPABILITY_RX_STBC1, - ], - ) - else: - n_capabilities = _get_or_default( - n_capabilities, - [ - hostapd_constants.N_CAPABILITY_LDPC, - hostapd_constants.N_CAPABILITY_SGI20, - hostapd_constants.N_CAPABILITY_SGI40, - hostapd_constants.N_CAPABILITY_TX_STBC, - hostapd_constants.N_CAPABILITY_RX_STBC1, - hostapd_constants.N_CAPABILITY_HT20, - ], - ) - ac_capabilities = _get_or_default( - ac_capabilities, - [ - hostapd_constants.AC_CAPABILITY_MAX_MPDU_11454, - hostapd_constants.AC_CAPABILITY_RXLDPC, - hostapd_constants.AC_CAPABILITY_SHORT_GI_80, - hostapd_constants.AC_CAPABILITY_TX_STBC_2BY1, - hostapd_constants.AC_CAPABILITY_RX_STBC_1, - hostapd_constants.AC_CAPABILITY_MAX_A_MPDU_LEN_EXP7, - hostapd_constants.AC_CAPABILITY_RX_ANTENNA_PATTERN, - hostapd_constants.AC_CAPABILITY_TX_ANTENNA_PATTERN, - ], - ) - config = hostapd_config.HostapdConfig( - ssid=ssid, - hidden=hidden, - security=security, - pmf_support=pmf_support, - interface=interface, - mode=mode, - force_wmm=force_wmm, - vht_channel_width=vht_bandwidth, - beacon_interval=beacon_interval, - dtim_period=dtim_period, - short_preamble=short_preamble, - frequency=frequency, - frag_threshold=frag_threshold, - rts_threshold=rts_threshold, - n_capabilities=n_capabilities, - ac_capabilities=ac_capabilities, - bss_settings=bss_settings, - ) - elif profile_name == "whirlwind_11ab_legacy": - if frequency < 5000: - mode = hostapd_constants.MODE_11B - else: - mode = hostapd_constants.MODE_11A - - config = create_ap_preset( - iface_wlan_2g=iface_wlan_2g, - iface_wlan_5g=iface_wlan_5g, - ssid=ssid, - channel=channel, - mode=mode, - security=security, - pmf_support=pmf_support, - hidden=hidden, - force_wmm=force_wmm, - beacon_interval=beacon_interval, - short_preamble=short_preamble, - dtim_period=dtim_period, - rts_threshold=rts_threshold, - frag_threshold=frag_threshold, - n_capabilities=[], - ac_capabilities=[], - vht_bandwidth=None, - wnm_features=wnm_features, - ) - elif profile_name == "whirlwind_11ag_legacy": - if frequency < 5000: - mode = hostapd_constants.MODE_11G - else: - mode = hostapd_constants.MODE_11A - - config = create_ap_preset( - iface_wlan_2g=iface_wlan_2g, - iface_wlan_5g=iface_wlan_5g, - ssid=ssid, - channel=channel, - mode=mode, - security=security, - pmf_support=pmf_support, - hidden=hidden, - force_wmm=force_wmm, - beacon_interval=beacon_interval, - short_preamble=short_preamble, - dtim_period=dtim_period, - rts_threshold=rts_threshold, - frag_threshold=frag_threshold, - n_capabilities=[], - ac_capabilities=[], - vht_bandwidth=None, - wnm_features=wnm_features, - ) - elif profile_name == "mistral": - hidden = _get_or_default(hidden, False) - force_wmm = _get_or_default(force_wmm, True) - beacon_interval = _get_or_default(beacon_interval, 100) - short_preamble = _get_or_default(short_preamble, True) - dtim_period = _get_or_default(dtim_period, 2) - frag_threshold = None - rts_threshold = None - - # Google IE - # Country Code IE ('us' lowercase) - vendor_elements = { - "vendor_elements": "dd0cf4f5e80505ff0000ffffffff" "070a75732024041e95051e00" - } - default_configs = {"bridge": "br-lan", "iapp_interface": "br-lan"} - - if frequency < 5000: - interface = iface_wlan_2g - mode = _get_or_default(mode, hostapd_constants.MODE_11N_MIXED) - n_capabilities = _get_or_default( - n_capabilities, - [ - hostapd_constants.N_CAPABILITY_LDPC, - hostapd_constants.N_CAPABILITY_SGI20, - hostapd_constants.N_CAPABILITY_SGI40, - hostapd_constants.N_CAPABILITY_TX_STBC, - hostapd_constants.N_CAPABILITY_RX_STBC1, - hostapd_constants.N_CAPABILITY_DSSS_CCK_40, - ], - ) - - additional_params = utils.merge_dicts( - vendor_elements, - hostapd_constants.ENABLE_RRM_BEACON_REPORT, - hostapd_constants.ENABLE_RRM_NEIGHBOR_REPORT, - default_configs, - ) - config = hostapd_config.HostapdConfig( - ssid=ssid, - hidden=hidden, - security=security, - pmf_support=pmf_support, - interface=interface, - mode=mode, - force_wmm=force_wmm, - beacon_interval=beacon_interval, - dtim_period=dtim_period, - short_preamble=short_preamble, - frequency=frequency, - n_capabilities=n_capabilities, - frag_threshold=frag_threshold, - rts_threshold=rts_threshold, - wnm_features=wnm_features, - bss_settings=bss_settings, - additional_parameters=additional_params, - set_ap_defaults_profile=profile_name, - ) - else: - interface = iface_wlan_5g - vht_bandwidth = _get_or_default(vht_bandwidth, 80) - mode = _get_or_default(mode, hostapd_constants.MODE_11AC_MIXED) - if hostapd_config.ht40_plus_allowed(channel): - extended_channel = hostapd_constants.N_CAPABILITY_HT40_PLUS - elif hostapd_config.ht40_minus_allowed(channel): - extended_channel = hostapd_constants.N_CAPABILITY_HT40_MINUS - # Channel 165 operates in 20MHz with n or ac modes. - if channel == 165: - mode = hostapd_constants.MODE_11N_MIXED - extended_channel = hostapd_constants.N_CAPABILITY_HT20 - if vht_bandwidth >= 40: - n_capabilities = _get_or_default( - n_capabilities, - [ - hostapd_constants.N_CAPABILITY_LDPC, - extended_channel, - hostapd_constants.N_CAPABILITY_SGI20, - hostapd_constants.N_CAPABILITY_SGI40, - hostapd_constants.N_CAPABILITY_TX_STBC, - hostapd_constants.N_CAPABILITY_RX_STBC1, - ], - ) - else: - n_capabilities = _get_or_default( - n_capabilities, - [ - hostapd_constants.N_CAPABILITY_LDPC, - hostapd_constants.N_CAPABILITY_SGI20, - hostapd_constants.N_CAPABILITY_SGI40, - hostapd_constants.N_CAPABILITY_TX_STBC, - hostapd_constants.N_CAPABILITY_RX_STBC1, - hostapd_constants.N_CAPABILITY_HT20, - ], - ) - ac_capabilities = _get_or_default( - ac_capabilities, - [ - hostapd_constants.AC_CAPABILITY_MAX_MPDU_11454, - hostapd_constants.AC_CAPABILITY_RXLDPC, - hostapd_constants.AC_CAPABILITY_SHORT_GI_80, - hostapd_constants.AC_CAPABILITY_TX_STBC_2BY1, - hostapd_constants.AC_CAPABILITY_RX_STBC_1, - hostapd_constants.AC_CAPABILITY_MAX_A_MPDU_LEN_EXP7, - hostapd_constants.AC_CAPABILITY_RX_ANTENNA_PATTERN, - hostapd_constants.AC_CAPABILITY_TX_ANTENNA_PATTERN, - hostapd_constants.AC_CAPABILITY_SU_BEAMFORMER, - hostapd_constants.AC_CAPABILITY_SU_BEAMFORMEE, - hostapd_constants.AC_CAPABILITY_MU_BEAMFORMER, - hostapd_constants.AC_CAPABILITY_SOUNDING_DIMENSION_4, - hostapd_constants.AC_CAPABILITY_BF_ANTENNA_4, - ], - ) - - additional_params = utils.merge_dicts( - vendor_elements, - hostapd_constants.ENABLE_RRM_BEACON_REPORT, - hostapd_constants.ENABLE_RRM_NEIGHBOR_REPORT, - default_configs, - ) - config = hostapd_config.HostapdConfig( - ssid=ssid, - hidden=hidden, - security=security, - pmf_support=pmf_support, - interface=interface, - mode=mode, - force_wmm=force_wmm, - vht_channel_width=vht_bandwidth, - beacon_interval=beacon_interval, - dtim_period=dtim_period, - short_preamble=short_preamble, - frequency=frequency, - frag_threshold=frag_threshold, - rts_threshold=rts_threshold, - n_capabilities=n_capabilities, - ac_capabilities=ac_capabilities, - wnm_features=wnm_features, - bss_settings=bss_settings, - additional_parameters=additional_params, - set_ap_defaults_profile=profile_name, - ) - elif profile_name == "actiontec_pk5000": - config = actiontec.actiontec_pk5000( - iface_wlan_2g=iface_wlan_2g, channel=channel, ssid=ssid, security=security - ) - elif profile_name == "actiontec_mi424wr": - config = actiontec.actiontec_mi424wr( - iface_wlan_2g=iface_wlan_2g, channel=channel, ssid=ssid, security=security - ) - elif profile_name == "asus_rtac66u": - config = asus.asus_rtac66u( - iface_wlan_2g=iface_wlan_2g, - iface_wlan_5g=iface_wlan_5g, - channel=channel, - ssid=ssid, - security=security, - ) - elif profile_name == "asus_rtac86u": - config = asus.asus_rtac86u( - iface_wlan_2g=iface_wlan_2g, - iface_wlan_5g=iface_wlan_5g, - channel=channel, - ssid=ssid, - security=security, - ) - elif profile_name == "asus_rtac5300": - config = asus.asus_rtac5300( - iface_wlan_2g=iface_wlan_2g, - iface_wlan_5g=iface_wlan_5g, - channel=channel, - ssid=ssid, - security=security, - ) - elif profile_name == "asus_rtn56u": - config = asus.asus_rtn56u( - iface_wlan_2g=iface_wlan_2g, - iface_wlan_5g=iface_wlan_5g, - channel=channel, - ssid=ssid, - security=security, - ) - elif profile_name == "asus_rtn66u": - config = asus.asus_rtn66u( - iface_wlan_2g=iface_wlan_2g, - iface_wlan_5g=iface_wlan_5g, - channel=channel, - ssid=ssid, - security=security, - ) - elif profile_name == "belkin_f9k1001v5": - config = belkin.belkin_f9k1001v5( - iface_wlan_2g=iface_wlan_2g, channel=channel, ssid=ssid, security=security - ) - elif profile_name == "linksys_ea4500": - config = linksys.linksys_ea4500( - iface_wlan_2g=iface_wlan_2g, - iface_wlan_5g=iface_wlan_5g, - channel=channel, - ssid=ssid, - security=security, - ) - elif profile_name == "linksys_ea9500": - config = linksys.linksys_ea9500( - iface_wlan_2g=iface_wlan_2g, - iface_wlan_5g=iface_wlan_5g, - channel=channel, - ssid=ssid, - security=security, - ) - elif profile_name == "linksys_wrt1900acv2": - config = linksys.linksys_wrt1900acv2( - iface_wlan_2g=iface_wlan_2g, - iface_wlan_5g=iface_wlan_5g, - channel=channel, - ssid=ssid, - security=security, - ) - elif profile_name == "netgear_r7000": - config = netgear.netgear_r7000( - iface_wlan_2g=iface_wlan_2g, - iface_wlan_5g=iface_wlan_5g, - channel=channel, - ssid=ssid, - security=security, - ) - elif profile_name == "netgear_wndr3400": - config = netgear.netgear_wndr3400( - iface_wlan_2g=iface_wlan_2g, - iface_wlan_5g=iface_wlan_5g, - channel=channel, - ssid=ssid, - security=security, - ) - elif profile_name == "securifi_almond": - config = securifi.securifi_almond( - iface_wlan_2g=iface_wlan_2g, channel=channel, ssid=ssid, security=security - ) - elif profile_name == "tplink_archerc5": - config = tplink.tplink_archerc5( - iface_wlan_2g=iface_wlan_2g, - iface_wlan_5g=iface_wlan_5g, - channel=channel, - ssid=ssid, - security=security, - ) - elif profile_name == "tplink_archerc7": - config = tplink.tplink_archerc7( - iface_wlan_2g=iface_wlan_2g, - iface_wlan_5g=iface_wlan_5g, - channel=channel, - ssid=ssid, - security=security, - ) - elif profile_name == "tplink_c1200": - config = tplink.tplink_c1200( - iface_wlan_2g=iface_wlan_2g, - iface_wlan_5g=iface_wlan_5g, - channel=channel, - ssid=ssid, - security=security, - ) - elif profile_name == "tplink_tlwr940n": - config = tplink.tplink_tlwr940n( - iface_wlan_2g=iface_wlan_2g, channel=channel, ssid=ssid, security=security - ) - else: - raise ValueError(f"Invalid ap model specified ({profile_name})") - - return config
diff --git a/src/antlion/controllers/ap_lib/hostapd_bss_settings.py b/src/antlion/controllers/ap_lib/hostapd_bss_settings.py deleted file mode 100644 index 56a5422..0000000 --- a/src/antlion/controllers/ap_lib/hostapd_bss_settings.py +++ /dev/null
@@ -1,52 +0,0 @@ -# Copyright 2022 The Fuchsia Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import collections - - -class BssSettings(object): - """Settings for a bss. - - Settings for a bss to allow multiple network on a single device. - - Attributes: - name: string, The name that this bss will go by. - ssid: string, The name of the ssid to brodcast. - hidden: bool, If true then the ssid will be hidden. - security: Security, The security settings to use. - """ - - def __init__(self, name, ssid, hidden=False, security=None, bssid=None): - self.name = name - self.ssid = ssid - self.hidden = hidden - self.security = security - self.bssid = bssid - - def generate_dict(self): - """Returns: A dictionary of bss settings.""" - settings = collections.OrderedDict() - settings["bss"] = self.name - if self.bssid: - settings["bssid"] = self.bssid - if self.ssid: - settings["ssid"] = self.ssid - settings["ignore_broadcast_ssid"] = 1 if self.hidden else 0 - - if self.security: - security_settings = self.security.generate_dict() - for k, v in security_settings.items(): - settings[k] = v - - return settings
diff --git a/src/antlion/controllers/ap_lib/hostapd_config.py b/src/antlion/controllers/ap_lib/hostapd_config.py deleted file mode 100644 index a886e04..0000000 --- a/src/antlion/controllers/ap_lib/hostapd_config.py +++ /dev/null
@@ -1,731 +0,0 @@ -# Copyright 2022 The Fuchsia Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import collections -import logging -from typing import FrozenSet - -from antlion.controllers.ap_lib import hostapd_constants - - -def ht40_plus_allowed(channel): - """Returns: True iff HT40+ is enabled for this configuration.""" - channel_supported = ( - channel - in hostapd_constants.HT40_ALLOW_MAP[ - hostapd_constants.N_CAPABILITY_HT40_PLUS_CHANNELS - ] - ) - return channel_supported - - -def ht40_minus_allowed(channel): - """Returns: True iff HT40- is enabled for this configuration.""" - channel_supported = ( - channel - in hostapd_constants.HT40_ALLOW_MAP[ - hostapd_constants.N_CAPABILITY_HT40_MINUS_CHANNELS - ] - ) - return channel_supported - - -def get_frequency_for_channel(channel): - """The frequency associated with a given channel number. - - Args: - value: int channel number. - - Returns: - int, frequency in MHz associated with the channel. - - """ - for frequency, channel_iter in hostapd_constants.CHANNEL_MAP.items(): - if channel == channel_iter: - return frequency - else: - raise ValueError("Unknown channel value: %r." % channel) - - -def get_channel_for_frequency(frequency): - """The channel number associated with a given frequency. - - Args: - value: int frequency in MHz. - - Returns: - int, frequency associated with the channel. - - """ - return hostapd_constants.CHANNEL_MAP[frequency] - - -class HostapdConfig(object): - """The root settings for the router. - - All the settings for a router that are not part of an ssid. - """ - - def _get_11ac_center_channel_from_channel(self, channel): - """Returns the center channel of the selected channel band based - on the channel and channel bandwidth provided. - """ - channel = int(channel) - center_channel_delta = hostapd_constants.CENTER_CHANNEL_MAP[ - self._vht_oper_chwidth - ]["delta"] - - for channel_map in hostapd_constants.CENTER_CHANNEL_MAP[self._vht_oper_chwidth][ - "channels" - ]: - lower_channel_bound, upper_channel_bound = channel_map - if lower_channel_bound <= channel <= upper_channel_bound: - return lower_channel_bound + center_channel_delta - raise ValueError( - "Invalid channel for {channel_width}.".format( - channel_width=self._vht_oper_chwidth - ) - ) - - @property - def _get_default_config(self): - """Returns: dict of default options for hostapd.""" - if self.set_ap_defaults_profile == "mistral": - return collections.OrderedDict( - [ - ("logger_syslog", "-1"), - ("logger_syslog_level", "0"), - # default RTS and frag threshold to ``off'' - ("rts_threshold", None), - ("fragm_threshold", None), - ("driver", hostapd_constants.DRIVER_NAME), - ] - ) - else: - return collections.OrderedDict( - [ - ("logger_syslog", "-1"), - ("logger_syslog_level", "0"), - # default RTS and frag threshold to ``off'' - ("rts_threshold", "2347"), - ("fragm_threshold", "2346"), - ("driver", hostapd_constants.DRIVER_NAME), - ] - ) - - @property - def _hostapd_ht_capabilities(self): - """Returns: string suitable for the ht_capab= line in a hostapd config.""" - ret = [] - for cap in hostapd_constants.N_CAPABILITIES_MAPPING.keys(): - if cap in self._n_capabilities: - ret.append(hostapd_constants.N_CAPABILITIES_MAPPING[cap]) - return "".join(ret) - - @property - def _hostapd_vht_capabilities(self): - """Returns: string suitable for the vht_capab= line in a hostapd config.""" - ret = [] - for cap in hostapd_constants.AC_CAPABILITIES_MAPPING.keys(): - if cap in self._ac_capabilities: - ret.append(hostapd_constants.AC_CAPABILITIES_MAPPING[cap]) - return "".join(ret) - - @property - def _require_ht(self): - """Returns: True iff clients should be required to support HT.""" - return self._mode == hostapd_constants.MODE_11N_PURE - - @property - def _require_vht(self): - """Returns: True if clients should be required to support VHT.""" - return self._mode == hostapd_constants.MODE_11AC_PURE - - @property - def hw_mode(self): - """Returns: string hardware mode understood by hostapd.""" - if self._mode == hostapd_constants.MODE_11A: - return hostapd_constants.MODE_11A - if self._mode == hostapd_constants.MODE_11B: - return hostapd_constants.MODE_11B - if self._mode == hostapd_constants.MODE_11G: - return hostapd_constants.MODE_11G - if self.is_11n or self.is_11ac: - # For their own historical reasons, hostapd wants it this way. - if self._frequency > 5000: - return hostapd_constants.MODE_11A - return hostapd_constants.MODE_11G - raise ValueError("Invalid mode.") - - @property - def is_11n(self): - """Returns: True if we're trying to host an 802.11n network.""" - return self._mode in ( - hostapd_constants.MODE_11N_MIXED, - hostapd_constants.MODE_11N_PURE, - ) - - @property - def is_11ac(self): - """Returns: True if we're trying to host an 802.11ac network.""" - return self._mode in ( - hostapd_constants.MODE_11AC_MIXED, - hostapd_constants.MODE_11AC_PURE, - ) - - @property - def channel(self): - """Returns: int channel number for self.frequency.""" - return get_channel_for_frequency(self.frequency) - - @channel.setter - def channel(self, value): - """Sets the channel number to configure hostapd to listen on. - - Args: - value: int, channel number. - - """ - self.frequency = get_frequency_for_channel(value) - - @property - def bssid(self): - return self._bssid - - @bssid.setter - def bssid(self, value): - self._bssid = value - - @property - def frequency(self): - """Returns: int, frequency for hostapd to listen on.""" - return self._frequency - - @frequency.setter - def frequency(self, value): - """Sets the frequency for hostapd to listen on. - - Args: - value: int, frequency in MHz. - - """ - if value not in hostapd_constants.CHANNEL_MAP: - raise ValueError("Tried to set an invalid frequency: %r." % value) - - self._frequency = value - - @property - def bss_lookup(self): - return self._bss_lookup - - @property - def ssid(self): - """Returns: SsidSettings, The root Ssid settings being used.""" - return self._ssid - - @ssid.setter - def ssid(self, value): - """Sets the ssid for the hostapd. - - Args: - value: SsidSettings, new ssid settings to use. - - """ - self._ssid = value - - @property - def hidden(self): - """Returns: bool, True if the ssid is hidden, false otherwise.""" - return self._hidden - - @hidden.setter - def hidden(self, value): - """Sets if this ssid is hidden. - - Args: - value: bool, If true the ssid will be hidden. - """ - self.hidden = value - - @property - def security(self): - """Returns: The security type being used.""" - return self._security - - @security.setter - def security(self, value): - """Sets the security options to use. - - Args: - value: Security, The type of security to use. - """ - self._security = value - - @property - def ht_packet_capture_mode(self): - """Get an appropriate packet capture HT parameter. - - When we go to configure a raw monitor we need to configure - the phy to listen on the correct channel. Part of doing - so is to specify the channel width for HT channels. In the - case that the AP is configured to be either HT40+ or HT40-, - we could return the wrong parameter because we don't know which - configuration will be chosen by hostap. - - Returns: - string, HT parameter for frequency configuration. - - """ - if not self.is_11n: - return None - - if ht40_plus_allowed(self.channel): - return "HT40+" - - if ht40_minus_allowed(self.channel): - return "HT40-" - - return "HT20" - - @property - def beacon_footer(self): - """Returns: bool _beacon_footer value.""" - return self._beacon_footer - - def beacon_footer(self, value): - """Changes the beacon footer. - - Args: - value: bool, The beacon footer vlaue. - """ - self._beacon_footer = value - - @property - def scenario_name(self): - """Returns: string _scenario_name value, or None.""" - return self._scenario_name - - @property - def min_streams(self): - """Returns: int, _min_streams value, or None.""" - return self._min_streams - - @property - def wnm_features(self) -> FrozenSet[hostapd_constants.WnmFeature]: - return self._wnm_features - - @wnm_features.setter - def wnm_features(self, value: FrozenSet[hostapd_constants.WnmFeature]): - self._wnm_features = value - - def __init__( - self, - interface=None, - mode=None, - channel=None, - frequency=None, - n_capabilities=[], - beacon_interval=None, - dtim_period=None, - frag_threshold=None, - rts_threshold=None, - short_preamble=None, - ssid=None, - hidden=False, - security=None, - bssid=None, - force_wmm=None, - pmf_support=None, - obss_interval=None, - vht_channel_width=None, - vht_center_channel=None, - ac_capabilities=[], - beacon_footer="", - spectrum_mgmt_required=None, - scenario_name=None, - min_streams=None, - wnm_features: FrozenSet[hostapd_constants.WnmFeature] = frozenset(), - bss_settings=[], - additional_parameters={}, - set_ap_defaults_profile="whirlwind", - ): - """Construct a HostapdConfig. - - You may specify channel or frequency, but not both. Both options - are checked for validity (i.e. you can't specify an invalid channel - or a frequency that will not be accepted). - - Args: - interface: string, The name of the interface to use. - mode: string, MODE_11x defined above. - channel: int, channel number. - frequency: int, frequency of channel. - n_capabilities: list of N_CAPABILITY_x defined above. - beacon_interval: int, beacon interval of AP. - dtim_period: int, include a DTIM every |dtim_period| beacons. - frag_threshold: int, maximum outgoing data frame size. - rts_threshold: int, maximum packet size without requiring explicit - protection via rts/cts or cts to self. - short_preamble: Whether to use a short preamble. - ssid: string, The name of the ssid to brodcast. - hidden: bool, Should the ssid be hidden. - security: Security, the secuirty settings to use. - bssid: string, a MAC address like string for the BSSID. - force_wmm: True if we should force WMM on, False if we should - force it off, None if we shouldn't force anything. - pmf_support: one of PMF_SUPPORT_* above. Controls whether the - client supports/must support 802.11w. If None, defaults to - required with wpa3, else defaults to disabled. - obss_interval: int, interval in seconds that client should be - required to do background scans for overlapping BSSes. - vht_channel_width: object channel width - vht_center_channel: int, center channel of segment 0. - ac_capabilities: list of AC_CAPABILITY_x defined above. - beacon_footer: string, containing (unvalidated) IE data to be - placed at the end of the beacon. - spectrum_mgmt_required: True if we require the DUT to support - spectrum management. - scenario_name: string to be included in file names, instead - of the interface name. - min_streams: int, number of spatial streams required. - wnm_features: WNM features to enable on the AP. - control_interface: The file name to use as the control interface. - bss_settings: The settings for all bss. - additional_parameters: A dictionary of additional parameters to add - to the hostapd config. - set_ap_defaults_profile: profile name to load defaults from - """ - self.set_ap_defaults_profile = set_ap_defaults_profile - self._interface = interface - if channel is not None and frequency is not None: - raise ValueError("Specify either frequency or channel " "but not both.") - - self._wmm_enabled = False - unknown_caps = [ - cap - for cap in n_capabilities - if cap not in hostapd_constants.N_CAPABILITIES_MAPPING - ] - if unknown_caps: - raise ValueError("Unknown capabilities: %r" % unknown_caps) - - self._frequency = None - if channel: - self.channel = channel - elif frequency: - self.frequency = frequency - else: - raise ValueError("Specify either frequency or channel.") - """ - if set_ap_defaults_model: - ap_default_config = hostapd_ap_default_configs.APDefaultConfig( - profile_name=set_ap_defaults_model, frequency=self.frequency) - force_wmm = ap_default_config.force_wmm - beacon_interval = ap_default_config.beacon_interval - dtim_period = ap_default_config.dtim_period - short_preamble = ap_default_config.short_preamble - self._interface = ap_default_config.interface - mode = ap_default_config.mode - if ap_default_config.n_capabilities: - n_capabilities = ap_default_config.n_capabilities - if ap_default_config.ac_capabilities: - ap_default_config = ap_default_config.ac_capabilities - """ - - self._n_capabilities = set(n_capabilities) - if self._n_capabilities: - self._wmm_enabled = True - if self._n_capabilities and mode is None: - mode = hostapd_constants.MODE_11N_PURE - self._mode = mode - - if not self.supports_frequency(self.frequency): - raise ValueError( - "Configured a mode %s that does not support " - "frequency %d" % (self._mode, self.frequency) - ) - - self._beacon_interval = beacon_interval - self._dtim_period = dtim_period - self._frag_threshold = frag_threshold - self._rts_threshold = rts_threshold - self._short_preamble = short_preamble - self._ssid = ssid - self._hidden = hidden - self._security = security - self._bssid = bssid - if force_wmm is not None: - if force_wmm: - self._wmm_enabled = 1 - else: - self._wmm_enabled = 0 - # Default PMF Values - if pmf_support is None: - if ( - self.security - and self.security.security_mode_string == hostapd_constants.WPA3_STRING - ): - # Set PMF required for WP3 - self._pmf_support = hostapd_constants.PMF_SUPPORT_REQUIRED - elif ( - self.security - and self.security.security_mode_string - in hostapd_constants.WPA3_MODE_STRINGS - ): - # Default PMF to enabled for WPA3 mixed modes (can be - # overwritten by explicitly provided value) - self._pmf_support = hostapd_constants.PMF_SUPPORT_ENABLED - else: - # Default PMD to disabled for all other modes (can be - # overwritten by explicitly provided value) - self._pmf_support = hostapd_constants.PMF_SUPPORT_DISABLED - elif pmf_support not in hostapd_constants.PMF_SUPPORT_VALUES: - raise ValueError("Invalid value for pmf_support: %r" % pmf_support) - elif ( - pmf_support != hostapd_constants.PMF_SUPPORT_REQUIRED - and self.security - and self.security.security_mode_string == hostapd_constants.WPA3_STRING - ): - raise ValueError("PMF support must be required with wpa3.") - else: - self._pmf_support = pmf_support - self._obss_interval = obss_interval - if self.is_11ac: - if str(vht_channel_width) == "40" or str(vht_channel_width) == "20": - self._vht_oper_chwidth = hostapd_constants.VHT_CHANNEL_WIDTH_40 - elif str(vht_channel_width) == "80": - self._vht_oper_chwidth = hostapd_constants.VHT_CHANNEL_WIDTH_80 - elif str(vht_channel_width) == "160": - self._vht_oper_chwidth = hostapd_constants.VHT_CHANNEL_WIDTH_160 - elif str(vht_channel_width) == "80+80": - self._vht_oper_chwidth = hostapd_constants.VHT_CHANNEL_WIDTH_80_80 - elif vht_channel_width is not None: - raise ValueError("Invalid channel width") - else: - logging.warning( - "No channel bandwidth specified. Using 80MHz for 11ac." - ) - self._vht_oper_chwidth = 1 - if vht_center_channel is not None: - self._vht_oper_centr_freq_seg0_idx = vht_center_channel - elif vht_channel_width == 20: - self._vht_oper_centr_freq_seg0_idx = channel - else: - self._vht_oper_centr_freq_seg0_idx = ( - self._get_11ac_center_channel_from_channel(self.channel) - ) - self._ac_capabilities = set(ac_capabilities) - self._beacon_footer = beacon_footer - self._spectrum_mgmt_required = spectrum_mgmt_required - self._scenario_name = scenario_name - self._min_streams = min_streams - self._wnm_features = wnm_features - self._additional_parameters = additional_parameters - - self._bss_lookup = collections.OrderedDict() - for bss in bss_settings: - if bss.name in self._bss_lookup: - raise ValueError( - "Cannot have multiple bss settings with the" " same name." - ) - self._bss_lookup[bss.name] = bss - - def __repr__(self): - return ( - "%s(mode=%r, channel=%r, frequency=%r, " - "n_capabilities=%r, beacon_interval=%r, " - "dtim_period=%r, frag_threshold=%r, ssid=%r, bssid=%r, " - "wmm_enabled=%r, security_config=%r, " - "spectrum_mgmt_required=%r)" - % ( - self.__class__.__name__, - self._mode, - self.channel, - self.frequency, - self._n_capabilities, - self._beacon_interval, - self._dtim_period, - self._frag_threshold, - self._ssid, - self._bssid, - self._wmm_enabled, - self._security, - self._spectrum_mgmt_required, - ) - ) - - def supports_channel(self, value): - """Check whether channel is supported by the current hardware mode. - - @param value: int channel to check. - @return True iff the current mode supports the band of the channel. - - """ - for freq, channel in hostapd_constants.CHANNEL_MAP.iteritems(): - if channel == value: - return self.supports_frequency(freq) - - return False - - def supports_frequency(self, frequency): - """Check whether frequency is supported by the current hardware mode. - - @param frequency: int frequency to check. - @return True iff the current mode supports the band of the frequency. - - """ - if self._mode == hostapd_constants.MODE_11A and frequency < 5000: - return False - - if ( - self._mode in (hostapd_constants.MODE_11B, hostapd_constants.MODE_11G) - and frequency > 5000 - ): - return False - - if frequency not in hostapd_constants.CHANNEL_MAP: - return False - - channel = hostapd_constants.CHANNEL_MAP[frequency] - supports_plus = ( - channel - in hostapd_constants.HT40_ALLOW_MAP[ - hostapd_constants.N_CAPABILITY_HT40_PLUS_CHANNELS - ] - ) - supports_minus = ( - channel - in hostapd_constants.HT40_ALLOW_MAP[ - hostapd_constants.N_CAPABILITY_HT40_MINUS_CHANNELS - ] - ) - if ( - hostapd_constants.N_CAPABILITY_HT40_PLUS in self._n_capabilities - and not supports_plus - ): - return False - - if ( - hostapd_constants.N_CAPABILITY_HT40_MINUS in self._n_capabilities - and not supports_minus - ): - return False - - return True - - def add_bss(self, bss): - """Adds a new bss setting. - - Args: - bss: The bss settings to add. - """ - if bss.name in self._bss_lookup: - raise ValueError("A bss with the same name already exists.") - - self._bss_lookup[bss.name] = bss - - def remove_bss(self, bss_name): - """Removes a bss setting from the config.""" - del self._bss_lookup[bss_name] - - def package_configs(self): - """Package the configs. - - Returns: - A list of dictionaries, one dictionary for each section of the - config. - """ - # Start with the default config parameters. - conf = self._get_default_config - - if self._interface: - conf["interface"] = self._interface - if self._bssid: - conf["bssid"] = self._bssid - if self._ssid: - conf["ssid"] = self._ssid - conf["ignore_broadcast_ssid"] = 1 if self._hidden else 0 - conf["channel"] = self.channel - conf["hw_mode"] = self.hw_mode - if self.is_11n or self.is_11ac: - conf["ieee80211n"] = 1 - conf["ht_capab"] = self._hostapd_ht_capabilities - if self.is_11ac: - conf["ieee80211ac"] = 1 - conf["vht_oper_chwidth"] = self._vht_oper_chwidth - conf["vht_oper_centr_freq_seg0_idx"] = self._vht_oper_centr_freq_seg0_idx - conf["vht_capab"] = self._hostapd_vht_capabilities - if self._wmm_enabled is not None: - conf["wmm_enabled"] = self._wmm_enabled - if self._require_ht: - conf["require_ht"] = 1 - if self._require_vht: - conf["require_vht"] = 1 - if self._beacon_interval: - conf["beacon_int"] = self._beacon_interval - if self._dtim_period: - conf["dtim_period"] = self._dtim_period - if self._frag_threshold: - conf["fragm_threshold"] = self._frag_threshold - if self._rts_threshold: - conf["rts_threshold"] = self._rts_threshold - if self._pmf_support: - conf["ieee80211w"] = self._pmf_support - if self._obss_interval: - conf["obss_interval"] = self._obss_interval - if self._short_preamble: - conf["preamble"] = 1 - if self._spectrum_mgmt_required: - # To set spectrum_mgmt_required, we must first set - # local_pwr_constraint. And to set local_pwr_constraint, - # we must first set ieee80211d. And to set ieee80211d, ... - # Point being: order matters here. - conf["country_code"] = "US" # Required for local_pwr_constraint - conf["ieee80211d"] = 1 # Required for local_pwr_constraint - conf["local_pwr_constraint"] = 0 # No local constraint - conf["spectrum_mgmt_required"] = 1 # Requires local_pwr_constraint - - if self._security: - for k, v in self._security.generate_dict().items(): - conf[k] = v - - all_conf = [conf] - - for bss in self._bss_lookup.values(): - bss_conf = collections.OrderedDict() - for k, v in (bss.generate_dict()).items(): - bss_conf[k] = v - all_conf.append(bss_conf) - - for wnm_feature in self._wnm_features: - if wnm_feature == hostapd_constants.WnmFeature.TIME_ADVERTISEMENT: - conf.update(hostapd_constants.ENABLE_WNM_TIME_ADVERTISEMENT) - elif wnm_feature == hostapd_constants.WnmFeature.WNM_SLEEP_MODE: - conf.update(hostapd_constants.ENABLE_WNM_SLEEP_MODE) - elif wnm_feature == hostapd_constants.WnmFeature.BSS_TRANSITION_MANAGEMENT: - conf.update(hostapd_constants.ENABLE_WNM_BSS_TRANSITION_MANAGEMENT) - elif wnm_feature == hostapd_constants.WnmFeature.PROXY_ARP: - conf.update(hostapd_constants.ENABLE_WNM_PROXY_ARP) - elif ( - wnm_feature - == hostapd_constants.WnmFeature.IPV6_NEIGHBOR_ADVERTISEMENT_MULTICAST_TO_UNICAST - ): - conf.update( - hostapd_constants.ENABLE_WNM_IPV6_NEIGHBOR_ADVERTISEMENT_MULTICAST_TO_UNICAST - ) - - if self._additional_parameters: - all_conf.append(self._additional_parameters) - - return all_conf
diff --git a/src/antlion/controllers/ap_lib/hostapd_constants.py b/src/antlion/controllers/ap_lib/hostapd_constants.py deleted file mode 100755 index ae7ef85..0000000 --- a/src/antlion/controllers/ap_lib/hostapd_constants.py +++ /dev/null
@@ -1,899 +0,0 @@ -#!/usr/bin/env python3 -# -# Copyright 2022 The Fuchsia Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import itertools - -from enum import Enum, auto, unique - -BAND_2G = "2g" -BAND_5G = "5g" -CHANNEL_BANDWIDTH_20MHZ = 20 -CHANNEL_BANDWIDTH_40MHZ = 40 -CHANNEL_BANDWIDTH_80MHZ = 80 -CHANNEL_BANDWIDTH_160MHZ = 160 -WEP = 0 -WPA1 = 1 -WPA2 = 2 -WPA3 = 2 # same as wpa2 and wpa2/wpa3, distinguished by wpa_key_mgmt -MIXED = 3 # applies to wpa/wpa2, and wpa/wpa2/wpa3, distinquished by wpa_key_mgmt -ENT = 4 # get the correct constant -MAX_WPA_PSK_LENGTH = 64 -MIN_WPA_PSK_LENGTH = 8 -MAX_WPA_PASSWORD_LENGTH = 63 -WPA_STRICT_REKEY = 1 -WPA_DEFAULT_CIPHER = "TKIP" -WPA2_DEFAULT_CIPER = "CCMP" -WPA_GROUP_KEY_ROTATION_TIME = 600 -WPA_STRICT_REKEY_DEFAULT = True -WEP_STRING = "wep" -WPA_STRING = "wpa" -WPA2_STRING = "wpa2" -WPA_MIXED_STRING = "wpa/wpa2" -WPA3_STRING = "wpa3" -WPA2_WPA3_MIXED_STRING = "wpa2/wpa3" -WPA_WPA2_WPA3_MIXED_STRING = "wpa/wpa2/wpa3" -ENT_STRING = "ent" -ENT_KEY_MGMT = "WPA-EAP" -WPA_PSK_KEY_MGMT = "WPA-PSK" -SAE_KEY_MGMT = "SAE" -DUAL_WPA_PSK_SAE_KEY_MGMT = "WPA-PSK SAE" -SECURITY_STRING_TO_SECURITY_MODE_INT = { - WPA_STRING: WPA1, - WPA2_STRING: WPA2, - WPA_MIXED_STRING: MIXED, - WPA3_STRING: WPA3, - WPA2_WPA3_MIXED_STRING: WPA3, - WPA_WPA2_WPA3_MIXED_STRING: MIXED, - WEP_STRING: WEP, - ENT_STRING: ENT, -} -SECURITY_STRING_TO_WPA_KEY_MGMT = { - WPA_STRING: WPA_PSK_KEY_MGMT, - WPA2_STRING: WPA_PSK_KEY_MGMT, - WPA_MIXED_STRING: WPA_PSK_KEY_MGMT, - WPA3_STRING: SAE_KEY_MGMT, - WPA2_WPA3_MIXED_STRING: DUAL_WPA_PSK_SAE_KEY_MGMT, - WPA_WPA2_WPA3_MIXED_STRING: DUAL_WPA_PSK_SAE_KEY_MGMT, -} -WPA3_MODE_STRINGS = {WPA3_STRING, WPA2_WPA3_MIXED_STRING, WPA_WPA2_WPA3_MIXED_STRING} - -SECURITY_STRING_TO_DEFAULT_TARGET_SECURITY = { - WEP_STRING: WEP_STRING, - WPA_STRING: WPA_STRING, - WPA2_STRING: WPA2_STRING, - WPA_MIXED_STRING: WPA2_STRING, - WPA3_STRING: WPA3_STRING, - WPA2_WPA3_MIXED_STRING: WPA3_STRING, - WPA_WPA2_WPA3_MIXED_STRING: WPA3_STRING, -} - -IEEE8021X = 1 -WLAN0_STRING = "wlan0" -WLAN1_STRING = "wlan1" -WLAN2_STRING = "wlan2" -WLAN3_STRING = "wlan3" -WLAN0_GALE = "wlan-2400mhz" -WLAN1_GALE = "wlan-5000mhz" -WEP_DEFAULT_KEY = 0 -WEP_HEX_LENGTH = [10, 26, 32, 58] -WEP_STR_LENGTH = [5, 13, 16] -WEP_DEFAULT_STR_LENGTH = 13 -AP_DEFAULT_CHANNEL_2G = 6 -AP_DEFAULT_CHANNEL_5G = 36 -AP_DEFAULT_MAX_SSIDS_2G = 8 -AP_DEFAULT_MAX_SSIDS_5G = 8 -AP_SSID_LENGTH_2G = 8 -AP_SSID_MIN_LENGTH_2G = 1 -AP_SSID_MAX_LENGTH_2G = 32 -AP_PASSPHRASE_LENGTH_2G = 10 -AP_SSID_LENGTH_5G = 8 -AP_SSID_MIN_LENGTH_5G = 1 -AP_SSID_MAX_LENGTH_5G = 32 -AP_PASSPHRASE_LENGTH_5G = 10 -INTERFACE_2G_LIST = [WLAN0_STRING, WLAN0_GALE] -INTERFACE_5G_LIST = [WLAN1_STRING, WLAN1_GALE] -HIGH_BEACON_INTERVAL = 300 -LOW_BEACON_INTERVAL = 100 -HIGH_DTIM = 3 -LOW_DTIM = 1 - -# A mapping of frequency to channel number. This includes some -# frequencies used outside the US. -CHANNEL_MAP = { - 2412: 1, - 2417: 2, - 2422: 3, - 2427: 4, - 2432: 5, - 2437: 6, - 2442: 7, - 2447: 8, - 2452: 9, - 2457: 10, - 2462: 11, - # 12, 13 are only legitimate outside the US. - 2467: 12, - 2472: 13, - # 14 is for Japan, DSSS and CCK only. - 2484: 14, - # 34 valid in Japan. - 5170: 34, - # 36-116 valid in the US, except 38, 42, and 46, which have - # mixed international support. - 5180: 36, - 5190: 38, - 5200: 40, - 5210: 42, - 5220: 44, - 5230: 46, - 5240: 48, - # DFS channels. - 5260: 52, - 5280: 56, - 5300: 60, - 5320: 64, - 5500: 100, - 5520: 104, - 5540: 108, - 5560: 112, - 5580: 116, - # 120, 124, 128 valid in Europe/Japan. - 5600: 120, - 5620: 124, - 5640: 128, - # 132+ valid in US. - 5660: 132, - 5680: 136, - 5700: 140, - # 144 is supported by a subset of WiFi chips - # (e.g. bcm4354, but not ath9k). - 5720: 144, - # End DFS channels. - 5745: 149, - 5755: 151, - 5765: 153, - 5775: 155, - 5795: 159, - 5785: 157, - 5805: 161, - 5825: 165, -} -FREQUENCY_MAP = {v: k for k, v in CHANNEL_MAP.items()} - -US_CHANNELS_2G = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11] -US_CHANNELS_5G = [ - 36, - 40, - 44, - 48, - 52, - 56, - 60, - 64, - 100, - 104, - 108, - 112, - 116, - 120, - 124, - 128, - 132, - 136, - 140, - 144, - 149, - 153, - 157, - 161, - 165, -] - -LOWEST_5G_CHANNEL = 36 - -MODE_11A = "a" -MODE_11B = "b" -MODE_11G = "g" -MODE_11N_MIXED = "n-mixed" -MODE_11N_PURE = "n-only" -MODE_11AC_MIXED = "ac-mixed" -MODE_11AC_PURE = "ac-only" - -N_CAPABILITY_LDPC = object() -N_CAPABILITY_HT20 = object() -N_CAPABILITY_HT40_PLUS = object() -N_CAPABILITY_HT40_MINUS = object() -N_CAPABILITY_GREENFIELD = object() -N_CAPABILITY_SGI20 = object() -N_CAPABILITY_SGI40 = object() -N_CAPABILITY_TX_STBC = object() -N_CAPABILITY_RX_STBC1 = object() -N_CAPABILITY_RX_STBC12 = object() -N_CAPABILITY_RX_STBC123 = object() -N_CAPABILITY_DSSS_CCK_40 = object() -N_CAPABILITY_LSIG_TXOP_PROT = object() -N_CAPABILITY_40_INTOLERANT = object() -N_CAPABILITY_MAX_AMSDU_7935 = object() -N_CAPABILITY_DELAY_BLOCK_ACK = object() -N_CAPABILITY_SMPS_STATIC = object() -N_CAPABILITY_SMPS_DYNAMIC = object() -N_CAPABILITIES_MAPPING = { - N_CAPABILITY_LDPC: "[LDPC]", - N_CAPABILITY_HT20: "[HT20]", - N_CAPABILITY_HT40_PLUS: "[HT40+]", - N_CAPABILITY_HT40_MINUS: "[HT40-]", - N_CAPABILITY_GREENFIELD: "[GF]", - N_CAPABILITY_SGI20: "[SHORT-GI-20]", - N_CAPABILITY_SGI40: "[SHORT-GI-40]", - N_CAPABILITY_TX_STBC: "[TX-STBC]", - N_CAPABILITY_RX_STBC1: "[RX-STBC1]", - N_CAPABILITY_RX_STBC12: "[RX-STBC12]", - N_CAPABILITY_RX_STBC123: "[RX-STBC123]", - N_CAPABILITY_DSSS_CCK_40: "[DSSS_CCK-40]", - N_CAPABILITY_LSIG_TXOP_PROT: "[LSIG-TXOP-PROT]", - N_CAPABILITY_40_INTOLERANT: "[40-INTOLERANT]", - N_CAPABILITY_MAX_AMSDU_7935: "[MAX-AMSDU-7935]", - N_CAPABILITY_DELAY_BLOCK_ACK: "[DELAYED-BA]", - N_CAPABILITY_SMPS_STATIC: "[SMPS-STATIC]", - N_CAPABILITY_SMPS_DYNAMIC: "[SMPS-DYNAMIC]", -} -N_CAPABILITIES_MAPPING_INVERSE = {v: k for k, v in N_CAPABILITIES_MAPPING.items()} -N_CAPABILITY_HT40_MINUS_CHANNELS = object() -N_CAPABILITY_HT40_PLUS_CHANNELS = object() -AC_CAPABILITY_VHT160 = object() -AC_CAPABILITY_VHT160_80PLUS80 = object() -AC_CAPABILITY_RXLDPC = object() -AC_CAPABILITY_SHORT_GI_80 = object() -AC_CAPABILITY_SHORT_GI_160 = object() -AC_CAPABILITY_TX_STBC_2BY1 = object() -AC_CAPABILITY_RX_STBC_1 = object() -AC_CAPABILITY_RX_STBC_12 = object() -AC_CAPABILITY_RX_STBC_123 = object() -AC_CAPABILITY_RX_STBC_1234 = object() -AC_CAPABILITY_SU_BEAMFORMER = object() -AC_CAPABILITY_SU_BEAMFORMEE = object() -AC_CAPABILITY_BF_ANTENNA_2 = object() -AC_CAPABILITY_BF_ANTENNA_3 = object() -AC_CAPABILITY_BF_ANTENNA_4 = object() -AC_CAPABILITY_SOUNDING_DIMENSION_2 = object() -AC_CAPABILITY_SOUNDING_DIMENSION_3 = object() -AC_CAPABILITY_SOUNDING_DIMENSION_4 = object() -AC_CAPABILITY_MU_BEAMFORMER = object() -AC_CAPABILITY_MU_BEAMFORMEE = object() -AC_CAPABILITY_VHT_TXOP_PS = object() -AC_CAPABILITY_HTC_VHT = object() -AC_CAPABILITY_MAX_A_MPDU_LEN_EXP0 = object() -AC_CAPABILITY_MAX_A_MPDU_LEN_EXP1 = object() -AC_CAPABILITY_MAX_A_MPDU_LEN_EXP2 = object() -AC_CAPABILITY_MAX_A_MPDU_LEN_EXP3 = object() -AC_CAPABILITY_MAX_A_MPDU_LEN_EXP4 = object() -AC_CAPABILITY_MAX_A_MPDU_LEN_EXP5 = object() -AC_CAPABILITY_MAX_A_MPDU_LEN_EXP6 = object() -AC_CAPABILITY_MAX_A_MPDU_LEN_EXP7 = object() -AC_CAPABILITY_VHT_LINK_ADAPT2 = object() -AC_CAPABILITY_VHT_LINK_ADAPT3 = object() -AC_CAPABILITY_RX_ANTENNA_PATTERN = object() -AC_CAPABILITY_TX_ANTENNA_PATTERN = object() -AC_CAPABILITY_MAX_MPDU_7991 = object() -AC_CAPABILITY_MAX_MPDU_11454 = object() -AC_CAPABILITIES_MAPPING = { - AC_CAPABILITY_VHT160: "[VHT160]", - AC_CAPABILITY_VHT160_80PLUS80: "[VHT160-80PLUS80]", - AC_CAPABILITY_RXLDPC: "[RXLDPC]", - AC_CAPABILITY_SHORT_GI_80: "[SHORT-GI-80]", - AC_CAPABILITY_SHORT_GI_160: "[SHORT-GI-160]", - AC_CAPABILITY_TX_STBC_2BY1: "[TX-STBC-2BY1]", - AC_CAPABILITY_RX_STBC_1: "[RX-STBC-1]", - AC_CAPABILITY_RX_STBC_12: "[RX-STBC-12]", - AC_CAPABILITY_RX_STBC_123: "[RX-STBC-123]", - AC_CAPABILITY_RX_STBC_1234: "[RX-STBC-1234]", - AC_CAPABILITY_SU_BEAMFORMER: "[SU-BEAMFORMER]", - AC_CAPABILITY_SU_BEAMFORMEE: "[SU-BEAMFORMEE]", - AC_CAPABILITY_BF_ANTENNA_2: "[BF-ANTENNA-2]", - AC_CAPABILITY_BF_ANTENNA_3: "[BF-ANTENNA-3]", - AC_CAPABILITY_BF_ANTENNA_4: "[BF-ANTENNA-4]", - AC_CAPABILITY_SOUNDING_DIMENSION_2: "[SOUNDING-DIMENSION-2]", - AC_CAPABILITY_SOUNDING_DIMENSION_3: "[SOUNDING-DIMENSION-3]", - AC_CAPABILITY_SOUNDING_DIMENSION_4: "[SOUNDING-DIMENSION-4]", - AC_CAPABILITY_MU_BEAMFORMER: "[MU-BEAMFORMER]", - AC_CAPABILITY_MU_BEAMFORMEE: "[MU-BEAMFORMEE]", - AC_CAPABILITY_VHT_TXOP_PS: "[VHT-TXOP-PS]", - AC_CAPABILITY_HTC_VHT: "[HTC-VHT]", - AC_CAPABILITY_MAX_A_MPDU_LEN_EXP0: "[MAX-A-MPDU-LEN-EXP0]", - AC_CAPABILITY_MAX_A_MPDU_LEN_EXP1: "[MAX-A-MPDU-LEN-EXP1]", - AC_CAPABILITY_MAX_A_MPDU_LEN_EXP2: "[MAX-A-MPDU-LEN-EXP2]", - AC_CAPABILITY_MAX_A_MPDU_LEN_EXP3: "[MAX-A-MPDU-LEN-EXP3]", - AC_CAPABILITY_MAX_A_MPDU_LEN_EXP4: "[MAX-A-MPDU-LEN-EXP4]", - AC_CAPABILITY_MAX_A_MPDU_LEN_EXP5: "[MAX-A-MPDU-LEN-EXP5]", - AC_CAPABILITY_MAX_A_MPDU_LEN_EXP6: "[MAX-A-MPDU-LEN-EXP6]", - AC_CAPABILITY_MAX_A_MPDU_LEN_EXP7: "[MAX-A-MPDU-LEN-EXP7]", - AC_CAPABILITY_VHT_LINK_ADAPT2: "[VHT-LINK-ADAPT2]", - AC_CAPABILITY_VHT_LINK_ADAPT3: "[VHT-LINK-ADAPT3]", - AC_CAPABILITY_RX_ANTENNA_PATTERN: "[RX-ANTENNA-PATTERN]", - AC_CAPABILITY_TX_ANTENNA_PATTERN: "[TX-ANTENNA-PATTERN]", - AC_CAPABILITY_MAX_MPDU_11454: "[MAX-MPDU-11454]", - AC_CAPABILITY_MAX_MPDU_7991: "[MAX-MPDU-7991]", -} -AC_CAPABILITIES_MAPPING_INVERSE = {v: k for k, v in AC_CAPABILITIES_MAPPING.items()} -VHT_CHANNEL_WIDTH_40 = 0 -VHT_CHANNEL_WIDTH_80 = 1 -VHT_CHANNEL_WIDTH_160 = 2 -VHT_CHANNEL_WIDTH_80_80 = 3 - -VHT_CHANNEL = { - 40: VHT_CHANNEL_WIDTH_40, - 80: VHT_CHANNEL_WIDTH_80, - 160: VHT_CHANNEL_WIDTH_160, -} - -# This is a loose merging of the rules for US and EU regulatory -# domains as taken from IEEE Std 802.11-2012 Appendix E. For instance, -# we tolerate HT40 in channels 149-161 (not allowed in EU), but also -# tolerate HT40+ on channel 7 (not allowed in the US). We take the loose -# definition so that we don't prohibit testing in either domain. -HT40_ALLOW_MAP = { - N_CAPABILITY_HT40_MINUS_CHANNELS: tuple( - itertools.chain(range(6, 14), range(40, 65, 8), range(104, 145, 8), [153, 161]) - ), - N_CAPABILITY_HT40_PLUS_CHANNELS: tuple( - itertools.chain(range(1, 8), range(36, 61, 8), range(100, 141, 8), [149, 157]) - ), -} - -PMF_SUPPORT_DISABLED = 0 -PMF_SUPPORT_ENABLED = 1 -PMF_SUPPORT_REQUIRED = 2 -PMF_SUPPORT_VALUES = (PMF_SUPPORT_DISABLED, PMF_SUPPORT_ENABLED, PMF_SUPPORT_REQUIRED) - -DRIVER_NAME = "nl80211" - -CENTER_CHANNEL_MAP = { - VHT_CHANNEL_WIDTH_40: { - "delta": 2, - "channels": ( - (36, 40), - (44, 48), - (52, 56), - (60, 64), - (100, 104), - (108, 112), - (116, 120), - (124, 128), - (132, 136), - (140, 144), - (149, 153), - (157, 161), - ), - }, - VHT_CHANNEL_WIDTH_80: { - "delta": 6, - "channels": ( - (36, 48), - (52, 64), - (100, 112), - (116, 128), - (132, 144), - (149, 161), - ), - }, - VHT_CHANNEL_WIDTH_160: {"delta": 14, "channels": ((36, 64), (100, 128))}, -} - -OFDM_DATA_RATES = {"supported_rates": "60 90 120 180 240 360 480 540"} - -CCK_DATA_RATES = {"supported_rates": "10 20 55 110"} - -CCK_AND_OFDM_DATA_RATES = { - "supported_rates": "10 20 55 110 60 90 120 180 240 360 480 540" -} - -OFDM_ONLY_BASIC_RATES = {"basic_rates": "60 120 240"} - -CCK_AND_OFDM_BASIC_RATES = {"basic_rates": "10 20 55 110"} - -WEP_AUTH = { - "open": {"auth_algs": 1}, - "shared": {"auth_algs": 2}, - "open_and_shared": {"auth_algs": 3}, -} - -WMM_11B_DEFAULT_PARAMS = { - "wmm_ac_bk_cwmin": 5, - "wmm_ac_bk_cwmax": 10, - "wmm_ac_bk_aifs": 7, - "wmm_ac_bk_txop_limit": 0, - "wmm_ac_be_aifs": 3, - "wmm_ac_be_cwmin": 5, - "wmm_ac_be_cwmax": 7, - "wmm_ac_be_txop_limit": 0, - "wmm_ac_vi_aifs": 2, - "wmm_ac_vi_cwmin": 4, - "wmm_ac_vi_cwmax": 5, - "wmm_ac_vi_txop_limit": 188, - "wmm_ac_vo_aifs": 2, - "wmm_ac_vo_cwmin": 3, - "wmm_ac_vo_cwmax": 4, - "wmm_ac_vo_txop_limit": 102, -} - -WMM_PHYS_11A_11G_11N_11AC_DEFAULT_PARAMS = { - "wmm_ac_bk_cwmin": 4, - "wmm_ac_bk_cwmax": 10, - "wmm_ac_bk_aifs": 7, - "wmm_ac_bk_txop_limit": 0, - "wmm_ac_be_aifs": 3, - "wmm_ac_be_cwmin": 4, - "wmm_ac_be_cwmax": 10, - "wmm_ac_be_txop_limit": 0, - "wmm_ac_vi_aifs": 2, - "wmm_ac_vi_cwmin": 3, - "wmm_ac_vi_cwmax": 4, - "wmm_ac_vi_txop_limit": 94, - "wmm_ac_vo_aifs": 2, - "wmm_ac_vo_cwmin": 2, - "wmm_ac_vo_cwmax": 3, - "wmm_ac_vo_txop_limit": 47, -} - -WMM_NON_DEFAULT_PARAMS = { - "wmm_ac_bk_cwmin": 5, - "wmm_ac_bk_cwmax": 9, - "wmm_ac_bk_aifs": 3, - "wmm_ac_bk_txop_limit": 94, - "wmm_ac_be_aifs": 2, - "wmm_ac_be_cwmin": 2, - "wmm_ac_be_cwmax": 8, - "wmm_ac_be_txop_limit": 0, - "wmm_ac_vi_aifs": 1, - "wmm_ac_vi_cwmin": 7, - "wmm_ac_vi_cwmax": 10, - "wmm_ac_vi_txop_limit": 47, - "wmm_ac_vo_aifs": 1, - "wmm_ac_vo_cwmin": 6, - "wmm_ac_vo_cwmax": 10, - "wmm_ac_vo_txop_limit": 94, -} - -WMM_DEGRADED_VO_PARAMS = { - "wmm_ac_bk_cwmin": 7, - "wmm_ac_bk_cwmax": 15, - "wmm_ac_bk_aifs": 2, - "wmm_ac_bk_txop_limit": 0, - "wmm_ac_be_aifs": 2, - "wmm_ac_be_cwmin": 7, - "wmm_ac_be_cwmax": 15, - "wmm_ac_be_txop_limit": 0, - "wmm_ac_vi_aifs": 2, - "wmm_ac_vi_cwmin": 7, - "wmm_ac_vi_cwmax": 15, - "wmm_ac_vi_txop_limit": 94, - "wmm_ac_vo_aifs": 10, - "wmm_ac_vo_cwmin": 7, - "wmm_ac_vo_cwmax": 15, - "wmm_ac_vo_txop_limit": 47, -} - -WMM_DEGRADED_VI_PARAMS = { - "wmm_ac_bk_cwmin": 7, - "wmm_ac_bk_cwmax": 15, - "wmm_ac_bk_aifs": 2, - "wmm_ac_bk_txop_limit": 0, - "wmm_ac_be_aifs": 2, - "wmm_ac_be_cwmin": 7, - "wmm_ac_be_cwmax": 15, - "wmm_ac_be_txop_limit": 0, - "wmm_ac_vi_aifs": 10, - "wmm_ac_vi_cwmin": 7, - "wmm_ac_vi_cwmax": 15, - "wmm_ac_vi_txop_limit": 94, - "wmm_ac_vo_aifs": 2, - "wmm_ac_vo_cwmin": 7, - "wmm_ac_vo_cwmax": 15, - "wmm_ac_vo_txop_limit": 47, -} - -WMM_IMPROVE_BE_PARAMS = { - "wmm_ac_bk_cwmin": 7, - "wmm_ac_bk_cwmax": 15, - "wmm_ac_bk_aifs": 10, - "wmm_ac_bk_txop_limit": 0, - "wmm_ac_be_aifs": 2, - "wmm_ac_be_cwmin": 7, - "wmm_ac_be_cwmax": 15, - "wmm_ac_be_txop_limit": 0, - "wmm_ac_vi_aifs": 10, - "wmm_ac_vi_cwmin": 7, - "wmm_ac_vi_cwmax": 15, - "wmm_ac_vi_txop_limit": 94, - "wmm_ac_vo_aifs": 10, - "wmm_ac_vo_cwmin": 7, - "wmm_ac_vo_cwmax": 15, - "wmm_ac_vo_txop_limit": 47, -} - -WMM_IMPROVE_BK_PARAMS = { - "wmm_ac_bk_cwmin": 7, - "wmm_ac_bk_cwmax": 15, - "wmm_ac_bk_aifs": 2, - "wmm_ac_bk_txop_limit": 0, - "wmm_ac_be_aifs": 10, - "wmm_ac_be_cwmin": 7, - "wmm_ac_be_cwmax": 15, - "wmm_ac_be_txop_limit": 0, - "wmm_ac_vi_aifs": 10, - "wmm_ac_vi_cwmin": 7, - "wmm_ac_vi_cwmax": 15, - "wmm_ac_vi_txop_limit": 94, - "wmm_ac_vo_aifs": 10, - "wmm_ac_vo_cwmin": 7, - "wmm_ac_vo_cwmax": 15, - "wmm_ac_vo_txop_limit": 47, -} - -WMM_ACM_BK = {"wmm_ac_bk_acm": 1} -WMM_ACM_BE = {"wmm_ac_be_acm": 1} -WMM_ACM_VI = {"wmm_ac_vi_acm": 1} -WMM_ACM_VO = {"wmm_ac_vo_acm": 1} - -UAPSD_ENABLED = {"uapsd_advertisement_enabled": 1} - -UTF_8_SSID = {"utf8_ssid": 1} - -ENABLE_RRM_BEACON_REPORT = {"rrm_beacon_report": 1} -ENABLE_RRM_NEIGHBOR_REPORT = {"rrm_neighbor_report": 1} - -# Wireless Network Management (AKA 802.11v) features. -ENABLE_WNM_TIME_ADVERTISEMENT = {"time_advertisement": 2, "time_zone": "EST5"} -ENABLE_WNM_SLEEP_MODE = {"wnm_sleep_mode": 1} -ENABLE_WNM_BSS_TRANSITION_MANAGEMENT = {"bss_transition": 1} -ENABLE_WNM_PROXY_ARP = {"proxy_arp": 1} -ENABLE_WNM_IPV6_NEIGHBOR_ADVERTISEMENT_MULTICAST_TO_UNICAST = {"na_mcast_to_ucast": 1} - -VENDOR_IE = { - "correct_length_beacon": {"vendor_elements": "dd0411223301"}, - "too_short_length_beacon": {"vendor_elements": "dd0311223301"}, - "too_long_length_beacon": {"vendor_elements": "dd0511223301"}, - "zero_length_beacon_with_data": {"vendor_elements": "dd0011223301"}, - "zero_length_beacon_without_data": {"vendor_elements": "dd00"}, - "simliar_to_wpa": {"vendor_elements": "dd040050f203"}, - "correct_length_association_response": {"assocresp_elements": "dd0411223301"}, - "too_short_length_association_response": {"assocresp_elements": "dd0311223301"}, - "too_long_length_association_response": {"assocresp_elements": "dd0511223301"}, - "zero_length_association_response_with_data": { - "assocresp_elements": "dd0011223301" - }, - "zero_length_association_response_without_data": {"assocresp_elements": "dd00"}, -} - -ENABLE_IEEE80211D = {"ieee80211d": 1} - -COUNTRY_STRING = { - "ALL": {"country3": "0x20"}, - "OUTDOOR": {"country3": "0x4f"}, - "INDOOR": {"country3": "0x49"}, - "NONCOUNTRY": {"country3": "0x58"}, - "GLOBAL": {"country3": "0x04"}, -} - -COUNTRY_CODE = { - "AFGHANISTAN": {"country_code": "AF"}, - "ALAND_ISLANDS": {"country_code": "AX"}, - "ALBANIA": {"country_code": "AL"}, - "ALGERIA": {"country_code": "DZ"}, - "AMERICAN_SAMOA": {"country_code": "AS"}, - "ANDORRA": {"country_code": "AD"}, - "ANGOLA": {"country_code": "AO"}, - "ANGUILLA": {"country_code": "AI"}, - "ANTARCTICA": {"country_code": "AQ"}, - "ANTIGUA_AND_BARBUDA": {"country_code": "AG"}, - "ARGENTINA": {"country_code": "AR"}, - "ARMENIA": {"country_code": "AM"}, - "ARUBA": {"country_code": "AW"}, - "AUSTRALIA": {"country_code": "AU"}, - "AUSTRIA": {"country_code": "AT"}, - "AZERBAIJAN": {"country_code": "AZ"}, - "BAHAMAS": {"country_code": "BS"}, - "BAHRAIN": {"country_code": "BH"}, - "BANGLADESH": {"country_code": "BD"}, - "BARBADOS": {"country_code": "BB"}, - "BELARUS": {"country_code": "BY"}, - "BELGIUM": {"country_code": "BE"}, - "BELIZE": {"country_code": "BZ"}, - "BENIN": {"country_code": "BJ"}, - "BERMUDA": {"country_code": "BM"}, - "BHUTAN": {"country_code": "BT"}, - "BOLIVIA": {"country_code": "BO"}, - "BONAIRE": {"country_code": "BQ"}, - "BOSNIA_AND_HERZEGOVINA": {"country_code": "BA"}, - "BOTSWANA": {"country_code": "BW"}, - "BOUVET_ISLAND": {"country_code": "BV"}, - "BRAZIL": {"country_code": "BR"}, - "BRITISH_INDIAN_OCEAN_TERRITORY": {"country_code": "IO"}, - "BRUNEI_DARUSSALAM": {"country_code": "BN"}, - "BULGARIA": {"country_code": "BG"}, - "BURKINA_FASO": {"country_code": "BF"}, - "BURUNDI": {"country_code": "BI"}, - "CAMBODIA": {"country_code": "KH"}, - "CAMEROON": {"country_code": "CM"}, - "CANADA": {"country_code": "CA"}, - "CAPE_VERDE": {"country_code": "CV"}, - "CAYMAN_ISLANDS": {"country_code": "KY"}, - "CENTRAL_AFRICAN_REPUBLIC": {"country_code": "CF"}, - "CHAD": {"country_code": "TD"}, - "CHILE": {"country_code": "CL"}, - "CHINA": {"country_code": "CN"}, - "CHRISTMAS_ISLAND": {"country_code": "CX"}, - "COCOS_ISLANDS": {"country_code": "CC"}, - "COLOMBIA": {"country_code": "CO"}, - "COMOROS": {"country_code": "KM"}, - "CONGO": {"country_code": "CG"}, - "DEMOCRATIC_REPUBLIC_CONGO": {"country_code": "CD"}, - "COOK_ISLANDS": {"country_code": "CK"}, - "COSTA_RICA": {"country_code": "CR"}, - "COTE_D_IVOIRE": {"country_code": "CI"}, - "CROATIA": {"country_code": "HR"}, - "CUBA": {"country_code": "CU"}, - "CURACAO": {"country_code": "CW"}, - "CYPRUS": {"country_code": "CY"}, - "CZECH_REPUBLIC": {"country_code": "CZ"}, - "DENMARK": {"country_code": "DK"}, - "DJIBOUTI": {"country_code": "DJ"}, - "DOMINICA": {"country_code": "DM"}, - "DOMINICAN_REPUBLIC": {"country_code": "DO"}, - "ECUADOR": {"country_code": "EC"}, - "EGYPT": {"country_code": "EG"}, - "EL_SALVADOR": {"country_code": "SV"}, - "EQUATORIAL_GUINEA": {"country_code": "GQ"}, - "ERITREA": {"country_code": "ER"}, - "ESTONIA": {"country_code": "EE"}, - "ETHIOPIA": {"country_code": "ET"}, - "FALKLAND_ISLANDS_(MALVINAS)": {"country_code": "FK"}, - "FAROE_ISLANDS": {"country_code": "FO"}, - "FIJI": {"country_code": "FJ"}, - "FINLAND": {"country_code": "FI"}, - "FRANCE": {"country_code": "FR"}, - "FRENCH_GUIANA": {"country_code": "GF"}, - "FRENCH_POLYNESIA": {"country_code": "PF"}, - "FRENCH_SOUTHERN_TERRITORIES": {"country_code": "TF"}, - "GABON": {"country_code": "GA"}, - "GAMBIA": {"country_code": "GM"}, - "GEORGIA": {"country_code": "GE"}, - "GERMANY": {"country_code": "DE"}, - "GHANA": {"country_code": "GH"}, - "GIBRALTAR": {"country_code": "GI"}, - "GREECE": {"country_code": "GR"}, - "GREENLAND": {"country_code": "GL"}, - "GRENADA": {"country_code": "GD"}, - "GUADELOUPE": {"country_code": "GP"}, - "GUAM": {"country_code": "GU"}, - "GUATEMALA": {"country_code": "GT"}, - "GUERNSEY": {"country_code": "GG"}, - "GUINEA": {"country_code": "GN"}, - "GUINEA-BISSAU": {"country_code": "GW"}, - "GUYANA": {"country_code": "GY"}, - "HAITI": {"country_code": "HT"}, - "HEARD_ISLAND_AND_MCDONALD_ISLANDS": {"country_code": "HM"}, - "VATICAN_CITY_STATE": {"country_code": "VA"}, - "HONDURAS": {"country_code": "HN"}, - "HONG_KONG": {"country_code": "HK"}, - "HUNGARY": {"country_code": "HU"}, - "ICELAND": {"country_code": "IS"}, - "INDIA": {"country_code": "IN"}, - "INDONESIA": {"country_code": "ID"}, - "IRAN": {"country_code": "IR"}, - "IRAQ": {"country_code": "IQ"}, - "IRELAND": {"country_code": "IE"}, - "ISLE_OF_MAN": {"country_code": "IM"}, - "ISRAEL": {"country_code": "IL"}, - "ITALY": {"country_code": "IT"}, - "JAMAICA": {"country_code": "JM"}, - "JAPAN": {"country_code": "JP"}, - "JERSEY": {"country_code": "JE"}, - "JORDAN": {"country_code": "JO"}, - "KAZAKHSTAN": {"country_code": "KZ"}, - "KENYA": {"country_code": "KE"}, - "KIRIBATI": {"country_code": "KI"}, - "DEMOCRATIC_PEOPLE_S_REPUBLIC_OF_KOREA": {"country_code": "KP"}, - "REPUBLIC_OF_KOREA": {"country_code": "KR"}, - "KUWAIT": {"country_code": "KW"}, - "KYRGYZSTAN": {"country_code": "KG"}, - "LAO": {"country_code": "LA"}, - "LATVIA": {"country_code": "LV"}, - "LEBANON": {"country_code": "LB"}, - "LESOTHO": {"country_code": "LS"}, - "LIBERIA": {"country_code": "LR"}, - "LIBYA": {"country_code": "LY"}, - "LIECHTENSTEIN": {"country_code": "LI"}, - "LITHUANIA": {"country_code": "LT"}, - "LUXEMBOURG": {"country_code": "LU"}, - "MACAO": {"country_code": "MO"}, - "MACEDONIA": {"country_code": "MK"}, - "MADAGASCAR": {"country_code": "MG"}, - "MALAWI": {"country_code": "MW"}, - "MALAYSIA": {"country_code": "MY"}, - "MALDIVES": {"country_code": "MV"}, - "MALI": {"country_code": "ML"}, - "MALTA": {"country_code": "MT"}, - "MARSHALL_ISLANDS": {"country_code": "MH"}, - "MARTINIQUE": {"country_code": "MQ"}, - "MAURITANIA": {"country_code": "MR"}, - "MAURITIUS": {"country_code": "MU"}, - "MAYOTTE": {"country_code": "YT"}, - "MEXICO": {"country_code": "MX"}, - "MICRONESIA": {"country_code": "FM"}, - "MOLDOVA": {"country_code": "MD"}, - "MONACO": {"country_code": "MC"}, - "MONGOLIA": {"country_code": "MN"}, - "MONTENEGRO": {"country_code": "ME"}, - "MONTSERRAT": {"country_code": "MS"}, - "MOROCCO": {"country_code": "MA"}, - "MOZAMBIQUE": {"country_code": "MZ"}, - "MYANMAR": {"country_code": "MM"}, - "NAMIBIA": {"country_code": "NA"}, - "NAURU": {"country_code": "NR"}, - "NEPAL": {"country_code": "NP"}, - "NETHERLANDS": {"country_code": "NL"}, - "NEW_CALEDONIA": {"country_code": "NC"}, - "NEW_ZEALAND": {"country_code": "NZ"}, - "NICARAGUA": {"country_code": "NI"}, - "NIGER": {"country_code": "NE"}, - "NIGERIA": {"country_code": "NG"}, - "NIUE": {"country_code": "NU"}, - "NORFOLK_ISLAND": {"country_code": "NF"}, - "NORTHERN_MARIANA_ISLANDS": {"country_code": "MP"}, - "NORWAY": {"country_code": "NO"}, - "OMAN": {"country_code": "OM"}, - "PAKISTAN": {"country_code": "PK"}, - "PALAU": {"country_code": "PW"}, - "PALESTINE": {"country_code": "PS"}, - "PANAMA": {"country_code": "PA"}, - "PAPUA_NEW_GUINEA": {"country_code": "PG"}, - "PARAGUAY": {"country_code": "PY"}, - "PERU": {"country_code": "PE"}, - "PHILIPPINES": {"country_code": "PH"}, - "PITCAIRN": {"country_code": "PN"}, - "POLAND": {"country_code": "PL"}, - "PORTUGAL": {"country_code": "PT"}, - "PUERTO_RICO": {"country_code": "PR"}, - "QATAR": {"country_code": "QA"}, - "RÉUNION": {"country_code": "RE"}, - "ROMANIA": {"country_code": "RO"}, - "RUSSIAN_FEDERATION": {"country_code": "RU"}, - "RWANDA": {"country_code": "RW"}, - "SAINT_BARTHELEMY": {"country_code": "BL"}, - "SAINT_KITTS_AND_NEVIS": {"country_code": "KN"}, - "SAINT_LUCIA": {"country_code": "LC"}, - "SAINT_MARTIN": {"country_code": "MF"}, - "SAINT_PIERRE_AND_MIQUELON": {"country_code": "PM"}, - "SAINT_VINCENT_AND_THE_GRENADINES": {"country_code": "VC"}, - "SAMOA": {"country_code": "WS"}, - "SAN_MARINO": {"country_code": "SM"}, - "SAO_TOME_AND_PRINCIPE": {"country_code": "ST"}, - "SAUDI_ARABIA": {"country_code": "SA"}, - "SENEGAL": {"country_code": "SN"}, - "SERBIA": {"country_code": "RS"}, - "SEYCHELLES": {"country_code": "SC"}, - "SIERRA_LEONE": {"country_code": "SL"}, - "SINGAPORE": {"country_code": "SG"}, - "SINT_MAARTEN": {"country_code": "SX"}, - "SLOVAKIA": {"country_code": "SK"}, - "SLOVENIA": {"country_code": "SI"}, - "SOLOMON_ISLANDS": {"country_code": "SB"}, - "SOMALIA": {"country_code": "SO"}, - "SOUTH_AFRICA": {"country_code": "ZA"}, - "SOUTH_GEORGIA": {"country_code": "GS"}, - "SOUTH_SUDAN": {"country_code": "SS"}, - "SPAIN": {"country_code": "ES"}, - "SRI_LANKA": {"country_code": "LK"}, - "SUDAN": {"country_code": "SD"}, - "SURINAME": {"country_code": "SR"}, - "SVALBARD_AND_JAN_MAYEN": {"country_code": "SJ"}, - "SWAZILAND": {"country_code": "SZ"}, - "SWEDEN": {"country_code": "SE"}, - "SWITZERLAND": {"country_code": "CH"}, - "SYRIAN_ARAB_REPUBLIC": {"country_code": "SY"}, - "TAIWAN": {"country_code": "TW"}, - "TAJIKISTAN": {"country_code": "TJ"}, - "TANZANIA": {"country_code": "TZ"}, - "THAILAND": {"country_code": "TH"}, - "TIMOR-LESTE": {"country_code": "TL"}, - "TOGO": {"country_code": "TG"}, - "TOKELAU": {"country_code": "TK"}, - "TONGA": {"country_code": "TO"}, - "TRINIDAD_AND_TOBAGO": {"country_code": "TT"}, - "TUNISIA": {"country_code": "TN"}, - "TURKEY": {"country_code": "TR"}, - "TURKMENISTAN": {"country_code": "TM"}, - "TURKS_AND_CAICOS_ISLANDS": {"country_code": "TC"}, - "TUVALU": {"country_code": "TV"}, - "UGANDA": {"country_code": "UG"}, - "UKRAINE": {"country_code": "UA"}, - "UNITED_ARAB_EMIRATES": {"country_code": "AE"}, - "UNITED_KINGDOM": {"country_code": "GB"}, - "UNITED_STATES": {"country_code": "US"}, - "UNITED_STATES_MINOR_OUTLYING_ISLANDS": {"country_code": "UM"}, - "URUGUAY": {"country_code": "UY"}, - "UZBEKISTAN": {"country_code": "UZ"}, - "VANUATU": {"country_code": "VU"}, - "VENEZUELA": {"country_code": "VE"}, - "VIETNAM": {"country_code": "VN"}, - "VIRGIN_ISLANDS_BRITISH": {"country_code": "VG"}, - "VIRGIN_ISLANDS_US": {"country_code": "VI"}, - "WALLIS_AND_FUTUNA": {"country_code": "WF"}, - "WESTERN_SAHARA": {"country_code": "EH"}, - "YEMEN": {"country_code": "YE"}, - "ZAMBIA": {"country_code": "ZM"}, - "ZIMBABWE": {"country_code": "ZW"}, - "NON_COUNTRY": {"country_code": "XX"}, -} - -ALL_CHANNELS_2G = { - 1: {20, 40}, - 2: {20, 40}, - 3: {20, 40}, - 4: {20, 40}, - 5: {20, 40}, - 6: {20, 40}, - 7: {20, 40}, - 8: {20, 40}, - 9: {20, 40}, - 10: {20, 40}, - 11: {20, 40}, - 12: {20, 40}, - 13: {20, 40}, - 14: {20}, -} - -ALL_CHANNELS_5G = { - 36: {20, 40, 80}, - 40: {20, 40, 80}, - 44: {20, 40, 80}, - 48: {20, 40, 80}, - 52: {20, 40, 80}, - 56: {20, 40, 80}, - 60: {20, 40, 80}, - 64: {20, 40, 80}, - 100: {20, 40, 80}, - 104: {20, 40, 80}, - 108: {20, 40, 80}, - 112: {20, 40, 80}, - 116: {20, 40, 80}, - 120: {20, 40, 80}, - 124: {20, 40, 80}, - 128: {20, 40, 80}, - 132: {20, 40, 80}, - 136: {20, 40, 80}, - 140: {20, 40, 80}, - 144: {20, 40, 80}, - 149: {20, 40, 80}, - 153: {20, 40, 80}, - 157: {20, 40, 80}, - 161: {20, 40, 80}, - 165: {20}, -} - -ALL_CHANNELS = {**ALL_CHANNELS_2G, **ALL_CHANNELS_5G} - - -@unique -class WnmFeature(Enum): - """Wireless Network Management (AKA 802.11v) features hostapd supports.""" - - TIME_ADVERTISEMENT = auto() - WNM_SLEEP_MODE = auto() - BSS_TRANSITION_MANAGEMENT = auto() - PROXY_ARP = auto() - IPV6_NEIGHBOR_ADVERTISEMENT_MULTICAST_TO_UNICAST = auto()
diff --git a/src/antlion/controllers/ap_lib/hostapd_security.py b/src/antlion/controllers/ap_lib/hostapd_security.py deleted file mode 100644 index 69d5c2f..0000000 --- a/src/antlion/controllers/ap_lib/hostapd_security.py +++ /dev/null
@@ -1,154 +0,0 @@ -# Copyright 2022 The Fuchsia Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import collections -import string - -from typing import Dict, Optional, Union - -from antlion.controllers.ap_lib import hostapd_constants - - -class Security(object): - """The Security class for hostapd representing some of the security - settings that are allowed in hostapd. If needed more can be added. - """ - - def __init__( - self, - security_mode: Optional[str] = None, - password: Optional[str] = None, - wpa_cipher: str = hostapd_constants.WPA_DEFAULT_CIPHER, - wpa2_cipher: str = hostapd_constants.WPA2_DEFAULT_CIPER, - wpa_group_rekey: int = hostapd_constants.WPA_GROUP_KEY_ROTATION_TIME, - wpa_strict_rekey: bool = hostapd_constants.WPA_STRICT_REKEY_DEFAULT, - wep_default_key: int = hostapd_constants.WEP_DEFAULT_KEY, - radius_server_ip: Optional[str] = None, - radius_server_port: Optional[int] = None, - radius_server_secret: Optional[str] = None, - ) -> None: - """Gather all of the security settings for WPA-PSK. This could be - expanded later. - - Args: - security_mode: Type of security modes. - Options: wep, wpa, wpa2, wpa/wpa2, wpa3, wpa2/wpa3, - wpa/wpa2/wpa3 - password: The PSK or passphrase for the security mode. - wpa_cipher: The cipher to be used for wpa. - Options: TKIP, CCMP, TKIP CCMP - Default: TKIP - wpa2_cipher: The cipher to be used for wpa2. - Options: TKIP, CCMP, TKIP CCMP - Default: CCMP - wpa_group_rekey: How often to refresh the GTK regardless of network - changes. - Options: An integrer in seconds, None - Default: 600 seconds - wpa_strict_rekey: Whether to do a group key update when client - leaves the network or not. - Options: True, False - Default: True - wep_default_key: The wep key number to use when transmitting. - radius_server_ip: Radius server IP for Enterprise auth. - radius_server_port: Radius server port for Enterprise auth. - radius_server_secret: Radius server secret for Enterprise auth. - """ - self.security_mode_string = security_mode - self.wpa_cipher = wpa_cipher - self.wpa2_cipher = wpa2_cipher - self.wpa_group_rekey = wpa_group_rekey - self.wpa_strict_rekey = wpa_strict_rekey - self.wep_default_key = wep_default_key - self.radius_server_ip = radius_server_ip - self.radius_server_port = radius_server_port - self.radius_server_secret = radius_server_secret - self.security_mode = hostapd_constants.SECURITY_STRING_TO_SECURITY_MODE_INT.get( - security_mode, None - ) - if password: - if self.security_mode == hostapd_constants.WEP: - if len(password) in hostapd_constants.WEP_STR_LENGTH: - self.password = '"%s"' % password - elif len(password) in hostapd_constants.WEP_HEX_LENGTH and all( - c in string.hexdigits for c in password - ): - self.password = password - else: - raise ValueError( - "WEP key must be a hex string of %s characters" - % hostapd_constants.WEP_HEX_LENGTH - ) - else: - if ( - len(password) < hostapd_constants.MIN_WPA_PSK_LENGTH - or len(password) > hostapd_constants.MAX_WPA_PSK_LENGTH - ): - raise ValueError( - "Password must be a minumum of %s characters and a maximum of %s" - % ( - hostapd_constants.MIN_WPA_PSK_LENGTH, - hostapd_constants.MAX_WPA_PSK_LENGTH, - ) - ) - else: - self.password = password - - def generate_dict(self) -> Dict[str, Union[str, int]]: - """Returns: an ordered dictionary of settings""" - settings = collections.OrderedDict() - if self.security_mode is not None: - if self.security_mode == hostapd_constants.WEP: - settings["wep_default_key"] = self.wep_default_key - settings["wep_key" + str(self.wep_default_key)] = self.password - elif self.security_mode == hostapd_constants.ENT: - settings["auth_server_addr"] = self.radius_server_ip - settings["auth_server_port"] = self.radius_server_port - settings["auth_server_shared_secret"] = self.radius_server_secret - settings["wpa_key_mgmt"] = hostapd_constants.ENT_KEY_MGMT - settings["ieee8021x"] = hostapd_constants.IEEE8021X - settings["wpa"] = hostapd_constants.WPA2 - else: - settings["wpa"] = self.security_mode - if len(self.password) == hostapd_constants.MAX_WPA_PSK_LENGTH: - settings["wpa_psk"] = self.password - else: - settings["wpa_passphrase"] = self.password - # For wpa, wpa/wpa2, and wpa/wpa2/wpa3, add wpa_pairwise - if ( - self.security_mode == hostapd_constants.WPA1 - or self.security_mode == hostapd_constants.MIXED - ): - settings["wpa_pairwise"] = self.wpa_cipher - # For wpa/wpa2, wpa2, wpa3, and wpa2/wpa3, and wpa/wpa2, wpa3, add rsn_pairwise - if ( - self.security_mode == hostapd_constants.WPA2 - or self.security_mode == hostapd_constants.MIXED - ): - settings["rsn_pairwise"] = self.wpa2_cipher - # Add wpa_key_mgmt based on security mode string - if ( - self.security_mode_string - in hostapd_constants.SECURITY_STRING_TO_WPA_KEY_MGMT - ): - settings[ - "wpa_key_mgmt" - ] = hostapd_constants.SECURITY_STRING_TO_WPA_KEY_MGMT[ - self.security_mode_string - ] - if self.wpa_group_rekey: - settings["wpa_group_rekey"] = self.wpa_group_rekey - if self.wpa_strict_rekey: - settings["wpa_strict_rekey"] = hostapd_constants.WPA_STRICT_REKEY - return settings
diff --git a/src/antlion/controllers/ap_lib/hostapd_utils.py b/src/antlion/controllers/ap_lib/hostapd_utils.py deleted file mode 100644 index 82331bf..0000000 --- a/src/antlion/controllers/ap_lib/hostapd_utils.py +++ /dev/null
@@ -1,99 +0,0 @@ -# Copyright 2022 The Fuchsia Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from antlion import utils -from antlion.controllers.ap_lib import hostapd_config -from antlion.controllers.ap_lib import hostapd_constants - - -def generate_random_password(security_mode=None, length=None, hex=None): - """Generates a random password. Defaults to an 8 character ASCII password. - - Args: - security_mode: optional string, security type. Used to determine if - length should be WEP compatible (useful for generated tests to simply - pass in security mode) - length: optional int, length of password to generate. Defaults to 8, - unless security_mode is WEP, then 13 - hex: optional int, if True, generates a hex string, else ascii - """ - if hex: - generator_func = utils.rand_hex_str - else: - generator_func = utils.rand_ascii_str - - if length: - return generator_func(length) - if security_mode and security_mode.lower() == hostapd_constants.WEP_STRING: - return generator_func(hostapd_constants.WEP_DEFAULT_STR_LENGTH) - else: - return generator_func(hostapd_constants.MIN_WPA_PSK_LENGTH) - - -def verify_interface(interface, valid_interfaces): - """Raises error if interface is missing or invalid - Args: - interface: string of interface name - valid_interfaces: list of valid interface names - """ - if not interface: - raise ValueError("Required wlan interface is missing.") - if interface not in valid_interfaces: - raise ValueError("Invalid interface name was passed: %s" % interface) - - -def verify_security_mode(security_profile, valid_security_modes): - """Raises error if security mode is not in list of valid security modes. - - Args: - security_profile: a hostapd_security.Security object. - valid_security_modes: a list of valid security modes for a profile. Must - include None if open security is valid. - """ - if security_profile is None: - if None not in valid_security_modes: - raise ValueError("Open security is not allowed for this profile.") - elif security_profile.security_mode not in valid_security_modes: - raise ValueError( - "Invalid Security Mode: %s. " - "Valid Security Modes for this profile: %s." - % (security_profile.security_mode, valid_security_modes) - ) - - -def verify_cipher(security_profile, valid_ciphers): - """Raise error if cipher is not in list of valid ciphers. - - Args: - security_profile: a hostapd_security.Security object. - valid_ciphers: a list of valid ciphers for a profile. - """ - if security_profile is None: - raise ValueError("Security mode is open.") - elif security_profile.security_mode == hostapd_constants.WPA1: - if security_profile.wpa_cipher not in valid_ciphers: - raise ValueError( - "Invalid WPA Cipher: %s. " - "Valid WPA Ciphers for this profile: %s" - % (security_profile.wpa_cipher, valid_ciphers) - ) - elif security_profile.security_mode == hostapd_constants.WPA2: - if security_profile.wpa2_cipher not in valid_ciphers: - raise ValueError( - "Invalid WPA2 Cipher: %s. " - "Valid WPA2 Ciphers for this profile: %s" - % (security_profile.wpa2_cipher, valid_ciphers) - ) - else: - raise ValueError("Invalid Security Mode: %s" % security_profile.security_mode)
diff --git a/src/antlion/controllers/ap_lib/radio_measurement.py b/src/antlion/controllers/ap_lib/radio_measurement.py deleted file mode 100644 index 5c7f2e0..0000000 --- a/src/antlion/controllers/ap_lib/radio_measurement.py +++ /dev/null
@@ -1,246 +0,0 @@ -#!/usr/bin/env python3 -# -# Copyright 2022 The Fuchsia Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from enum import IntEnum, unique - - -@unique -class ApReachability(IntEnum): - """Neighbor Report AP Reachability values. - - See IEEE 802.11-2020 Figure 9-172. - """ - - NOT_REACHABLE = 1 - UNKNOWN = 2 - REACHABLE = 3 - - -class BssidInformationCapabilities: - """Representation of Neighbor Report BSSID Information Capabilities. - - See IEEE 802.11-2020 Figure 9-338 and 9.4.1.4. - """ - - def __init__( - self, - spectrum_management: bool = False, - qos: bool = False, - apsd: bool = False, - radio_measurement: bool = False, - ): - """Create a capabilities object. - - Args: - spectrum_management: whether spectrum management is required. - qos: whether QoS is implemented. - apsd: whether APSD is implemented. - radio_measurement: whether radio measurement is activated. - """ - self._spectrum_management = spectrum_management - self._qos = qos - self._apsd = apsd - self._radio_measurement = radio_measurement - - def __index__(self) -> int: - """Convert to numeric representation of the field's bits.""" - return ( - self.spectrum_management << 5 - | self.qos << 4 - | self.apsd << 3 - | self.radio_measurement << 2 - ) - - @property - def spectrum_management(self) -> bool: - return self._spectrum_management - - @property - def qos(self) -> bool: - return self._qos - - @property - def apsd(self) -> bool: - return self._apsd - - @property - def radio_measurement(self) -> bool: - return self._radio_measurement - - -class BssidInformation: - """Representation of Neighbor Report BSSID Information field. - - BssidInformation contains info about a neighboring AP, to be included in a - neighbor report element. See IEEE 802.11-2020 Figure 9-337. - """ - - def __init__( - self, - ap_reachability: ApReachability = ApReachability.UNKNOWN, - security: bool = False, - key_scope: bool = False, - capabilities: BssidInformationCapabilities = BssidInformationCapabilities(), - mobility_domain: bool = False, - high_throughput: bool = False, - very_high_throughput: bool = False, - ftm: bool = False, - ): - """Create a BSSID Information object for a neighboring AP. - - Args: - ap_reachability: whether this AP is reachable by the STA that - requested the neighbor report. - security: whether this AP is known to support the same security - provisioning as used by the STA in its current association. - key_scope: whether this AP is known to have the same - authenticator as the AP sending the report. - capabilities: selected capabilities of this AP. - mobility_domain: whether the AP is including an MDE in its beacon - frames and the contents of that MDE are identical to the MDE - advertised by the AP sending the report. - high_throughput: whether the AP is an HT AP including the HT - Capabilities element in its Beacons, and that the contents of - that HT capabilities element are identical to the HT - capabilities element advertised by the AP sending the report. - very_high_throughput: whether the AP is a VHT AP and the VHT - capabilities element, if included as a subelement, is - identical in content to the VHT capabilities element included - in the AP’s beacon. - ftm: whether the AP is known to have the Fine Timing Measurement - Responder extended capability. - """ - self._ap_reachability = ap_reachability - self._security = security - self._key_scope = key_scope - self._capabilities = capabilities - self._mobility_domain = mobility_domain - self._high_throughput = high_throughput - self._very_high_throughput = very_high_throughput - self._ftm = ftm - - def __index__(self) -> int: - """Convert to numeric representation of the field's bits.""" - return ( - self._ap_reachability << 30 - | self.security << 29 - | self.key_scope << 28 - | int(self.capabilities) << 22 - | self.mobility_domain << 21 - | self.high_throughput << 20 - | self.very_high_throughput << 19 - | self.ftm << 18 - ) - - @property - def security(self) -> bool: - return self._security - - @property - def key_scope(self) -> bool: - return self._key_scope - - @property - def capabilities(self) -> BssidInformationCapabilities: - return self._capabilities - - @property - def mobility_domain(self) -> bool: - return self._mobility_domain - - @property - def high_throughput(self) -> bool: - return self._high_throughput - - @property - def very_high_throughput(self) -> bool: - return self._very_high_throughput - - @property - def ftm(self) -> bool: - return self._ftm - - -@unique -class PhyType(IntEnum): - """PHY type values, see dot11PhyType in 802.11-2020 Annex C.""" - - DSSS = 2 - OFDM = 4 - HRDSS = 5 - ERP = 6 - HT = 7 - DMG = 8 - VHT = 9 - TVHT = 10 - S1G = 11 - CDMG = 12 - CMMG = 13 - - -class NeighborReportElement: - """Representation of Neighbor Report element. - - See IEEE 802.11-2020 9.4.2.36. - """ - - def __init__( - self, - bssid: str, - bssid_information: BssidInformation, - operating_class: int, - channel_number: int, - phy_type: PhyType, - ): - """Create a neighbor report element. - - Args: - bssid: MAC address of the neighbor. - bssid_information: BSSID Information of the neigbor. - operating_class: operating class of the neighbor. - channel_number: channel number of the neighbor. - phy_type: dot11PhyType of the neighbor. - """ - self._bssid = bssid - self._bssid_information = bssid_information - - # Operating Class, IEEE 802.11-2020 Annex E. - self._operating_class = operating_class - - self._channel_number = channel_number - - # PHY Type, IEEE 802.11-2020 Annex C. - self._phy_type = phy_type - - @property - def bssid(self) -> str: - return self._bssid - - @property - def bssid_information(self) -> BssidInformation: - return self._bssid_information - - @property - def operating_class(self) -> int: - return self._operating_class - - @property - def channel_number(self) -> int: - return self._channel_number - - @property - def phy_type(self) -> PhyType: - return self._phy_type
diff --git a/src/antlion/controllers/ap_lib/radvd.py b/src/antlion/controllers/ap_lib/radvd.py deleted file mode 100644 index 216ad0e..0000000 --- a/src/antlion/controllers/ap_lib/radvd.py +++ /dev/null
@@ -1,215 +0,0 @@ -# Copyright 2022 The Fuchsia Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import logging -import shlex -import tempfile -import time - -from typing import Any, Optional - -from antlion.controllers.ap_lib.radvd_config import RadvdConfig -from antlion.controllers.utils_lib.commands import shell -from antlion.libs.proc import job - - -class Error(Exception): - """An error caused by radvd.""" - - -class Radvd(object): - """Manages the radvd program. - - https://en.wikipedia.org/wiki/Radvd - This implements the Router Advertisement Daemon of IPv6 router addresses - and IPv6 routing prefixes using the Neighbor Discovery Protocol. - - Attributes: - config: The radvd configuration that is being used. - """ - - def __init__( - self, - runner: Any, - interface: str, - working_dir: Optional[str] = None, - radvd_binary: Optional[str] = None, - ) -> None: - """ - Args: - runner: Object that has run_async and run methods for executing - shell commands (e.g. connection.SshConnection) - interface: Name of the interface to use (eg. wlan0). - working_dir: Directory to work out of. - radvd_binary: Location of the radvd binary - """ - if not radvd_binary: - logging.debug( - "No radvd binary specified. " "Assuming radvd is in the path." - ) - radvd_binary = "radvd" - else: - logging.debug(f"Using radvd binary located at {radvd_binary}") - if working_dir is None and runner == job.run: - working_dir = tempfile.gettempdir() - else: - working_dir = "/tmp" - self._radvd_binary = radvd_binary - self._runner = runner - self._interface = interface - self._working_dir = working_dir - self.config: Optional[RadvdConfig] = None - self._shell = shell.ShellCommand(runner, working_dir) - self._log_file = f"{working_dir}/radvd-{self._interface}.log" - self._config_file = f"{working_dir}/radvd-{self._interface}.conf" - self._pid_file = f"{working_dir}/radvd-{self._interface}.pid" - self._ps_identifier = f"{self._radvd_binary}.*{self._config_file}" - - def start(self, config: RadvdConfig, timeout: int = 60) -> None: - """Starts radvd - - Starts the radvd daemon and runs it in the background. - - Args: - config: Configs to start the radvd with. - timeout: Time to wait for radvd to come up. - - Returns: - True if the daemon could be started. Note that the daemon can still - start and not work. Invalid configurations can take a long amount - of time to be produced, and because the daemon runs indefinitely - it's impossible to wait on. If you need to check if configs are ok - then periodic checks to is_running and logs should be used. - """ - if self.is_alive(): - self.stop() - - self.config = config - - self._shell.delete_file(self._log_file) - self._shell.delete_file(self._config_file) - self._write_configs(self.config) - - command = ( - f"{self._radvd_binary} -C {shlex.quote(self._config_file)} " - f"-p {shlex.quote(self._pid_file)} -m logfile -d 5 " - f'-l {self._log_file} > "{self._log_file}" 2>&1' - ) - self._runner.run_async(command) - - try: - self._wait_for_process(timeout=timeout) - except Error: - self.stop() - raise - - def stop(self): - """Kills the daemon if it is running.""" - self._shell.kill(self._ps_identifier) - - def is_alive(self): - """ - Returns: - True if the daemon is running. - """ - return self._shell.is_alive(self._ps_identifier) - - def pull_logs(self) -> str: - """Pulls the log files from where radvd is running. - - Returns: - A string of the radvd logs. - """ - # TODO: Auto pulling of logs when stop is called. - return self._shell.read_file(self._log_file) - - def _wait_for_process(self, timeout: int = 60) -> None: - """Waits for the process to come up. - - Waits until the radvd process is found running, or there is - a timeout. If the program never comes up then the log file - will be scanned for errors. - - Raises: See _scan_for_errors - """ - start_time = time.time() - while time.time() - start_time < timeout and not self.is_alive(): - time.sleep(0.1) - self._scan_for_errors(False) - self._scan_for_errors(True) - - def _scan_for_errors(self, should_be_up: bool) -> None: - """Scans the radvd log for any errors. - - Args: - should_be_up: If true then radvd program is expected to be alive. - If it is found not alive while this is true an error - is thrown. - - Raises: - Error: Raised when a radvd error is found. - """ - # Store this so that all other errors have priority. - is_dead = not self.is_alive() - - exited_prematurely = self._shell.search_file("Exiting", self._log_file) - if exited_prematurely: - raise Error("Radvd exited prematurely.", self) - if should_be_up and is_dead: - raise Error("Radvd failed to start", self) - - def _write_configs(self, config: RadvdConfig) -> None: - """Writes the configs to the radvd config file. - - Args: - config: a RadvdConfig object. - """ - self._shell.delete_file(self._config_file) - conf = config.package_configs() - lines = ["interface %s {" % self._interface] - for interface_option_key, interface_option in conf["interface_options"].items(): - lines.append( - "\t%s %s;" % (str(interface_option_key), str(interface_option)) - ) - lines.append("\tprefix %s" % conf["prefix"]) - lines.append("\t{") - for prefix_option in conf["prefix_options"].items(): - lines.append("\t\t%s;" % " ".join(map(str, prefix_option))) - lines.append("\t};") - if conf["clients"]: - lines.append("\tclients") - lines.append("\t{") - for client in conf["clients"]: - lines.append("\t\t%s;" % client) - lines.append("\t};") - if conf["route"]: - lines.append("\troute %s {" % conf["route"]) - for route_option in conf["route_options"].items(): - lines.append("\t\t%s;" % " ".join(map(str, route_option))) - lines.append("\t};") - if conf["rdnss"]: - lines.append( - "\tRDNSS %s {" % " ".join([str(elem) for elem in conf["rdnss"]]) - ) - for rdnss_option in conf["rdnss_options"].items(): - lines.append("\t\t%s;" % " ".join(map(str, rdnss_option))) - lines.append("\t};") - lines.append("};") - output_config = "\n".join(lines) - logging.info("Writing %s" % self._config_file) - logging.debug("******************Start*******************") - logging.debug("\n%s" % output_config) - logging.debug("*******************End********************") - - self._shell.write_file(self._config_file, output_config)
diff --git a/src/antlion/controllers/ap_lib/radvd_config.py b/src/antlion/controllers/ap_lib/radvd_config.py deleted file mode 100644 index 647df82..0000000 --- a/src/antlion/controllers/ap_lib/radvd_config.py +++ /dev/null
@@ -1,314 +0,0 @@ -# Copyright 2022 The Fuchsia Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from typing import Any, List, Optional - -from antlion.controllers.ap_lib import radvd_constants - -import collections - - -class RadvdConfig(object): - """The root settings for the router advertisement daemon. - - All the settings for a router advertisement daemon. - """ - - def __init__( - self, - prefix: str = radvd_constants.DEFAULT_PREFIX, - clients: List[str] = [], - route: Optional[Any] = None, - rdnss: List[str] = [], - ignore_if_missing: Optional[str] = None, - adv_send_advert: str = radvd_constants.ADV_SEND_ADVERT_ON, - unicast_only: Optional[str] = None, - max_rtr_adv_interval: Optional[int] = None, - min_rtr_adv_interval: Optional[int] = None, - min_delay_between_ras: Optional[int] = None, - adv_managed_flag: Optional[str] = None, - adv_other_config_flag: Optional[str] = None, - adv_link_mtu: Optional[int] = None, - adv_reachable_time: Optional[int] = None, - adv_retrans_timer: Optional[int] = None, - adv_cur_hop_limit: Optional[int] = None, - adv_default_lifetime: Optional[int] = None, - adv_default_preference: Optional[str] = None, - adv_source_ll_address: Optional[str] = None, - adv_home_agent_flag: Optional[str] = None, - adv_home_agent_info: Optional[str] = None, - home_agent_lifetime: Optional[int] = None, - home_agent_preference: Optional[int] = None, - adv_mob_rtr_support_flag: Optional[str] = None, - adv_interval_opt: Optional[str] = None, - adv_on_link: str = radvd_constants.ADV_ON_LINK_ON, - adv_autonomous: str = radvd_constants.ADV_AUTONOMOUS_ON, - adv_router_addr: Optional[str] = None, - adv_valid_lifetime: Optional[int] = None, - adv_preferred_lifetime: Optional[int] = None, - base_6to4_interface: Optional[str] = None, - adv_route_lifetime: Optional[int] = None, - adv_route_preference: Optional[str] = None, - adv_rdnss_preference: Optional[int] = None, - adv_rdnss_open: Optional[str] = None, - adv_rdnss_lifetime: Optional[int] = None, - ) -> None: - """Construct a RadvdConfig. - - Args: - prefix: IPv6 prefix and length, ie fd::/64 - clients: A list of IPv6 link local addresses that will be the only - clients served. All other IPv6 addresses will be ignored if - this list is present. - route: A route for the router advertisement with prefix. - rdnss: A list of recursive DNS servers - ignore_if_missing: A flag indicating whether or not the interface - is ignored if it does not exist at start-up. By default, - radvd exits. - adv_send_advert: A flag indicating whether or not the router sends - periodic router advertisements and responds to router - solicitations. - unicast_only: Indicates that the interface link type only supports - unicast. - max_rtr_adv_interval:The maximum time allowed between sending - unsolicited multicast router advertisements from the interface, - in seconds. Must be no less than 4 seconds and no greater than - 1800 seconds. - min_rtr_adv_interval: The minimum time allowed between sending - unsolicited multicast router advertisements from the interface, - in seconds. Must be no less than 3 seconds and no greater than - 0.75 * max_rtr_adv_interval. - min_delay_between_ras: The minimum time allowed between sending - multicast router advertisements from the interface, in seconds., - adv_managed_flag: When set, hosts use the administered (stateful) - protocol for address autoconfiguration in addition to any - addresses autoconfigured using stateless address - autoconfiguration. The use of this flag is described in - RFC 4862. - adv_other_config_flag: When set, hosts use the administered - (stateful) protocol for autoconfiguration of other (non-address) - information. The use of this flag is described in RFC 4862. - adv_link_mtu: The MTU option is used in router advertisement - messages to insure that all nodes on a link use the same MTU - value in those cases where the link MTU is not well known. - adv_reachable_time: The time, in milliseconds, that a node assumes - a neighbor is reachable after having received a reachability - confirmation. Used by the Neighbor Unreachability Detection - algorithm (see Section 7.3 of RFC 4861). A value of zero means - unspecified (by this router). - adv_retrans_timer: The time, in milliseconds, between retransmitted - Neighbor Solicitation messages. Used by address resolution and - the Neighbor Unreachability Detection algorithm (see Sections - 7.2 and 7.3 of RFC 4861). A value of zero means unspecified - (by this router). - adv_cur_hop_limit: The default value that should be placed in the - Hop Count field of the IP header for outgoing (unicast) IP - packets. The value should be set to the current diameter of the - Internet. The value zero means unspecified (by this router). - adv_default_lifetime: The lifetime associated with the default - router in units of seconds. The maximum value corresponds to - 18.2 hours. A lifetime of 0 indicates that the router is not a - default router and should not appear on the default router list. - The router lifetime applies only to the router's usefulness as - a default router; it does not apply to information contained in - other message fields or options. Options that need time limits - for their information include their own lifetime fields. - adv_default_preference: The preference associated with the default - router, as either "low", "medium", or "high". - adv_source_ll_address: When set, the link-layer address of the - outgoing interface is included in the RA. - adv_home_agent_flag: When set, indicates that sending router is able - to serve as Mobile IPv6 Home Agent. When set, minimum limits - specified by Mobile IPv6 are used for MinRtrAdvInterval and - MaxRtrAdvInterval. - adv_home_agent_info: When set, Home Agent Information Option - (specified by Mobile IPv6) is included in Router Advertisements. - adv_home_agent_flag must also be set when using this option. - home_agent_lifetime: The length of time in seconds (relative to the - time the packet is sent) that the router is offering Mobile IPv6 - Home Agent services. A value 0 must not be used. The maximum - lifetime is 65520 seconds (18.2 hours). This option is ignored, - if adv_home_agent_info is not set. - home_agent_preference: The preference for the Home Agent sending - this Router Advertisement. Values greater than 0 indicate more - preferable Home Agent, values less than 0 indicate less - preferable Home Agent. This option is ignored, if - adv_home_agent_info is not set. - adv_mob_rtr_support_flag: When set, the Home Agent signals it - supports Mobile Router registrations (specified by NEMO Basic). - adv_home_agent_info must also be set when using this option. - adv_interval_opt: When set, Advertisement Interval Option - (specified by Mobile IPv6) is included in Router Advertisements. - When set, minimum limits specified by Mobile IPv6 are used for - MinRtrAdvInterval and MaxRtrAdvInterval. - adv_on_linkWhen set, indicates that this prefix can be used for - on-link determination. When not set the advertisement makes no - statement about on-link or off-link properties of the prefix. - For instance, the prefix might be used for address configuration - with some of the addresses belonging to the prefix being - on-link and others being off-link. - adv_autonomous: When set, indicates that this prefix can be used for - autonomous address configuration as specified in RFC 4862. - adv_router_addr: When set, indicates that the address of interface - is sent instead of network prefix, as is required by Mobile - IPv6. When set, minimum limits specified by Mobile IPv6 are used - for MinRtrAdvInterval and MaxRtrAdvInterval. - adv_valid_lifetime: The length of time in seconds (relative to the - time the packet is sent) that the prefix is valid for the - purpose of on-link determination. The symbolic value infinity - represents infinity (i.e. a value of all one bits (0xffffffff)). - The valid lifetime is also used by RFC 4862. - adv_preferred_lifetimeThe length of time in seconds (relative to the - time the packet is sent) that addresses generated from the - prefix via stateless address autoconfiguration remain preferred. - The symbolic value infinity represents infinity (i.e. a value of - all one bits (0xffffffff)). See RFC 4862. - base_6to4_interface: If this option is specified, this prefix will - be combined with the IPv4 address of interface name to produce - a valid 6to4 prefix. The first 16 bits of this prefix will be - replaced by 2002 and the next 32 bits of this prefix will be - replaced by the IPv4 address assigned to interface name at - configuration time. The remaining 80 bits of the prefix - (including the SLA ID) will be advertised as specified in the - configuration file. - adv_route_lifetime: The lifetime associated with the route in units - of seconds. The symbolic value infinity represents infinity - (i.e. a value of all one bits (0xffffffff)). - adv_route_preference: The preference associated with the default - router, as either "low", "medium", or "high". - adv_rdnss_preference: The preference of the DNS server, compared to - other DNS servers advertised and used. 0 to 7 means less - important than manually configured nameservers in resolv.conf, - while 12 to 15 means more important. - adv_rdnss_open: "Service Open" flag. When set, indicates that RDNSS - continues to be available to hosts even if they moved to a - different subnet. - adv_rdnss_lifetime: The maximum duration how long the RDNSS entries - are used for name resolution. A value of 0 means the nameserver - should no longer be used. The maximum duration how long the - RDNSS entries are used for name resolution. A value of 0 means - the nameserver should no longer be used. The value, if not 0, - must be at least max_rtr_adv_interval. To ensure stale RDNSS - info gets removed in a timely fashion, this should not be - greater than 2*max_rtr_adv_interval. - """ - self._prefix = prefix - self._clients = clients - self._route = route - self._rdnss = rdnss - self._ignore_if_missing = ignore_if_missing - self._adv_send_advert = adv_send_advert - self._unicast_only = unicast_only - self._max_rtr_adv_interval = max_rtr_adv_interval - self._min_rtr_adv_interval = min_rtr_adv_interval - self._min_delay_between_ras = min_delay_between_ras - self._adv_managed_flag = adv_managed_flag - self._adv_other_config_flag = adv_other_config_flag - self._adv_link_mtu = adv_link_mtu - self._adv_reachable_time = adv_reachable_time - self._adv_retrans_timer = adv_retrans_timer - self._adv_cur_hop_limit = adv_cur_hop_limit - self._adv_default_lifetime = adv_default_lifetime - self._adv_default_preference = adv_default_preference - self._adv_source_ll_address = adv_source_ll_address - self._adv_home_agent_flag = adv_home_agent_flag - self._adv_home_agent_info = adv_home_agent_info - self._home_agent_lifetime = home_agent_lifetime - self._home_agent_preference = home_agent_preference - self._adv_mob_rtr_support_flag = adv_mob_rtr_support_flag - self._adv_interval_opt = adv_interval_opt - self._adv_on_link = adv_on_link - self._adv_autonomous = adv_autonomous - self._adv_router_addr = adv_router_addr - self._adv_valid_lifetime = adv_valid_lifetime - self._adv_preferred_lifetime = adv_preferred_lifetime - self._base_6to4_interface = base_6to4_interface - self._adv_route_lifetime = adv_route_lifetime - self._adv_route_preference = adv_route_preference - self._adv_rdnss_preference = adv_rdnss_preference - self._adv_rdnss_open = adv_rdnss_open - self._adv_rdnss_lifetime = adv_rdnss_lifetime - - def package_configs(self): - conf = dict() - conf["prefix"] = self._prefix - conf["clients"] = self._clients - conf["route"] = self._route - conf["rdnss"] = self._rdnss - - conf["interface_options"] = collections.OrderedDict( - filter( - lambda pair: pair[1] is not None, - ( - ("IgnoreIfMissing", self._ignore_if_missing), - ("AdvSendAdvert", self._adv_send_advert), - ("UnicastOnly", self._unicast_only), - ("MaxRtrAdvInterval", self._max_rtr_adv_interval), - ("MinRtrAdvInterval", self._min_rtr_adv_interval), - ("MinDelayBetweenRAs", self._min_delay_between_ras), - ("AdvManagedFlag", self._adv_managed_flag), - ("AdvOtherConfigFlag", self._adv_other_config_flag), - ("AdvLinkMTU", self._adv_link_mtu), - ("AdvReachableTime", self._adv_reachable_time), - ("AdvRetransTimer", self._adv_retrans_timer), - ("AdvCurHopLimit", self._adv_cur_hop_limit), - ("AdvDefaultLifetime", self._adv_default_lifetime), - ("AdvDefaultPreference", self._adv_default_preference), - ("AdvSourceLLAddress", self._adv_source_ll_address), - ("AdvHomeAgentFlag", self._adv_home_agent_flag), - ("AdvHomeAgentInfo", self._adv_home_agent_info), - ("HomeAgentLifetime", self._home_agent_lifetime), - ("HomeAgentPreference", self._home_agent_preference), - ("AdvMobRtrSupportFlag", self._adv_mob_rtr_support_flag), - ("AdvIntervalOpt", self._adv_interval_opt), - ), - ) - ) - - conf["prefix_options"] = collections.OrderedDict( - filter( - lambda pair: pair[1] is not None, - ( - ("AdvOnLink", self._adv_on_link), - ("AdvAutonomous", self._adv_autonomous), - ("AdvRouterAddr", self._adv_router_addr), - ("AdvValidLifetime", self._adv_valid_lifetime), - ("AdvPreferredLifetime", self._adv_preferred_lifetime), - ("Base6to4Interface", self._base_6to4_interface), - ), - ) - ) - - conf["route_options"] = collections.OrderedDict( - filter( - lambda pair: pair[1] is not None, - ( - ("AdvRouteLifetime", self._adv_route_lifetime), - ("AdvRoutePreference", self._adv_route_preference), - ), - ) - ) - - conf["rdnss_options"] = collections.OrderedDict( - filter( - lambda pair: pair[1] is not None, - ( - ("AdvRDNSSPreference", self._adv_rdnss_preference), - ("AdvRDNSSOpen", self._adv_rdnss_open), - ("AdvRDNSSLifetime", self._adv_rdnss_lifetime), - ), - ) - ) - - return conf
diff --git a/src/antlion/controllers/ap_lib/radvd_constants.py b/src/antlion/controllers/ap_lib/radvd_constants.py deleted file mode 100644 index b02a694..0000000 --- a/src/antlion/controllers/ap_lib/radvd_constants.py +++ /dev/null
@@ -1,66 +0,0 @@ -#!/usr/bin/env python3 -# -# Copyright 2022 The Fuchsia Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -DEFAULT_PREFIX = "fd00::/64" - -IGNORE_IF_MISSING_ON = "on" -IGNORE_IF_MISSING_OFF = "off" - -ADV_SEND_ADVERT_ON = "on" -ADV_SEND_ADVERT_OFF = "off" - -UNICAST_ONLY_ON = "on" -UNICAST_ONLY_OFF = "off" - -ADV_MANAGED_FLAG_ON = "on" -ADV_MANAGED_FLAG_OFF = "off" - -ADV_OTHER_CONFIG_FLAG_ON = "on" -ADV_OTHER_CONFIG_FLAG_OFF = "off" - -ADV_DEFAULT_PREFERENCE_ON = "on" -ADV_DEFAULT_PREFERENCE_OFF = "off" - -ADV_SOURCE_LL_ADDRESS_ON = "on" -ADV_SOURCE_LL_ADDRESS_OFF = "off" - -ADV_HOME_AGENT_FLAG_ON = "on" -ADV_HOME_AGENT_FLAG_OFF = "off" - -ADV_HOME_AGENT_INFO_ON = "on" -ADV_HOME_AGENT_INFO_OFF = "off" - -ADV_MOB_RTR_SUPPORT_FLAG_ON = "on" -ADV_MOB_RTR_SUPPORT_FLAG_OFF = "off" - -ADV_INTERVAL_OPT_ON = "on" -ADV_INTERVAL_OPT_OFF = "off" - -ADV_ON_LINK_ON = "on" -ADV_ON_LINK_OFF = "off" - -ADV_AUTONOMOUS_ON = "on" -ADV_AUTONOMOUS_OFF = "off" - -ADV_ROUTER_ADDR_ON = "on" -ADV_ROUTER_ADDR_OFF = "off" - -ADV_ROUTE_PREFERENCE_LOW = "low" -ADV_ROUTE_PREFERENCE_MED = "medium" -ADV_ROUTE_PREFERENCE_HIGH = "high" - -ADV_RDNSS_OPEN_ON = "on" -ADV_RDNSS_OPEN_OFF = "off"
diff --git a/src/antlion/controllers/ap_lib/third_party_ap_profiles/__init__.py b/src/antlion/controllers/ap_lib/third_party_ap_profiles/__init__.py deleted file mode 100644 index e69de29..0000000 --- a/src/antlion/controllers/ap_lib/third_party_ap_profiles/__init__.py +++ /dev/null
diff --git a/src/antlion/controllers/ap_lib/third_party_ap_profiles/actiontec.py b/src/antlion/controllers/ap_lib/third_party_ap_profiles/actiontec.py deleted file mode 100644 index 9e48935..0000000 --- a/src/antlion/controllers/ap_lib/third_party_ap_profiles/actiontec.py +++ /dev/null
@@ -1,150 +0,0 @@ -# Copyright 2022 The Fuchsia Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from antlion import utils - -from antlion.controllers.ap_lib import hostapd_config -from antlion.controllers.ap_lib import hostapd_constants -from antlion.controllers.ap_lib import hostapd_utils - - -def actiontec_pk5000(iface_wlan_2g=None, channel=None, security=None, ssid=None): - """A simulated implementation of what a Actiontec PK5000 AP - Args: - iface_wlan_2g: The 2.4 interface of the test AP. - channel: What channel to use. Only 2.4Ghz is supported for this profile - security: A security profile. Must be none or WPA2 as this is what is - supported by the PK5000. - ssid: Network name - Returns: - A hostapd config - - Differences from real pk5000: - Supported Rates IE: - PK5000: Supported: 1, 2, 5.5, 11 - Extended: 6, 9, 12, 18, 24, 36, 48, 54 - Simulated: Supported: 1, 2, 5.5, 11, 6, 9, 12, 18 - Extended: 24, 36, 48, 54 - """ - if channel > 11: - # Technically this should be 14 but since the PK5000 is a US only AP, - # 11 is the highest allowable channel. - raise ValueError( - "The Actiontec PK5000 does not support 5Ghz. " - "Invalid channel (%s)" % channel - ) - # Verify interface and security - hostapd_utils.verify_interface(iface_wlan_2g, hostapd_constants.INTERFACE_2G_LIST) - hostapd_utils.verify_security_mode(security, [None, hostapd_constants.WPA2]) - if security: - hostapd_utils.verify_cipher(security, [hostapd_constants.WPA2_DEFAULT_CIPER]) - - interface = iface_wlan_2g - short_preamble = False - force_wmm = False - beacon_interval = 100 - dtim_period = 3 - # Sets the basic rates and supported rates of the PK5000 - additional_params = utils.merge_dicts( - hostapd_constants.CCK_AND_OFDM_BASIC_RATES, - hostapd_constants.CCK_AND_OFDM_DATA_RATES, - ) - - config = hostapd_config.HostapdConfig( - ssid=ssid, - channel=channel, - hidden=False, - security=security, - interface=interface, - mode=hostapd_constants.MODE_11G, - force_wmm=force_wmm, - beacon_interval=beacon_interval, - dtim_period=dtim_period, - short_preamble=short_preamble, - additional_parameters=additional_params, - ) - - return config - - -def actiontec_mi424wr(iface_wlan_2g=None, channel=None, security=None, ssid=None): - # TODO(b/143104825): Permit RIFS once it is supported - """A simulated implementation of an Actiontec MI424WR AP. - Args: - iface_wlan_2g: The 2.4Ghz interface of the test AP. - channel: What channel to use (2.4Ghz or 5Ghz). - security: A security profile. - ssid: The network name. - Returns: - A hostapd config. - - Differences from real MI424WR: - HT Capabilities: - MI424WR: - HT Rx STBC: Support for 1, 2, and 3 - Simulated: - HT Rx STBC: Support for 1 - HT Information: - MI424WR: - RIFS: Premitted - Simulated: - RIFS: Prohibited - """ - if channel > 11: - raise ValueError( - "The Actiontec MI424WR does not support 5Ghz. " - "Invalid channel (%s)" % channel - ) - # Verify interface and security - hostapd_utils.verify_interface(iface_wlan_2g, hostapd_constants.INTERFACE_2G_LIST) - hostapd_utils.verify_security_mode(security, [None, hostapd_constants.WPA2]) - if security: - hostapd_utils.verify_cipher(security, [hostapd_constants.WPA2_DEFAULT_CIPER]) - - n_capabilities = [ - hostapd_constants.N_CAPABILITY_TX_STBC, - hostapd_constants.N_CAPABILITY_DSSS_CCK_40, - hostapd_constants.N_CAPABILITY_RX_STBC1, - ] - rates = utils.merge_dicts( - hostapd_constants.CCK_AND_OFDM_DATA_RATES, - hostapd_constants.CCK_AND_OFDM_BASIC_RATES, - ) - # Proprietary Atheros Communication: Adv Capability IE - # Proprietary Atheros Communication: Unknown IE - # Country Info: US Only IE - vendor_elements = { - "vendor_elements": "dd0900037f01010000ff7f" - "dd0a00037f04010000000000" - "0706555320010b1b" - } - - additional_params = utils.merge_dicts(rates, vendor_elements) - - config = hostapd_config.HostapdConfig( - ssid=ssid, - channel=channel, - hidden=False, - security=security, - interface=iface_wlan_2g, - mode=hostapd_constants.MODE_11N_MIXED, - force_wmm=True, - beacon_interval=100, - dtim_period=1, - short_preamble=True, - n_capabilities=n_capabilities, - additional_parameters=additional_params, - ) - - return config
diff --git a/src/antlion/controllers/ap_lib/third_party_ap_profiles/asus.py b/src/antlion/controllers/ap_lib/third_party_ap_profiles/asus.py deleted file mode 100644 index ea25157..0000000 --- a/src/antlion/controllers/ap_lib/third_party_ap_profiles/asus.py +++ /dev/null
@@ -1,544 +0,0 @@ -# Copyright 2022 The Fuchsia Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from antlion import utils - -from antlion.controllers.ap_lib import hostapd_config -from antlion.controllers.ap_lib import hostapd_constants -from antlion.controllers.ap_lib import hostapd_utils - - -def asus_rtac66u( - iface_wlan_2g=None, iface_wlan_5g=None, channel=None, security=None, ssid=None -): - # TODO(b/143104825): Permit RIFS once it is supported - # TODO(b/144446076): Address non-whirlwind hardware capabilities. - """A simulated implementation of an Asus RTAC66U AP. - Args: - iface_wlan_2g: The 2.4Ghz interface of the test AP. - iface_wlan_5g: The 5Ghz interface of the test AP. - channel: What channel to use. - security: A security profile. Must be none or WPA2 as this is what is - supported by the RTAC66U. - ssid: Network name - Returns: - A hostapd config - Differences from real RTAC66U: - 2.4 GHz: - Rates: - RTAC66U: - Supported: 1, 2, 5.5, 11, 18, 24, 36, 54 - Extended: 6, 9, 12, 48 - Simulated: - Supported: 1, 2, 5.5, 11, 6, 9, 12, 18 - Extended: 24, 36, 48, 54 - HT Capab: - Info - RTAC66U: Green Field supported - Simulated: Green Field not supported on Whirlwind. - 5GHz: - VHT Capab: - RTAC66U: - SU Beamformer Supported, - SU Beamformee Supported, - Beamformee STS Capability: 3, - Number of Sounding Dimensions: 3, - VHT Link Adaptation: Both - Simulated: - Above are not supported on Whirlwind. - VHT Operation Info: - RTAC66U: Basic MCS Map (0x0000) - Simulated: Basic MCS Map (0xfffc) - VHT Tx Power Envelope: - RTAC66U: Local Max Tx Pwr Constraint: 1.0 dBm - Simulated: Local Max Tx Pwr Constraint: 23.0 dBm - Both: - HT Capab: - A-MPDU - RTAC66U: MPDU Density 4 - Simulated: MPDU Density 8 - HT Info: - RTAC66U: RIFS Permitted - Simulated: RIFS Prohibited - """ - # Verify interface and security - hostapd_utils.verify_interface(iface_wlan_2g, hostapd_constants.INTERFACE_2G_LIST) - hostapd_utils.verify_interface(iface_wlan_5g, hostapd_constants.INTERFACE_5G_LIST) - hostapd_utils.verify_security_mode(security, [None, hostapd_constants.WPA2]) - if security: - hostapd_utils.verify_cipher(security, [hostapd_constants.WPA2_DEFAULT_CIPER]) - - # Common Parameters - rates = hostapd_constants.CCK_AND_OFDM_DATA_RATES - vht_channel_width = 20 - n_capabilities = [ - hostapd_constants.N_CAPABILITY_LDPC, - hostapd_constants.N_CAPABILITY_TX_STBC, - hostapd_constants.N_CAPABILITY_RX_STBC1, - hostapd_constants.N_CAPABILITY_MAX_AMSDU_7935, - hostapd_constants.N_CAPABILITY_DSSS_CCK_40, - hostapd_constants.N_CAPABILITY_SGI20, - ] - # WPS IE - # Broadcom IE - vendor_elements = { - "vendor_elements": "dd310050f204104a00011010440001021047001093689729d373c26cb1563c6c570f33" - "d7103c0001031049000600372a000120" - "dd090010180200001c0000" - } - - # 2.4GHz - if channel <= 11: - interface = iface_wlan_2g - rates.update(hostapd_constants.CCK_AND_OFDM_BASIC_RATES) - mode = hostapd_constants.MODE_11N_MIXED - ac_capabilities = None - - # 5GHz - else: - interface = iface_wlan_5g - rates.update(hostapd_constants.OFDM_ONLY_BASIC_RATES) - mode = hostapd_constants.MODE_11AC_MIXED - ac_capabilities = [ - hostapd_constants.AC_CAPABILITY_RXLDPC, - hostapd_constants.AC_CAPABILITY_SHORT_GI_80, - hostapd_constants.AC_CAPABILITY_TX_STBC_2BY1, - hostapd_constants.AC_CAPABILITY_RX_STBC_1, - hostapd_constants.AC_CAPABILITY_MAX_MPDU_11454, - hostapd_constants.AC_CAPABILITY_MAX_A_MPDU_LEN_EXP7, - ] - - additional_params = utils.merge_dicts( - rates, vendor_elements, hostapd_constants.UAPSD_ENABLED - ) - - config = hostapd_config.HostapdConfig( - ssid=ssid, - channel=channel, - hidden=False, - security=security, - interface=interface, - mode=mode, - force_wmm=True, - beacon_interval=100, - dtim_period=3, - short_preamble=False, - n_capabilities=n_capabilities, - ac_capabilities=ac_capabilities, - vht_channel_width=vht_channel_width, - additional_parameters=additional_params, - ) - - return config - - -def asus_rtac86u( - iface_wlan_2g=None, iface_wlan_5g=None, channel=None, security=None, ssid=None -): - """A simulated implementation of an Asus RTAC86U AP. - Args: - iface_wlan_2g: The 2.4Ghz interface of the test AP. - iface_wlan_5g: The 5Ghz interface of the test AP. - channel: What channel to use. - security: A security profile. Must be none or WPA2 as this is what is - supported by the RTAC86U. - ssid: Network name - Returns: - A hostapd config - Differences from real RTAC86U: - 2.4GHz: - Rates: - RTAC86U: - Supported: 1, 2, 5.5, 11, 18, 24, 36, 54 - Extended: 6, 9, 12, 48 - Simulated: - Supported: 1, 2, 5.5, 11, 6, 9, 12, 18 - Extended: 24, 36, 48, 54 - 5GHz: - Country Code: - Simulated: Has two country code IEs, one that matches - the actual, and another explicit IE that was required for - hostapd's 802.11d to work. - Both: - RSN Capabilities (w/ WPA2): - RTAC86U: - RSN PTKSA Replay Counter Capab: 16 - Simulated: - RSN PTKSA Replay Counter Capab: 1 - """ - # Verify interface and security - hostapd_utils.verify_interface(iface_wlan_2g, hostapd_constants.INTERFACE_2G_LIST) - hostapd_utils.verify_interface(iface_wlan_5g, hostapd_constants.INTERFACE_5G_LIST) - hostapd_utils.verify_security_mode(security, [None, hostapd_constants.WPA2]) - if security: - hostapd_utils.verify_cipher(security, [hostapd_constants.WPA2_DEFAULT_CIPER]) - - # Common Parameters - rates = hostapd_constants.CCK_AND_OFDM_DATA_RATES - qbss = {"bss_load_update_period": 50, "chan_util_avg_period": 600} - - # 2.4GHz - if channel <= 11: - interface = iface_wlan_2g - mode = hostapd_constants.MODE_11G - rates.update(hostapd_constants.CCK_AND_OFDM_BASIC_RATES) - spectrum_mgmt = False - # Measurement Pilot Transmission IE - vendor_elements = {"vendor_elements": "42020000"} - - # 5GHz - else: - interface = iface_wlan_5g - mode = hostapd_constants.MODE_11A - rates.update(hostapd_constants.OFDM_ONLY_BASIC_RATES) - spectrum_mgmt = (True,) - # Country Information IE (w/ individual channel info) - # TPC Report Transmit Power IE - # Measurement Pilot Transmission IE - vendor_elements = { - "vendor_elements": "074255532024011e28011e2c011e30011e34011e38011e3c011e40011e64011e" - "68011e6c011e70011e74011e84011e88011e8c011e95011e99011e9d011ea1011e" - "a5011e" - "23021300" - "42020000" - } - - additional_params = utils.merge_dicts(rates, qbss, vendor_elements) - - config = hostapd_config.HostapdConfig( - ssid=ssid, - channel=channel, - hidden=False, - security=security, - interface=interface, - mode=mode, - force_wmm=False, - beacon_interval=100, - dtim_period=3, - short_preamble=False, - spectrum_mgmt_required=spectrum_mgmt, - additional_parameters=additional_params, - ) - return config - - -def asus_rtac5300( - iface_wlan_2g=None, iface_wlan_5g=None, channel=None, security=None, ssid=None -): - # TODO(b/143104825): Permit RIFS once it is supported - # TODO(b/144446076): Address non-whirlwind hardware capabilities. - """A simulated implementation of an Asus RTAC5300 AP. - Args: - iface_wlan_2g: The 2.4Ghz interface of the test AP. - iface_wlan_5g: The 5Ghz interface of the test AP. - channel: What channel to use. - security: A security profile. Must be none or WPA2 as this is what is - supported by the RTAC5300. - ssid: Network name - Returns: - A hostapd config - Differences from real RTAC5300: - 2.4GHz: - Rates: - RTAC86U: - Supported: 1, 2, 5.5, 11, 18, 24, 36, 54 - Extended: 6, 9, 12, 48 - Simulated: - Supported: 1, 2, 5.5, 11, 6, 9, 12, 18 - Extended: 24, 36, 48, 54 - 5GHz: - VHT Capab: - RTAC5300: - SU Beamformer Supported, - SU Beamformee Supported, - Beamformee STS Capability: 4, - Number of Sounding Dimensions: 4, - MU Beamformer Supported, - VHT Link Adaptation: Both - Simulated: - Above are not supported on Whirlwind. - VHT Operation Info: - RTAC5300: Basic MCS Map (0x0000) - Simulated: Basic MCS Map (0xfffc) - VHT Tx Power Envelope: - RTAC5300: Local Max Tx Pwr Constraint: 1.0 dBm - Simulated: Local Max Tx Pwr Constraint: 23.0 dBm - Both: - HT Capab: - A-MPDU - RTAC5300: MPDU Density 4 - Simulated: MPDU Density 8 - HT Info: - RTAC5300: RIFS Permitted - Simulated: RIFS Prohibited - """ - # Verify interface and security - hostapd_utils.verify_interface(iface_wlan_2g, hostapd_constants.INTERFACE_2G_LIST) - hostapd_utils.verify_interface(iface_wlan_5g, hostapd_constants.INTERFACE_5G_LIST) - hostapd_utils.verify_security_mode(security, [None, hostapd_constants.WPA2]) - if security: - hostapd_utils.verify_cipher(security, [hostapd_constants.WPA2_DEFAULT_CIPER]) - - # Common Parameters - rates = hostapd_constants.CCK_AND_OFDM_DATA_RATES - vht_channel_width = 20 - qbss = {"bss_load_update_period": 50, "chan_util_avg_period": 600} - n_capabilities = [ - hostapd_constants.N_CAPABILITY_LDPC, - hostapd_constants.N_CAPABILITY_TX_STBC, - hostapd_constants.N_CAPABILITY_RX_STBC1, - hostapd_constants.N_CAPABILITY_SGI20, - ] - - # Broadcom IE - vendor_elements = {"vendor_elements": "dd090010180200009c0000"} - - # 2.4GHz - if channel <= 11: - interface = iface_wlan_2g - rates.update(hostapd_constants.CCK_AND_OFDM_BASIC_RATES) - mode = hostapd_constants.MODE_11N_MIXED - # AsusTek IE - # Epigram 2.4GHz IE - vendor_elements["vendor_elements"] += ( - "dd25f832e4010101020100031411b5" - "2fd437509c30b3d7f5cf5754fb125aed3b8507045aed3b85" - "dd1e00904c0418bf0cb2798b0faaff0000aaff0000c0050001000000c3020002" - ) - ac_capabilities = None - - # 5GHz - else: - interface = iface_wlan_5g - rates.update(hostapd_constants.OFDM_ONLY_BASIC_RATES) - mode = hostapd_constants.MODE_11AC_MIXED - # Epigram 5GHz IE - vendor_elements["vendor_elements"] += "dd0500904c0410" - ac_capabilities = [ - hostapd_constants.AC_CAPABILITY_RXLDPC, - hostapd_constants.AC_CAPABILITY_SHORT_GI_80, - hostapd_constants.AC_CAPABILITY_TX_STBC_2BY1, - hostapd_constants.AC_CAPABILITY_RX_STBC_1, - hostapd_constants.AC_CAPABILITY_MAX_MPDU_11454, - hostapd_constants.AC_CAPABILITY_MAX_A_MPDU_LEN_EXP7, - ] - - additional_params = utils.merge_dicts( - rates, qbss, vendor_elements, hostapd_constants.UAPSD_ENABLED - ) - - config = hostapd_config.HostapdConfig( - ssid=ssid, - channel=channel, - hidden=False, - security=security, - interface=interface, - mode=mode, - force_wmm=True, - beacon_interval=100, - dtim_period=3, - short_preamble=False, - n_capabilities=n_capabilities, - ac_capabilities=ac_capabilities, - vht_channel_width=vht_channel_width, - additional_parameters=additional_params, - ) - return config - - -def asus_rtn56u( - iface_wlan_2g=None, iface_wlan_5g=None, channel=None, security=None, ssid=None -): - """A simulated implementation of an Asus RTN56U AP. - Args: - iface_wlan_2g: The 2.4Ghz interface of the test AP. - iface_wlan_5g: The 5Ghz interface of the test AP. - channel: What channel to use. - security: A security profile. Must be none or WPA2 as this is what is - supported by the RTN56U. - ssid: Network name - Returns: - A hostapd config - Differences from real RTN56U: - 2.4GHz: - Rates: - RTN56U: - Supported: 1, 2, 5.5, 11, 18, 24, 36, 54 - Extended: 6, 9, 12, 48 - Simulated: - Supported: 1, 2, 5.5, 11, 6, 9, 12, 18 - Extended: 24, 36, 48, 54 - Both: - Fixed Parameters: - RTN56U: APSD Implemented - Simulated: APSD Not Implemented - HT Capab: - A-MPDU - RTN56U: MPDU Density 4 - Simulated: MPDU Density 8 - RSN Capabilities (w/ WPA2): - RTN56U: - RSN PTKSA Replay Counter Capab: 1 - Simulated: - RSN PTKSA Replay Counter Capab: 16 - """ - # Verify interface and security - hostapd_utils.verify_interface(iface_wlan_2g, hostapd_constants.INTERFACE_2G_LIST) - hostapd_utils.verify_interface(iface_wlan_5g, hostapd_constants.INTERFACE_5G_LIST) - hostapd_utils.verify_security_mode(security, [None, hostapd_constants.WPA2]) - if security: - hostapd_utils.verify_cipher(security, [hostapd_constants.WPA2_DEFAULT_CIPER]) - - # Common Parameters - rates = hostapd_constants.CCK_AND_OFDM_DATA_RATES - qbss = {"bss_load_update_period": 50, "chan_util_avg_period": 600} - n_capabilities = [ - hostapd_constants.N_CAPABILITY_SGI20, - hostapd_constants.N_CAPABILITY_SGI40, - hostapd_constants.N_CAPABILITY_TX_STBC, - hostapd_constants.N_CAPABILITY_RX_STBC1, - ] - - # 2.4GHz - if channel <= 11: - interface = iface_wlan_2g - rates.update(hostapd_constants.CCK_AND_OFDM_BASIC_RATES) - # Ralink Technology IE - # US Country Code IE - # AP Channel Report IEs (2) - # WPS IE - vendor_elements = { - "vendor_elements": "dd07000c4307000000" - "0706555320010b14" - "33082001020304050607" - "33082105060708090a0b" - "dd270050f204104a000110104400010210470010bc329e001dd811b286011c872c" - "d33448103c000101" - } - - # 5GHz - else: - interface = iface_wlan_5g - rates.update(hostapd_constants.OFDM_ONLY_BASIC_RATES) - # Ralink Technology IE - # US Country Code IE - vendor_elements = {"vendor_elements": "dd07000c4307000000" "0706555320010b14"} - - additional_params = utils.merge_dicts( - rates, vendor_elements, qbss, hostapd_constants.UAPSD_ENABLED - ) - - config = hostapd_config.HostapdConfig( - ssid=ssid, - channel=channel, - hidden=False, - security=security, - interface=interface, - mode=hostapd_constants.MODE_11N_MIXED, - force_wmm=True, - beacon_interval=100, - dtim_period=1, - short_preamble=False, - n_capabilities=n_capabilities, - additional_parameters=additional_params, - ) - - return config - - -def asus_rtn66u( - iface_wlan_2g=None, iface_wlan_5g=None, channel=None, security=None, ssid=None -): - # TODO(b/143104825): Permit RIFS once it is supported - """A simulated implementation of an Asus RTN66U AP. - Args: - iface_wlan_2g: The 2.4Ghz interface of the test AP. - iface_wlan_5g: The 5Ghz interface of the test AP. - channel: What channel to use. - security: A security profile. Must be none or WPA2 as this is what is - supported by the RTN66U. - ssid: Network name - Returns: - A hostapd config - Differences from real RTN66U: - 2.4GHz: - Rates: - RTN66U: - Supported: 1, 2, 5.5, 11, 18, 24, 36, 54 - Extended: 6, 9, 12, 48 - Simulated: - Supported: 1, 2, 5.5, 11, 6, 9, 12, 18 - Extended: 24, 36, 48, 54 - Both: - HT Info: - RTN66U: RIFS Permitted - Simulated: RIFS Prohibited - HT Capab: - Info: - RTN66U: Green Field supported - Simulated: Green Field not supported on Whirlwind. - A-MPDU - RTN66U: MPDU Density 4 - Simulated: MPDU Density 8 - """ - # Verify interface and security - hostapd_utils.verify_interface(iface_wlan_2g, hostapd_constants.INTERFACE_2G_LIST) - hostapd_utils.verify_interface(iface_wlan_5g, hostapd_constants.INTERFACE_5G_LIST) - hostapd_utils.verify_security_mode(security, [None, hostapd_constants.WPA2]) - if security: - hostapd_utils.verify_cipher(security, [hostapd_constants.WPA2_DEFAULT_CIPER]) - - # Common Parameters - rates = hostapd_constants.CCK_AND_OFDM_DATA_RATES - n_capabilities = [ - hostapd_constants.N_CAPABILITY_LDPC, - hostapd_constants.N_CAPABILITY_SGI20, - hostapd_constants.N_CAPABILITY_TX_STBC, - hostapd_constants.N_CAPABILITY_RX_STBC1, - hostapd_constants.N_CAPABILITY_MAX_AMSDU_7935, - ] - # Broadcom IE - vendor_elements = {"vendor_elements": "dd090010180200001c0000"} - - # 2.4GHz - if channel <= 11: - interface = iface_wlan_2g - rates.update(hostapd_constants.CCK_AND_OFDM_BASIC_RATES) - n_capabilities.append(hostapd_constants.N_CAPABILITY_DSSS_CCK_40) - - # 5GHz - else: - interface = iface_wlan_5g - rates.update(hostapd_constants.OFDM_ONLY_BASIC_RATES) - - additional_params = utils.merge_dicts( - rates, vendor_elements, hostapd_constants.UAPSD_ENABLED - ) - - config = hostapd_config.HostapdConfig( - ssid=ssid, - channel=channel, - hidden=False, - security=security, - interface=interface, - mode=hostapd_constants.MODE_11N_MIXED, - force_wmm=True, - beacon_interval=100, - dtim_period=3, - short_preamble=False, - n_capabilities=n_capabilities, - additional_parameters=additional_params, - ) - - return config
diff --git a/src/antlion/controllers/ap_lib/third_party_ap_profiles/belkin.py b/src/antlion/controllers/ap_lib/third_party_ap_profiles/belkin.py deleted file mode 100644 index 9c5c99d..0000000 --- a/src/antlion/controllers/ap_lib/third_party_ap_profiles/belkin.py +++ /dev/null
@@ -1,99 +0,0 @@ -# Copyright 2022 The Fuchsia Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from antlion import utils - -from antlion.controllers.ap_lib import hostapd_config -from antlion.controllers.ap_lib import hostapd_constants -from antlion.controllers.ap_lib import hostapd_utils - - -def belkin_f9k1001v5(iface_wlan_2g=None, channel=None, security=None, ssid=None): - # TODO(b/143104825): Permit RIFS once it is supported - """A simulated implementation of what a Belkin F9K1001v5 AP - Args: - iface_wlan_2g: The 2.4Ghz interface of the test AP. - channel: What channel to use. - security: A security profile (None or WPA2). - ssid: The network name. - Returns: - A hostapd config. - Differences from real F9K1001v5: - Rates: - F9K1001v5: - Supported: 1, 2, 5.5, 11, 18, 24, 36, 54 - Extended: 6, 9, 12, 48 - Simulated: - Supported: 1, 2, 5.5, 11, 6, 9, 12, 18 - Extended: 24, 36, 48, 54 - HT Info: - F9K1001v5: - RIFS: Permitted - Simulated: - RIFS: Prohibited - RSN Capabilities (w/ WPA2): - F9K1001v5: - RSN PTKSA Replay Counter Capab: 1 - Simulated: - RSN PTKSA Replay Counter Capab: 16 - """ - if channel > 11: - raise ValueError( - "The Belkin F9k1001v5 does not support 5Ghz. " - "Invalid channel (%s)" % channel - ) - # Verify interface and security - hostapd_utils.verify_interface(iface_wlan_2g, hostapd_constants.INTERFACE_2G_LIST) - hostapd_utils.verify_security_mode(security, [None, hostapd_constants.WPA2]) - if security: - hostapd_utils.verify_cipher(security, [hostapd_constants.WPA2_DEFAULT_CIPER]) - - n_capabilities = [ - hostapd_constants.N_CAPABILITY_SGI20, - hostapd_constants.N_CAPABILITY_SGI40, - hostapd_constants.N_CAPABILITY_TX_STBC, - hostapd_constants.N_CAPABILITY_MAX_AMSDU_7935, - hostapd_constants.N_CAPABILITY_DSSS_CCK_40, - ] - - rates = additional_params = utils.merge_dicts( - hostapd_constants.CCK_AND_OFDM_BASIC_RATES, - hostapd_constants.CCK_AND_OFDM_DATA_RATES, - ) - - # Broadcom IE - # WPS IE - vendor_elements = { - "vendor_elements": "dd090010180200100c0000" - "dd180050f204104a00011010440001021049000600372a000120" - } - - additional_params = utils.merge_dicts(rates, vendor_elements) - - config = hostapd_config.HostapdConfig( - ssid=ssid, - channel=channel, - hidden=False, - security=security, - interface=iface_wlan_2g, - mode=hostapd_constants.MODE_11N_MIXED, - force_wmm=True, - beacon_interval=100, - dtim_period=3, - short_preamble=False, - n_capabilities=n_capabilities, - additional_parameters=additional_params, - ) - - return config
diff --git a/src/antlion/controllers/ap_lib/third_party_ap_profiles/linksys.py b/src/antlion/controllers/ap_lib/third_party_ap_profiles/linksys.py deleted file mode 100644 index 8010837..0000000 --- a/src/antlion/controllers/ap_lib/third_party_ap_profiles/linksys.py +++ /dev/null
@@ -1,297 +0,0 @@ -# Copyright 2022 The Fuchsia Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from antlion import utils - -from antlion.controllers.ap_lib import hostapd_config -from antlion.controllers.ap_lib import hostapd_constants -from antlion.controllers.ap_lib import hostapd_utils - - -def linksys_ea4500( - iface_wlan_2g=None, iface_wlan_5g=None, channel=None, security=None, ssid=None -): - # TODO(b/143104825): Permit RIFS once it is supported - # TODO(b/144446076): Address non-whirlwind hardware capabilities. - """A simulated implementation of what a Linksys EA4500 AP - Args: - iface_wlan_2g: The 2.4Ghz interface of the test AP. - iface_wlan_5g: The 5GHz interface of the test AP. - channel: What channel to use. - security: A security profile (None or WPA2). - ssid: The network name. - Returns: - A hostapd config. - Differences from real EA4500: - CF (Contention-Free) Parameter IE: - EA4500: has CF Parameter IE - Simulated: does not have CF Parameter IE - HT Capab: - Info: - EA4500: Green Field supported - Simulated: Green Field not supported on Whirlwind. - A-MPDU - RTAC66U: MPDU Density 4 - Simulated: MPDU Density 8 - RSN Capab (w/ WPA2): - EA4500: - RSN PTKSA Replay Counter Capab: 1 - Simulated: - RSN PTKSA Replay Counter Capab: 16 - """ - # Verify interface and security - hostapd_utils.verify_interface(iface_wlan_2g, hostapd_constants.INTERFACE_2G_LIST) - hostapd_utils.verify_interface(iface_wlan_5g, hostapd_constants.INTERFACE_5G_LIST) - hostapd_utils.verify_security_mode(security, [None, hostapd_constants.WPA2]) - if security: - hostapd_utils.verify_cipher(security, [hostapd_constants.WPA2_DEFAULT_CIPER]) - - # Common Parameters - rates = hostapd_constants.CCK_AND_OFDM_DATA_RATES - - n_capabilities = [ - hostapd_constants.N_CAPABILITY_SGI20, - hostapd_constants.N_CAPABILITY_SGI40, - hostapd_constants.N_CAPABILITY_TX_STBC, - hostapd_constants.N_CAPABILITY_RX_STBC1, - hostapd_constants.N_CAPABILITY_DSSS_CCK_40, - ] - - # Epigram HT Capabilities IE - # Epigram HT Additional Capabilities IE - # Marvell Semiconductor, Inc. IE - vendor_elements = { - "vendor_elements": "dd1e00904c33fc0117ffffff0000000000000000000000000000000000000000" - "dd1a00904c3424000000000000000000000000000000000000000000" - "dd06005043030000" - } - - # 2.4GHz - if channel <= 11: - interface = iface_wlan_2g - rates.update(hostapd_constants.CCK_AND_OFDM_BASIC_RATES) - obss_interval = 180 - n_capabilities.append(hostapd_constants.N_CAPABILITY_HT40_PLUS) - - # 5GHz - else: - interface = iface_wlan_5g - rates.update(hostapd_constants.OFDM_ONLY_BASIC_RATES) - obss_interval = None - - additional_params = utils.merge_dicts( - rates, vendor_elements, hostapd_constants.UAPSD_ENABLED - ) - - config = hostapd_config.HostapdConfig( - ssid=ssid, - channel=channel, - hidden=False, - security=security, - interface=interface, - mode=hostapd_constants.MODE_11N_MIXED, - force_wmm=True, - beacon_interval=100, - dtim_period=1, - short_preamble=True, - obss_interval=obss_interval, - n_capabilities=n_capabilities, - additional_parameters=additional_params, - ) - - return config - - -def linksys_ea9500( - iface_wlan_2g=None, iface_wlan_5g=None, channel=None, security=None, ssid=None -): - """A simulated implementation of what a Linksys EA9500 AP - Args: - iface_wlan_2g: The 2.4Ghz interface of the test AP. - iface_wlan_5g: The 5GHz interface of the test AP. - channel: What channel to use. - security: A security profile (None or WPA2). - ssid: The network name. - Returns: - A hostapd config. - Differences from real EA9500: - 2.4GHz: - Rates: - EA9500: - Supported: 1, 2, 5.5, 11, 18, 24, 36, 54 - Extended: 6, 9, 12, 48 - Simulated: - Supported: 1, 2, 5.5, 11, 6, 9, 12, 18 - Extended: 24, 36, 48, 54 - RSN Capab (w/ WPA2): - EA9500: - RSN PTKSA Replay Counter Capab: 16 - Simulated: - RSN PTKSA Replay Counter Capab: 1 - """ - # Verify interface and security - hostapd_utils.verify_interface(iface_wlan_2g, hostapd_constants.INTERFACE_2G_LIST) - hostapd_utils.verify_interface(iface_wlan_5g, hostapd_constants.INTERFACE_5G_LIST) - hostapd_utils.verify_security_mode(security, [None, hostapd_constants.WPA2]) - if security: - hostapd_utils.verify_cipher(security, [hostapd_constants.WPA2_DEFAULT_CIPER]) - - # Common Parameters - rates = hostapd_constants.CCK_AND_OFDM_DATA_RATES - qbss = {"bss_load_update_period": 50, "chan_util_avg_period": 600} - # Measurement Pilot Transmission IE - vendor_elements = {"vendor_elements": "42020000"} - - # 2.4GHz - if channel <= 11: - interface = iface_wlan_2g - mode = hostapd_constants.MODE_11G - rates.update(hostapd_constants.CCK_AND_OFDM_BASIC_RATES) - - # 5GHz - else: - interface = iface_wlan_5g - mode = hostapd_constants.MODE_11A - rates.update(hostapd_constants.OFDM_ONLY_BASIC_RATES) - - additional_params = utils.merge_dicts(rates, qbss, vendor_elements) - - config = hostapd_config.HostapdConfig( - ssid=ssid, - channel=channel, - hidden=False, - security=security, - interface=interface, - mode=mode, - force_wmm=False, - beacon_interval=100, - dtim_period=1, - short_preamble=False, - additional_parameters=additional_params, - ) - return config - - -def linksys_wrt1900acv2( - iface_wlan_2g=None, iface_wlan_5g=None, channel=None, security=None, ssid=None -): - # TODO(b/144446076): Address non-whirlwind hardware capabilities. - """A simulated implementation of what a Linksys WRT1900ACV2 AP - Args: - iface_wlan_2g: The 2.4Ghz interface of the test AP. - iface_wlan_5g: The 5GHz interface of the test AP. - channel: What channel to use. - security: A security profile (None or WPA2). - ssid: The network name. - Returns: - A hostapd config. - Differences from real WRT1900ACV2: - 5 GHz: - Simulated: Has two country code IEs, one that matches - the actual, and another explicit IE that was required for - hostapd's 802.11d to work. - Both: - HT Capab: - A-MPDU - WRT1900ACV2: MPDU Density 4 - Simulated: MPDU Density 8 - VHT Capab: - WRT1900ACV2: - SU Beamformer Supported, - SU Beamformee Supported, - Beamformee STS Capability: 4, - Number of Sounding Dimensions: 4, - Simulated: - Above are not supported on Whirlwind. - RSN Capabilities (w/ WPA2): - WRT1900ACV2: - RSN PTKSA Replay Counter Capab: 1 - Simulated: - RSN PTKSA Replay Counter Capab: 16 - """ - # Verify interface and security - hostapd_utils.verify_interface(iface_wlan_2g, hostapd_constants.INTERFACE_2G_LIST) - hostapd_utils.verify_interface(iface_wlan_5g, hostapd_constants.INTERFACE_5G_LIST) - hostapd_utils.verify_security_mode(security, [None, hostapd_constants.WPA2]) - if security: - hostapd_utils.verify_cipher(security, [hostapd_constants.WPA2_DEFAULT_CIPER]) - - # Common Parameters - rates = hostapd_constants.CCK_AND_OFDM_DATA_RATES - n_capabilities = [ - hostapd_constants.N_CAPABILITY_LDPC, - hostapd_constants.N_CAPABILITY_SGI20, - hostapd_constants.N_CAPABILITY_SGI40, - ] - ac_capabilities = [ - hostapd_constants.AC_CAPABILITY_RXLDPC, - hostapd_constants.AC_CAPABILITY_SHORT_GI_80, - hostapd_constants.AC_CAPABILITY_RX_STBC_1, - hostapd_constants.AC_CAPABILITY_RX_ANTENNA_PATTERN, - hostapd_constants.AC_CAPABILITY_TX_ANTENNA_PATTERN, - hostapd_constants.AC_CAPABILITY_MAX_A_MPDU_LEN_EXP7, - ] - vht_channel_width = 20 - # Epigram, Inc. HT Capabilities IE - # Epigram, Inc. HT Additional Capabilities IE - # Marvell Semiconductor IE - vendor_elements = { - "vendor_elements": "dd1e00904c336c0017ffffff0001000000000000000000000000001fff071800" - "dd1a00904c3424000000000000000000000000000000000000000000" - "dd06005043030000" - } - - # 2.4GHz - if channel <= 11: - interface = iface_wlan_2g - rates.update(hostapd_constants.CCK_AND_OFDM_BASIC_RATES) - obss_interval = 180 - spectrum_mgmt = False - local_pwr_constraint = {} - - # 5GHz - else: - interface = iface_wlan_5g - rates.update(hostapd_constants.OFDM_ONLY_BASIC_RATES) - obss_interval = None - spectrum_mgmt = (True,) - local_pwr_constraint = {"local_pwr_constraint": 3} - # Country Information IE (w/ individual channel info) - vendor_elements["vendor_elements"] += ( - "071e5553202401112801112c011130" "01119501179901179d0117a10117a50117" - ) - - additional_params = utils.merge_dicts( - rates, vendor_elements, hostapd_constants.UAPSD_ENABLED, local_pwr_constraint - ) - - config = hostapd_config.HostapdConfig( - ssid=ssid, - channel=channel, - hidden=False, - security=security, - interface=interface, - mode=hostapd_constants.MODE_11AC_MIXED, - force_wmm=True, - beacon_interval=100, - dtim_period=1, - short_preamble=True, - obss_interval=obss_interval, - n_capabilities=n_capabilities, - ac_capabilities=ac_capabilities, - vht_channel_width=vht_channel_width, - spectrum_mgmt_required=spectrum_mgmt, - additional_parameters=additional_params, - ) - return config
diff --git a/src/antlion/controllers/ap_lib/third_party_ap_profiles/netgear.py b/src/antlion/controllers/ap_lib/third_party_ap_profiles/netgear.py deleted file mode 100644 index 25a91cd..0000000 --- a/src/antlion/controllers/ap_lib/third_party_ap_profiles/netgear.py +++ /dev/null
@@ -1,264 +0,0 @@ -# Copyright 2022 The Fuchsia Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from antlion import utils - -from antlion.controllers.ap_lib import hostapd_config -from antlion.controllers.ap_lib import hostapd_constants -from antlion.controllers.ap_lib import hostapd_utils - - -def netgear_r7000( - iface_wlan_2g=None, iface_wlan_5g=None, channel=None, security=None, ssid=None -): - # TODO(b/143104825): Permit RIFS once it is supported - # TODO(b/144446076): Address non-whirlwind hardware capabilities. - """A simulated implementation of what a Netgear R7000 AP - Args: - iface_wlan_2g: The 2.4Ghz interface of the test AP. - iface_wlan_5g: The 5GHz interface of the test AP. - channel: What channel to use. - security: A security profile (None or WPA2). - ssid: The network name. - Returns: - A hostapd config. - Differences from real R7000: - 2.4GHz: - Rates: - R7000: - Supported: 1, 2, 5.5, 11, 18, 24, 36, 54 - Extended: 6, 9, 12, 48 - Simulated: - Supported: 1, 2, 5.5, 11, 6, 9, 12, 18 - Extended: 24, 36, 48, - 5GHz: - VHT Capab: - R7000: - SU Beamformer Supported, - SU Beamformee Supported, - Beamformee STS Capability: 3, - Number of Sounding Dimensions: 3, - VHT Link Adaptation: Both - Simulated: - Above are not supported on Whirlwind. - VHT Operation Info: - R7000: Basic MCS Map (0x0000) - Simulated: Basic MCS Map (0xfffc) - VHT Tx Power Envelope: - R7000: Local Max Tx Pwr Constraint: 1.0 dBm - Simulated: Local Max Tx Pwr Constraint: 23.0 dBm - Both: - HT Capab: - A-MPDU - R7000: MPDU Density 4 - Simulated: MPDU Density 8 - HT Info: - R7000: RIFS Permitted - Simulated: RIFS Prohibited - RM Capabilities: - R7000: - Beacon Table Measurement: Not Supported - Statistic Measurement: Enabled - AP Channel Report Capability: Enabled - Simulated: - Beacon Table Measurement: Supported - Statistic Measurement: Disabled - AP Channel Report Capability: Disabled - """ - # Verify interface and security - hostapd_utils.verify_interface(iface_wlan_2g, hostapd_constants.INTERFACE_2G_LIST) - hostapd_utils.verify_interface(iface_wlan_5g, hostapd_constants.INTERFACE_5G_LIST) - hostapd_utils.verify_security_mode(security, [None, hostapd_constants.WPA2]) - if security: - hostapd_utils.verify_cipher(security, [hostapd_constants.WPA2_DEFAULT_CIPER]) - - # Common Parameters - rates = hostapd_constants.CCK_AND_OFDM_DATA_RATES - vht_channel_width = 80 - n_capabilities = [ - hostapd_constants.N_CAPABILITY_LDPC, - hostapd_constants.N_CAPABILITY_TX_STBC, - hostapd_constants.N_CAPABILITY_RX_STBC1, - hostapd_constants.N_CAPABILITY_MAX_AMSDU_7935, - hostapd_constants.N_CAPABILITY_SGI20, - ] - # Netgear IE - # WPS IE - # Epigram, Inc. IE - # Broadcom IE - vendor_elements = { - "vendor_elements": "dd0600146c000000" - "dd310050f204104a00011010440001021047001066189606f1e967f9c0102048817a7" - "69e103c0001031049000600372a000120" - "dd1e00904c0408bf0cb259820feaff0000eaff0000c0050001000000c3020002" - "dd090010180200001c0000" - } - qbss = {"bss_load_update_period": 50, "chan_util_avg_period": 600} - - # 2.4GHz - if channel <= 11: - interface = iface_wlan_2g - rates.update(hostapd_constants.CCK_AND_OFDM_BASIC_RATES) - mode = hostapd_constants.MODE_11N_MIXED - obss_interval = 300 - ac_capabilities = None - - # 5GHz - else: - interface = iface_wlan_5g - rates.update(hostapd_constants.OFDM_ONLY_BASIC_RATES) - mode = hostapd_constants.MODE_11AC_MIXED - n_capabilities += [ - hostapd_constants.N_CAPABILITY_SGI40, - ] - - if hostapd_config.ht40_plus_allowed(channel): - n_capabilities.append(hostapd_constants.N_CAPABILITY_HT40_PLUS) - elif hostapd_config.ht40_minus_allowed(channel): - n_capabilities.append(hostapd_constants.N_CAPABILITY_HT40_MINUS) - - obss_interval = None - ac_capabilities = [ - hostapd_constants.AC_CAPABILITY_RXLDPC, - hostapd_constants.AC_CAPABILITY_SHORT_GI_80, - hostapd_constants.AC_CAPABILITY_TX_STBC_2BY1, - hostapd_constants.AC_CAPABILITY_RX_STBC_1, - hostapd_constants.AC_CAPABILITY_MAX_MPDU_11454, - hostapd_constants.AC_CAPABILITY_MAX_A_MPDU_LEN_EXP7, - ] - - additional_params = utils.merge_dicts( - rates, - vendor_elements, - qbss, - hostapd_constants.ENABLE_RRM_BEACON_REPORT, - hostapd_constants.ENABLE_RRM_NEIGHBOR_REPORT, - hostapd_constants.UAPSD_ENABLED, - ) - - config = hostapd_config.HostapdConfig( - ssid=ssid, - channel=channel, - hidden=False, - security=security, - interface=interface, - mode=mode, - force_wmm=True, - beacon_interval=100, - dtim_period=2, - short_preamble=False, - obss_interval=obss_interval, - n_capabilities=n_capabilities, - ac_capabilities=ac_capabilities, - vht_channel_width=vht_channel_width, - additional_parameters=additional_params, - ) - return config - - -def netgear_wndr3400( - iface_wlan_2g=None, iface_wlan_5g=None, channel=None, security=None, ssid=None -): - # TODO(b/143104825): Permit RIFS on 5GHz once it is supported - # TODO(b/144446076): Address non-whirlwind hardware capabilities. - """A simulated implementation of what a Netgear WNDR3400 AP - Args: - iface_wlan_2g: The 2.4Ghz interface of the test AP. - iface_wlan_5g: The 5GHz interface of the test AP. - channel: What channel to use. - security: A security profile (None or WPA2). - ssid: The network name. - Returns: - A hostapd config. - Differences from real WNDR3400: - 2.4GHz: - Rates: - WNDR3400: - Supported: 1, 2, 5.5, 11, 18, 24, 36, 54 - Extended: 6, 9, 12, 48 - Simulated: - Supported: 1, 2, 5.5, 11, 6, 9, 12, 18 - Extended: 24, 36, 48, - 5GHz: - HT Info: - WNDR3400: RIFS Permitted - Simulated: RIFS Prohibited - Both: - HT Capab: - A-MPDU - WNDR3400: MPDU Density 16 - Simulated: MPDU Density 8 - Info - WNDR3400: Green Field supported - Simulated: Green Field not supported on Whirlwind. - """ - # Verify interface and security - hostapd_utils.verify_interface(iface_wlan_2g, hostapd_constants.INTERFACE_2G_LIST) - hostapd_utils.verify_interface(iface_wlan_5g, hostapd_constants.INTERFACE_5G_LIST) - hostapd_utils.verify_security_mode(security, [None, hostapd_constants.WPA2]) - if security: - hostapd_utils.verify_cipher(security, [hostapd_constants.WPA2_DEFAULT_CIPER]) - - # Common Parameters - rates = hostapd_constants.CCK_AND_OFDM_DATA_RATES - n_capabilities = [ - hostapd_constants.N_CAPABILITY_SGI20, - hostapd_constants.N_CAPABILITY_SGI40, - hostapd_constants.N_CAPABILITY_TX_STBC, - hostapd_constants.N_CAPABILITY_MAX_AMSDU_7935, - hostapd_constants.N_CAPABILITY_DSSS_CCK_40, - ] - # WPS IE - # Broadcom IE - vendor_elements = { - "vendor_elements": "dd310050f204104a0001101044000102104700108c403eb883e7e225ab139828703ade" - "dc103c0001031049000600372a000120" - "dd090010180200f0040000" - } - - # 2.4GHz - if channel <= 11: - interface = iface_wlan_2g - rates.update(hostapd_constants.CCK_AND_OFDM_BASIC_RATES) - obss_interval = 300 - n_capabilities.append(hostapd_constants.N_CAPABILITY_DSSS_CCK_40) - - # 5GHz - else: - interface = iface_wlan_5g - rates.update(hostapd_constants.OFDM_ONLY_BASIC_RATES) - obss_interval = None - n_capabilities.append(hostapd_constants.N_CAPABILITY_HT40_PLUS) - - additional_params = utils.merge_dicts( - rates, vendor_elements, hostapd_constants.UAPSD_ENABLED - ) - - config = hostapd_config.HostapdConfig( - ssid=ssid, - channel=channel, - hidden=False, - security=security, - interface=interface, - mode=hostapd_constants.MODE_11N_MIXED, - force_wmm=True, - beacon_interval=100, - dtim_period=2, - short_preamble=False, - obss_interval=obss_interval, - n_capabilities=n_capabilities, - additional_parameters=additional_params, - ) - - return config
diff --git a/src/antlion/controllers/ap_lib/third_party_ap_profiles/securifi.py b/src/antlion/controllers/ap_lib/third_party_ap_profiles/securifi.py deleted file mode 100644 index 4a5bf68..0000000 --- a/src/antlion/controllers/ap_lib/third_party_ap_profiles/securifi.py +++ /dev/null
@@ -1,104 +0,0 @@ -# Copyright 2022 The Fuchsia Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from antlion import utils - -from antlion.controllers.ap_lib import hostapd_config -from antlion.controllers.ap_lib import hostapd_constants -from antlion.controllers.ap_lib import hostapd_utils - - -def securifi_almond(iface_wlan_2g=None, channel=None, security=None, ssid=None): - """A simulated implementation of a Securifi Almond AP - Args: - iface_wlan_2g: The 2.4Ghz interface of the test AP. - channel: What channel to use. - security: A security profile (None or WPA2). - ssid: The network name. - Returns: - A hostapd config. - Differences from real Almond: - Rates: - Almond: - Supported: 1, 2, 5.5, 11, 18, 24, 36, 54 - Extended: 6, 9, 12, 48 - Simulated: - Supported: 1, 2, 5.5, 11, 6, 9, 12, 18 - Extended: 24, 36, 48, 54 - HT Capab: - A-MPDU - Almond: MPDU Density 4 - Simulated: MPDU Density 8 - RSN Capab (w/ WPA2): - Almond: - RSN PTKSA Replay Counter Capab: 1 - Simulated: - RSN PTKSA Replay Counter Capab: 16 - """ - if channel > 11: - raise ValueError( - "The Securifi Almond does not support 5Ghz. " - "Invalid channel (%s)" % channel - ) - # Verify interface and security - hostapd_utils.verify_interface(iface_wlan_2g, hostapd_constants.INTERFACE_2G_LIST) - hostapd_utils.verify_security_mode(security, [None, hostapd_constants.WPA2]) - if security: - hostapd_utils.verify_cipher(security, [hostapd_constants.WPA2_DEFAULT_CIPER]) - - n_capabilities = [ - hostapd_constants.N_CAPABILITY_HT40_PLUS, - hostapd_constants.N_CAPABILITY_SGI20, - hostapd_constants.N_CAPABILITY_SGI40, - hostapd_constants.N_CAPABILITY_TX_STBC, - hostapd_constants.N_CAPABILITY_RX_STBC1, - hostapd_constants.N_CAPABILITY_DSSS_CCK_40, - ] - - rates = utils.merge_dicts( - hostapd_constants.CCK_AND_OFDM_BASIC_RATES, - hostapd_constants.CCK_AND_OFDM_DATA_RATES, - ) - - # Ralink Technology IE - # Country Information IE - # AP Channel Report IEs - vendor_elements = { - "vendor_elements": "dd07000c4307000000" - "0706555320010b14" - "33082001020304050607" - "33082105060708090a0b" - } - - qbss = {"bss_load_update_period": 50, "chan_util_avg_period": 600} - - additional_params = utils.merge_dicts(rates, vendor_elements, qbss) - - config = hostapd_config.HostapdConfig( - ssid=ssid, - channel=channel, - hidden=False, - security=security, - interface=iface_wlan_2g, - mode=hostapd_constants.MODE_11N_MIXED, - force_wmm=True, - beacon_interval=100, - dtim_period=1, - short_preamble=True, - obss_interval=300, - n_capabilities=n_capabilities, - additional_parameters=additional_params, - ) - - return config
diff --git a/src/antlion/controllers/ap_lib/third_party_ap_profiles/tplink.py b/src/antlion/controllers/ap_lib/third_party_ap_profiles/tplink.py deleted file mode 100644 index 81eeeec..0000000 --- a/src/antlion/controllers/ap_lib/third_party_ap_profiles/tplink.py +++ /dev/null
@@ -1,456 +0,0 @@ -# Copyright 2022 The Fuchsia Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from antlion import utils - -from antlion.controllers.ap_lib import hostapd_config -from antlion.controllers.ap_lib import hostapd_constants -from antlion.controllers.ap_lib import hostapd_utils - - -def tplink_archerc5( - iface_wlan_2g=None, iface_wlan_5g=None, channel=None, security=None, ssid=None -): - # TODO(b/144446076): Address non-whirlwind hardware capabilities. - """A simulated implementation of an TPLink ArcherC5 AP. - Args: - iface_wlan_2g: The 2.4Ghz interface of the test AP. - iface_wlan_5g: The 5GHz interface of the test AP. - channel: What channel to use. - security: A security profile (None or WPA2). - ssid: The network name. - Returns: - A hostapd config. - Differences from real ArcherC5: - 2.4GHz: - Rates: - ArcherC5: - Supported: 1, 2, 5.5, 11, 18, 24, 36, 54 - Extended: 6, 9, 12, 48 - Simulated: - Supported: 1, 2, 5.5, 11, 6, 9, 12, 18 - Extended: 24, 36, 48, 54 - HT Capab: - Info: - ArcherC5: Green Field supported - Simulated: Green Field not supported on Whirlwind. - 5GHz: - VHT Capab: - ArcherC5: - SU Beamformer Supported, - SU Beamformee Supported, - Beamformee STS Capability: 3, - Number of Sounding Dimensions: 3, - VHT Link Adaptation: Both - Simulated: - Above are not supported on Whirlwind. - VHT Operation Info: - ArcherC5: Basic MCS Map (0x0000) - Simulated: Basic MCS Map (0xfffc) - VHT Tx Power Envelope: - ArcherC5: Local Max Tx Pwr Constraint: 1.0 dBm - Simulated: Local Max Tx Pwr Constraint: 23.0 dBm - Both: - HT Capab: - A-MPDU - ArcherC5: MPDU Density 4 - Simulated: MPDU Density 8 - HT Info: - ArcherC5: RIFS Permitted - Simulated: RIFS Prohibited - """ - # Verify interface and security - hostapd_utils.verify_interface(iface_wlan_2g, hostapd_constants.INTERFACE_2G_LIST) - hostapd_utils.verify_interface(iface_wlan_5g, hostapd_constants.INTERFACE_5G_LIST) - hostapd_utils.verify_security_mode(security, [None, hostapd_constants.WPA2]) - if security: - hostapd_utils.verify_cipher(security, [hostapd_constants.WPA2_DEFAULT_CIPER]) - - # Common Parameters - rates = hostapd_constants.CCK_AND_OFDM_DATA_RATES - vht_channel_width = 20 - n_capabilities = [ - hostapd_constants.N_CAPABILITY_SGI20, - hostapd_constants.N_CAPABILITY_TX_STBC, - hostapd_constants.N_CAPABILITY_RX_STBC1, - hostapd_constants.N_CAPABILITY_MAX_AMSDU_7935, - ] - # WPS IE - # Broadcom IE - vendor_elements = { - "vendor_elements": "dd310050f204104a000110104400010210470010d96c7efc2f8938f1efbd6e5148bfa8" - "12103c0001031049000600372a000120" - "dd090010180200001c0000" - } - qbss = {"bss_load_update_period": 50, "chan_util_avg_period": 600} - - # 2.4GHz - if channel <= 11: - interface = iface_wlan_2g - rates.update(hostapd_constants.CCK_AND_OFDM_BASIC_RATES) - short_preamble = True - mode = hostapd_constants.MODE_11N_MIXED - n_capabilities.append(hostapd_constants.N_CAPABILITY_DSSS_CCK_40) - ac_capabilities = None - - # 5GHz - else: - interface = iface_wlan_5g - rates.update(hostapd_constants.OFDM_ONLY_BASIC_RATES) - short_preamble = False - mode = hostapd_constants.MODE_11AC_MIXED - n_capabilities.append(hostapd_constants.N_CAPABILITY_LDPC) - ac_capabilities = [ - hostapd_constants.AC_CAPABILITY_MAX_MPDU_11454, - hostapd_constants.AC_CAPABILITY_SHORT_GI_80, - hostapd_constants.AC_CAPABILITY_RXLDPC, - hostapd_constants.AC_CAPABILITY_TX_STBC_2BY1, - hostapd_constants.AC_CAPABILITY_RX_STBC_1, - hostapd_constants.AC_CAPABILITY_MAX_A_MPDU_LEN_EXP7, - ] - - additional_params = utils.merge_dicts( - rates, - vendor_elements, - qbss, - hostapd_constants.ENABLE_RRM_BEACON_REPORT, - hostapd_constants.ENABLE_RRM_NEIGHBOR_REPORT, - hostapd_constants.UAPSD_ENABLED, - ) - - config = hostapd_config.HostapdConfig( - ssid=ssid, - channel=channel, - hidden=False, - security=security, - interface=interface, - mode=mode, - force_wmm=True, - beacon_interval=100, - dtim_period=1, - short_preamble=short_preamble, - n_capabilities=n_capabilities, - ac_capabilities=ac_capabilities, - vht_channel_width=vht_channel_width, - additional_parameters=additional_params, - ) - return config - - -def tplink_archerc7( - iface_wlan_2g=None, iface_wlan_5g=None, channel=None, security=None, ssid=None -): - # TODO(b/143104825): Permit RIFS once it is supported - """A simulated implementation of an TPLink ArcherC7 AP. - Args: - iface_wlan_2g: The 2.4Ghz interface of the test AP. - iface_wlan_5g: The 5GHz interface of the test AP. - channel: What channel to use. - security: A security profile (None or WPA2). - ssid: The network name. - Returns: - A hostapd config. - Differences from real ArcherC7: - 5GHz: - Country Code: - Simulated: Has two country code IEs, one that matches - the actual, and another explicit IE that was required for - hostapd's 802.11d to work. - Both: - HT Info: - ArcherC7: RIFS Permitted - Simulated: RIFS Prohibited - RSN Capabilities (w/ WPA2): - ArcherC7: - RSN PTKSA Replay Counter Capab: 1 - Simulated: - RSN PTKSA Replay Counter Capab: 16 - """ - # Verify interface and security - hostapd_utils.verify_interface(iface_wlan_2g, hostapd_constants.INTERFACE_2G_LIST) - hostapd_utils.verify_interface(iface_wlan_5g, hostapd_constants.INTERFACE_5G_LIST) - hostapd_utils.verify_security_mode(security, [None, hostapd_constants.WPA2]) - if security: - hostapd_utils.verify_cipher(security, [hostapd_constants.WPA2_DEFAULT_CIPER]) - - # Common Parameters - rates = hostapd_constants.CCK_AND_OFDM_DATA_RATES - vht_channel_width = 80 - n_capabilities = [ - hostapd_constants.N_CAPABILITY_LDPC, - hostapd_constants.N_CAPABILITY_SGI20, - hostapd_constants.N_CAPABILITY_TX_STBC, - hostapd_constants.N_CAPABILITY_RX_STBC1, - ] - # Atheros IE - # WPS IE - vendor_elements = { - "vendor_elements": "dd0900037f01010000ff7f" - "dd180050f204104a00011010440001021049000600372a000120" - } - - # 2.4GHz - if channel <= 11: - interface = iface_wlan_2g - rates.update(hostapd_constants.CCK_AND_OFDM_BASIC_RATES) - short_preamble = True - mode = hostapd_constants.MODE_11N_MIXED - spectrum_mgmt = False - pwr_constraint = {} - ac_capabilities = None - vht_channel_width = None - - # 5GHz - else: - interface = iface_wlan_5g - rates.update(hostapd_constants.OFDM_ONLY_BASIC_RATES) - short_preamble = False - mode = hostapd_constants.MODE_11AC_MIXED - spectrum_mgmt = True - # Country Information IE (w/ individual channel info) - vendor_elements["vendor_elements"] += ( - "074255532024011e28011e2c011e30" - "011e3401173801173c01174001176401176801176c0117700117740117840117" - "8801178c011795011e99011e9d011ea1011ea5011e" - ) - pwr_constraint = {"local_pwr_constraint": 3} - n_capabilities += [ - hostapd_constants.N_CAPABILITY_SGI40, - hostapd_constants.N_CAPABILITY_MAX_AMSDU_7935, - ] - - if hostapd_config.ht40_plus_allowed(channel): - n_capabilities.append(hostapd_constants.N_CAPABILITY_HT40_PLUS) - elif hostapd_config.ht40_minus_allowed(channel): - n_capabilities.append(hostapd_constants.N_CAPABILITY_HT40_MINUS) - - ac_capabilities = [ - hostapd_constants.AC_CAPABILITY_MAX_MPDU_11454, - hostapd_constants.AC_CAPABILITY_RXLDPC, - hostapd_constants.AC_CAPABILITY_SHORT_GI_80, - hostapd_constants.AC_CAPABILITY_TX_STBC_2BY1, - hostapd_constants.AC_CAPABILITY_RX_STBC_1, - hostapd_constants.AC_CAPABILITY_MAX_A_MPDU_LEN_EXP7, - hostapd_constants.AC_CAPABILITY_RX_ANTENNA_PATTERN, - hostapd_constants.AC_CAPABILITY_TX_ANTENNA_PATTERN, - ] - - additional_params = utils.merge_dicts( - rates, vendor_elements, hostapd_constants.UAPSD_ENABLED, pwr_constraint - ) - - config = hostapd_config.HostapdConfig( - ssid=ssid, - channel=channel, - hidden=False, - security=security, - interface=interface, - mode=mode, - force_wmm=True, - beacon_interval=100, - dtim_period=1, - short_preamble=short_preamble, - n_capabilities=n_capabilities, - ac_capabilities=ac_capabilities, - vht_channel_width=vht_channel_width, - spectrum_mgmt_required=spectrum_mgmt, - additional_parameters=additional_params, - ) - return config - - -def tplink_c1200( - iface_wlan_2g=None, iface_wlan_5g=None, channel=None, security=None, ssid=None -): - # TODO(b/143104825): Permit RIFS once it is supported - # TODO(b/144446076): Address non-whirlwind hardware capabilities. - """A simulated implementation of an TPLink C1200 AP. - Args: - iface_wlan_2g: The 2.4Ghz interface of the test AP. - iface_wlan_5g: The 5GHz interface of the test AP. - channel: What channel to use. - security: A security profile (None or WPA2). - ssid: The network name. - Returns: - A hostapd config. - Differences from real C1200: - 2.4GHz: - Rates: - C1200: - Supported: 1, 2, 5.5, 11, 18, 24, 36, 54 - Extended: 6, 9, 12, 48 - Simulated: - Supported: 1, 2, 5.5, 11, 6, 9, 12, 18 - Extended: 24, 36, 48, 54 - HT Capab: - Info: - C1200: Green Field supported - Simulated: Green Field not supported on Whirlwind. - 5GHz: - VHT Operation Info: - C1200: Basic MCS Map (0x0000) - Simulated: Basic MCS Map (0xfffc) - VHT Tx Power Envelope: - C1200: Local Max Tx Pwr Constraint: 7.0 dBm - Simulated: Local Max Tx Pwr Constraint: 23.0 dBm - Both: - HT Info: - C1200: RIFS Permitted - Simulated: RIFS Prohibited - """ - # Verify interface and security - hostapd_utils.verify_interface(iface_wlan_2g, hostapd_constants.INTERFACE_2G_LIST) - hostapd_utils.verify_interface(iface_wlan_5g, hostapd_constants.INTERFACE_5G_LIST) - hostapd_utils.verify_security_mode(security, [None, hostapd_constants.WPA2]) - if security: - hostapd_utils.verify_cipher(security, [hostapd_constants.WPA2_DEFAULT_CIPER]) - - # Common Parameters - rates = hostapd_constants.CCK_AND_OFDM_DATA_RATES - vht_channel_width = 20 - n_capabilities = [ - hostapd_constants.N_CAPABILITY_SGI20, - hostapd_constants.N_CAPABILITY_TX_STBC, - hostapd_constants.N_CAPABILITY_RX_STBC1, - hostapd_constants.N_CAPABILITY_MAX_AMSDU_7935, - ] - # WPS IE - # Broadcom IE - vendor_elements = { - "vendor_elements": "dd350050f204104a000110104400010210470010000000000000000000000000000000" - "00103c0001031049000a00372a00012005022688" - "dd090010180200000c0000" - } - - # 2.4GHz - if channel <= 11: - interface = iface_wlan_2g - rates.update(hostapd_constants.CCK_AND_OFDM_BASIC_RATES) - short_preamble = True - mode = hostapd_constants.MODE_11N_MIXED - ac_capabilities = None - - # 5GHz - else: - interface = iface_wlan_5g - rates.update(hostapd_constants.OFDM_ONLY_BASIC_RATES) - short_preamble = False - mode = hostapd_constants.MODE_11AC_MIXED - n_capabilities.append(hostapd_constants.N_CAPABILITY_LDPC) - ac_capabilities = [ - hostapd_constants.AC_CAPABILITY_MAX_MPDU_11454, - hostapd_constants.AC_CAPABILITY_SHORT_GI_80, - hostapd_constants.AC_CAPABILITY_RXLDPC, - hostapd_constants.AC_CAPABILITY_TX_STBC_2BY1, - hostapd_constants.AC_CAPABILITY_RX_STBC_1, - hostapd_constants.AC_CAPABILITY_MAX_A_MPDU_LEN_EXP7, - ] - - additional_params = utils.merge_dicts( - rates, - vendor_elements, - hostapd_constants.ENABLE_RRM_BEACON_REPORT, - hostapd_constants.ENABLE_RRM_NEIGHBOR_REPORT, - hostapd_constants.UAPSD_ENABLED, - ) - - config = hostapd_config.HostapdConfig( - ssid=ssid, - channel=channel, - hidden=False, - security=security, - interface=interface, - mode=mode, - force_wmm=True, - beacon_interval=100, - dtim_period=1, - short_preamble=short_preamble, - n_capabilities=n_capabilities, - ac_capabilities=ac_capabilities, - vht_channel_width=vht_channel_width, - additional_parameters=additional_params, - ) - return config - - -def tplink_tlwr940n(iface_wlan_2g=None, channel=None, security=None, ssid=None): - # TODO(b/143104825): Permit RIFS once it is supported - """A simulated implementation of an TPLink TLWR940N AP. - Args: - iface_wlan_2g: The 2.4Ghz interface of the test AP. - channel: What channel to use. - security: A security profile (None or WPA2). - ssid: The network name. - Returns: - A hostapd config. - Differences from real TLWR940N: - HT Info: - TLWR940N: RIFS Permitted - Simulated: RIFS Prohibited - RSN Capabilities (w/ WPA2): - TLWR940N: - RSN PTKSA Replay Counter Capab: 1 - Simulated: - RSN PTKSA Replay Counter Capab: 16 - """ - if channel > 11: - raise ValueError( - "The mock TP-Link TLWR940N does not support 5Ghz. " - "Invalid channel (%s)" % channel - ) - # Verify interface and security - hostapd_utils.verify_interface(iface_wlan_2g, hostapd_constants.INTERFACE_2G_LIST) - hostapd_utils.verify_security_mode(security, [None, hostapd_constants.WPA2]) - if security: - hostapd_utils.verify_cipher(security, [hostapd_constants.WPA2_DEFAULT_CIPER]) - - n_capabilities = [ - hostapd_constants.N_CAPABILITY_SGI20, - hostapd_constants.N_CAPABILITY_TX_STBC, - hostapd_constants.N_CAPABILITY_RX_STBC1, - ] - - rates = utils.merge_dicts( - hostapd_constants.CCK_AND_OFDM_BASIC_RATES, - hostapd_constants.CCK_AND_OFDM_DATA_RATES, - ) - - # Atheros Communications, Inc. IE - # WPS IE - vendor_elements = { - "vendor_elements": "dd0900037f01010000ff7f" - "dd260050f204104a0001101044000102104900140024e2600200010160000002000160" - "0100020001" - } - - additional_params = utils.merge_dicts( - rates, vendor_elements, hostapd_constants.UAPSD_ENABLED - ) - - config = hostapd_config.HostapdConfig( - ssid=ssid, - channel=channel, - hidden=False, - security=security, - interface=iface_wlan_2g, - mode=hostapd_constants.MODE_11N_MIXED, - force_wmm=True, - beacon_interval=100, - dtim_period=1, - short_preamble=True, - n_capabilities=n_capabilities, - additional_parameters=additional_params, - ) - - return config
diff --git a/src/antlion/controllers/ap_lib/wireless_network_management.py b/src/antlion/controllers/ap_lib/wireless_network_management.py deleted file mode 100644 index 62ba34e..0000000 --- a/src/antlion/controllers/ap_lib/wireless_network_management.py +++ /dev/null
@@ -1,151 +0,0 @@ -#!/usr/bin/env python3 -# -# Copyright 2022 The Fuchsia Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from typing import List, NewType, Optional - -from antlion.controllers.ap_lib.radio_measurement import NeighborReportElement - -BssTransitionCandidateList = NewType( - "BssTransitionCandidateList", List[NeighborReportElement] -) - - -class BssTerminationDuration: - """Representation of BSS Termination Duration subelement. - - See IEEE 802.11-2020 Figure 9-341. - """ - - def __init__(self, duration: int): - """Create a BSS Termination Duration subelement. - - Args: - duration: number of minutes the BSS will be offline. - """ - # Note: hostapd does not currently support setting BSS Termination TSF, - # which is the other value held in this subelement. - self._duration = duration - - @property - def duration(self) -> int: - return self._duration - - -class BssTransitionManagementRequest: - """Representation of BSS Transition Management request. - - See IEEE 802.11-2020 9.6.13.9. - """ - - def __init__( - self, - preferred_candidate_list_included: bool = False, - abridged: bool = False, - disassociation_imminent: bool = False, - ess_disassociation_imminent: bool = False, - disassociation_timer: int = 0, - validity_interval: int = 1, - bss_termination_duration: Optional[BssTerminationDuration] = None, - session_information_url: Optional[str] = None, - candidate_list: Optional[BssTransitionCandidateList] = None, - ): - """Create a BSS Transition Management request. - - Args: - preferred_candidate_list_included: whether the candidate list is a - preferred candidate list, or (if False) a list of known - candidates. - abridged: whether a preference value of 0 is assigned to all BSSIDs - that do not appear in the candidate list, or (if False) AP has - no recommendation for/against anything not in the candidate - list. - disassociation_imminent: whether the STA is about to be - disassociated by the AP. - ess_disassociation_imminent: whether the STA will be disassociated - from the ESS. - disassociation_timer: the number of beacon transmission times - (TBTTs) until the AP disassociates this STA (default 0, meaning - AP has not determined when it will disassociate this STA). - validity_interval: number of TBTTs until the candidate list is no - longer valid (default 1). - bss_termination_duration: BSS Termination Duration subelement. - session_information_url: this URL is included if ESS disassociation - is immiment. - candidate_list: zero or more neighbor report elements. - """ - # Request mode field, see IEEE 802.11-2020 Figure 9-924. - self._preferred_candidate_list_included = preferred_candidate_list_included - self._abridged = abridged - self._disassociation_imminent = disassociation_imminent - self._ess_disassociation_imminent = ess_disassociation_imminent - - # Disassociation Timer, see IEEE 802.11-2020 Figure 9-925 - self._disassociation_timer = disassociation_timer - - # Validity Interval, see IEEE 802.11-2020 9.6.13.9 - self._validity_interval = validity_interval - - # BSS Termination Duration, see IEEE 802.11-2020 9.6.13.9 and Figure 9-341 - self._bss_termination_duration = bss_termination_duration - - # Session Information URL, see IEEE 802.11-2020 Figure 9-926 - self._session_information_url = session_information_url - - # BSS Transition Candidate List Entries, IEEE 802.11-2020 9.6.13.9. - self._candidate_list = candidate_list - - @property - def preferred_candidate_list_included(self) -> bool: - return self._preferred_candidate_list_included - - @property - def abridged(self) -> bool: - return self._abridged - - @property - def disassociation_imminent(self) -> bool: - return self._disassociation_imminent - - @property - def bss_termination_included(self) -> bool: - return self._bss_termination_duration is not None - - @property - def ess_disassociation_imminent(self) -> bool: - return self._ess_disassociation_imminent - - @property - def disassociation_timer(self) -> Optional[int]: - if self.disassociation_imminent: - return self._disassociation_timer - # Otherwise, field is reserved. - return None - - @property - def validity_interval(self) -> int: - return self._validity_interval - - @property - def bss_termination_duration(self) -> Optional[BssTerminationDuration]: - return self._bss_termination_duration - - @property - def session_information_url(self) -> Optional[str]: - return self._session_information_url - - @property - def candidate_list(self) -> Optional[BssTransitionCandidateList]: - return self._candidate_list
diff --git a/src/antlion/controllers/attenuator.py b/src/antlion/controllers/attenuator.py deleted file mode 100644 index 440e90a..0000000 --- a/src/antlion/controllers/attenuator.py +++ /dev/null
@@ -1,417 +0,0 @@ -#!/usr/bin/env python3.4 -# -# Copyright 2022 The Fuchsia Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import importlib -import logging - -from antlion.keys import Config -from antlion.libs.proc import job - -MOBLY_CONTROLLER_CONFIG_NAME = "Attenuator" -ACTS_CONTROLLER_REFERENCE_NAME = "attenuators" -_ATTENUATOR_OPEN_RETRIES = 3 - - -def create(configs): - objs = [] - for c in configs: - attn_model = c["Model"] - # Default to telnet. - protocol = c.get("Protocol", "telnet") - module_name = "antlion.controllers.attenuator_lib.%s.%s" % ( - attn_model, - protocol, - ) - module = importlib.import_module(module_name) - inst_cnt = c["InstrumentCount"] - attn_inst = module.AttenuatorInstrument(inst_cnt) - attn_inst.model = attn_model - - ip_address = c[Config.key_address.value] - port = c[Config.key_port.value] - - for attempt_number in range(1, _ATTENUATOR_OPEN_RETRIES + 1): - try: - attn_inst.open(ip_address, port) - except Exception as e: - logging.error( - "Attempt %s to open connection to attenuator " - "failed: %s" % (attempt_number, e) - ) - if attempt_number == _ATTENUATOR_OPEN_RETRIES: - ping_output = job.run( - "ping %s -c 1 -w 1" % ip_address, ignore_status=True - ) - if ping_output.exit_status == 1: - logging.error("Unable to ping attenuator at %s" % ip_address) - else: - logging.error("Able to ping attenuator at %s" % ip_address) - job.run( - 'echo "q" | telnet %s %s' % (ip_address, port), - ignore_status=True, - ) - raise - for i in range(inst_cnt): - attn = Attenuator(attn_inst, idx=i) - if "Paths" in c: - try: - setattr(attn, "path", c["Paths"][i]) - except IndexError: - logging.error("No path specified for attenuator %d.", i) - raise - objs.append(attn) - return objs - - -def get_info(attenuators): - """Get information on a list of Attenuator objects. - - Args: - attenuators: A list of Attenuator objects. - - Returns: - A list of dict, each representing info for Attenuator objects. - """ - device_info = [] - for attenuator in attenuators: - info = { - "Address": attenuator.instrument.address, - "Attenuator_Port": attenuator.idx, - } - device_info.append(info) - return device_info - - -def destroy(objs): - for attn in objs: - attn.instrument.close() - - -def get_attenuators_for_device(device_attenuator_configs, attenuators, attenuator_key): - """Gets the list of attenuators associated to a specified device and builds - a list of the attenuator objects associated to the ip address in the - device's section of the ACTS config and the Attenuator's IP address. In the - example below the access point object has an attenuator dictionary with - IP address associated to an attenuator object. The address is the only - mandatory field and the 'attenuator_ports_wifi_2g' and - 'attenuator_ports_wifi_5g' are the attenuator_key specified above. These - can be anything and is sent in as a parameter to this function. The numbers - in the list are ports that are in the attenuator object. Below is an - standard Access_Point object and the link to a standard Attenuator object. - Notice the link is the IP address, which is why the IP address is mandatory. - - "AccessPoint": [ - { - "ssh_config": { - "user": "root", - "host": "192.168.42.210" - }, - "Attenuator": [ - { - "Address": "192.168.42.200", - "attenuator_ports_wifi_2g": [ - 0, - 1, - 3 - ], - "attenuator_ports_wifi_5g": [ - 0, - 1 - ] - } - ] - } - ], - "Attenuator": [ - { - "Model": "minicircuits", - "InstrumentCount": 4, - "Address": "192.168.42.200", - "Port": 23 - } - ] - Args: - device_attenuator_configs: A list of attenuators config information in - the acts config that are associated a particular device. - attenuators: A list of all of the available attenuators objects - in the testbed. - attenuator_key: A string that is the key to search in the device's - configuration. - - Returns: - A list of attenuator objects for the specified device and the key in - that device's config. - """ - attenuator_list = [] - for device_attenuator_config in device_attenuator_configs: - for attenuator_port in device_attenuator_config[attenuator_key]: - for attenuator in attenuators: - if ( - attenuator.instrument.address == device_attenuator_config["Address"] - and attenuator.idx is attenuator_port - ): - attenuator_list.append(attenuator) - return attenuator_list - - -"""Classes for accessing, managing, and manipulating attenuators. - -Users will instantiate a specific child class, but almost all operation should -be performed on the methods and data members defined here in the base classes -or the wrapper classes. -""" - - -class AttenuatorError(Exception): - """Base class for all errors generated by Attenuator-related modules.""" - - -class InvalidDataError(AttenuatorError): - """ "Raised when an unexpected result is seen on the transport layer. - - When this exception is seen, closing an re-opening the link to the - attenuator instrument is probably necessary. Something has gone wrong in - the transport. - """ - - -class InvalidOperationError(AttenuatorError): - """Raised when the attenuator's state does not allow the given operation. - - Certain methods may only be accessed when the instance upon which they are - invoked is in a certain state. This indicates that the object is not in the - correct state for a method to be called. - """ - - -class AttenuatorInstrument(object): - """Defines the primitive behavior of all attenuator instruments. - - The AttenuatorInstrument class is designed to provide a simple low-level - interface for accessing any step attenuator instrument comprised of one or - more attenuators and a controller. All AttenuatorInstruments should override - all the methods below and call AttenuatorInstrument.__init__ in their - constructors. Outside of setup/teardown, devices should be accessed via - this generic "interface". - """ - - model = None - INVALID_MAX_ATTEN = 999.9 - - def __init__(self, num_atten=0): - """This is the Constructor for Attenuator Instrument. - - Args: - num_atten: The number of attenuators contained within the - instrument. In some instances setting this number to zero will - allow the driver to auto-determine the number of attenuators; - however, this behavior is not guaranteed. - - Raises: - NotImplementedError if initialization is called from this class. - """ - - if type(self) is AttenuatorInstrument: - raise NotImplementedError("Base class should not be instantiated directly!") - - self.num_atten = num_atten - self.max_atten = AttenuatorInstrument.INVALID_MAX_ATTEN - self.properties = None - - def set_atten(self, idx, value, strict=True, retry=False): - """Sets the attenuation given its index in the instrument. - - Args: - idx: A zero based index used to identify a particular attenuator in - an instrument. - value: a floating point value for nominal attenuation to be set. - strict: if True, function raises an error when given out of - bounds attenuation values, if false, the function sets out of - bounds values to 0 or max_atten. - retry: if True, command will be retried if possible - """ - raise NotImplementedError("Base class should not be called directly!") - - def get_atten(self, idx, retry=False): - """Returns the current attenuation of the attenuator at index idx. - - Args: - idx: A zero based index used to identify a particular attenuator in - an instrument. - retry: if True, command will be retried if possible - - Returns: - The current attenuation value as a floating point value - """ - raise NotImplementedError("Base class should not be called directly!") - - -class Attenuator(object): - """An object representing a single attenuator in a remote instrument. - - A user wishing to abstract the mapping of attenuators to physical - instruments should use this class, which provides an object that abstracts - the physical implementation and allows the user to think only of attenuators - regardless of their location. - """ - - def __init__(self, instrument, idx=0, offset=0): - """This is the constructor for Attenuator - - Args: - instrument: Reference to an AttenuatorInstrument on which the - Attenuator resides - idx: This zero-based index is the identifier for a particular - attenuator in an instrument. - offset: A power offset value for the attenuator to be used when - performing future operations. This could be used for either - calibration or to allow group operations with offsets between - various attenuators. - - Raises: - TypeError if an invalid AttenuatorInstrument is passed in. - IndexError if the index is out of range. - """ - if not isinstance(instrument, AttenuatorInstrument): - raise TypeError("Must provide an Attenuator Instrument Ref") - self.model = instrument.model - self.instrument = instrument - self.idx = idx - self.offset = offset - - if self.idx >= instrument.num_atten: - raise IndexError("Attenuator index out of range for attenuator instrument") - - def set_atten(self, value, strict=True, retry=False): - """Sets the attenuation. - - Args: - value: A floating point value for nominal attenuation to be set. - strict: if True, function raises an error when given out of - bounds attenuation values, if false, the function sets out of - bounds values to 0 or max_atten. - retry: if True, command will be retried if possible - - Raises: - ValueError if value + offset is greater than the maximum value. - """ - if value + self.offset > self.instrument.max_atten and strict: - raise ValueError("Attenuator Value+Offset greater than Max Attenuation!") - - self.instrument.set_atten( - self.idx, value + self.offset, strict=strict, retry=retry - ) - - def get_atten(self, retry=False): - """Returns the attenuation as a float, normalized by the offset.""" - return self.instrument.get_atten(self.idx, retry) - self.offset - - def get_max_atten(self): - """Returns the max attenuation as a float, normalized by the offset.""" - if self.instrument.max_atten == AttenuatorInstrument.INVALID_MAX_ATTEN: - raise ValueError("Invalid Max Attenuator Value") - - return self.instrument.max_atten - self.offset - - -class AttenuatorGroup(object): - """An abstraction for groups of attenuators that will share behavior. - - Attenuator groups are intended to further facilitate abstraction of testing - functions from the physical objects underlying them. By adding attenuators - to a group, it is possible to operate on functional groups that can be - thought of in a common manner in the test. This class is intended to provide - convenience to the user and avoid re-implementation of helper functions and - small loops scattered throughout user code. - """ - - def __init__(self, name=""): - """This constructor for AttenuatorGroup - - Args: - name: An optional parameter intended to further facilitate the - passing of easily tracked groups of attenuators throughout code. - It is left to the user to use the name in a way that meets their - needs. - """ - self.name = name - self.attens = [] - self._value = 0 - - def add_from_instrument(self, instrument, indices): - """Adds an AttenuatorInstrument to the group. - - This function will create Attenuator objects for all of the indices - passed in and add them to the group. - - Args: - instrument: the AttenuatorInstrument to pull attenuators from. - indices: The index or indices to add to the group. Either a - range, a list, or a single integer. - - Raises - ------ - TypeError - Requires a valid AttenuatorInstrument to be passed in. - """ - if not instrument or not isinstance(instrument, AttenuatorInstrument): - raise TypeError("Must provide an Attenuator Instrument Ref") - - if type(indices) is range or type(indices) is list: - for i in indices: - self.attens.append(Attenuator(instrument, i)) - elif type(indices) is int: - self.attens.append(Attenuator(instrument, indices)) - - def add(self, attenuator): - """Adds an already constructed Attenuator object to this group. - - Args: - attenuator: An Attenuator object. - - Raises: - TypeError if the attenuator parameter is not an Attenuator. - """ - if not isinstance(attenuator, Attenuator): - raise TypeError("Must provide an Attenuator") - - self.attens.append(attenuator) - - def synchronize(self): - """Sets all grouped attenuators to the group's attenuation value.""" - self.set_atten(self._value) - - def is_synchronized(self): - """Returns true if all attenuators have the synchronized value.""" - for att in self.attens: - if att.get_atten() != self._value: - return False - return True - - def set_atten(self, value): - """Sets the attenuation value of all attenuators in the group. - - Args: - value: A floating point value for nominal attenuation to be set. - """ - value = float(value) - for att in self.attens: - att.set_atten(value) - self._value = value - - def get_atten(self): - """Returns the current attenuation setting of AttenuatorGroup.""" - return float(self._value)
diff --git a/src/antlion/controllers/attenuator_lib/__init__.py b/src/antlion/controllers/attenuator_lib/__init__.py deleted file mode 100644 index e69de29..0000000 --- a/src/antlion/controllers/attenuator_lib/__init__.py +++ /dev/null
diff --git a/src/antlion/controllers/attenuator_lib/_tnhelper.py b/src/antlion/controllers/attenuator_lib/_tnhelper.py deleted file mode 100644 index 61b4193..0000000 --- a/src/antlion/controllers/attenuator_lib/_tnhelper.py +++ /dev/null
@@ -1,139 +0,0 @@ -#!/usr/bin/env python3 - -# Copyright 2022 The Fuchsia Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""A helper module to communicate over telnet with AttenuatorInstruments. - -User code shouldn't need to directly access this class. -""" - -import logging -import telnetlib -import re -from antlion.controllers import attenuator -from antlion.libs.proc import job - - -def _ascii_string(uc_string): - return str(uc_string).encode("ASCII") - - -class _TNHelper(object): - """An internal helper class for Telnet+SCPI command-based instruments. - - It should only be used by those implementation control libraries and not by - any user code directly. - """ - - def __init__(self, tx_cmd_separator="\n", rx_cmd_separator="\n", prompt=""): - self._tn = None - self._ip_address = None - self._port = None - - self.tx_cmd_separator = tx_cmd_separator - self.rx_cmd_separator = rx_cmd_separator - self.prompt = prompt - - def open(self, host, port=23): - self._ip_address = host - self._port = port - if self._tn: - self._tn.close() - logging.debug("Telnet Server IP = %s" % host) - self._tn = telnetlib.Telnet() - self._tn.open(host, port, 10) - - def is_open(self): - return bool(self._tn) - - def close(self): - if self._tn: - self._tn.close() - self._tn = None - - def diagnose_telnet(self): - """Function that diagnoses telnet connections. - - This function diagnoses telnet connections and can be used in case of - command failures. The function checks if the devices is still reachable - via ping, and whether or not it can close and reopen the telnet - connection. - - Returns: - False when telnet server is unreachable or unresponsive - True when telnet server is reachable and telnet connection has been - successfully reopened - """ - logging.debug("Diagnosing telnet connection") - try: - job_result = job.run("ping {} -c 5 -i 0.2".format(self._ip_address)) - except: - logging.error("Unable to ping telnet server.") - return False - ping_output = job_result.stdout - if not re.search(r" 0% packet loss", ping_output): - logging.error("Ping Packets Lost. Result: {}".format(ping_output)) - return False - try: - self.close() - except: - logging.error("Cannot close telnet connection.") - return False - try: - self.open(self._ip_address, self._port) - except: - logging.error("Cannot reopen telnet connection.") - return False - logging.debug("Telnet connection likely recovered") - return True - - def cmd(self, cmd_str, wait_ret=True, retry=False): - if not isinstance(cmd_str, str): - raise TypeError("Invalid command string", cmd_str) - - if not self.is_open(): - raise attenuator.InvalidOperationError( - "Telnet connection not open for commands" - ) - - cmd_str.strip(self.tx_cmd_separator) - self._tn.read_until(_ascii_string(self.prompt), 2) - self._tn.write(_ascii_string(cmd_str + self.tx_cmd_separator)) - - if wait_ret is False: - return None - - match_idx, match_val, ret_text = self._tn.expect( - [_ascii_string("\S+" + self.rx_cmd_separator)], 1 - ) - - logging.debug("Telnet Command: {}".format(cmd_str)) - logging.debug("Telnet Reply: ({},{},{})".format(match_idx, match_val, ret_text)) - - if match_idx == -1: - telnet_recovered = self.diagnose_telnet() - if telnet_recovered and retry: - logging.debug("Retrying telnet command once.") - return self.cmd(cmd_str, wait_ret, retry=False) - else: - raise attenuator.InvalidDataError( - "Telnet command failed to return valid data" - ) - - ret_text = ret_text.decode() - ret_text = ret_text.strip( - self.tx_cmd_separator + self.rx_cmd_separator + self.prompt - ) - - return ret_text
diff --git a/src/antlion/controllers/attenuator_lib/aeroflex/__init__.py b/src/antlion/controllers/attenuator_lib/aeroflex/__init__.py deleted file mode 100644 index e69de29..0000000 --- a/src/antlion/controllers/attenuator_lib/aeroflex/__init__.py +++ /dev/null
diff --git a/src/antlion/controllers/attenuator_lib/aeroflex/telnet.py b/src/antlion/controllers/attenuator_lib/aeroflex/telnet.py deleted file mode 100644 index 4c34f4b..0000000 --- a/src/antlion/controllers/attenuator_lib/aeroflex/telnet.py +++ /dev/null
@@ -1,130 +0,0 @@ -#!/usr/bin/env python3 - -# Copyright 2022 The Fuchsia Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -""" -Class for Telnet control of Aeroflex 832X and 833X Series Attenuator Modules - -This class provides a wrapper to the Aeroflex attenuator modules for purposes -of simplifying and abstracting control down to the basic necessities. It is -not the intention of the module to expose all functionality, but to allow -interchangeable HW to be used. - -See http://www.aeroflex.com/ams/weinschel/PDFILES/IM-608-Models-8320-&-8321-preliminary.pdf -""" - -from antlion.controllers import attenuator -from antlion.controllers.attenuator_lib import _tnhelper - - -class AttenuatorInstrument(attenuator.AttenuatorInstrument): - def __init__(self, num_atten=0): - super(AttenuatorInstrument, self).__init__(num_atten) - - self._tnhelper = _tnhelper._TNHelper( - tx_cmd_separator="\r\n", rx_cmd_separator="\r\n", prompt=">" - ) - self.properties = None - self.address = None - - def open(self, host, port=23): - """Opens a telnet connection to the desired AttenuatorInstrument and - queries basic information. - - Args: - host: A valid hostname (IP address or DNS-resolvable name) to an - MC-DAT attenuator instrument. - port: An optional port number (defaults to telnet default 23) - """ - self._tnhelper.open(host, port) - - # work around a bug in IO, but this is a good thing to do anyway - self._tnhelper.cmd("*CLS", False) - self.address = host - - if self.num_atten == 0: - self.num_atten = int(self._tnhelper.cmd("RFCONFIG? CHAN")) - - configstr = self._tnhelper.cmd("RFCONFIG? ATTN 1") - - self.properties = dict( - zip( - ["model", "max_atten", "min_step", "unknown", "unknown2", "cfg_str"], - configstr.split(", ", 5), - ) - ) - - self.max_atten = float(self.properties["max_atten"]) - - def is_open(self): - """Returns True if the AttenuatorInstrument has an open connection.""" - return bool(self._tnhelper.is_open()) - - def close(self): - """Closes the telnet connection. - - This should be called as part of any teardown procedure prior to the - attenuator instrument leaving scope. - """ - self._tnhelper.close() - - def set_atten(self, idx, value, **_): - """This function sets the attenuation of an attenuator given its index - in the instrument. - - Args: - idx: A zero-based index that identifies a particular attenuator in - an instrument. For instruments that only have one channel, this - is ignored by the device. - value: A floating point value for nominal attenuation to be set. - - Raises: - InvalidOperationError if the telnet connection is not open. - IndexError if the index is not valid for this instrument. - ValueError if the requested set value is greater than the maximum - attenuation value. - """ - if not self.is_open(): - raise attenuator.InvalidOperationError("Connection not open!") - - if idx >= self.num_atten: - raise IndexError("Attenuator index out of range!", self.num_atten, idx) - - if value > self.max_atten: - raise ValueError("Attenuator value out of range!", self.max_atten, value) - - self._tnhelper.cmd("ATTN " + str(idx + 1) + " " + str(value), False) - - def get_atten(self, idx, **_): - """Returns the current attenuation of the attenuator at the given index. - - Args: - idx: The index of the attenuator. - - Raises: - InvalidOperationError if the telnet connection is not open. - - Returns: - the current attenuation value as a float - """ - if not self.is_open(): - raise attenuator.InvalidOperationError("Connection not open!") - - # Potentially redundant safety check removed for the moment - # if idx >= self.num_atten: - # raise IndexError("Attenuator index out of range!", self.num_atten, idx) - - atten_val = self._tnhelper.cmd("ATTN? " + str(idx + 1)) - - return float(atten_val)
diff --git a/src/antlion/controllers/attenuator_lib/minicircuits/__init__.py b/src/antlion/controllers/attenuator_lib/minicircuits/__init__.py deleted file mode 100644 index e69de29..0000000 --- a/src/antlion/controllers/attenuator_lib/minicircuits/__init__.py +++ /dev/null
diff --git a/src/antlion/controllers/attenuator_lib/minicircuits/http.py b/src/antlion/controllers/attenuator_lib/minicircuits/http.py deleted file mode 100644 index 61c1e29..0000000 --- a/src/antlion/controllers/attenuator_lib/minicircuits/http.py +++ /dev/null
@@ -1,166 +0,0 @@ -#!/usr/bin/env python3 - -# Copyright 2022 The Fuchsia Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -""" -Class for HTTP control of Mini-Circuits RCDAT series attenuators - -This class provides a wrapper to the MC-RCDAT attenuator modules for purposes -of simplifying and abstracting control down to the basic necessities. It is -not the intention of the module to expose all functionality, but to allow -interchangeable HW to be used. - -See http://www.minicircuits.com/softwaredownload/Prog_Manual-6-Programmable_Attenuator.pdf -""" - -import urllib -from antlion.controllers import attenuator - - -class AttenuatorInstrument(attenuator.AttenuatorInstrument): - """A specific HTTP-controlled implementation of AttenuatorInstrument for - Mini-Circuits RC-DAT attenuators. - - With the exception of HTTP-specific commands, all functionality is defined - by the AttenuatorInstrument class. - """ - - def __init__(self, num_atten=1): - super(AttenuatorInstrument, self).__init__(num_atten) - self._ip_address = None - self._port = None - self._timeout = None - self.address = None - - def open(self, host, port=80, timeout=2): - """Initializes the AttenuatorInstrument and queries basic information. - - Args: - host: A valid hostname (IP address or DNS-resolvable name) to an - MC-DAT attenuator instrument. - port: An optional port number (defaults to http default 80) - timeout: An optional timeout for http requests - """ - self._ip_address = host - self._port = port - self._timeout = timeout - self.address = host - - att_req = urllib.request.urlopen( - "http://{}:{}/MN?".format(self._ip_address, self._port) - ) - config_str = att_req.read().decode("utf-8").strip() - if not config_str.startswith("MN="): - raise attenuator.InvalidDataError( - "Attenuator returned invalid data. Attenuator returned: {}".format( - config_str - ) - ) - - config_str = config_str[len("MN=") :] - self.properties = dict( - zip(["model", "max_freq", "max_atten"], config_str.split("-", 2)) - ) - self.max_atten = float(self.properties["max_atten"]) - - def is_open(self): - """Returns True if the AttenuatorInstrument has an open connection. - - Since this controller is based on HTTP requests, there is no connection - required and the attenuator is always ready to accept requests. - """ - return True - - def close(self): - """Closes the connection to the attenuator. - - Since this controller is based on HTTP requests, there is no connection - teardowns required. - """ - - def set_atten(self, idx, value, strict=True, retry=False, **_): - """This function sets the attenuation of an attenuator given its index - in the instrument. - - Args: - idx: A zero-based index that identifies a particular attenuator in - an instrument. For instruments that only have one channel, this - is ignored by the device. - value: A floating point value for nominal attenuation to be set. - strict: if True, function raises an error when given out of - bounds attenuation values, if false, the function sets out of - bounds values to 0 or max_atten. - retry: if True, command will be retried if possible - - Raises: - InvalidDataError if the attenuator does not respond with the - expected output. - """ - if not (0 <= idx < self.num_atten): - raise IndexError("Attenuator index out of range!", self.num_atten, idx) - - if value > self.max_atten and strict: - raise ValueError("Attenuator value out of range!", self.max_atten, value) - # The actual device uses one-based index for channel numbers. - adjusted_value = min(max(0, value), self.max_atten) - att_req = urllib.request.urlopen( - "http://{}:{}/CHAN:{}:SETATT:{}".format( - self._ip_address, self._port, idx + 1, adjusted_value - ), - timeout=self._timeout, - ) - att_resp = att_req.read().decode("utf-8").strip() - if att_resp != "1": - if retry: - self.set_atten(idx, value, strict, retry=False) - else: - raise attenuator.InvalidDataError( - "Attenuator returned invalid data. Attenuator returned: {}".format( - att_resp - ) - ) - - def get_atten(self, idx, retry=False, **_): - """Returns the current attenuation of the attenuator at the given index. - - Args: - idx: The index of the attenuator. - retry: if True, command will be retried if possible - - Raises: - InvalidDataError if the attenuator does not respond with the - expected outpu - - Returns: - the current attenuation value as a float - """ - if not (0 <= idx < self.num_atten): - raise IndexError("Attenuator index out of range!", self.num_atten, idx) - att_req = urllib.request.urlopen( - "http://{}:{}/CHAN:{}:ATT?".format(self._ip_address, self.port, idx + 1), - timeout=self._timeout, - ) - att_resp = att_req.read().decode("utf-8").strip() - try: - atten_val = float(att_resp) - except: - if retry: - self.get_atten(idx, retry=False) - else: - raise attenuator.InvalidDataError( - "Attenuator returned invalid data. Attenuator returned: {}".format( - att_resp - ) - ) - return atten_val
diff --git a/src/antlion/controllers/attenuator_lib/minicircuits/telnet.py b/src/antlion/controllers/attenuator_lib/minicircuits/telnet.py deleted file mode 100644 index ad9f0ce..0000000 --- a/src/antlion/controllers/attenuator_lib/minicircuits/telnet.py +++ /dev/null
@@ -1,146 +0,0 @@ -#!/usr/bin/env python3 - -# Copyright 2022 The Fuchsia Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -""" -Class for Telnet control of Mini-Circuits RCDAT series attenuators - -This class provides a wrapper to the MC-RCDAT attenuator modules for purposes -of simplifying and abstracting control down to the basic necessities. It is -not the intention of the module to expose all functionality, but to allow -interchangeable HW to be used. - -See http://www.minicircuits.com/softwaredownload/Prog_Manual-6-Programmable_Attenuator.pdf -""" - -from antlion.controllers import attenuator -from antlion.controllers.attenuator_lib import _tnhelper - - -class AttenuatorInstrument(attenuator.AttenuatorInstrument): - """A specific telnet-controlled implementation of AttenuatorInstrument for - Mini-Circuits RC-DAT attenuators. - - With the exception of telnet-specific commands, all functionality is defined - by the AttenuatorInstrument class. Because telnet is a stateful protocol, - the functionality of AttenuatorInstrument is contingent upon a telnet - connection being established. - """ - - def __init__(self, num_atten=0): - super(AttenuatorInstrument, self).__init__(num_atten) - self._tnhelper = _tnhelper._TNHelper( - tx_cmd_separator="\r\n", rx_cmd_separator="\r\n", prompt="" - ) - self.address = None - - def __del__(self): - if self.is_open(): - self.close() - - def open(self, host, port=23): - """Opens a telnet connection to the desired AttenuatorInstrument and - queries basic information. - - Args: - host: A valid hostname (IP address or DNS-resolvable name) to an - MC-DAT attenuator instrument. - port: An optional port number (defaults to telnet default 23) - """ - self._tnhelper.open(host, port) - self.address = host - - if self.num_atten == 0: - self.num_atten = 1 - - config_str = self._tnhelper.cmd("MN?") - - if config_str.startswith("MN="): - config_str = config_str[len("MN=") :] - - self.properties = dict( - zip(["model", "max_freq", "max_atten"], config_str.split("-", 2)) - ) - self.max_atten = float(self.properties["max_atten"]) - - def is_open(self): - """Returns True if the AttenuatorInstrument has an open connection.""" - return bool(self._tnhelper.is_open()) - - def close(self): - """Closes the telnet connection. - - This should be called as part of any teardown procedure prior to the - attenuator instrument leaving scope. - """ - self._tnhelper.close() - - def set_atten(self, idx, value, strict=True, retry=False): - """This function sets the attenuation of an attenuator given its index - in the instrument. - - Args: - idx: A zero-based index that identifies a particular attenuator in - an instrument. For instruments that only have one channel, this - is ignored by the device. - value: A floating point value for nominal attenuation to be set. - strict: if True, function raises an error when given out of - bounds attenuation values, if false, the function sets out of - bounds values to 0 or max_atten. - retry: if True, command will be retried if possible - - Raises: - InvalidOperationError if the telnet connection is not open. - IndexError if the index is not valid for this instrument. - ValueError if the requested set value is greater than the maximum - attenuation value. - """ - - if not self.is_open(): - raise attenuator.InvalidOperationError("Connection not open!") - - if idx >= self.num_atten: - raise IndexError("Attenuator index out of range!", self.num_atten, idx) - - if value > self.max_atten and strict: - raise ValueError("Attenuator value out of range!", self.max_atten, value) - # The actual device uses one-based index for channel numbers. - adjusted_value = min(max(0, value), self.max_atten) - self._tnhelper.cmd("CHAN:%s:SETATT:%s" % (idx + 1, adjusted_value), retry=retry) - - def get_atten(self, idx, retry=False): - """Returns the current attenuation of the attenuator at the given index. - - Args: - idx: The index of the attenuator. - retry: if True, command will be retried if possible - - Raises: - InvalidOperationError if the telnet connection is not open. - - Returns: - the current attenuation value as a float - """ - if not self.is_open(): - raise attenuator.InvalidOperationError("Connection not open!") - - if idx >= self.num_atten or idx < 0: - raise IndexError("Attenuator index out of range!", self.num_atten, idx) - - if self.num_atten == 1: - atten_val_str = self._tnhelper.cmd(":ATT?", retry=retry) - else: - atten_val_str = self._tnhelper.cmd("CHAN:%s:ATT?" % (idx + 1), retry=retry) - atten_val = float(atten_val_str) - return atten_val
diff --git a/src/antlion/controllers/fastboot.py b/src/antlion/controllers/fastboot.py deleted file mode 100755 index ed67245..0000000 --- a/src/antlion/controllers/fastboot.py +++ /dev/null
@@ -1,80 +0,0 @@ -#!/usr/bin/env python3 -# -# Copyright 2022 The Fuchsia Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from antlion.libs.proc import job - -from antlion import error - - -class FastbootError(error.ActsError): - """Raised when there is an error in fastboot operations.""" - - def __init__(self, cmd, stdout, stderr, ret_code): - super().__init__() - self.cmd = cmd - self.stdout = stdout - self.stderr = stderr - self.ret_code = ret_code - - def __str__(self): - return ( - "Error executing fastboot cmd '%s'. ret: %d, stdout: %s," " stderr: %s" - ) % (self.cmd, self.ret_code, self.stdout, self.stderr) - - -class FastbootProxy: - """Proxy class for fastboot. - - For syntactic reasons, the '-' in fastboot commands need to be replaced - with '_'. Can directly execute fastboot commands on an object: - >> fb = FastbootProxy(<serial>) - >> fb.devices() # will return the console output of "fastboot devices". - """ - - def __init__(self, serial="", ssh_connection=None): - self.serial = serial - if serial: - self.fastboot_str = "fastboot -s {}".format(serial) - else: - self.fastboot_str = "fastboot" - self.ssh_connection = ssh_connection - - def _exec_fastboot_cmd(self, name, arg_str, ignore_status=False, timeout=60): - command = " ".join((self.fastboot_str, name, arg_str)) - if self.ssh_connection: - result = self.connection.run(command, ignore_status=True, timeout=timeout) - else: - result = job.run(command, ignore_status=True, timeout=timeout) - ret, out, err = result.exit_status, result.stdout, result.stderr - # TODO: This is only a temporary workaround for b/34815412. - # fastboot getvar outputs to stderr instead of stdout - if "getvar" in command: - out = err - if ret == 0 or ignore_status: - return out - else: - raise FastbootError(cmd=command, stdout=out, stderr=err, ret_code=ret) - - def args(self, *args, **kwargs): - return job.run(" ".join((self.fastboot_str,) + args), **kwargs).stdout - - def __getattr__(self, name): - def fastboot_call(*args, **kwargs): - clean_name = name.replace("_", "-") - arg_str = " ".join(str(elem) for elem in args) - return self._exec_fastboot_cmd(clean_name, arg_str, **kwargs) - - return fastboot_call
diff --git a/src/antlion/controllers/fuchsia_device.py b/src/antlion/controllers/fuchsia_device.py deleted file mode 100644 index 414afb4..0000000 --- a/src/antlion/controllers/fuchsia_device.py +++ /dev/null
@@ -1,974 +0,0 @@ -#!/usr/bin/env python3 -# -# Copyright 2022 The Fuchsia Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from typing import Optional, List -import json -import logging -import os -import re -import subprocess -import textwrap -import time - -from antlion import context -from antlion import logger as acts_logger -from antlion import signals -from antlion import utils -from antlion.capabilities.ssh import DEFAULT_SSH_PORT, SSHConfig, SSHError -from antlion.controllers import pdu -from antlion.controllers.fuchsia_lib.ffx import FFX -from antlion.controllers.fuchsia_lib.lib_controllers.netstack_controller import ( - NetstackController, -) -from antlion.controllers.fuchsia_lib.lib_controllers.wlan_controller import ( - WlanController, -) -from antlion.controllers.fuchsia_lib.lib_controllers.wlan_policy_controller import ( - WlanPolicyController, -) -from antlion.controllers.fuchsia_lib.package_server import PackageServer -from antlion.controllers.fuchsia_lib.sl4f import SL4F -from antlion.controllers.fuchsia_lib.ssh import ( - DEFAULT_SSH_PRIVATE_KEY, - DEFAULT_SSH_USER, - FuchsiaSSHProvider, -) -from antlion.controllers.fuchsia_lib.utils_lib import flash -from antlion.utils import get_fuchsia_mdns_ipv6_address, get_interface_ip_addresses - -MOBLY_CONTROLLER_CONFIG_NAME = "FuchsiaDevice" -ACTS_CONTROLLER_REFERENCE_NAME = "fuchsia_devices" - -FUCHSIA_DEVICE_EMPTY_CONFIG_MSG = "Configuration is empty, abort!" -FUCHSIA_DEVICE_NOT_LIST_CONFIG_MSG = "Configuration should be a list, abort!" -FUCHSIA_DEVICE_INVALID_CONFIG = ( - "Fuchsia device config must be either a str " - "or dict. abort! Invalid element %i in %r" -) -FUCHSIA_DEVICE_NO_IP_MSG = "No IP address specified, abort!" -FUCHSIA_COULD_NOT_GET_DESIRED_STATE = "Could not %s %s." -FUCHSIA_INVALID_CONTROL_STATE = "Invalid control state (%s). abort!" - -FUCHSIA_TIME_IN_NANOSECONDS = 1000000000 - -SL4F_APK_NAME = "com.googlecode.android_scripting" -DAEMON_INIT_TIMEOUT_SEC = 1 - -DAEMON_ACTIVATED_STATES = ["running", "start"] -DAEMON_DEACTIVATED_STATES = ["stop", "stopped"] - -FUCHSIA_RECONNECT_AFTER_REBOOT_TIME = 5 - -CHANNEL_OPEN_TIMEOUT = 5 - -FUCHSIA_REBOOT_TYPE_SOFT = "soft" -FUCHSIA_REBOOT_TYPE_SOFT_AND_FLASH = "flash" -FUCHSIA_REBOOT_TYPE_HARD = "hard" - -FUCHSIA_DEFAULT_CONNECT_TIMEOUT = 90 -FUCHSIA_DEFAULT_COMMAND_TIMEOUT = 60 - -FUCHSIA_DEFAULT_CLEAN_UP_COMMAND_TIMEOUT = 15 - -FUCHSIA_COUNTRY_CODE_TIMEOUT = 15 -FUCHSIA_DEFAULT_COUNTRY_CODE_US = "US" - -MDNS_LOOKUP_RETRY_MAX = 3 - -VALID_ASSOCIATION_MECHANISMS = {None, "policy", "drivers"} -IP_ADDRESS_TIMEOUT = 15 - - -class FuchsiaDeviceError(signals.ControllerError): - pass - - -class FuchsiaConfigError(signals.ControllerError): - """Incorrect FuchsiaDevice configuration.""" - - -def create(configs): - if not configs: - raise FuchsiaDeviceError(FUCHSIA_DEVICE_EMPTY_CONFIG_MSG) - elif not isinstance(configs, list): - raise FuchsiaDeviceError(FUCHSIA_DEVICE_NOT_LIST_CONFIG_MSG) - for index, config in enumerate(configs): - if isinstance(config, str): - configs[index] = {"ip": config} - elif not isinstance(config, dict): - raise FuchsiaDeviceError(FUCHSIA_DEVICE_INVALID_CONFIG % (index, configs)) - return get_instances(configs) - - -def destroy(fds): - for fd in fds: - fd.clean_up() - del fd - - -def get_info(fds): - """Get information on a list of FuchsiaDevice objects. - - Args: - fds: A list of FuchsiaDevice objects. - - Returns: - A list of dict, each representing info for FuchsiaDevice objects. - """ - device_info = [] - for fd in fds: - info = {"ip": fd.ip} - device_info.append(info) - return device_info - - -def get_instances(fds_conf_data): - """Create FuchsiaDevice instances from a list of Fuchsia ips. - - Args: - fds_conf_data: A list of dicts that contain Fuchsia device info. - - Returns: - A list of FuchsiaDevice objects. - """ - - return [FuchsiaDevice(fd_conf_data) for fd_conf_data in fds_conf_data] - - -class FuchsiaDevice: - """Class representing a Fuchsia device. - - Each object of this class represents one Fuchsia device in ACTS. - - Attributes: - ip: The full address or Fuchsia abstract name to contact the Fuchsia - device at - log: A logger object. - ssh_port: The SSH TCP port number of the Fuchsia device. - sl4f_port: The SL4F HTTP port number of the Fuchsia device. - ssh_config: The ssh_config for connecting to the Fuchsia device. - """ - - def __init__(self, fd_conf_data) -> None: - self.conf_data = fd_conf_data - if "ip" not in fd_conf_data: - raise FuchsiaDeviceError(FUCHSIA_DEVICE_NO_IP_MSG) - self.ip: str = fd_conf_data["ip"] - self.orig_ip: str = fd_conf_data["ip"] - self.sl4f_port: int = fd_conf_data.get("sl4f_port", 80) - self.ssh_username: str = fd_conf_data.get("ssh_username", DEFAULT_SSH_USER) - self.ssh_port: int = fd_conf_data.get("ssh_port", DEFAULT_SSH_PORT) - self.ssh_binary_path: str = fd_conf_data.get("ssh_binary_path", "ssh") - - def expand(path: str) -> str: - return os.path.expandvars(os.path.expanduser(path)) - - def path_from_config(name: str, default: Optional[str] = None) -> Optional[str]: - path = fd_conf_data.get(name, default) - if not path: - return path - return expand(path) - - def assert_exists(name: str, path: str) -> None: - if not path: - raise FuchsiaDeviceError( - f'Please specify "${name}" in your configuration file' - ) - if not os.path.exists(path): - raise FuchsiaDeviceError( - f'Please specify a correct "${name}" in your configuration ' - f'file: "{path}" does not exist' - ) - - self.specific_image: Optional[str] = path_from_config("specific_image") - if self.specific_image: - assert_exists("specific_image", self.specific_image) - - # Path to a tar.gz archive with pm and amber-files, as necessary for - # starting a package server. - self.packages_archive_path: Optional[str] = path_from_config( - "packages_archive_path", None - ) - if self.packages_archive_path: - assert_exists("packages_archive_path", self.packages_archive_path) - - def required_path_from_config(name: str, default: Optional[str] = None) -> str: - path = path_from_config(name, default) - assert_exists(name, path) - return path - - self.ssh_priv_key: str = required_path_from_config( - "ssh_priv_key", DEFAULT_SSH_PRIVATE_KEY - ) - self.ffx_binary_path: str = required_path_from_config( - "ffx_binary_path", "${FUCHSIA_DIR}/.jiri_root/bin/ffx" - ) - - self.authorized_file: Optional[str] = fd_conf_data.get( - "authorized_file_loc", None - ) - self.serial_number: Optional[str] = fd_conf_data.get("serial_number", None) - self.device_type: Optional[str] = fd_conf_data.get("device_type", None) - self.product_type: Optional[str] = fd_conf_data.get("product_type", None) - self.board_type: Optional[str] = fd_conf_data.get("board_type", None) - self.build_number: Optional[str] = fd_conf_data.get("build_number", None) - self.build_type: Optional[str] = fd_conf_data.get("build_type", None) - - self.mdns_name: Optional[str] = fd_conf_data.get("mdns_name", None) - - self.hard_reboot_on_fail: bool = fd_conf_data.get("hard_reboot_on_fail", False) - self.take_bug_report_on_fail: bool = fd_conf_data.get( - "take_bug_report_on_fail", False - ) - self.device_pdu_config = fd_conf_data.get("PduDevice", None) - self.config_country_code: str = fd_conf_data.get( - "country_code", FUCHSIA_DEFAULT_COUNTRY_CODE_US - ).upper() - - output_path = context.get_current_context().get_base_output_path() - self.ssh_config = os.path.join(output_path, "ssh_config_{}".format(self.ip)) - self._generate_ssh_config(self.ssh_config) - - # WLAN interface info is populated inside configure_wlan - self.wlan_client_interfaces = {} - self.wlan_ap_interfaces = {} - self.wlan_client_test_interface_name = fd_conf_data.get( - "wlan_client_test_interface", None - ) - self.wlan_ap_test_interface_name = fd_conf_data.get( - "wlan_ap_test_interface", None - ) - self.wlan_features: List[str] = fd_conf_data.get("wlan_features", []) - - # Whether to use 'policy' or 'drivers' for WLAN connect/disconnect calls - # If set to None, wlan is not configured. - self.association_mechanism = None - # Defaults to policy layer, unless otherwise specified in the config - self.default_association_mechanism = fd_conf_data.get( - "association_mechanism", "policy" - ) - - # Whether to clear and preserve existing saved networks and client - # connections state, to be restored at device teardown. - self.default_preserve_saved_networks = fd_conf_data.get( - "preserve_saved_networks", True - ) - - if not utils.is_valid_ipv4_address(self.ip) and not utils.is_valid_ipv6_address( - self.ip - ): - mdns_ip = None - for retry_counter in range(MDNS_LOOKUP_RETRY_MAX): - mdns_ip = get_fuchsia_mdns_ipv6_address(self.ip) - if mdns_ip: - break - else: - time.sleep(1) - if mdns_ip and utils.is_valid_ipv6_address(mdns_ip): - # self.ip was actually an mdns name. Use it for self.mdns_name - # unless one was explicitly provided. - self.mdns_name = self.mdns_name or self.ip - self.ip = mdns_ip - else: - raise ValueError("Invalid IP: %s" % self.ip) - - self.log = acts_logger.create_tagged_trace_logger( - "FuchsiaDevice | %s" % self.orig_ip - ) - - self.ping_rtt_match = re.compile( - r"RTT Min/Max/Avg " r"= \[ (.*?) / (.*?) / (.*?) \] ms" - ) - self.serial = re.sub("[.:%]", "_", self.ip) - log_path_base = getattr(logging, "log_path", "/tmp/logs") - self.log_path = os.path.join(log_path_base, "FuchsiaDevice%s" % self.serial) - self.fuchsia_log_file_path = os.path.join( - self.log_path, "fuchsialog_%s_debug.txt" % self.serial - ) - self.log_process = None - self.package_server = None - - self.init_controllers() - - @property - def sl4f(self): - """Get the sl4f module configured for this device. - - The sl4f module uses lazy-initialization; it will initialize an sl4f - server on the host device when it is required. - """ - if not hasattr(self, "_sl4f"): - self._sl4f = SL4F(self.ssh, self.sl4f_port) - self.log.info("Started SL4F server") - return self._sl4f - - @sl4f.deleter - def sl4f(self): - if not hasattr(self, "_sl4f"): - return - self.log.debug("Cleaning up SL4F") - del self._sl4f - - @property - def ssh(self): - """Get the SSH provider module configured for this device.""" - if not hasattr(self, "_ssh"): - if not self.ssh_port: - raise FuchsiaConfigError( - 'Must provide "ssh_port: <int>" in the device config' - ) - if not self.ssh_priv_key: - raise FuchsiaConfigError( - 'Must provide "ssh_priv_key: <file path>" in the device config' - ) - self._ssh = FuchsiaSSHProvider( - SSHConfig( - self.ssh_username, - self.ip, - self.ssh_priv_key, - port=self.ssh_port, - ssh_binary=self.ssh_binary_path, - ) - ) - return self._ssh - - @ssh.deleter - def ssh(self): - if not hasattr(self, "_ssh"): - return - self.log.debug("Cleaning up SSH") - del self._ssh - - @property - def ffx(self): - """Get the ffx module configured for this device. - - The ffx module uses lazy-initialization; it will initialize an ffx - connection to the device when it is required. - - If ffx needs to be reinitialized, delete the "ffx" property and attempt - access again. Note re-initialization will interrupt any running ffx - calls. - """ - if not hasattr(self, "_ffx"): - if not self.mdns_name: - raise FuchsiaConfigError( - 'Must provide "mdns_name: <device mDNS name>" in the device config' - ) - self._ffx = FFX( - self.ffx_binary_path, self.mdns_name, self.ip, self.ssh_priv_key - ) - return self._ffx - - @ffx.deleter - def ffx(self): - if not hasattr(self, "_ffx"): - return - self.log.debug("Cleaning up ffx") - self._ffx.clean_up() - del self._ffx - - def _generate_ssh_config(self, file_path: str): - """Generate and write an SSH config for Fuchsia to disk. - - Args: - file_path: Path to write the generated SSH config - """ - content = textwrap.dedent( - f"""\ - Host * - CheckHostIP no - StrictHostKeyChecking no - ForwardAgent no - ForwardX11 no - GSSAPIDelegateCredentials no - UserKnownHostsFile /dev/null - User fuchsia - IdentitiesOnly yes - IdentityFile {self.ssh_priv_key} - ControlPersist yes - ControlMaster auto - ControlPath /tmp/fuchsia--%r@%h:%p - ServerAliveInterval 1 - ServerAliveCountMax 1 - LogLevel ERROR - """ - ) - - with open(file_path, "w") as file: - file.write(content) - - def init_controllers(self): - # Contains Netstack functions - self.netstack_controller = NetstackController(self) - - # Contains WLAN core functions - self.wlan_controller = WlanController(self) - - # Contains WLAN policy functions like save_network, remove_network, etc - self.wlan_policy_controller = WlanPolicyController(self.sl4f, self.ssh) - - def start_package_server(self): - if not self.packages_archive_path: - self.log.warn( - "packages_archive_path is not specified. " - "Assuming a package server is already running and configured on " - "the DUT. If this is not the case, either run your own package " - "server, or configure these fields appropriately. " - "This is usually required for the Fuchsia iPerf3 client or " - "other testing utilities not on device cache." - ) - return - if self.package_server: - self.log.warn( - "Skipping to start the package server since is already running" - ) - return - - self.package_server = PackageServer(self.packages_archive_path) - self.package_server.start() - self.package_server.configure_device(self.ssh) - - def run_commands_from_config(self, cmd_dicts): - """Runs commands on the Fuchsia device from the config file. Useful for - device and/or Fuchsia specific configuration. - - Args: - cmd_dicts: list of dictionaries containing the following - 'cmd': string, command to run on device - 'timeout': int, seconds to wait for command to run (optional) - 'skip_status_code_check': bool, disregard errors if true - - Raises: - FuchsiaDeviceError: if any of the commands return a non-zero status - code and skip_status_code_check is false or undefined. - """ - for cmd_dict in cmd_dicts: - try: - cmd = cmd_dict["cmd"] - except KeyError: - raise FuchsiaDeviceError( - 'To run a command via config, you must provide key "cmd" ' - "containing the command string." - ) - - timeout = cmd_dict.get("timeout", FUCHSIA_DEFAULT_COMMAND_TIMEOUT) - # Catch both boolean and string values from JSON - skip_status_code_check = ( - "true" == str(cmd_dict.get("skip_status_code_check", False)).lower() - ) - - if skip_status_code_check: - self.log.info(f'Running command "{cmd}" and ignoring result.') - else: - self.log.info(f'Running command "{cmd}".') - - try: - result = self.ssh.run(cmd, timeout_sec=timeout) - self.log.debug(result) - except SSHError as e: - if not skip_status_code_check: - raise FuchsiaDeviceError( - "Failed device specific commands for initial configuration" - ) from e - - def configure_wlan( - self, association_mechanism: str = None, preserve_saved_networks: bool = None - ) -> None: - """ - Readies device for WLAN functionality. If applicable, connects to the - policy layer and clears/saves preexisting saved networks. - - Args: - association_mechanism: either 'policy' or 'drivers'. If None, uses - the default value from init (can be set by ACTS config) - preserve_saved_networks: whether to clear existing saved - networks, and preserve them for restoration later. If None, uses - the default value from init (can be set by ACTS config) - - Raises: - FuchsiaDeviceError, if configuration fails - """ - - # Set the country code US by default, or country code provided - # in ACTS config - self.configure_regulatory_domain(self.config_country_code) - - # If args aren't provided, use the defaults, which can be set in the - # config. - if association_mechanism is None: - association_mechanism = self.default_association_mechanism - if preserve_saved_networks is None: - preserve_saved_networks = self.default_preserve_saved_networks - - if association_mechanism not in VALID_ASSOCIATION_MECHANISMS: - raise FuchsiaDeviceError( - "Invalid FuchsiaDevice association_mechanism: %s" - % association_mechanism - ) - - # Allows for wlan to be set up differently in different tests - if self.association_mechanism: - self.log.info("Deconfiguring WLAN") - self.deconfigure_wlan() - - self.association_mechanism = association_mechanism - - self.log.info( - "Configuring WLAN w/ association mechanism: " f"{association_mechanism}" - ) - if association_mechanism == "drivers": - self.log.warn( - "You may encounter unusual device behavior when using the " - "drivers directly for WLAN. This should be reserved for " - "debugging specific issues. Normal test runs should use the " - "policy layer." - ) - if preserve_saved_networks: - self.log.warn( - "Unable to preserve saved networks when using drivers " - "association mechanism (requires policy layer control)." - ) - else: - # This requires SL4F calls, so it can only happen with actual - # devices, not with unit tests. - self.wlan_policy_controller.configure_wlan(preserve_saved_networks) - - # Retrieve WLAN client and AP interfaces - self.wlan_controller.update_wlan_interfaces() - - def deconfigure_wlan(self): - """ - Stops WLAN functionality (if it has been started). Used to allow - different tests to use WLAN differently (e.g. some tests require using - wlan policy, while the abstract wlan_device can be setup to use policy - or drivers) - - Raises: - FuchsiaDeviveError, if deconfigure fails. - """ - if not self.association_mechanism: - self.log.debug("WLAN not configured before deconfigure was called.") - return - # If using policy, stop client connections. Otherwise, just clear - # variables. - if self.association_mechanism != "drivers": - self.wlan_policy_controller._deconfigure_wlan() - self.association_mechanism = None - - def reboot( - self, - use_ssh: bool = False, - unreachable_timeout: int = FUCHSIA_DEFAULT_CONNECT_TIMEOUT, - reboot_type: str = FUCHSIA_REBOOT_TYPE_SOFT, - testbed_pdus: List[pdu.PduDevice] = [], - ) -> None: - """Reboot a FuchsiaDevice. - - Soft reboots the device, verifies it becomes unreachable, then verifies - it comes back online. Re-initializes services so the tests can continue. - - Args: - use_ssh: if True, use fuchsia shell command via ssh to reboot - instead of SL4F. - unreachable_timeout: time to wait for device to become unreachable. - reboot_type: 'soft', 'hard' or 'flash'. - testbed_pdus: all testbed PDUs. - - Raises: - ConnectionError, if device fails to become unreachable or fails to - come back up. - """ - if reboot_type == FUCHSIA_REBOOT_TYPE_SOFT: - if use_ssh: - self.log.info("Soft rebooting via SSH") - try: - self.ssh.run( - "dm reboot", timeout_sec=FUCHSIA_RECONNECT_AFTER_REBOOT_TIME - ) - except SSHError as e: - if "closed by remote host" not in e.result.stderr: - raise e - else: - self.log.info("Soft rebooting via SL4F") - self.sl4f.hardware_power_statecontrol_lib.suspendReboot(timeout=3) - self.ssh.wait_until_unreachable(timeout_sec=unreachable_timeout) - - elif reboot_type == FUCHSIA_REBOOT_TYPE_HARD: - self.log.info("Hard rebooting via PDU") - if not testbed_pdus: - raise AttributeError( - "Testbed PDUs must be supplied " "to hard reboot a fuchsia_device." - ) - device_pdu, device_pdu_port = pdu.get_pdu_port_for_device( - self.device_pdu_config, testbed_pdus - ) - self.log.info("Killing power to FuchsiaDevice") - device_pdu.off(str(device_pdu_port)) - self.ssh.wait_until_unreachable(timeout_sec=unreachable_timeout) - self.log.info("Restoring power to FuchsiaDevice") - device_pdu.on(str(device_pdu_port)) - - elif reboot_type == FUCHSIA_REBOOT_TYPE_SOFT_AND_FLASH: - flash(self, use_ssh, FUCHSIA_RECONNECT_AFTER_REBOOT_TIME) - - else: - raise ValueError("Invalid reboot type: %s" % reboot_type) - - # Cleanup services - self.stop_services() - - # TODO (b/246852449): Move configure_wlan to other controllers. - # If wlan was configured before reboot, it must be configured again - # after rebooting, as it was before reboot. No preserving should occur. - if self.association_mechanism: - pre_reboot_association_mechanism = self.association_mechanism - # Prevent configure_wlan from thinking it needs to deconfigure first - self.association_mechanism = None - self.configure_wlan( - association_mechanism=pre_reboot_association_mechanism, - preserve_saved_networks=False, - ) - - self.log.info("Device has rebooted") - - def version(self) -> str: - """Return the version of Fuchsia running on the device.""" - return self.sl4f.device_lib.get_version()["result"] - - def device_name(self) -> str: - """Return the name of the device.""" - return self.sl4f.device_lib.get_device_name()["result"] - - def product_name(self) -> str: - """Return the product name of the device.""" - return self.sl4f.device_lib.get_product_name()["result"] - - def ping( - self, - dest_ip, - count=3, - interval=1000, - timeout=1000, - size=25, - additional_ping_params=None, - ): - """Pings from a Fuchsia device to an IPv4 address or hostname - - Args: - dest_ip: (str) The ip or hostname to ping. - count: (int) How many icmp packets to send. - interval: (int) How long to wait between pings (ms) - timeout: (int) How long to wait before having the icmp packet - timeout (ms). - size: (int) Size of the icmp packet. - additional_ping_params: (str) command option flags to - append to the command string - - Returns: - A dictionary for the results of the ping. The dictionary contains - the following items: - status: Whether the ping was successful. - rtt_min: The minimum round trip time of the ping. - rtt_max: The minimum round trip time of the ping. - rtt_avg: The avg round trip time of the ping. - stdout: The standard out of the ping command. - stderr: The standard error of the ping command. - """ - rtt_min = None - rtt_max = None - rtt_avg = None - self.log.debug("Pinging %s..." % dest_ip) - if not additional_ping_params: - additional_ping_params = "" - - try: - ping_result = self.ssh.run( - f"ping -c {count} -i {interval} -t {timeout} -s {size} " - f"{additional_ping_params} {dest_ip}" - ) - except SSHError as e: - ping_result = e.result - - if ping_result.stderr: - status = False - else: - status = True - rtt_line = ping_result.stdout.split("\n")[:-1] - rtt_line = rtt_line[-1] - rtt_stats = re.search(self.ping_rtt_match, rtt_line) - rtt_min = rtt_stats.group(1) - rtt_max = rtt_stats.group(2) - rtt_avg = rtt_stats.group(3) - return { - "status": status, - "rtt_min": rtt_min, - "rtt_max": rtt_max, - "rtt_avg": rtt_avg, - "stdout": ping_result.stdout, - "stderr": ping_result.stderr, - } - - def can_ping( - self, - dest_ip, - count=1, - interval=1000, - timeout=1000, - size=25, - additional_ping_params=None, - ) -> bool: - """Returns whether fuchsia device can ping a given dest address""" - ping_result = self.ping( - dest_ip, - count=count, - interval=interval, - timeout=timeout, - size=size, - additional_ping_params=additional_ping_params, - ) - return ping_result["status"] - - def clean_up(self): - """Cleans up the FuchsiaDevice object, releases any resources it - claimed, and restores saved networks if applicable. For reboots, use - clean_up_services only. - - Note: Any exceptions thrown in this method must be caught and handled, - ensuring that clean_up_services is run. Otherwise, the syslog listening - thread will never join and will leave tests hanging. - """ - # If and only if wlan is configured, and using the policy layer - if self.association_mechanism == "policy": - try: - self.wlan_policy_controller.clean_up() - except Exception as err: - self.log.warning("Unable to clean up WLAN Policy layer: %s" % err) - - self.stop_services() - - if self.package_server: - self.package_server.clean_up() - - def get_interface_ip_addresses(self, interface): - return get_interface_ip_addresses(self, interface) - - def wait_for_ipv4_addr(self, interface: str) -> None: - """Checks if device has an ipv4 private address. Sleeps 1 second between - retries. - - Args: - interface: name of interface from which to get ipv4 address. - - Raises: - ConnectionError, if device does not have an ipv4 address after all - timeout. - """ - self.log.info( - f"Checking for valid ipv4 addr. Retry {IP_ADDRESS_TIMEOUT} seconds." - ) - timeout = time.time() + IP_ADDRESS_TIMEOUT - while time.time() < timeout: - ip_addrs = self.get_interface_ip_addresses(interface) - - if len(ip_addrs["ipv4_private"]) > 0: - self.log.info( - "Device has an ipv4 address: " f"{ip_addrs['ipv4_private'][0]}" - ) - break - else: - self.log.debug( - "Device does not yet have an ipv4 address...retrying in 1 " - "second." - ) - time.sleep(1) - else: - raise ConnectionError("Device failed to get an ipv4 address.") - - def wait_for_ipv6_addr(self, interface: str) -> None: - """Checks if device has an ipv6 private local address. Sleeps 1 second - between retries. - - Args: - interface: name of interface from which to get ipv6 address. - - Raises: - ConnectionError, if device does not have an ipv6 address after all - timeout. - """ - self.log.info( - f"Checking for valid ipv6 addr. Retry {IP_ADDRESS_TIMEOUT} seconds." - ) - timeout = time.time() + IP_ADDRESS_TIMEOUT - while time.time() < timeout: - ip_addrs = self.get_interface_ip_addresses(interface) - if len(ip_addrs["ipv6_private_local"]) > 0: - self.log.info( - "Device has an ipv6 private local address: " - f"{ip_addrs['ipv6_private_local'][0]}" - ) - break - else: - self.log.debug( - "Device does not yet have an ipv6 address...retrying in 1 " - "second." - ) - time.sleep(1) - else: - raise ConnectionError("Device failed to get an ipv6 address.") - - def check_connect_response(self, connect_response): - if connect_response.get("error") is None: - # Checks the response from SL4F and if there is no error, check - # the result. - connection_result = connect_response.get("result") - if not connection_result: - # Ideally the error would be present but just outputting a log - # message until available. - self.log.debug("Connect call failed, aborting!") - return False - else: - # Returns True if connection was successful. - return True - else: - # the response indicates an error - log and raise failure - self.log.debug( - "Aborting! - Connect call failed with error: %s" - % connect_response.get("error") - ) - return False - - def check_disconnect_response(self, disconnect_response): - if disconnect_response.get("error") is None: - # Returns True if disconnect was successful. - return True - else: - # the response indicates an error - log and raise failure - self.log.debug( - "Disconnect call failed with error: %s" - % disconnect_response.get("error") - ) - return False - - # TODO(fxb/64657): Determine more stable solution to country code config on - # device bring up. - def configure_regulatory_domain(self, desired_country_code): - """Allows the user to set the device country code via ACTS config - - Usage: - In FuchsiaDevice config, add "country_code": "<CC>" - """ - # Country code can be None, from antlion config. - if desired_country_code: - desired_country_code = desired_country_code.upper() - response = self.sl4f.regulatory_region_lib.setRegion(desired_country_code) - if response.get("error"): - raise FuchsiaDeviceError( - "Failed to set regulatory domain. Err: %s" % response["error"] - ) - - phy_list_response = self.sl4f.wlan_lib.wlanPhyIdList() - if phy_list_response.get("error"): - raise FuchsiaDeviceError( - f'Failed to get phy list. Err: {response["error"]}' - ) - phy_list = phy_list_response.get("result") - if not phy_list: - raise FuchsiaDeviceError("No phy available in phy list") - phy_id = phy_list[0] - - end_time = time.time() + FUCHSIA_COUNTRY_CODE_TIMEOUT - while time.time() < end_time: - ascii_cc = self.sl4f.wlan_lib.wlanGetCountry(phy_id).get("result") - # Convert ascii_cc to string, then compare - if ascii_cc and ( - "".join(chr(c) for c in ascii_cc).upper() == desired_country_code - ): - self.log.debug( - "Country code successfully set to %s." % desired_country_code - ) - return - self.log.debug("Country code not yet updated. Retrying.") - time.sleep(1) - raise FuchsiaDeviceError( - "Country code never updated to %s" % desired_country_code - ) - - def stop_services(self): - """Stops ffx daemon, deletes SSH property, and deletes SL4F property.""" - self.log.info("Stopping host device services.") - del self.sl4f - del self.ssh - del self.ffx - - def load_config(self, config): - pass - - def take_bug_report(self, test_name=None, begin_time=None): - """Takes a bug report on the device and stores it in a file. - - Args: - test_name: DEPRECATED. Do not specify this argument; it is only used - for logging. Name of the test case that triggered this bug - report. - begin_time: DEPRECATED. Do not specify this argument; it allows - overwriting of bug reports when this function is called several - times in one test. Epoch time when the test started. If not - specified, the current time will be used. - """ - if test_name: - self.log.info(f"Taking snapshot of {self.mdns_name} for {test_name}") - else: - self.log.info(f"Taking snapshot of {self.mdns_name}") - - epoch = begin_time if begin_time else utils.get_current_epoch_time() - time_stamp = acts_logger.normalize_log_line_timestamp( - acts_logger.epoch_to_log_line_timestamp(epoch) - ) - out_dir = context.get_current_context().get_full_output_path() - out_path = os.path.join(out_dir, f"{self.mdns_name}_{time_stamp}.zip") - - try: - subprocess.run( - [f"ssh -F {self.ssh_config} {self.ip} snapshot > {out_path}"], - shell=True, - ) - self.log.info(f"Snapshot saved to {out_path}") - except Exception as err: - self.log.error(f"Failed to take snapshot: {err}") - - def take_bt_snoop_log(self, custom_name=None): - """Takes a the bt-snoop log from the device and stores it in a file - in a pcap format. - """ - bt_snoop_path = context.get_current_context().get_full_output_path() - time_stamp = acts_logger.normalize_log_line_timestamp( - acts_logger.epoch_to_log_line_timestamp(time.time()) - ) - out_name = "FuchsiaDevice%s_%s" % ( - self.serial, - time_stamp.replace(" ", "_").replace(":", "-"), - ) - out_name = "%s.pcap" % out_name - if custom_name: - out_name = "%s_%s.pcap" % (self.serial, custom_name) - else: - out_name = "%s.pcap" % out_name - full_out_path = os.path.join(bt_snoop_path, out_name) - bt_snoop_data = self.ssh.run("bt-snoop-cli -d -f pcap").raw_stdout - bt_snoop_file = open(full_out_path, "wb") - bt_snoop_file.write(bt_snoop_data) - bt_snoop_file.close()
diff --git a/src/antlion/controllers/fuchsia_lib/OWNERS b/src/antlion/controllers/fuchsia_lib/OWNERS deleted file mode 100644 index 130db54..0000000 --- a/src/antlion/controllers/fuchsia_lib/OWNERS +++ /dev/null
@@ -1,9 +0,0 @@ -chcl@google.com -dhobsd@google.com -haydennix@google.com -jmbrenna@google.com -mnck@google.com -nickchee@google.com -sbalana@google.com -silberst@google.com -tturney@google.com
diff --git a/src/antlion/controllers/fuchsia_lib/__init__.py b/src/antlion/controllers/fuchsia_lib/__init__.py deleted file mode 100644 index e69de29..0000000 --- a/src/antlion/controllers/fuchsia_lib/__init__.py +++ /dev/null
diff --git a/src/antlion/controllers/fuchsia_lib/base_lib.py b/src/antlion/controllers/fuchsia_lib/base_lib.py deleted file mode 100644 index ea7f96e..0000000 --- a/src/antlion/controllers/fuchsia_lib/base_lib.py +++ /dev/null
@@ -1,102 +0,0 @@ -#!/usr/bin/env python3 -# -# Copyright 2022 The Fuchsia Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import json -import socket - -from typing import Any, Mapping -from urllib.request import Request, urlopen - -from antlion import logger - -DEFAULT_SL4F_RESPONSE_TIMEOUT_SEC = 30 - - -class DeviceOffline(Exception): - """Exception if the device is no longer reachable via the network.""" - - -class SL4FCommandFailed(Exception): - """A SL4F command to the server failed.""" - - -class BaseLib: - def __init__(self, addr: str, logger_tag: str) -> None: - self.address = addr - self.log = logger.create_tagged_trace_logger( - f"SL4F | {self.address} | {logger_tag}" - ) - - def send_command( - self, - cmd: str, - args: Mapping[str, Any], - response_timeout: int = DEFAULT_SL4F_RESPONSE_TIMEOUT_SEC, - ) -> Mapping[str, Any]: - """Builds and sends a JSON command to SL4F server. - - Args: - cmd: SL4F method name of command. - args: Arguments required to execute cmd. - response_timeout: Seconds to wait for a response before - throwing an exception. - - Returns: - Response from SL4F server. - - Throws: - TimeoutError: The HTTP request timed out waiting for a response - """ - data = { - "jsonrpc": "2.0", - # id is required by the SL4F server to parse test_data but is not - # currently used. - "id": "", - "method": cmd, - "params": args, - } - data_json = json.dumps(data).encode("utf-8") - req = Request( - self.address, - data=data_json, - headers={ - "Content-Type": "application/json; charset=utf-8", - "Content-Length": len(data_json), - }, - ) - - self.log.debug(f'Sending request "{cmd}" with {args}') - try: - response = urlopen(req, timeout=response_timeout) - except socket.timeout as e: - # socket.timeout was aliased to TimeoutError in Python 3.10. For - # older versions of Python, we need to cast to TimeoutError to - # provide a version-agnostic API. - raise TimeoutError("socket timeout") from e - - response_body = response.read().decode("utf-8") - try: - response_json = json.loads(response_body) - self.log.debug(f'Received response for "{cmd}": {response_json}') - except json.JSONDecodeError as e: - raise SL4FCommandFailed(response_body) from e - - # If the SL4F command fails it returns a str, without an 'error' field - # to get. - if not isinstance(response_json, dict): - raise SL4FCommandFailed(response_json) - - return response_json
diff --git a/src/antlion/controllers/fuchsia_lib/device_lib.py b/src/antlion/controllers/fuchsia_lib/device_lib.py deleted file mode 100644 index f7ad6b6..0000000 --- a/src/antlion/controllers/fuchsia_lib/device_lib.py +++ /dev/null
@@ -1,37 +0,0 @@ -#!/usr/bin/env python3 -# -# Copyright 2023 The Fuchsia Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from antlion.controllers.fuchsia_lib.base_lib import BaseLib - - -class DeviceLib(BaseLib): - def __init__(self, addr: str) -> None: - super().__init__(addr, "device") - - def get_device_name(self) -> str: - """Get the device name.""" - - return self.send_command("device_facade.GetDeviceName", {}) - - def get_product_name(self) -> str: - """Get the product name.""" - - return self.send_command("device_facade.GetProduct", {}) - - def get_version(self): - """Get the device version.""" - - return self.send_command("device_facade.GetVersion", {})
diff --git a/src/antlion/controllers/fuchsia_lib/ffx.py b/src/antlion/controllers/fuchsia_lib/ffx.py deleted file mode 100644 index 3db6c8d..0000000 --- a/src/antlion/controllers/fuchsia_lib/ffx.py +++ /dev/null
@@ -1,336 +0,0 @@ -#!/usr/bin/env python3 -# -# Copyright 2022 The Fuchsia Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import json -import os -import tempfile -import subprocess -import time - -from pathlib import Path, PurePath -from shutil import rmtree -from typing import Any, MutableMapping, Optional - -from antlion import context -from antlion import logger -from antlion import signals -from antlion import utils - -FFX_DEFAULT_COMMAND_TIMEOUT: int = 60 - - -class FFXError(signals.TestError): - """Non-zero error code returned from a ffx command.""" - - def __init__(self, command: str, process: subprocess.CalledProcessError) -> None: - self.command = command - self.stdout: str = process.stdout.decode("utf-8", errors="replace") - self.stderr: str = process.stderr.decode("utf-8", errors="replace") - self.exit_status = process.returncode - - def __str__(self) -> str: - return f'ffx subcommand "{self.command}" returned {self.exit_status}, stdout: "{self.stdout}", stderr: "{self.stderr}"' - - -class FFXTimeout(signals.TestError): - """Timed out running a ffx command.""" - - -class FFX: - """Device-specific controller for the ffx tool. - - Attributes: - log: Logger for the device-specific instance of ffx. - binary_path: Path to the ffx binary. - mdns_name: mDNS nodename of the default Fuchsia target. - ip: IP address of the default Fuchsia target. - ssh_private_key_path: Path to Fuchsia DUT SSH private key. - """ - - def __init__( - self, - binary_path: str, - mdns_name: str, - ip: str = None, - ssh_private_key_path: str = None, - ): - """ - Args: - binary_path: Path to ffx binary. - target: Fuchsia mDNS nodename of default target. - ssh_private_key_path: Path to SSH private key for talking to the - Fuchsia DUT. - """ - self.log = logger.create_tagged_trace_logger(f"ffx | {mdns_name}") - self.binary_path = binary_path - self.mdns_name = mdns_name - self.ip = ip - self.ssh_private_key_path = ssh_private_key_path - - self._env_config_path: Optional[str] = None - self._sock_dir: Optional[str] = None - self._ssh_auth_sock_path: Optional[str] = None - self._overnet_socket_path: Optional[str] = None - self._has_been_reachable = False - self._has_logged_version = False - - def clean_up(self) -> None: - if self._env_config_path: - self.run("daemon stop", skip_reachability_check=True) - if self._ssh_auth_sock_path: - Path(self._ssh_auth_sock_path).unlink(missing_ok=True) - if self._overnet_socket_path: - Path(self._overnet_socket_path).unlink(missing_ok=True) - if self._sock_dir: - rmtree(self._sock_dir) - - self._env_config_path = None - self._sock_dir = None - self._ssh_auth_sock_path = None - self._overnet_socket_path = None - self._has_been_reachable = False - self._has_logged_version = False - - def run( - self, - command: str, - timeout_sec: int = FFX_DEFAULT_COMMAND_TIMEOUT, - skip_status_code_check: bool = False, - skip_reachability_check: bool = False, - ) -> subprocess.CompletedProcess: - """Runs an ffx command. - - Verifies reachability before running, if it hasn't already. - - Args: - command: Command to run with ffx. - timeout_sec: Seconds to wait for a command to complete. - skip_status_code_check: Whether to check for the status code. - verify_reachable: Whether to verify reachability before running. - - Raises: - FFXTimeout: when the command times out. - FFXError: when the command returns non-zero and skip_status_code_check is False. - - Returns: - The results of the command. Note subprocess.CompletedProcess returns - stdout and stderr as a byte-array, not a string. Treat these members - as such or convert to a string using bytes.decode('utf-8'). - """ - if not self._env_config_path: - self._create_isolated_environment() - if not self._has_been_reachable and not skip_reachability_check: - self.log.info(f'Verifying reachability before running "{command}"') - self.verify_reachable() - - self.log.debug(f'Running "{command}".') - full_command = f"{self.binary_path} -e {self._env_config_path} {command}" - - try: - result = subprocess.run( - full_command.split(), - capture_output=True, - timeout=timeout_sec, - check=not skip_status_code_check, - ) - except subprocess.CalledProcessError as e: - raise FFXError(command, e) from e - except subprocess.TimeoutExpired as e: - raise FFXTimeout(f'Timed out running "{full_command}"') from e - - return result - - def _create_isolated_environment(self) -> None: - """Create a new isolated environment for ffx. - - This is needed to avoid overlapping ffx daemons while testing in - parallel, causing the ffx invocations to “upgrade” one daemon to - another, which appears as a flap/restart to another test. - """ - # Store ffx files in a unique directory. Timestamp is used to prevent - # files from being overwritten in the case when a test intentionally - # reboots or resets the device such that a new isolated ffx environment - # is created. - root_dir = context.get_current_context().get_full_output_path() - epoch = utils.get_current_epoch_time() - time_stamp = logger.normalize_log_line_timestamp( - logger.epoch_to_log_line_timestamp(epoch) - ) - target_dir = os.path.join(root_dir, f"{self.mdns_name}_{time_stamp}") - os.makedirs(target_dir, exist_ok=True) - - # Sockets need to be created in a different directory to be guaranteed - # to stay under the maximum socket path length of 104 characters. - # See https://unix.stackexchange.com/q/367008 - self._sock_dir = tempfile.mkdtemp() - # On MacOS, the socket paths need to be just paths (not pre-created - # Python tempfiles, which are not socket files). - self._ssh_auth_sock_path = str(PurePath(self._sock_dir, "ssh_auth_sock")) - self._overnet_socket_path = str(PurePath(self._sock_dir, "overnet_socket")) - - config: MutableMapping[str, Any] = { - "target": { - "default": self.mdns_name, - }, - # Use user-specific and device-specific locations for sockets. - # Avoids user permission errors in a multi-user test environment. - # Avoids daemon upgrades when running tests in parallel in a CI - # environment. - "ssh": { - "auth-sock": self._ssh_auth_sock_path, - }, - "overnet": { - "socket": self._overnet_socket_path, - }, - # Configure the ffx daemon to log to a place where we can read it. - # Note, ffx client will still output to stdout, not this log - # directory. - "log": { - "enabled": True, - "dir": [target_dir], - }, - # Disable analytics to decrease noise on the network. - "ffx": { - "analytics": { - "disabled": True, - }, - }, - # Prevent log collection from all devices the ffx daemon sees; only - # collect logs from the target device. - # - # TODO(https://fxbug.dev/118764): Consider re-enabling after - # resolution of the issue causing a reboot of the target device. - "proactive_log": { - "enabled": False, - }, - } - - if self.ip: - config["discovery"] = { - "mdns": { - "enabled": False, - }, - } - - # ffx looks for the private key in several default locations. For - # testbeds which have the private key in another location, set it now. - if self.ssh_private_key_path: - config["ssh"]["priv"] = self.ssh_private_key_path - - config_path = os.path.join(target_dir, "ffx_config.json") - with open(config_path, "w", encoding="utf-8") as f: - json.dump(config, f, ensure_ascii=False, indent=4) - - env = { - "user": config_path, - "build": None, - "global": None, - } - self._env_config_path = os.path.join(target_dir, "ffx_env.json") - with open(self._env_config_path, "w", encoding="utf-8") as f: - json.dump(env, f, ensure_ascii=False, indent=4) - - # The ffx daemon will started automatically when needed. There is no - # need to start it manually here. - - def verify_reachable(self, timeout_sec: int = FFX_DEFAULT_COMMAND_TIMEOUT) -> None: - """Verify the target is reachable via RCS and various services. - - Blocks until the device allows for an RCS connection. If the device - isn't reachable within a short time, logs a warning before waiting - longer. - - Verifies the RCS connection by fetching information from the device, - which exercises several debug and informational FIDL services. - - When called for the first time, the versions will be checked for - compatibility. - - Args: - timeout_sec: Seconds to wait for reachability check - - Raises: - FFXError: when an unknown error occurs - FFXTimeout: when the target is unreachable - """ - cmd = "target wait" - if self.ip: - # `target add` does what `target wait` does but adds an entry - # to ensure connections can happen without mDNS. - # TODO(https://fxbug.dev/105530): Update manual target parsing in - # ffx. - cmd = f"target add {self.ip}" - - timeout = time.perf_counter() + timeout_sec - while True: - try: - self.run(cmd, timeout_sec=5, skip_reachability_check=True) - break - except FFXError as e: - if "took too long connecting to ascendd socket" in e.stderr: - err = e - else: - raise e - except FFXTimeout as e: - err = e - - if time.perf_counter() > timeout: - raise FFXTimeout( - f"Waited over {timeout_sec}s for ffx to become reachable" - ) from err - - # Use a shorter timeout than default because device information - # gathering can hang for a long time if the device is not actually - # connectable. - try: - result = self.run( - "target show --json", timeout_sec=15, skip_reachability_check=True - ) - except Exception as e: - self.log.error( - f'Failed to reach target device. Try running "{self.binary_path}' - + ' doctor" to diagnose issues.' - ) - raise e - - self._has_been_reachable = True - - if not self._has_logged_version: - self._has_logged_version = True - self.compare_version(result) - - def compare_version(self, target_show_result: subprocess.CompletedProcess) -> None: - """Compares the version of Fuchsia with the version of ffx. - - Args: - target_show_result: Result of the target show command with JSON - output mode enabled - """ - result_json = json.loads(target_show_result.stdout) - build_info = next(filter(lambda s: s.get("label") == "build", result_json)) - version_info = next( - filter(lambda s: s.get("label") == "version", build_info["child"]) - ) - device_version = version_info.get("value") - ffx_version = self.run("version").stdout.decode("utf-8") - - self.log.info(f"Device version: {device_version}, ffx version: {ffx_version}") - if device_version != ffx_version: - self.log.warning( - "ffx versions that differ from device versions may" - + " have compatibility issues. It is recommended to" - + " use versions within 6 weeks of each other." - )
diff --git a/src/antlion/controllers/fuchsia_lib/hardware_power_statecontrol_lib.py b/src/antlion/controllers/fuchsia_lib/hardware_power_statecontrol_lib.py deleted file mode 100644 index 30af9a8..0000000 --- a/src/antlion/controllers/fuchsia_lib/hardware_power_statecontrol_lib.py +++ /dev/null
@@ -1,90 +0,0 @@ -#!/usr/bin/env python3 -# -# Copyright 2022 The Fuchsia Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import logging -import http - -import antlion.controllers.fuchsia_lib.base_lib as base_lib - -HW_PWR_STATE_CONTROL_TIMEOUT = 5 - - -class FuchsiaHardwarePowerStatecontrolLib(base_lib.BaseLib): - def __init__(self, addr: str) -> None: - super().__init__(addr, "hardware_power_statecontrol") - - def send_command(self, test_cmd, test_args, response_timeout=30): - """Wrap send_command to allow disconnects after sending the request.""" - try: - response = super().send_command(test_cmd, test_args, response_timeout) - except ( - TimeoutError, - http.client.RemoteDisconnected, - base_lib.DeviceOffline, - ) as e: - logging.warn(f"Error while sending power command: {e}") - return - return response - - def suspendReboot(self, timeout=HW_PWR_STATE_CONTROL_TIMEOUT): - """Call Suspend Reboot. - - Returns: - None if success. - """ - test_cmd = "hardware_power_statecontrol_facade.SuspendReboot" - test_args = {} - return self.send_command(test_cmd, test_args, response_timeout=timeout) - - def suspendRebootBootloader(self, timeout=HW_PWR_STATE_CONTROL_TIMEOUT): - """Call Suspend Reboot Bootloader - - Returns: - None if success. - """ - test_cmd = "hardware_power_statecontrol_facade.SuspendRebootBootloader" - test_args = {} - return self.send_command(test_cmd, test_args, response_timeout=timeout) - - def suspendPoweroff(self, timeout=HW_PWR_STATE_CONTROL_TIMEOUT): - """Call Suspend Poweroff - - Returns: - None if success. - """ - test_cmd = "hardware_power_statecontrol_facade.SuspendPoweroff" - test_args = {} - return self.send_command(test_cmd, test_args, response_timeout=timeout) - - def suspendMexec(self, timeout=HW_PWR_STATE_CONTROL_TIMEOUT): - """Call Suspend Mexec - - Returns: - None if success. - """ - test_cmd = "hardware_power_statecontrol_facade.SuspendMexec" - test_args = {} - return self.send_command(test_cmd, test_args, response_timeout=timeout) - - def suspendRam(self, timeout=HW_PWR_STATE_CONTROL_TIMEOUT): - """Call Suspend Ram - - Returns: - None if success. - """ - test_cmd = "hardware_power_statecontrol_facade.SuspendRam" - test_args = {} - return self.send_command(test_cmd, test_args, response_timeout=timeout)
diff --git a/src/antlion/controllers/fuchsia_lib/lib_controllers/__init__.py b/src/antlion/controllers/fuchsia_lib/lib_controllers/__init__.py deleted file mode 100644 index e69de29..0000000 --- a/src/antlion/controllers/fuchsia_lib/lib_controllers/__init__.py +++ /dev/null
diff --git a/src/antlion/controllers/fuchsia_lib/lib_controllers/netstack_controller.py b/src/antlion/controllers/fuchsia_lib/lib_controllers/netstack_controller.py deleted file mode 100644 index 0ff858c..0000000 --- a/src/antlion/controllers/fuchsia_lib/lib_controllers/netstack_controller.py +++ /dev/null
@@ -1,46 +0,0 @@ -#!/usr/bin/env python3 -# -# Copyright 2022 The Fuchsia Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from antlion import logger -from antlion import signals - - -class NetstackControllerError(signals.ControllerError): - pass - - -class NetstackController: - """Contains methods related to netstack, to be used in FuchsiaDevice object""" - - def __init__(self, fuchsia_device): - self.device = fuchsia_device - self.log = logger.create_tagged_trace_logger( - "NetstackController for FuchsiaDevice | %s" % self.device.ip - ) - - def list_interfaces(self): - """Retrieve netstack interfaces from netstack facade - - Returns: - List of dicts, one for each interface, containing interface - information - """ - response = self.device.sl4f.netstack_lib.netstackListInterfaces() - if response.get("error"): - raise NetstackControllerError( - "Failed to get network interfaces list: %s" % response["error"] - ) - return response["result"]
diff --git a/src/antlion/controllers/fuchsia_lib/lib_controllers/wlan_controller.py b/src/antlion/controllers/fuchsia_lib/lib_controllers/wlan_controller.py deleted file mode 100644 index 922b167..0000000 --- a/src/antlion/controllers/fuchsia_lib/lib_controllers/wlan_controller.py +++ /dev/null
@@ -1,189 +0,0 @@ -#!/usr/bin/env python3 -# -# Copyright 2022 The Fuchsia Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import time - -from antlion import logger -from antlion import signals -from antlion import utils - -TIME_TO_SLEEP_BETWEEN_RETRIES = 1 -TIME_TO_WAIT_FOR_COUNTRY_CODE = 10 - - -class WlanControllerError(signals.ControllerError): - pass - - -class WlanController: - """Contains methods related to wlan core, to be used in FuchsiaDevice object""" - - def __init__(self, fuchsia_device): - self.device = fuchsia_device - self.log = logger.create_tagged_trace_logger( - "WlanController for FuchsiaDevice | %s" % self.device.ip - ) - - # TODO(70501): Wrap wlan_lib functions and setup from FuchsiaDevice here - # (similar to how WlanPolicyController does it) to prevent FuchsiaDevice - # from growing too large. - def _configure_wlan(self): - pass - - def _deconfigure_wlan(self): - pass - - def update_wlan_interfaces(self): - """Retrieves WLAN interfaces from device and sets the FuchsiaDevice - attributes. - """ - wlan_interfaces = self.get_interfaces_by_role() - self.device.wlan_client_interfaces = wlan_interfaces["client"] - self.device.wlan_ap_interfaces = wlan_interfaces["ap"] - - # Set test interfaces to value from config, else the first found - # interface, else None - self.device.wlan_client_test_interface_name = self.device.conf_data.get( - "wlan_client_test_interface", - next(iter(self.device.wlan_client_interfaces), None), - ) - - self.device.wlan_ap_test_interface_name = self.device.conf_data.get( - "wlan_ap_test_interface", next(iter(self.device.wlan_ap_interfaces), None) - ) - - def get_interfaces_by_role(self): - """Retrieves WLAN interface information, supplimented by netstack info. - - Returns: - Dict with keys 'client' and 'ap', each of which contain WLAN - interfaces. - """ - - # Retrieve WLAN interface IDs - response = self.device.sl4f.wlan_lib.wlanGetIfaceIdList() - if response.get("error"): - raise WlanControllerError( - "Failed to get WLAN iface ids: %s" % response["error"] - ) - - wlan_iface_ids = response.get("result", []) - if len(wlan_iface_ids) < 1: - return {"client": {}, "ap": {}} - - # Use IDs to get WLAN interface info and mac addresses - wlan_ifaces_by_mac = {} - for id in wlan_iface_ids: - response = self.device.sl4f.wlan_lib.wlanQueryInterface(id) - if response.get("error"): - raise WlanControllerError( - "Failed to query wlan iface id %s: %s" % (id, response["error"]) - ) - - mac = response["result"].get("sta_addr", None) - if mac is None: - # Fallback to older field name to maintain backwards - # compatibility with older versions of SL4F's - # QueryIfaceResponse. See https://fxrev.dev/562146. - mac = response["result"].get("mac_addr") - - wlan_ifaces_by_mac[utils.mac_address_list_to_str(mac)] = response["result"] - - # Use mac addresses to query the interfaces from the netstack view, - # which allows us to supplement the interface information with the name, - # netstack_id, etc. - - # TODO(fxb/75909): This tedium is necessary to get the interface name - # because only netstack has that information. The bug linked here is - # to reconcile some of the information between the two perspectives, at - # which point we can eliminate step. - net_ifaces = self.device.netstack_controller.list_interfaces() - wlan_ifaces_by_role = {"client": {}, "ap": {}} - for iface in net_ifaces: - try: - # Some interfaces might not have a MAC - iface_mac = utils.mac_address_list_to_str(iface["mac"]) - except Exception as e: - self.log.debug(f"Error {e} getting MAC for iface {iface}") - continue - if iface_mac in wlan_ifaces_by_mac: - wlan_ifaces_by_mac[iface_mac]["netstack_id"] = iface["id"] - - # Add to return dict, mapped by role then name. - wlan_ifaces_by_role[wlan_ifaces_by_mac[iface_mac]["role"].lower()][ - iface["name"] - ] = wlan_ifaces_by_mac[iface_mac] - - return wlan_ifaces_by_role - - def set_country_code(self, country_code): - """Sets country code through the regulatory region service and waits - for the code to be applied to WLAN PHY. - - Args: - country_code: string, the 2 character country code to set - - Raises: - EnvironmentError - failure to get/set regulatory region - ConnectionError - failure to query PHYs - """ - self.log.info("Setting DUT country code to %s" % country_code) - country_code_response = self.device.sl4f.regulatory_region_lib.setRegion( - country_code - ) - if country_code_response.get("error"): - raise EnvironmentError( - "Failed to set country code (%s) on DUT. Error: %s" - % (country_code, country_code_response["error"]) - ) - - self.log.info( - "Verifying DUT country code was correctly set to %s." % country_code - ) - phy_ids_response = self.device.sl4f.wlan_lib.wlanPhyIdList() - if phy_ids_response.get("error"): - raise ConnectionError( - "Failed to get phy ids from DUT. Error: %s" - % (country_code, phy_ids_response["error"]) - ) - - end_time = time.time() + TIME_TO_WAIT_FOR_COUNTRY_CODE - while time.time() < end_time: - for id in phy_ids_response["result"]: - get_country_response = self.device.sl4f.wlan_lib.wlanGetCountry(id) - if get_country_response.get("error"): - raise ConnectionError( - "Failed to query PHY ID (%s) for country. Error: %s" - % (id, get_country_response["error"]) - ) - - set_code = "".join( - [chr(ascii_char) for ascii_char in get_country_response["result"]] - ) - if set_code != country_code: - self.log.debug( - "PHY (id: %s) has incorrect country code set. " - "Expected: %s, Got: %s" % (id, country_code, set_code) - ) - break - else: - self.log.info("All PHYs have expected country code (%s)" % country_code) - break - time.sleep(TIME_TO_SLEEP_BETWEEN_RETRIES) - else: - raise EnvironmentError( - "Failed to set DUT country code to %s." % country_code - )
diff --git a/src/antlion/controllers/fuchsia_lib/lib_controllers/wlan_policy_controller.py b/src/antlion/controllers/fuchsia_lib/lib_controllers/wlan_policy_controller.py deleted file mode 100644 index 5ef126b..0000000 --- a/src/antlion/controllers/fuchsia_lib/lib_controllers/wlan_policy_controller.py +++ /dev/null
@@ -1,616 +0,0 @@ -#!/usr/bin/env python3 -# -# Copyright 2022 The Fuchsia Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import time - -from antlion import logger -from antlion import signals - -from antlion.controllers.fuchsia_lib.sl4f import SL4F -from antlion.controllers.fuchsia_lib.ssh import SSHProvider - -SAVED_NETWORKS = "saved_networks" -CLIENT_STATE = "client_connections_state" -CONNECTIONS_ENABLED = "ConnectionsEnabled" -CONNECTIONS_DISABLED = "ConnectionsDisabled" - -STATE_CONNECTED = "Connected" -STATE_CONNECTING = "Connecting" -STATE_DISCONNECTED = "Disconnected" -STATE_CONNECTION_STOPPED = "ConnectionStopped" - -SESSION_MANAGER_TIMEOUT_SEC = 10 -FUCHSIA_DEFAULT_WLAN_CONFIGURE_TIMEOUT = 30 -DEFAULT_GET_UPDATE_TIMEOUT = 60 - - -class WlanPolicyControllerError(signals.ControllerError): - pass - - -class WlanPolicyController: - """Contains methods related to the wlan policy layer, to be used in the - FuchsiaDevice object. - - Attributes: - sl4f: sl4f module for communicating to the WLAN policy controller. - ssh: transport to fuchsia device to stop component processes. - """ - - def __init__(self, sl4f: SL4F, ssh: SSHProvider): - """ - Args: - sl4f: sl4f module for communicating to the WLAN policy controller. - ssh: transport to fuchsia device to stop component processes. - """ - self.preserved_networks_and_client_state = None - self.policy_configured = False - self.sl4f = sl4f - self.ssh = ssh - self.log = logger.create_tagged_trace_logger( - f"WlanPolicyController | {self.ssh.config.host_name}" - ) - - def configure_wlan( - self, - preserve_saved_networks: bool, - timeout_sec: int = FUCHSIA_DEFAULT_WLAN_CONFIGURE_TIMEOUT, - ) -> None: - """Sets up wlan policy layer. - - Args: - preserve_saved_networks: whether to clear existing saved - networks and client state, to be restored at test close. - timeout_sec: time to wait for device to configure WLAN. - """ - - # We need to stop session manager to free control of - # fuchsia.wlan.policy.ClientController, which can only be used by a - # single caller at a time. SL4F needs the ClientController to trigger - # WLAN policy state changes. On eng builds the session_manager can be - # restarted after being stopped during reboot so we attempt killing the - # session manager process for 10 seconds. - # See https://cs.opensource.google/fuchsia/fuchsia/+/main:sdk/fidl/fuchsia.wlan.policy/client_provider.fidl - if "cast_agent.cm" in self.ssh.run("ps").stdout: - end_time_session_manager_sec = time.time() + SESSION_MANAGER_TIMEOUT_SEC - while time.time() < end_time_session_manager_sec: - self.ssh.stop_component("session_manager", is_cfv2_component=True) - - # Acquire control of policy layer - end_time_config_sec = time.time() + timeout_sec - controller_errors = [] - while time.time() < end_time_config_sec: - # Create a client controller - response = self.sl4f.wlan_policy_lib.wlanCreateClientController() - if response.get("error"): - controller_errors.append(response["error"]) - self.log.debug(response["error"]) - time.sleep(1) - continue - break - else: - self.log.warning( - "Failed to create and use a WLAN policy client controller. Errors: [" - + "; ".join(controller_errors) - + "]" - ) - raise WlanPolicyControllerError( - "Failed to create and use a WLAN policy client controller." - ) - - self.log.info("ACTS tests now have control of the WLAN policy layer.") - - if preserve_saved_networks and not self.preserved_networks_and_client_state: - self.preserved_networks_and_client_state = ( - self.remove_and_preserve_networks_and_client_state() - ) - if not self.start_client_connections(): - raise WlanPolicyControllerError( - "Failed to start client connections during configuration." - ) - - self.policy_configured = True - - def _deconfigure_wlan(self): - if not self.stop_client_connections(): - raise WlanPolicyControllerError( - "Failed to stop client connections during deconfiguration." - ) - self.policy_configured = False - - def clean_up(self) -> None: - if self.preserved_networks_and_client_state: - # It is possible for policy to have been configured before, but - # deconfigured before test end. In this case, in must be setup - # before restoring networks - if not self.policy_configured: - self.configure_wlan() - self.restore_preserved_networks_and_client_state() - - def start_client_connections(self): - """Allow device to connect to networks via policy layer (including - autoconnecting to saved networks). - - Returns: - True, if successful. False otherwise.""" - start_response = self.sl4f.wlan_policy_lib.wlanStartClientConnections() - if start_response.get("error"): - self.log.error( - "Failed to start client connections. Err: %s" % start_response["error"] - ) - return False - return True - - def stop_client_connections(self): - """Prevent device from connecting and autoconnecting to networks via the - policy layer. - - Returns: - True, if successful. False otherwise.""" - stop_response = self.sl4f.wlan_policy_lib.wlanStopClientConnections() - if stop_response.get("error"): - self.log.error( - "Failed to stop client connections. Err: %s" % stop_response["error"] - ) - return False - return True - - def save_and_connect(self, ssid, security, password=None, timeout=30): - """Saves and connects to the network. This is the policy version of - connect and check_connect_response because the policy layer - requires a saved network and the policy connect does not return - success or failure - - Args: - ssid: string, the network name - security: string, security type of network (see sl4f.wlan_policy_lib) - password: string, the credential of the network if applicable - timeout: int, time in seconds to wait for connection - - Returns: - True, if successful. False otherwise. - """ - # Save network and check response - if not self.save_network(ssid, security, password=password): - return False - # Make connect call and check response - self.sl4f.wlan_policy_lib.wlanSetNewListener() - if not self.send_connect_command(ssid, security): - return False - return self.wait_for_connect(ssid, security, timeout=timeout) - - def save_and_wait_for_autoconnect(self, ssid, security, password=None, timeout=30): - """Saves a network and waits, expecting an autoconnection to the newly - saved network. This differes from save_and_connect, as it doesn't - expressly trigger a connection first. There are cases in which an - autoconnect won't occur after a save (like if the device is connected - already), so this should be used with caution to test very specific - situations. - - Args: - ssid: string, the network name - security: string, security type of network (see sl4f.wlan_policy_lib) - password: string, the credential of the network if applicable - timeout: int, time in seconds to wait for connection - - Returns: - True, if successful. False otherwise. - """ - if not self.save_network(ssid, security, password=password): - return False - return self.wait_for_connect(ssid, security, timeout=timeout) - - def remove_and_wait_for_disconnect( - self, ssid, security_type, password=None, state=None, status=None, timeout=30 - ): - """Removes a single network and waits for a disconnect. It is not - guaranteed the device will stay disconnected, as it may autoconnect - to a different saved network. - - Args: - ssid: string, the network name - security: string, security type of network (see sl4f.wlan_policy_lib) - password: string, the credential of the network if applicable - state: string, The connection state we are expecting, ie "Disconnected" or - "Failed" - status: string, The disconnect status we expect, it "ConnectionStopped" or - "ConnectionFailed" - timeout: int, time in seconds to wait for connection - - Returns: - True, if successful. False otherwise. - """ - self.sl4f.wlan_policy_lib.wlanSetNewListener() - if not self.remove_network(ssid, security_type, password=password): - return False - return self.wait_for_disconnect( - ssid, security_type, state=state, status=status, timeout=timeout - ) - - def remove_all_networks_and_wait_for_no_connections( - self, timeout_sec: int = DEFAULT_GET_UPDATE_TIMEOUT - ) -> bool: - """Removes all networks and waits until device is not connected to any - networks. This should be used as the policy version of disconnect. - - Args: - timeout_sec: The time to wait to see no connections. - - Returns: - True, if successful. False otherwise. - """ - self.sl4f.wlan_policy_lib.wlanSetNewListener() - if not self.remove_all_networks(): - self.log.error( - "Failed to remove all networks. Cannot continue to " - "wait_for_no_connections." - ) - return False - return self.wait_for_no_connections(timeout_sec=timeout_sec) - - def save_network(self, ssid, security_type, password=None): - """Save a network via the policy layer. - - Args: - ssid: string, the network name - security: string, security type of network (see sl4f.wlan_policy_lib) - password: string, the credential of the network if applicable - - Returns: - True, if successful. False otherwise. - """ - save_response = self.sl4f.wlan_policy_lib.wlanSaveNetwork( - ssid, security_type, target_pwd=password - ) - if save_response.get("error"): - self.log.error( - "Failed to save network %s with error: %s" - % (ssid, save_response["error"]) - ) - return False - return True - - def remove_network(self, ssid, security_type, password=None): - """Remove a saved network via the policy layer. - - Args: - ssid: string, the network name - security: string, security type of network (see sl4f.wlan_policy_lib) - password: string, the credential of the network if applicable - - Returns: - True, if successful. False otherwise. - """ - remove_response = self.sl4f.wlan_policy_lib.wlanRemoveNetwork( - ssid, security_type, target_pwd=password - ) - if remove_response.get("error"): - self.log.error( - "Failed to remove network %s with error: %s" - % (ssid, remove_response["error"]) - ) - return False - return True - - def remove_all_networks(self): - """Removes all saved networks from device. - - Returns: - True, if successful. False otherwise. - """ - remove_all_response = self.sl4f.wlan_policy_lib.wlanRemoveAllNetworks() - if remove_all_response.get("error"): - self.log.error( - "Error occurred removing all networks: %s" - % remove_all_response["error"] - ) - return False - return True - - def get_saved_networks(self): - """Retrieves saved networks from device. - - Returns: - list of saved networks - - Raises: - WlanPolicyControllerError, if retrieval fails. - """ - saved_networks_response = self.sl4f.wlan_policy_lib.wlanGetSavedNetworks() - if saved_networks_response.get("error"): - raise WlanPolicyControllerError( - "Failed to retrieve saved networks: %s" - % saved_networks_response["error"] - ) - return saved_networks_response["result"] - - def send_connect_command(self, ssid, security_type): - """Sends a connect command to a network that is already saved. This does - not wait to guarantee the connection is successful (for that, use - save_and_connect). - - Args: - ssid: string, the network name - security: string, security type of network (see sl4f.wlan_policy_lib) - password: string, the credential of the network if applicable - - Returns: - True, if command send successfully. False otherwise. - """ - connect_response = self.sl4f.wlan_policy_lib.wlanConnect(ssid, security_type) - if connect_response.get("error"): - self.log.error( - "Error occurred when sending policy connect command: %s" - % connect_response["error"] - ) - return False - return True - - def wait_for_connect(self, ssid, security_type, timeout=30): - """Wait until the device has connected to the specified network. - Args: - ssid: string, the network name - security: string, security type of network (see sl4f.wlan_policy_lib) - timeout: int, seconds to wait for a update showing connection - Returns: - True if we see a connect to the network, False otherwise. - """ - security_type = str(security_type) - # Wait until we've connected. - end_time = time.time() + timeout - while time.time() < end_time: - time_left = max(1, int(end_time - time.time())) - - try: - update = self.sl4f.wlan_policy_lib.wlanGetUpdate(timeout=time_left) - except TimeoutError: - self.log.error( - "Timed out waiting for response from device " - 'while waiting for network with SSID "%s" to ' - "connect. Device took too long to connect or " - "the request timed out for another reason." % ssid - ) - self.sl4f.wlan_policy_lib.wlanSetNewListener() - return False - if update.get("error"): - # This can occur for many reasons, so it is not necessarily a - # failure. - self.log.debug( - "Error occurred getting status update: %s" % update["error"] - ) - continue - - for network in update["result"]["networks"]: - if ( - network["id"]["ssid"] == ssid - or network["id"]["type_"].lower() == security_type.lower() - ): - if "state" not in network: - raise WlanPolicyControllerError( - "WLAN status missing state field." - ) - elif network["state"].lower() == STATE_CONNECTED.lower(): - return True - # Wait a bit before requesting another status update - time.sleep(1) - # Stopped getting updates because out timeout - self.log.error( - 'Timed out waiting for network with SSID "%s" to ' "connect" % ssid - ) - return False - - def wait_for_disconnect( - self, ssid, security_type, state=None, status=None, timeout=30 - ): - """Wait for a disconnect of the specified network on the given device. This - will check that the correct connection state and disconnect status are - given in update. If we do not see a disconnect after some time, - return false. - - Args: - ssid: string, the network name - security: string, security type of network (see sl4f.wlan_policy_lib) - state: string, The connection state we are expecting, ie "Disconnected" or - "Failed" - status: string, The disconnect status we expect, it "ConnectionStopped" or - "ConnectionFailed" - timeout: int, seconds to wait before giving up - - Returns: True if we saw a disconnect as specified, or False otherwise. - """ - if not state: - state = STATE_DISCONNECTED - if not status: - status = STATE_CONNECTION_STOPPED - - end_time = time.time() + timeout - while time.time() < end_time: - time_left = max(1, int(end_time - time.time())) - try: - update = self.sl4f.wlan_policy_lib.wlanGetUpdate(timeout=time_left) - except TimeoutError: - self.log.error( - "Timed out waiting for response from device " - 'while waiting for network with SSID "%s" to ' - "disconnect. Device took too long to disconnect " - "or the request timed out for another reason." % ssid - ) - self.sl4f.wlan_policy_lib.wlanSetNewListener() - return False - - if update.get("error"): - # This can occur for many reasons, so it is not necessarily a - # failure. - self.log.debug( - "Error occurred getting status update: %s" % update["error"] - ) - continue - # Update should include network, either connected to or recently disconnected. - if len(update["result"]["networks"]) == 0: - raise WlanPolicyControllerError("WLAN state update is missing network.") - - for network in update["result"]["networks"]: - if ( - network["id"]["ssid"] == ssid - or network["id"]["type_"].lower() == security_type.lower() - ): - if "state" not in network or "status" not in network: - raise WlanPolicyControllerError( - "Client state summary's network is missing fields" - ) - # If still connected, we will wait for another update and check again - elif network["state"].lower() == STATE_CONNECTED.lower(): - continue - elif network["state"].lower() == STATE_CONNECTING.lower(): - self.log.error( - 'Update is "Connecting", but device should already be ' - "connected; expected disconnect" - ) - return False - # Check that the network state and disconnect status are expected, ie - # that it isn't ConnectionFailed when we expect ConnectionStopped - elif ( - network["state"].lower() != state.lower() - or network["status"].lower() != status.lower() - ): - self.log.error( - "Connection failed: a network failure occurred that is unrelated" - "to remove network or incorrect status update. \nExpected state: " - "%s, Status: %s,\nActual update: %s" - % (state, status, network) - ) - return False - else: - return True - # Wait a bit before requesting another status update - time.sleep(1) - # Stopped getting updates because out timeout - self.log.error( - 'Timed out waiting for network with SSID "%s" to ' "connect" % ssid - ) - return False - - def wait_for_no_connections( - self, timeout_sec: int = DEFAULT_GET_UPDATE_TIMEOUT - ) -> bool: - """Waits to see that there are no existing connections the device. This - is the simplest way to watch for disconnections when only a single - network is saved/present. - - Args: - timeout_sec: The time to wait to see no connections. - - Returns: - True, if successful. False, if still connected after timeout. - """ - # If there are already no existing connections when this function is called, - # then an update won't be generated by the device, and we'll time out. - # Force an update by getting a new listener. - self.sl4f.wlan_policy_lib.wlanSetNewListener() - end_time = time.time() + timeout_sec - while time.time() < end_time: - time_left = max(1, int(end_time - time.time())) - try: - update = self.sl4f.wlan_policy_lib.wlanGetUpdate(timeout=time_left) - except TimeoutError: - self.log.info( - "Timed out getting status update while waiting for all" - " connections to end." - ) - self.sl4f.wlan_policy_lib.wlanSetNewListener() - return False - - if update["error"] != None: - self.log.info("Failed to get status update") - return False - # If any network is connected or being connected to, wait for them - # to disconnect. - if any( - network["state"].lower() - in {STATE_CONNECTED.lower(), STATE_CONNECTING.lower()} - for network in update["result"]["networks"] - ): - continue - else: - return True - return False - - def remove_and_preserve_networks_and_client_state(self): - """Preserves networks already saved on devices before removing them to - setup up for a clean test environment. Records the state of client - connections before tests. - - Raises: - WlanPolicyControllerError, if the network removal is unsuccessful - """ - # Save preexisting saved networks - preserved_networks_and_state = {} - saved_networks_response = self.sl4f.wlan_policy_lib.wlanGetSavedNetworks() - if saved_networks_response.get("error"): - raise WlanPolicyControllerError( - "Failed to get preexisting saved networks: %s" - % saved_networks_response["error"] - ) - if saved_networks_response.get("result") != None: - preserved_networks_and_state[SAVED_NETWORKS] = saved_networks_response[ - "result" - ] - - # Remove preexisting saved networks - if not self.remove_all_networks(): - raise WlanPolicyControllerError( - "Failed to clear networks and disconnect at FuchsiaDevice creation." - ) - - self.sl4f.wlan_policy_lib.wlanSetNewListener() - update_response = self.sl4f.wlan_policy_lib.wlanGetUpdate() - update_result = update_response.get("result", {}) - if update_result.get("state"): - preserved_networks_and_state[CLIENT_STATE] = update_result["state"] - else: - self.log.warn( - "Failed to get update; test will not start or " - "stop client connections at the end of the test." - ) - - self.log.info("Saved networks cleared and preserved.") - return preserved_networks_and_state - - def restore_preserved_networks_and_client_state(self): - """Restore saved networks and client state onto device if they have - been preserved. - """ - if not self.remove_all_networks(): - self.log.warn("Failed to remove saved networks before restore.") - restore_success = True - for network in self.preserved_networks_and_client_state[SAVED_NETWORKS]: - if not self.save_network( - network["ssid"], network["security_type"], network["credential_value"] - ): - self.log.warn("Failed to restore network (%s)." % network["ssid"]) - restore_success = False - starting_state = self.preserved_networks_and_client_state[CLIENT_STATE] - if starting_state == CONNECTIONS_ENABLED: - state_restored = self.start_client_connections() - else: - state_restored = self.stop_client_connections() - if not state_restored: - self.log.warn("Failed to restore client connections state.") - restore_success = False - if restore_success: - self.log.info("Preserved networks and client state restored.") - self.preserved_networks_and_client_state = None - return restore_success
diff --git a/src/antlion/controllers/fuchsia_lib/location/__init__.py b/src/antlion/controllers/fuchsia_lib/location/__init__.py deleted file mode 100644 index e69de29..0000000 --- a/src/antlion/controllers/fuchsia_lib/location/__init__.py +++ /dev/null
diff --git a/src/antlion/controllers/fuchsia_lib/location/regulatory_region_lib.py b/src/antlion/controllers/fuchsia_lib/location/regulatory_region_lib.py deleted file mode 100644 index 54d9e44..0000000 --- a/src/antlion/controllers/fuchsia_lib/location/regulatory_region_lib.py +++ /dev/null
@@ -1,38 +0,0 @@ -#!/usr/bin/env python3 -# -# Copyright 2022 The Fuchsia Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from antlion.controllers.fuchsia_lib.base_lib import BaseLib - - -class FuchsiaRegulatoryRegionLib(BaseLib): - def __init__(self, addr: str) -> None: - super().__init__(addr, "location_regulatory_region") - - # TODO(fxb/46727): Provide an analagous call to check the region - # configured into the driver. - def setRegion(self, region_code): - """Set regulatory region. - - Args: - region_code: 2-byte ASCII string. - - Returns: - Dictionary, None if success, error if error. - """ - test_cmd = "location_regulatory_region_facade.set_region" - test_args = {"region": region_code} - - return self.send_command(test_cmd, test_args)
diff --git a/src/antlion/controllers/fuchsia_lib/logging_lib.py b/src/antlion/controllers/fuchsia_lib/logging_lib.py deleted file mode 100644 index 83825c4..0000000 --- a/src/antlion/controllers/fuchsia_lib/logging_lib.py +++ /dev/null
@@ -1,68 +0,0 @@ -#!/usr/bin/env python3 -# -# Copyright 2022 The Fuchsia Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import datetime - -from antlion.controllers.fuchsia_lib.base_lib import BaseLib - - -class FuchsiaLoggingLib(BaseLib): - def __init__(self, addr: str) -> None: - super().__init__(addr, "logging") - - def logE(self, message): - """Log a message of level Error directly to the syslog. - - Args: - message: The message to log. - - Returns: - Dictionary, None if success, error if error. - """ - test_cmd = "logging_facade.LogErr" - test_args = { - "message": "[%s] %s" % (datetime.datetime.now(), message), - } - - return self.send_command(test_cmd, test_args) - - def logI(self, message): - """Log a message of level Info directly to the syslog. - - Args: - message: The message to log. - - Returns: - Dictionary, None if success, error if error. - """ - test_cmd = "logging_facade.LogInfo" - test_args = {"message": "[%s] %s" % (datetime.datetime.now(), message)} - - return self.send_command(test_cmd, test_args) - - def logW(self, message): - """Log a message of level Warning directly to the syslog. - - Args: - message: The message to log. - - Returns: - Dictionary, None if success, error if error. - """ - test_cmd = "logging_facade.LogWarn" - test_args = {"message": "[%s] %s" % (datetime.datetime.now(), message)} - - return self.send_command(test_cmd, test_args)
diff --git a/src/antlion/controllers/fuchsia_lib/netstack/__init__.py b/src/antlion/controllers/fuchsia_lib/netstack/__init__.py deleted file mode 100644 index e69de29..0000000 --- a/src/antlion/controllers/fuchsia_lib/netstack/__init__.py +++ /dev/null
diff --git a/src/antlion/controllers/fuchsia_lib/netstack/netstack_lib.py b/src/antlion/controllers/fuchsia_lib/netstack/netstack_lib.py deleted file mode 100644 index 481e9bd..0000000 --- a/src/antlion/controllers/fuchsia_lib/netstack/netstack_lib.py +++ /dev/null
@@ -1,61 +0,0 @@ -#!/usr/bin/env python3 -# -# Copyright 2022 The Fuchsia Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from antlion.controllers.fuchsia_lib.base_lib import BaseLib - - -class FuchsiaNetstackLib(BaseLib): - def __init__(self, addr: str) -> None: - super().__init__(addr, "netstack") - - def netstackListInterfaces(self): - """ListInterfaces command - - Returns: - List of interface paths - """ - test_cmd = "netstack_facade.ListInterfaces" - test_args = {} - - return self.send_command(test_cmd, test_args) - - def enableInterface(self, id): - """Enable Interface - - Args: - id: The interface ID. - - Returns: - Dictionary, None if success, error if error. - """ - test_cmd = "netstack_facade.EnableInterface" - test_args = {"identifier": id} - - return self.send_command(test_cmd, test_args) - - def disableInterface(self, id): - """Disable Interface - - Args: - id: The interface ID. - - Returns: - Dictionary, None if success, error if error. - """ - test_cmd = "netstack_facade.DisableInterface" - test_args = {"identifier": id} - - return self.send_command(test_cmd, test_args)
diff --git a/src/antlion/controllers/fuchsia_lib/package_server.py b/src/antlion/controllers/fuchsia_lib/package_server.py deleted file mode 100644 index d497e96..0000000 --- a/src/antlion/controllers/fuchsia_lib/package_server.py +++ /dev/null
@@ -1,252 +0,0 @@ -#!/usr/bin/env python3 -# -# Copyright 2022 The Fuchsia Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import json -import os -import shutil -import socket -import subprocess -import tarfile -import tempfile - -from dataclasses import dataclass -from datetime import datetime -from typing import TextIO, List, Optional - -from antlion import context -from antlion import logger -from antlion import signals -from antlion import utils - -from antlion.controllers.fuchsia_lib.ssh import SSHError, SSHProvider -from antlion.net import wait_for_port -from antlion.tracelogger import TraceLogger - -DEFAULT_FUCHSIA_REPO_NAME = "fuchsia.com" -PM_SERVE_STOP_TIMEOUT_SEC = 5 - - -class PackageServerError(signals.TestAbortClass): - pass - - -def random_port() -> int: - s = socket.socket() - s.bind(("", 0)) - return s.getsockname()[1] - - -@dataclass -class Route: - """Represent a route in the routing table.""" - - preferred_source: Optional[str] - - -def find_routes_to(dest_ip) -> List[Route]: - """Find the routes used to reach a destination. - - Look through the routing table for the routes that would be used without - sending any packets. This is especially helpful for when the device is - currently unreachable. - - Only natively supported on Linux. MacOS has iproute2mac, but it doesn't - support JSON formatted output. - - TODO(http://b/238924195): Add support for MacOS. - - Args: - dest_ip: IP address of the destination - - Throws: - CalledProcessError: if the ip command returns a non-zero exit code - JSONDecodeError: if the ip command doesn't return JSON - - Returns: - Routes with destination to dest_ip. - """ - resp = subprocess.run( - f"ip -json route get {dest_ip}".split(), capture_output=True, check=True - ) - routes = json.loads(resp.stdout) - return [Route(r.get("prefsrc")) for r in routes] - - -def find_host_ip(device_ip: str) -> str: - """Find the host's source IP used to reach a device. - - Not all host interfaces can talk to a given device. This limitation can - either be physical through hardware or virtual through routing tables. - Look through the routing table without sending any packets then return the - preferred source IP address. - - Args: - device_ip: IP address of the device - - Raises: - PackageServerError: if there are multiple or no routes to device_ip, or - if the route doesn't contain "prefsrc" - - Returns: - The host IP used to reach device_ip. - """ - routes = find_routes_to(device_ip) - if len(routes) != 1: - raise PackageServerError( - f"Expected only one route to {device_ip}, got {routes}" - ) - - route = routes[0] - if not route.preferred_source: - raise PackageServerError(f'Route does not contain "prefsrc": {route}') - return route.preferred_source - - -class PackageServer: - """Package manager for Fuchsia; an interface to the "pm" CLI tool.""" - - def __init__(self, packages_archive_path: str) -> None: - """ - Args: - packages_archive_path: Path to an archive containing the pm binary - and amber-files. - """ - self.log: TraceLogger = logger.create_tagged_trace_logger("pm") - - self._server_log: Optional[TextIO] = None - self._server_proc: Optional[subprocess.Popen] = None - self._log_path: Optional[str] = None - - self._tmp_dir = tempfile.mkdtemp(prefix="packages-") - tar = tarfile.open(packages_archive_path, "r:gz") - tar.extractall(self._tmp_dir) - - self._binary_path = os.path.join(self._tmp_dir, "pm") - self._packages_path = os.path.join(self._tmp_dir, "amber-files") - self._port = random_port() - - self._assert_repo_has_not_expired() - - def clean_up(self) -> None: - if self._server_proc: - self.stop_server() - if self._tmp_dir: - shutil.rmtree(self._tmp_dir) - - def _assert_repo_has_not_expired(self) -> None: - """Abort if the repository metadata has expired. - - Raises: - TestAbortClass: when the timestamp.json file has expired - """ - with open(f"{self._packages_path}/repository/timestamp.json", "r") as f: - data = json.load(f) - expiresAtRaw = data["signed"]["expires"] - expiresAt = datetime.strptime(expiresAtRaw, "%Y-%m-%dT%H:%M:%SZ") - if expiresAt <= datetime.now(): - raise signals.TestAbortClass( - f"{self._packages_path}/repository/timestamp.json has expired on {expiresAtRaw}" - ) - - def start(self) -> None: - """Start the package server. - - Does not check for errors; view the log file for any errors. - """ - if self._server_proc: - self.log.warn( - "Skipping to start the server since it has already been started" - ) - return - - pm_command = f"{self._binary_path} serve -c 2 -repo {self._packages_path} -l :{self._port}" - - root_dir = context.get_current_context().get_full_output_path() - epoch = utils.get_current_epoch_time() - time_stamp = logger.normalize_log_line_timestamp( - logger.epoch_to_log_line_timestamp(epoch) - ) - self._log_path = os.path.join(root_dir, f"pm_server.{time_stamp}.log") - - self._server_log = open(self._log_path, "a+") - self._server_proc = subprocess.Popen( - pm_command.split(), - preexec_fn=os.setpgrp, - stdout=self._server_log, - stderr=subprocess.STDOUT, - ) - try: - wait_for_port("127.0.0.1", self._port) - except TimeoutError as e: - if self._server_log: - self._server_log.close() - if self._log_path: - with open(self._log_path, "r") as f: - logs = f.read() - raise TimeoutError( - f"pm serve failed to expose port {self._port}. Logs:\n{logs}" - ) from e - - self.log.info(f"Serving packages on port {self._port}") - - def configure_device( - self, ssh: SSHProvider, repo_name=DEFAULT_FUCHSIA_REPO_NAME - ) -> None: - """Configure the device to use this package server. - - Args: - ssh: Device SSH transport channel - repo_name: Name of the repo to alias this package server - """ - # Remove any existing repositories that may be stale. - try: - ssh.run(f"pkgctl repo rm fuchsia-pkg://{repo_name}") - except SSHError as e: - if "NOT_FOUND" not in e.result.stderr: - raise e - - # Configure the device with the new repository. - host_ip = find_host_ip(ssh.config.host_name) - repo_url = f"http://{host_ip}:{self._port}" - ssh.run(f"pkgctl repo add url -f 2 -n {repo_name} {repo_url}/config.json") - self.log.info( - f'Added repo "{repo_name}" as {repo_url} on device {ssh.config.host_name}' - ) - - def stop_server(self) -> None: - """Stop the package server.""" - if not self._server_proc: - self.log.warn( - "Skipping to stop the server since it hasn't been started yet" - ) - return - - self._server_proc.terminate() - try: - self._server_proc.wait(timeout=PM_SERVE_STOP_TIMEOUT_SEC) - except subprocess.TimeoutExpired: - self.log.warn( - f"Taking over {PM_SERVE_STOP_TIMEOUT_SEC}s to stop. Killing the server" - ) - self._server_proc.kill() - self._server_proc.wait(timeout=PM_SERVE_STOP_TIMEOUT_SEC) - finally: - if self._server_log: - self._server_log.close() - - self._server_proc = None - self._log_path = None - self._server_log = None
diff --git a/src/antlion/controllers/fuchsia_lib/sl4f.py b/src/antlion/controllers/fuchsia_lib/sl4f.py deleted file mode 100644 index e051d7c..0000000 --- a/src/antlion/controllers/fuchsia_lib/sl4f.py +++ /dev/null
@@ -1,130 +0,0 @@ -#!/usr/bin/env python3 -# -# Copyright 2022 The Fuchsia Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import ipaddress -import sys - -from antlion import logger -from antlion.controllers.fuchsia_lib.device_lib import DeviceLib -from antlion.controllers.fuchsia_lib.hardware_power_statecontrol_lib import ( - FuchsiaHardwarePowerStatecontrolLib, -) -from antlion.controllers.fuchsia_lib.location.regulatory_region_lib import ( - FuchsiaRegulatoryRegionLib, -) -from antlion.controllers.fuchsia_lib.logging_lib import FuchsiaLoggingLib -from antlion.controllers.fuchsia_lib.netstack.netstack_lib import FuchsiaNetstackLib -from antlion.controllers.fuchsia_lib.ssh import SSHProvider, SSHError -from antlion.controllers.fuchsia_lib.wlan_ap_policy_lib import FuchsiaWlanApPolicyLib -from antlion.controllers.fuchsia_lib.wlan_deprecated_configuration_lib import ( - FuchsiaWlanDeprecatedConfigurationLib, -) -from antlion.controllers.fuchsia_lib.wlan_lib import FuchsiaWlanLib -from antlion.controllers.fuchsia_lib.wlan_policy_lib import FuchsiaWlanPolicyLib -from antlion.net import wait_for_port - -DEFAULT_SL4F_PORT = 80 -START_SL4F_V2_CMD = "start_sl4f" - - -class SL4F: - """Module for Fuchsia devices to interact with the SL4F tool. - - Attributes: - ssh: SSHProvider transport to start and stop SL4F. - address: http address for SL4F server including SL4F port. - log: Logger for the device-specific instance of SL4F. - """ - - def __init__(self, ssh: SSHProvider, port: int = DEFAULT_SL4F_PORT) -> None: - """ - Args: - ssh: SSHProvider transport to start and stop SL4F. - port: Port for the SL4F server to listen on. - """ - host = ssh.config.host_name - - if sys.version_info < (3, 9): - # TODO(http://b/261746355): Remove this if statement once the - # minimum Python version is 3.9 or newer. - ip = ipaddress.ip_address(host.split("%")[0]) - if ip.version == 4: - self.address = f"http://{ip}:{port}" - elif ip.version == 6: - ip = ssh.config.host_name - self.address = f"http://[{ip}]:{port}" - else: - ip = ipaddress.ip_address(host) - if ip.version == 4: - self.address = f"http://{ip}:{port}" - elif ip.version == 6: - self.address = f"http://[{ip}]:{port}" - - self.log = logger.create_tagged_trace_logger(f"SL4F | {self.address}") - - try: - ssh.stop_component("sl4f") - ssh.run(START_SL4F_V2_CMD).stdout - except SSHError: - # TODO(fxbug.dev/99331) Remove support to run SL4F in CFv1 mode - # once ACTS no longer use images that comes with only CFv1 SL4F. - self.log.warn( - "Running SL4F in CFv1 mode, " - "this is deprecated for images built after 5/9/2022, " - "see https://fxbug.dev/77056 for more info." - ) - ssh.stop_component("sl4f") - ssh.start_v1_component("sl4f") - - try: - wait_for_port(host, port) - self.log.info("SL4F server is reachable") - except TimeoutError as e: - raise TimeoutError("SL4F server is unreachable") from e - - self._init_libraries() - - def _init_libraries(self) -> None: - # Grab commands from DeviceLib - self.device_lib = DeviceLib(self.address) - - # Grab commands from FuchsiaHardwarePowerStatecontrolLib - self.hardware_power_statecontrol_lib = FuchsiaHardwarePowerStatecontrolLib( - self.address - ) - - # Grab commands from FuchsiaRegulatoryRegionLib - self.regulatory_region_lib = FuchsiaRegulatoryRegionLib(self.address) - - # Grab commands from FuchsiaLoggingLib - self.logging_lib = FuchsiaLoggingLib(self.address) - - # Grab commands from FuchsiaNetstackLib - self.netstack_lib = FuchsiaNetstackLib(self.address) - - # Grab commands from FuchsiaWlanApPolicyLib - self.wlan_ap_policy_lib = FuchsiaWlanApPolicyLib(self.address) - - # Grabs command from FuchsiaWlanDeprecatedConfigurationLib - self.wlan_deprecated_configuration_lib = FuchsiaWlanDeprecatedConfigurationLib( - self.address - ) - - # Grab commands from FuchsiaWlanLib - self.wlan_lib = FuchsiaWlanLib(self.address) - - # Grab commands from FuchsiaWlanPolicyLib - self.wlan_policy_lib = FuchsiaWlanPolicyLib(self.address)
diff --git a/src/antlion/controllers/fuchsia_lib/ssh.py b/src/antlion/controllers/fuchsia_lib/ssh.py deleted file mode 100644 index 1d1f421..0000000 --- a/src/antlion/controllers/fuchsia_lib/ssh.py +++ /dev/null
@@ -1,79 +0,0 @@ -#!/usr/bin/env python3 -# -# Copyright 2022 The Fuchsia Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import time - -from antlion.capabilities.ssh import SSHError, SSHProvider - -DEFAULT_SSH_USER: str = "fuchsia" -DEFAULT_SSH_PRIVATE_KEY: str = "~/.ssh/fuchsia_ed25519" -# The default package repository for all components. -FUCHSIA_PACKAGE_REPO_NAME = "fuchsia.com" - - -class FuchsiaSSHProvider(SSHProvider): - """Device-specific provider for SSH clients.""" - - def start_v1_component( - self, - component: str, - timeout_sec: int = 5, - repo: str = FUCHSIA_PACKAGE_REPO_NAME, - ) -> None: - """Start a CFv1 component in the background. - - Args: - component: Name of the component without ".cmx". - timeout_sec: Seconds to wait for the process to show up in 'ps'. - repo: Default package repository for all components. - - Raises: - TimeoutError: when the component doesn't launch within timeout_sec - """ - # The "run -d" command will hang when executed without a pseudo-tty - # allocated. - self.run( - f"run -d fuchsia-pkg://{repo}/{component}#meta/{component}.cmx", - force_tty=True, - ) - - timeout = time.perf_counter() + timeout_sec - while True: - ps_cmd = self.run("ps") - if f"{component}.cmx" in ps_cmd.stdout: - return - if time.perf_counter() > timeout: - raise TimeoutError( - f'Failed to start "{component}.cmx" after {timeout_sec}s' - ) - - def stop_component(self, component: str, is_cfv2_component: bool = False) -> None: - """Stop all instances of a CFv1 or CFv2 component. - - Args: - component: Name of the component without suffix("cm" or "cmx"). - is_cfv2_component: Determines the component suffix to use. - """ - suffix = "cm" if is_cfv2_component else "cmx" - - try: - self.run(f"killall {component}.{suffix}") - self.log.info(f"Stopped component: {component}.{suffix}") - except SSHError as e: - if "no tasks found" in e.result.stderr: - self.log.debug(f"Could not find component: {component}.{suffix}") - return - raise e
diff --git a/src/antlion/controllers/fuchsia_lib/utils_lib.py b/src/antlion/controllers/fuchsia_lib/utils_lib.py deleted file mode 100644 index 1e1336a..0000000 --- a/src/antlion/controllers/fuchsia_lib/utils_lib.py +++ /dev/null
@@ -1,239 +0,0 @@ -#!/usr/bin/env python3 -# -# Copyright 2022 The Fuchsia Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import os -import logging -import tarfile -import tempfile -import time - -from antlion import utils -from antlion.libs.proc import job -from antlion.utils import get_fuchsia_mdns_ipv6_address - -MDNS_LOOKUP_RETRY_MAX = 3 -FASTBOOT_TIMEOUT = 30 -FLASH_TIMEOUT_SEC = 60 * 5 # 5 minutes -AFTER_FLASH_BOOT_TIME = 30 -WAIT_FOR_EXISTING_FLASH_TO_FINISH_SEC = 360 -PROCESS_CHECK_WAIT_TIME_SEC = 30 - -FUCHSIA_SDK_URL = "gs://fuchsia-sdk/development" -FUCHSIA_RELEASE_TESTING_URL = "gs://fuchsia-release-testing/images" - - -def flash(fuchsia_device, use_ssh=False, fuchsia_reconnect_after_reboot_time=5): - """A function to flash, not pave, a fuchsia_device - - Args: - fuchsia_device: An ACTS fuchsia_device - - Returns: - True if successful. - """ - if not fuchsia_device.authorized_file: - raise ValueError( - "A ssh authorized_file must be present in the " - "ACTS config to flash fuchsia_devices." - ) - # This is the product type from the fx set command. - # Do 'fx list-products' to see options in Fuchsia source tree. - if not fuchsia_device.product_type: - raise ValueError( - "A product type must be specified to flash " "fuchsia_devices." - ) - # This is the board type from the fx set command. - # Do 'fx list-boards' to see options in Fuchsia source tree. - if not fuchsia_device.board_type: - raise ValueError("A board type must be specified to flash " "fuchsia_devices.") - if not fuchsia_device.build_number: - fuchsia_device.build_number = "LATEST" - if not fuchsia_device.mdns_name: - raise ValueError( - "Either fuchsia_device mdns_name must be specified or " - "ip must be the mDNS name to be able to flash." - ) - - file_to_download = None - image_archive_path = None - image_path = None - - if not fuchsia_device.specific_image: - product_build = fuchsia_device.product_type - if fuchsia_device.build_type: - product_build = f"{product_build}_{fuchsia_device.build_type}" - if "LATEST" in fuchsia_device.build_number: - sdk_version = "sdk" - if "LATEST_F" in fuchsia_device.build_number: - f_branch = fuchsia_device.build_number.split("LATEST_F", 1)[1] - sdk_version = f"f{f_branch}_sdk" - file_to_download = ( - f"{FUCHSIA_RELEASE_TESTING_URL}/" - f"{sdk_version}-{product_build}.{fuchsia_device.board_type}-release.tgz" - ) - else: - # Must be a fully qualified build number (e.g. 5.20210721.4.1215) - file_to_download = ( - f"{FUCHSIA_SDK_URL}/{fuchsia_device.build_number}/images/" - f"{product_build}.{fuchsia_device.board_type}-release.tgz" - ) - elif "gs://" in fuchsia_device.specific_image: - file_to_download = fuchsia_device.specific_image - elif os.path.isdir(fuchsia_device.specific_image): - image_path = fuchsia_device.specific_image - elif tarfile.is_tarfile(fuchsia_device.specific_image): - image_archive_path = fuchsia_device.specific_image - else: - raise ValueError(f'Invalid specific_image "{fuchsia_device.specific_image}"') - - if image_path: - reboot_to_bootloader( - fuchsia_device, use_ssh, fuchsia_reconnect_after_reboot_time - ) - logging.info( - f'Flashing {fuchsia_device.mdns_name} with {image_path} using authorized keys "{fuchsia_device.authorized_file}".' - ) - run_flash_script(fuchsia_device, image_path) - else: - suffix = fuchsia_device.board_type - with tempfile.TemporaryDirectory(suffix=suffix) as image_path: - if file_to_download: - logging.info(f"Downloading {file_to_download} to {image_path}") - job.run(f"gsutil cp {file_to_download} {image_path}") - image_archive_path = os.path.join( - image_path, os.path.basename(file_to_download) - ) - - if image_archive_path: - # Use tar command instead of tarfile.extractall, as it takes too long. - job.run(f"tar xfvz {image_archive_path} -C {image_path}", timeout=120) - - reboot_to_bootloader( - fuchsia_device, use_ssh, fuchsia_reconnect_after_reboot_time - ) - - logging.info( - f'Flashing {fuchsia_device.mdns_name} with {image_archive_path} using authorized keys "{fuchsia_device.authorized_file}".' - ) - run_flash_script(fuchsia_device, image_path) - return True - - -def reboot_to_bootloader( - fuchsia_device, use_ssh=False, fuchsia_reconnect_after_reboot_time=5 -): - import psutil - import usbinfo - from antlion.controllers.fuchsia_lib.ssh import SSHError - - if use_ssh: - logging.info("Sending reboot command via SSH to " "get into bootloader.") - # Sending this command will put the device in fastboot - # but it does not guarantee the device will be in fastboot - # after this command. There is no check so if there is an - # expectation of the device being in fastboot, then some - # other check needs to be done. - try: - fuchsia_device.ssh.run( - "dm rb", timeout_sec=fuchsia_reconnect_after_reboot_time - ) - except SSHError as e: - if "closed by remote host" not in e.result.stderr: - raise e - else: - pass - ## Todo: Add elif for SL4F if implemented in SL4F - - time_counter = 0 - while time_counter < FASTBOOT_TIMEOUT: - logging.info( - "Checking to see if fuchsia_device(%s) SN: %s is in " - "fastboot. (Attempt #%s Timeout: %s)" - % ( - fuchsia_device.mdns_name, - fuchsia_device.serial_number, - str(time_counter + 1), - FASTBOOT_TIMEOUT, - ) - ) - for usb_device in usbinfo.usbinfo(): - if ( - usb_device["iSerialNumber"] == fuchsia_device.serial_number - and usb_device["iProduct"] == "USB_download_gadget" - ): - logging.info( - "fuchsia_device(%s) SN: %s is in fastboot." - % (fuchsia_device.mdns_name, fuchsia_device.serial_number) - ) - time_counter = FASTBOOT_TIMEOUT - time_counter = time_counter + 1 - if time_counter == FASTBOOT_TIMEOUT: - for fail_usb_device in usbinfo.usbinfo(): - logging.debug(fail_usb_device) - raise TimeoutError( - "fuchsia_device(%s) SN: %s " - "never went into fastboot" - % (fuchsia_device.mdns_name, fuchsia_device.serial_number) - ) - time.sleep(1) - - end_time = time.time() + WAIT_FOR_EXISTING_FLASH_TO_FINISH_SEC - # Attempt to wait for existing flashing process to finish - while time.time() < end_time: - flash_process_found = False - for proc in psutil.process_iter(): - if "bash" in proc.name() and "flash.sh" in proc.cmdline(): - logging.info("Waiting for existing flash.sh process to complete.") - time.sleep(PROCESS_CHECK_WAIT_TIME_SEC) - flash_process_found = True - if not flash_process_found: - break - - -def run_flash_script(fuchsia_device, flash_dir): - try: - flash_output = job.run( - f"bash {flash_dir}/flash.sh --ssh-key={fuchsia_device.authorized_file} -s {fuchsia_device.serial_number}", - timeout=FLASH_TIMEOUT_SEC, - ) - logging.debug(flash_output.stderr) - except job.TimeoutError as err: - raise TimeoutError(err) - - logging.info( - "Waiting %s seconds for device" - " to come back up after flashing." % AFTER_FLASH_BOOT_TIME - ) - time.sleep(AFTER_FLASH_BOOT_TIME) - logging.info("Updating device to new IP addresses.") - mdns_ip = None - for retry_counter in range(MDNS_LOOKUP_RETRY_MAX): - mdns_ip = get_fuchsia_mdns_ipv6_address(fuchsia_device.mdns_name) - if mdns_ip: - break - else: - time.sleep(1) - if mdns_ip and utils.is_valid_ipv6_address(mdns_ip): - logging.info( - "IP for fuchsia_device(%s) changed from %s to %s" - % (fuchsia_device.mdns_name, fuchsia_device.ip, mdns_ip) - ) - fuchsia_device.ip = mdns_ip - fuchsia_device.address = "http://[{}]:{}".format( - fuchsia_device.ip, fuchsia_device.sl4f_port - ) - else: - raise ValueError("Invalid IP: %s after flashing." % fuchsia_device.mdns_name)
diff --git a/src/antlion/controllers/fuchsia_lib/wlan_ap_policy_lib.py b/src/antlion/controllers/fuchsia_lib/wlan_ap_policy_lib.py deleted file mode 100644 index 54486d9..0000000 --- a/src/antlion/controllers/fuchsia_lib/wlan_ap_policy_lib.py +++ /dev/null
@@ -1,110 +0,0 @@ -#!/usr/bin/env python3 -# -# Copyright 2022 The Fuchsia Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from antlion.controllers.fuchsia_lib.base_lib import BaseLib - - -class FuchsiaWlanApPolicyLib(BaseLib): - def __init__(self, addr: str) -> None: - super().__init__(addr, "wlan_ap_policy") - - def wlanStartAccessPoint( - self, target_ssid, security_type, target_pwd, connectivity_mode, operating_band - ): - """Start an Access Point. - Args: - target_ssid: the network to attempt a connection to - security_type: the security protocol of the network. Possible inputs: - "none", "wep", "wpa", "wpa2", "wpa3" - target_pwd: (optional) credential being saved with the network. No password - is equivalent to empty string. - connectivity_mode: the connectivity mode to use. Possible inputs: - "local_only", "unrestricted" - operating_band: The operating band to use. Possible inputs: - "any", "only_2_4_ghz", "only_5_ghz" - - Returns: - boolean indicating if the action was successful - """ - - test_cmd = "wlan_ap_policy.start_access_point" - - test_args = { - "target_ssid": target_ssid, - "security_type": security_type.lower(), - "target_pwd": target_pwd, - "connectivity_mode": connectivity_mode, - "operating_band": operating_band, - } - - return self.send_command(test_cmd, test_args) - - def wlanStopAccessPoint(self, target_ssid, security_type, target_pwd=""): - """Stops an active Access Point. - Args: - target_ssid: the network to attempt a connection to - security_type: the security protocol of the network - target_pwd: (optional) credential being saved with the network. No password - is equivalent to empty string. - - Returns: - boolean indicating if the action was successful - """ - - test_cmd = "wlan_ap_policy.stop_access_point" - - test_args = { - "target_ssid": target_ssid, - "security_type": security_type.lower(), - "target_pwd": target_pwd, - } - - return self.send_command(test_cmd, test_args) - - def wlanStopAllAccessPoint(self): - """Stops all Access Points - - Returns: - boolean indicating if the actions were successful - """ - - test_cmd = "wlan_ap_policy.stop_all_access_points" - - test_args = {} - - return self.send_command(test_cmd, test_args) - - def wlanSetNewListener(self): - """Sets the update listener stream of the facade to a new stream so that updates will be - reset. Intended to be used between tests so that the behaviour of updates in a test is - independent from previous tests. - """ - test_cmd = "wlan_ap_policy.set_new_update_listener" - - return self.send_command(test_cmd, {}) - - def wlanGetUpdate(self, timeout=30): - """Gets a list of AP state updates. This call will return with an update immediately the - first time the update listener is initialized by setting a new listener or by creating - a client controller before setting a new listener. Subsequent calls will hang until - there is an update. - Returns: - A list of AP state updated. If there is no error, the result is a list with a - structure that matches the FIDL AccessPointState struct given for updates. - """ - test_cmd = "wlan_ap_policy.get_update" - - return self.send_command(test_cmd, {}, response_timeout=timeout)
diff --git a/src/antlion/controllers/fuchsia_lib/wlan_deprecated_configuration_lib.py b/src/antlion/controllers/fuchsia_lib/wlan_deprecated_configuration_lib.py deleted file mode 100644 index a53698b..0000000 --- a/src/antlion/controllers/fuchsia_lib/wlan_deprecated_configuration_lib.py +++ /dev/null
@@ -1,35 +0,0 @@ -#!/usr/bin/env python3 -# -# Copyright 2022 The Fuchsia Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from antlion import logger -from antlion.controllers.fuchsia_lib.base_lib import BaseLib - - -class FuchsiaWlanDeprecatedConfigurationLib(BaseLib): - def __init__(self, addr: str) -> None: - super().__init__(addr, "wlan_deprecated") - - def wlanSuggestAccessPointMacAddress(self, addr): - """Suggests a mac address to soft AP interface, to support - cast legacy behavior. - - Args: - addr: string of mac address to suggest (e.g. '12:34:56:78:9a:bc') - """ - test_cmd = "wlan_deprecated.suggest_ap_mac" - test_args = {"mac": addr} - - return self.send_command(test_cmd, test_args)
diff --git a/src/antlion/controllers/fuchsia_lib/wlan_lib.py b/src/antlion/controllers/fuchsia_lib/wlan_lib.py deleted file mode 100644 index 9ed274a..0000000 --- a/src/antlion/controllers/fuchsia_lib/wlan_lib.py +++ /dev/null
@@ -1,173 +0,0 @@ -#!/usr/bin/env python3 -# -# Copyright 2022 The Fuchsia Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -from antlion import logger -from antlion.controllers.fuchsia_lib.base_lib import BaseLib - -COMMAND_SCAN = "wlan.scan" -COMMAND_SCAN_FOR_BSS_INFO = "wlan.scan_for_bss_info" -COMMAND_CONNECT = "wlan.connect" -COMMAND_DISCONNECT = "wlan.disconnect" -COMMAND_STATUS = "wlan.status" -COMMAND_GET_IFACE_ID_LIST = "wlan.get_iface_id_list" -COMMAND_GET_PHY_ID_LIST = "wlan.get_phy_id_list" -COMMAND_DESTROY_IFACE = "wlan.destroy_iface" -COMMAND_GET_COUNTRY = "wlan_phy.get_country" -COMMAND_GET_DEV_PATH = "wlan_phy.get_dev_path" -COMMAND_QUERY_IFACE = "wlan.query_iface" - - -class FuchsiaWlanLib(BaseLib): - def __init__(self, addr: str) -> None: - super().__init__(addr, "wlan") - - def wlanStartScan(self): - """Starts a wlan scan - - Returns: - scan results - """ - test_cmd = COMMAND_SCAN - - return self.send_command(test_cmd, {}) - - def wlanScanForBSSInfo(self): - """Scans and returns BSS info - - Returns: - A dict mapping each seen SSID to a list of BSS Description IE - blocks, one for each BSS observed in the network - """ - test_cmd = COMMAND_SCAN_FOR_BSS_INFO - - return self.send_command(test_cmd, {}) - - def wlanConnectToNetwork(self, target_ssid, target_bss_desc, target_pwd=None): - """Triggers a network connection - Args: - target_ssid: the network to attempt a connection to - target_pwd: (optional) password for the target network - - Returns: - boolean indicating if the connection was successful - """ - test_cmd = COMMAND_CONNECT - test_args = { - "target_ssid": target_ssid, - "target_pwd": target_pwd, - "target_bss_desc": target_bss_desc, - } - - return self.send_command(test_cmd, test_args) - - def wlanDisconnect(self): - """Disconnect any current wifi connections""" - test_cmd = COMMAND_DISCONNECT - - return self.send_command(test_cmd, {}) - - def wlanDestroyIface(self, iface_id): - """Destroy WLAN interface by ID. - Args: - iface_id: the interface id. - - Returns: - Dictionary, service id if success, error if error. - """ - test_cmd = COMMAND_DESTROY_IFACE - test_args = {"identifier": iface_id} - - return self.send_command(test_cmd, test_args) - - def wlanGetIfaceIdList(self): - """Get a list if wlan interface IDs. - - Returns: - Dictionary, service id if success, error if error. - """ - test_cmd = COMMAND_GET_IFACE_ID_LIST - - return self.send_command(test_cmd, {}) - - def wlanPhyIdList(self): - """Get a list if wlan phy IDs. - - Returns: - List of IDs if success, error if error. - """ - test_cmd = COMMAND_GET_PHY_ID_LIST - - return self.send_command(test_cmd, {}) - - def wlanStatus(self, iface_id=None): - """Request connection status - - Args: - iface_id: unsigned 16-bit int, the wlan interface id - (defaults to None) - - Returns: - Client state summary containing WlanClientState and - status of various networks connections - """ - test_cmd = COMMAND_STATUS - test_args = {} - if iface_id: - test_args = {"iface_id": iface_id} - - return self.send_command(test_cmd, test_args) - - def wlanGetCountry(self, phy_id): - """Reads the currently configured country for `phy_id`. - - Args: - phy_id: unsigned 16-bit integer. - - Returns: - Dictionary, String if success, error if error. - """ - test_cmd = COMMAND_GET_COUNTRY - test_args = {"phy_id": phy_id} - - return self.send_command(test_cmd, test_args) - - def wlanGetDevPath(self, phy_id): - """Queries the device path for `phy_id`. - - Args: - phy_id: unsigned 16-bit integer. - - Returns: - Dictionary, String if success, error if error. - """ - test_cmd = COMMAND_GET_DEV_PATH - test_args = {"phy_id": phy_id} - - return self.send_command(test_cmd, test_args) - - def wlanQueryInterface(self, iface_id): - """Retrieves interface info for given wlan iface id. - - Args: - iface_id: unsigned 16-bit int, the wlan interface id. - - Returns: - Dictionary, containing interface id, role, phy_id, phy_assigned_id - and mac addr. - """ - test_cmd = COMMAND_QUERY_IFACE - test_args = {"iface_id": iface_id} - - return self.send_command(test_cmd, test_args)
diff --git a/src/antlion/controllers/fuchsia_lib/wlan_policy_lib.py b/src/antlion/controllers/fuchsia_lib/wlan_policy_lib.py deleted file mode 100644 index 94701d7..0000000 --- a/src/antlion/controllers/fuchsia_lib/wlan_policy_lib.py +++ /dev/null
@@ -1,182 +0,0 @@ -# Lint as: python3 -#!/usr/bin/env python3 -# -# Copyright 2022 The Fuchsia Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# This class provides pipeline betweem python tests and WLAN policy facade. - -from antlion import logger -from antlion.controllers.fuchsia_lib.base_lib import BaseLib - -COMMAND_START_CLIENT_CONNECTIONS = "wlan_policy.start_client_connections" -COMMAND_STOP_CLIENT_CONNECTIONS = "wlan_policy.stop_client_connections" -COMMAND_SCAN_FOR_NETWORKS = "wlan_policy.scan_for_networks" -COMMAND_SAVE_NETWORK = "wlan_policy.save_network" -COMMAND_REMOVE_NETWORK = "wlan_policy.remove_network" -COMMAND_REMOVE_ALL_NETWORKS = "wlan_policy.remove_all_networks" -COMMAND_GET_SAVED_NETWORKS = "wlan_policy.get_saved_networks" -COMMAND_CONNECT = "wlan_policy.connect" -COMMAND_CREATE_CLIENT_CONTROLLER = "wlan_policy.create_client_controller" -COMMAND_SET_NEW_LISTENER = "wlan_policy.set_new_update_listener" -COMMAND_REMOVE_ALL_NETWORKS = "wlan_policy.remove_all_networks" -COMMAND_GET_UPDATE = "wlan_policy.get_update" - - -class FuchsiaWlanPolicyLib(BaseLib): - def __init__(self, addr: str) -> None: - super().__init__(addr, "wlan_policy") - - def wlanStartClientConnections(self): - """Enables device to initiate connections to networks""" - - test_cmd = COMMAND_START_CLIENT_CONNECTIONS - - return self.send_command(test_cmd, {}) - - def wlanStopClientConnections(self): - """Disables device for initiating connections to networks""" - - test_cmd = COMMAND_STOP_CLIENT_CONNECTIONS - - return self.send_command(test_cmd, {}) - - def wlanScanForNetworks(self): - """Scans for networks that can be connected to - Returns: - A list of network names and security types - """ - - test_cmd = COMMAND_SCAN_FOR_NETWORKS - - return self.send_command(test_cmd, {}) - - def wlanSaveNetwork(self, target_ssid, security_type, target_pwd=None): - """Saveds a network to the device for future connections - Args: - target_ssid: the network to attempt a connection to - security_type: the security protocol of the network - target_pwd: (optional) credential being saved with the network. No password - is equivalent to empty string. - - Returns: - boolean indicating if the connection was successful - """ - if not target_pwd: - target_pwd = "" - test_cmd = COMMAND_SAVE_NETWORK - test_args = { - "target_ssid": target_ssid, - "security_type": str(security_type).lower(), - "target_pwd": target_pwd, - } - - return self.send_command(test_cmd, test_args) - - def wlanRemoveNetwork(self, target_ssid, security_type, target_pwd=None): - """Removes or "forgets" a network from saved networks - Args: - target_ssid: the network to attempt a connection to - security_type: the security protocol of the network - target_pwd: (optional) credential of the network to remove. No password and - empty string are equivalent. - """ - if not target_pwd: - target_pwd = "" - test_cmd = COMMAND_REMOVE_NETWORK - test_args = { - "target_ssid": target_ssid, - "security_type": str(security_type).lower(), - "target_pwd": target_pwd, - } - - return self.send_command(test_cmd, test_args) - - def wlanRemoveAllNetworks(self): - """Removes or "forgets" all networks from saved networks - Returns: - A boolean indicating if the action was successful - """ - - test_cmd = COMMAND_REMOVE_ALL_NETWORKS - - return self.send_command(test_cmd, {}) - - def wlanGetSavedNetworks(self): - """Gets networks saved on device. Any PSK of a saved network will be - lower case regardless of how it was saved. - Returns: - A list of saved network names and security protocols - """ - - test_cmd = COMMAND_GET_SAVED_NETWORKS - - return self.send_command(test_cmd, {}) - - def wlanConnect(self, target_ssid, security_type): - """Triggers connection to a network - Args: - target_ssid: the network to attempt a connection to. Must have been previously - saved in order for a successful connection to happen. - security_type: the security protocol of the network - - Returns: - boolean indicating if the connection was successful - """ - - test_cmd = COMMAND_CONNECT - test_args = { - "target_ssid": target_ssid, - "security_type": str(security_type).lower(), - } - - return self.send_command(test_cmd, test_args) - - def wlanCreateClientController(self): - """Initializes the client controller of the facade that is used to make Client Controller - API calls - """ - test_cmd = COMMAND_CREATE_CLIENT_CONTROLLER - - return self.send_command(test_cmd, {}) - - def wlanSetNewListener(self): - """Sets the update listener stream of the facade to a new stream so that updates will be - reset. Intended to be used between tests so that the behaviour of updates in a test is - independent from previous tests. - """ - test_cmd = COMMAND_SET_NEW_LISTENER - - return self.send_command(test_cmd, {}) - - def wlanRemoveAllNetworks(self): - """Deletes all saved networks on the device. Relies directly on the get_saved_networks and - remove_network commands - """ - test_cmd = COMMAND_REMOVE_ALL_NETWORKS - - return self.send_command(test_cmd, {}) - - def wlanGetUpdate(self, timeout=30): - """Gets one client listener update. This call will return with an update immediately the - first time the update listener is initialized by setting a new listener or by creating - a client controller before setting a new listener. Subsequent calls will hang until - there is an update. - Returns: - An update of connection status. If there is no error, the result is a dict with a - structure that matches the FIDL ClientStateSummary struct given for updates. - """ - test_cmd = COMMAND_GET_UPDATE - - return self.send_command(test_cmd, {}, response_timeout=timeout)
diff --git a/src/antlion/controllers/iperf_client.py b/src/antlion/controllers/iperf_client.py deleted file mode 100644 index 9ad6efc..0000000 --- a/src/antlion/controllers/iperf_client.py +++ /dev/null
@@ -1,324 +0,0 @@ -#!/usr/bin/env python3 -# -# Copyright 2022 The Fuchsia Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import logging -import os -import subprocess -import socket -import threading - -from antlion import context -from antlion import utils -from antlion.controllers.adb_lib.error import AdbCommandError -from antlion.controllers.android_device import AndroidDevice -from antlion.controllers.fuchsia_lib.ssh import SSHProvider -from antlion.controllers.iperf_server import _AndroidDeviceBridge -from antlion.controllers.utils_lib.ssh import connection -from antlion.controllers.utils_lib.ssh import settings -from antlion.libs.proc import job - -MOBLY_CONTROLLER_CONFIG_NAME = "IPerfClient" -ACTS_CONTROLLER_REFERENCE_NAME = "iperf_clients" - - -class IPerfError(Exception): - """Raised on execution errors of iPerf.""" - - -def create(configs): - """Factory method for iperf clients. - - The function creates iperf clients based on at least one config. - If configs contain ssh settings or and AndroidDevice, remote iperf clients - will be started on those devices, otherwise, a the client will run on the - local machine. - - Args: - configs: config parameters for the iperf server - """ - results = [] - for c in configs: - if type(c) is dict and "AndroidDevice" in c: - results.append( - IPerfClientOverAdb( - c["AndroidDevice"], test_interface=c.get("test_interface") - ) - ) - elif type(c) is dict and "ssh_config" in c: - results.append( - IPerfClientOverSsh( - c["ssh_config"], test_interface=c.get("test_interface") - ) - ) - else: - results.append(IPerfClient()) - return results - - -def get_info(iperf_clients): - """Placeholder for info about iperf clients - - Returns: - None - """ - return None - - -def destroy(_): - # No cleanup needed. - pass - - -class IPerfClientBase(object): - """The Base class for all IPerfClients. - - This base class is responsible for synchronizing the logging to prevent - multiple IPerfClients from writing results to the same file, as well - as providing the interface for IPerfClient objects. - """ - - # Keeps track of the number of IPerfClient logs to prevent file name - # collisions. - __log_file_counter = 0 - - __log_file_lock = threading.Lock() - - @staticmethod - def _get_full_file_path(tag=""): - """Returns the full file path for the IPerfClient log file. - - Note: If the directory for the file path does not exist, it will be - created. - - Args: - tag: The tag passed in to the server run. - """ - current_context = context.get_current_context() - full_out_dir = os.path.join( - current_context.get_full_output_path(), "iperf_client_files" - ) - - with IPerfClientBase.__log_file_lock: - os.makedirs(full_out_dir, exist_ok=True) - tags = ["IPerfClient", tag, IPerfClientBase.__log_file_counter] - out_file_name = "%s.log" % ( - ",".join([str(x) for x in tags if x != "" and x is not None]) - ) - IPerfClientBase.__log_file_counter += 1 - - return os.path.join(full_out_dir, out_file_name) - - def start(self, ip, iperf_args, tag, timeout=3600, iperf_binary=None): - """Starts iperf client, and waits for completion. - - Args: - ip: iperf server ip address. - iperf_args: A string representing arguments to start iperf - client. Eg: iperf_args = "-t 10 -p 5001 -w 512k/-u -b 200M -J". - tag: A string to further identify iperf results file - timeout: the maximum amount of time the iperf client can run. - iperf_binary: Location of iperf3 binary. If none, it is assumed the - the binary is in the path. - - Returns: - full_out_path: iperf result path. - """ - raise NotImplementedError("start() must be implemented.") - - -class IPerfClient(IPerfClientBase): - """Class that handles iperf3 client operations.""" - - def start(self, ip, iperf_args, tag, timeout=3600, iperf_binary=None): - """Starts iperf client, and waits for completion. - - Args: - ip: iperf server ip address. - iperf_args: A string representing arguments to start iperf - client. Eg: iperf_args = "-t 10 -p 5001 -w 512k/-u -b 200M -J". - tag: tag to further identify iperf results file - timeout: unused. - iperf_binary: Location of iperf3 binary. If none, it is assumed the - the binary is in the path. - - Returns: - full_out_path: iperf result path. - """ - if not iperf_binary: - logging.debug( - "No iperf3 binary specified. " "Assuming iperf3 is in the path." - ) - iperf_binary = "iperf3" - else: - logging.debug("Using iperf3 binary located at %s" % iperf_binary) - iperf_cmd = [str(iperf_binary), "-c", ip] + iperf_args.split(" ") - full_out_path = self._get_full_file_path(tag) - - with open(full_out_path, "w") as out_file: - subprocess.call(iperf_cmd, stdout=out_file) - - return full_out_path - - -class IPerfClientOverSsh(IPerfClientBase): - """Class that handles iperf3 client operations on remote machines.""" - - def __init__( - self, - ssh_config: str, - test_interface: str = None, - ssh_provider: SSHProvider = None, - ): - self._ssh_provider = ssh_provider - if not self._ssh_provider: - self._ssh_settings = settings.from_config(ssh_config) - if not ( - utils.is_valid_ipv4_address(self._ssh_settings.hostname) - or utils.is_valid_ipv6_address(self._ssh_settings.hostname) - ): - mdns_ip = utils.get_fuchsia_mdns_ipv6_address( - self._ssh_settings.hostname - ) - if mdns_ip: - self._ssh_settings.hostname = mdns_ip - self._ssh_session = None - self.start_ssh() - - self.test_interface = test_interface - - def start(self, ip, iperf_args, tag, timeout=3600, iperf_binary=None): - """Starts iperf client, and waits for completion. - - Args: - ip: iperf server ip address. - iperf_args: A string representing arguments to start iperf - client. Eg: iperf_args = "-t 10 -p 5001 -w 512k/-u -b 200M -J". - tag: tag to further identify iperf results file - timeout: the maximum amount of time to allow the iperf client to run - iperf_binary: Location of iperf3 binary. If none, it is assumed the - the binary is in the path. - - Returns: - full_out_path: iperf result path. - """ - if not iperf_binary: - logging.debug( - "No iperf3 binary specified. " "Assuming iperf3 is in the path." - ) - iperf_binary = "iperf3" - else: - logging.debug("Using iperf3 binary located at %s" % iperf_binary) - iperf_cmd = "{} -c {} {}".format(iperf_binary, ip, iperf_args) - full_out_path = self._get_full_file_path(tag) - - try: - self.start_ssh() - if self._ssh_provider: - iperf_process = self._ssh_provider.run(iperf_cmd, timeout_sec=timeout) - else: - iperf_process = self._ssh_session.run(iperf_cmd, timeout=timeout) - iperf_output = iperf_process.stdout - with open(full_out_path, "w") as out_file: - out_file.write(iperf_output) - except socket.timeout: - raise TimeoutError( - "Socket timeout. Timed out waiting for iperf " "client to finish." - ) - except Exception as err: - logging.exception("iperf run failed: {}".format(err)) - - return full_out_path - - def start_ssh(self): - """Starts an ssh session to the iperf client.""" - if self._ssh_provider: - # SSH sessions are created by the provider. - return - if not self._ssh_session: - self._ssh_session = connection.SshConnection(self._ssh_settings) - - def close_ssh(self): - """Closes the ssh session to the iperf client, if one exists, preventing - connection reset errors when rebooting client device. - """ - if self._ssh_session: - self._ssh_session.close() - self._ssh_session = None - - -class IPerfClientOverAdb(IPerfClientBase): - """Class that handles iperf3 operations over ADB devices.""" - - def __init__(self, android_device_or_serial, test_interface=None): - """Creates a new IPerfClientOverAdb object. - - Args: - android_device_or_serial: Either an AndroidDevice object, or the - serial that corresponds to the AndroidDevice. Note that the - serial must be present in an AndroidDevice entry in the ACTS - config. - test_interface: The network interface that will be used to send - traffic to the iperf server. - """ - self._android_device_or_serial = android_device_or_serial - self.test_interface = test_interface - - @property - def _android_device(self): - if isinstance(self._android_device_or_serial, AndroidDevice): - return self._android_device_or_serial - else: - return _AndroidDeviceBridge.android_devices()[ - self._android_device_or_serial - ] - - def start(self, ip, iperf_args, tag, timeout=3600, iperf_binary=None): - """Starts iperf client, and waits for completion. - - Args: - ip: iperf server ip address. - iperf_args: A string representing arguments to start iperf - client. Eg: iperf_args = "-t 10 -p 5001 -w 512k/-u -b 200M -J". - tag: tag to further identify iperf results file - timeout: the maximum amount of time to allow the iperf client to run - iperf_binary: Location of iperf3 binary. If none, it is assumed the - the binary is in the path. - - Returns: - The iperf result file path. - """ - clean_out = "" - try: - if not iperf_binary: - logging.debug( - "No iperf3 binary specified. " "Assuming iperf3 is in the path." - ) - iperf_binary = "iperf3" - else: - logging.debug("Using iperf3 binary located at %s" % iperf_binary) - iperf_cmd = "{} -c {} {}".format(iperf_binary, ip, iperf_args) - out = self._android_device.adb.shell(str(iperf_cmd), timeout=timeout) - clean_out = out.split("\n") - if "error" in clean_out[0].lower(): - raise IPerfError(clean_out) - except (job.TimeoutError, AdbCommandError): - logging.warning("TimeoutError: Iperf measurement failed.") - - full_out_path = self._get_full_file_path(tag) - with open(full_out_path, "w") as out_file: - out_file.write("\n".join(clean_out)) - - return full_out_path
diff --git a/src/antlion/controllers/iperf_server.py b/src/antlion/controllers/iperf_server.py deleted file mode 100755 index 20dcfbf..0000000 --- a/src/antlion/controllers/iperf_server.py +++ /dev/null
@@ -1,736 +0,0 @@ -#!/usr/bin/env python3 -# -# Copyright 2022 The Fuchsia Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import json -import logging -import math -import os -import shlex -import subprocess -import threading -import time - -from antlion import context -from antlion import logger as acts_logger -from antlion import utils -from antlion.controllers.android_device import AndroidDevice -from antlion.controllers.utils_lib.ssh import connection -from antlion.controllers.utils_lib.ssh import settings -from antlion.event import event_bus -from antlion.event.decorators import subscribe_static -from antlion.event.event import TestClassBeginEvent -from antlion.event.event import TestClassEndEvent -from antlion.libs.proc import job - -MOBLY_CONTROLLER_CONFIG_NAME = "IPerfServer" -ACTS_CONTROLLER_REFERENCE_NAME = "iperf_servers" -KILOBITS = 1024 -MEGABITS = KILOBITS * 1024 -GIGABITS = MEGABITS * 1024 -BITS_IN_BYTE = 8 - - -def create(configs): - """Factory method for iperf servers. - - The function creates iperf servers based on at least one config. - If configs only specify a port number, a regular local IPerfServer object - will be created. If configs contains ssh settings or and AndroidDevice, - remote iperf servers will be started on those devices - - Args: - configs: config parameters for the iperf server - """ - results = [] - for c in configs: - if type(c) in (str, int) and str(c).isdigit(): - results.append(IPerfServer(int(c))) - elif type(c) is dict and "AndroidDevice" in c and "port" in c: - results.append(IPerfServerOverAdb(c["AndroidDevice"], c["port"])) - elif type(c) is dict and "ssh_config" in c and "port" in c: - results.append( - IPerfServerOverSsh( - settings.from_config(c["ssh_config"]), - c["port"], - test_interface=c.get("test_interface"), - use_killall=c.get("use_killall"), - ) - ) - else: - raise ValueError( - "Config entry %s in %s is not a valid IPerfServer " - "config." % (repr(c), configs) - ) - return results - - -def get_info(iperf_servers): - """Placeholder for info about iperf servers - - Returns: - None - """ - return None - - -def destroy(iperf_server_list): - for iperf_server in iperf_server_list: - try: - iperf_server.stop() - except Exception: - logging.exception("Unable to properly clean up %s." % iperf_server) - - -class IPerfResult(object): - def __init__(self, result_path, reporting_speed_units="Mbytes"): - """Loads iperf result from file. - - Loads iperf result from JSON formatted server log. File can be accessed - before or after server is stopped. Note that only the first JSON object - will be loaded and this funtion is not intended to be used with files - containing multiple iperf client runs. - """ - # if result_path isn't a path, treat it as JSON - self.reporting_speed_units = reporting_speed_units - if not os.path.exists(result_path): - self.result = json.loads(result_path) - else: - try: - with open(result_path, "r") as f: - iperf_output = f.readlines() - if "}\n" in iperf_output: - iperf_output = iperf_output[: iperf_output.index("}\n") + 1] - iperf_string = "".join(iperf_output) - iperf_string = iperf_string.replace("nan", "0") - self.result = json.loads(iperf_string) - except ValueError: - with open(result_path, "r") as f: - # Possibly a result from interrupted iperf run, - # skip first line and try again. - lines = f.readlines()[1:] - self.result = json.loads("".join(lines)) - - def _has_data(self): - """Checks if the iperf result has valid throughput data. - - Returns: - True if the result contains throughput data. False otherwise. - """ - return ("end" in self.result) and ( - "sum_received" in self.result["end"] or "sum" in self.result["end"] - ) - - def _get_reporting_speed(self, network_speed_in_bits_per_second): - """Sets the units for the network speed reporting based on how the - object was initiated. Defaults to Megabytes per second. Currently - supported, bits per second (bits), kilobits per second (kbits), megabits - per second (mbits), gigabits per second (gbits), bytes per second - (bytes), kilobits per second (kbytes), megabits per second (mbytes), - gigabytes per second (gbytes). - - Args: - network_speed_in_bits_per_second: The network speed from iperf in - bits per second. - - Returns: - The value of the throughput in the appropriate units. - """ - speed_divisor = 1 - if self.reporting_speed_units[1:].lower() == "bytes": - speed_divisor = speed_divisor * BITS_IN_BYTE - if self.reporting_speed_units[0:1].lower() == "k": - speed_divisor = speed_divisor * KILOBITS - if self.reporting_speed_units[0:1].lower() == "m": - speed_divisor = speed_divisor * MEGABITS - if self.reporting_speed_units[0:1].lower() == "g": - speed_divisor = speed_divisor * GIGABITS - return network_speed_in_bits_per_second / speed_divisor - - def get_json(self): - """Returns the raw json output from iPerf.""" - return self.result - - @property - def error(self): - return self.result.get("error", None) - - @property - def avg_rate(self): - """Average UDP rate in MB/s over the entire run. - - This is the average UDP rate observed at the terminal the iperf result - is pulled from. According to iperf3 documentation this is calculated - based on bytes sent and thus is not a good representation of the - quality of the link. If the result is not from a success run, this - property is None. - """ - if not self._has_data() or "sum" not in self.result["end"]: - return None - bps = self.result["end"]["sum"]["bits_per_second"] - return self._get_reporting_speed(bps) - - @property - def avg_receive_rate(self): - """Average receiving rate in MB/s over the entire run. - - This data may not exist if iperf was interrupted. If the result is not - from a success run, this property is None. - """ - if not self._has_data() or "sum_received" not in self.result["end"]: - return None - bps = self.result["end"]["sum_received"]["bits_per_second"] - return self._get_reporting_speed(bps) - - @property - def avg_send_rate(self): - """Average sending rate in MB/s over the entire run. - - This data may not exist if iperf was interrupted. If the result is not - from a success run, this property is None. - """ - if not self._has_data() or "sum_sent" not in self.result["end"]: - return None - bps = self.result["end"]["sum_sent"]["bits_per_second"] - return self._get_reporting_speed(bps) - - @property - def instantaneous_rates(self): - """Instantaneous received rate in MB/s over entire run. - - This data may not exist if iperf was interrupted. If the result is not - from a success run, this property is None. - """ - if not self._has_data(): - return None - intervals = [ - self._get_reporting_speed(interval["sum"]["bits_per_second"]) - for interval in self.result["intervals"] - ] - return intervals - - @property - def std_deviation(self): - """Standard deviation of rates in MB/s over entire run. - - This data may not exist if iperf was interrupted. If the result is not - from a success run, this property is None. - """ - return self.get_std_deviation(0) - - def get_std_deviation(self, iperf_ignored_interval): - """Standard deviation of rates in MB/s over entire run. - - This data may not exist if iperf was interrupted. If the result is not - from a success run, this property is None. A configurable number of - beginning (and the single last) intervals are ignored in the - calculation as they are inaccurate (e.g. the last is from a very small - interval) - - Args: - iperf_ignored_interval: number of iperf interval to ignored in - calculating standard deviation - - Returns: - The standard deviation. - """ - if not self._has_data(): - return None - instantaneous_rates = self.instantaneous_rates[iperf_ignored_interval:-1] - avg_rate = math.fsum(instantaneous_rates) / len(instantaneous_rates) - sqd_deviations = [(rate - avg_rate) ** 2 for rate in instantaneous_rates] - std_dev = math.sqrt(math.fsum(sqd_deviations) / (len(sqd_deviations) - 1)) - return std_dev - - -class IPerfServerBase(object): - # Keeps track of the number of IPerfServer logs to prevent file name - # collisions. - __log_file_counter = 0 - - __log_file_lock = threading.Lock() - - def __init__(self, port): - self._port = port - # TODO(markdr): We shouldn't be storing the log files in an array like - # this. Nobody should be reading this property either. Instead, the - # IPerfResult should be returned in stop() with all the necessary info. - # See aosp/1012824 for a WIP implementation. - self.log_files = [] - - @property - def port(self): - raise NotImplementedError("port must be specified.") - - @property - def started(self): - raise NotImplementedError("started must be specified.") - - def start(self, extra_args="", tag=""): - """Starts an iperf3 server. - - Args: - extra_args: A string representing extra arguments to start iperf - server with. - tag: Appended to log file name to identify logs from different - iperf runs. - """ - raise NotImplementedError("start() must be specified.") - - def stop(self): - """Stops the iperf server. - - Returns: - The name of the log file generated from the terminated session. - """ - raise NotImplementedError("stop() must be specified.") - - def _get_full_file_path(self, tag=None): - """Returns the full file path for the IPerfServer log file. - - Note: If the directory for the file path does not exist, it will be - created. - - Args: - tag: The tag passed in to the server run. - """ - out_dir = self.log_path - - with IPerfServerBase.__log_file_lock: - tags = [tag, IPerfServerBase.__log_file_counter] - out_file_name = "IPerfServer,%s.log" % ( - ",".join([str(x) for x in tags if x != "" and x is not None]) - ) - IPerfServerBase.__log_file_counter += 1 - - file_path = os.path.join(out_dir, out_file_name) - self.log_files.append(file_path) - return file_path - - @property - def log_path(self): - current_context = context.get_current_context() - full_out_dir = os.path.join( - current_context.get_full_output_path(), "IPerfServer%s" % self.port - ) - - # Ensure the directory exists. - os.makedirs(full_out_dir, exist_ok=True) - - return full_out_dir - - -def _get_port_from_ss_output(ss_output, pid): - pid = str(pid) - lines = ss_output.split("\n") - for line in lines: - if pid in line: - # Expected format: - # tcp LISTEN 0 5 *:<PORT> *:* users:(("cmd",pid=<PID>,fd=3)) - return line.split()[4].split(":")[-1] - else: - raise ProcessLookupError("Could not find started iperf3 process.") - - -class IPerfServer(IPerfServerBase): - """Class that handles iperf server commands on localhost.""" - - def __init__(self, port=5201): - super().__init__(port) - self._hinted_port = port - self._current_log_file = None - self._iperf_process = None - self._last_opened_file = None - - @property - def port(self): - return self._port - - @property - def started(self): - return self._iperf_process is not None - - def start(self, extra_args="", tag=""): - """Starts iperf server on local machine. - - Args: - extra_args: A string representing extra arguments to start iperf - server with. - tag: Appended to log file name to identify logs from different - iperf runs. - """ - if self._iperf_process is not None: - return - - self._current_log_file = self._get_full_file_path(tag) - - # Run an iperf3 server on the hinted port with JSON output. - command = ["iperf3", "-s", "-p", str(self._hinted_port), "-J"] - - command.extend(shlex.split(extra_args)) - - if self._last_opened_file: - self._last_opened_file.close() - self._last_opened_file = open(self._current_log_file, "w") - self._iperf_process = subprocess.Popen( - command, stdout=self._last_opened_file, stderr=subprocess.DEVNULL - ) - for attempts_left in reversed(range(3)): - try: - self._port = int( - _get_port_from_ss_output( - job.run("ss -l -p -n | grep iperf").stdout, - self._iperf_process.pid, - ) - ) - break - except ProcessLookupError: - if attempts_left == 0: - raise - logging.debug("iperf3 process not started yet.") - time.sleep(0.01) - - def stop(self): - """Stops the iperf server. - - Returns: - The name of the log file generated from the terminated session. - """ - if self._iperf_process is None: - return - - if self._last_opened_file: - self._last_opened_file.close() - self._last_opened_file = None - - self._iperf_process.terminate() - self._iperf_process = None - - return self._current_log_file - - def __del__(self): - self.stop() - - -class IPerfServerOverSsh(IPerfServerBase): - """Class that handles iperf3 operations on remote machines.""" - - def __init__(self, ssh_settings, port, test_interface=None, use_killall=False): - super().__init__(port) - self.ssh_settings = ssh_settings - self.log = acts_logger.create_tagged_trace_logger( - f"IPerfServer | {self.ssh_settings.hostname}" - ) - self._ssh_session = None - self.start_ssh() - - self._iperf_pid = None - self._current_tag = None - self.hostname = self.ssh_settings.hostname - self._use_killall = str(use_killall).lower() == "true" - try: - # A test interface can only be found if an ip address is specified. - # A fully qualified hostname will return None for the - # test_interface. - self.test_interface = ( - test_interface - if test_interface - else utils.get_interface_based_on_ip(self._ssh_session, self.hostname) - ) - except Exception as e: - self.log.warning(e) - self.test_interface = None - - @property - def port(self): - return self._port - - @property - def started(self): - return self._iperf_pid is not None - - def _get_remote_log_path(self): - return "/tmp/iperf_server_port%s.log" % self.port - - def get_interface_ip_addresses(self, interface): - """Gets all of the ip addresses, ipv4 and ipv6, associated with a - particular interface name. - - Args: - interface: The interface name on the device, ie eth0 - - Returns: - A list of dictionaries of the various IP addresses. See - utils.get_interface_ip_addresses. - """ - if not self._ssh_session: - self.start_ssh() - - return utils.get_interface_ip_addresses(self._ssh_session, interface) - - def renew_test_interface_ip_address(self): - """Renews the test interface's IPv4 address. - - Necessary for changing DHCP scopes during a test. - """ - if not self._ssh_session: - self.start_ssh() - utils.renew_linux_ip_address(self._ssh_session, self.test_interface) - - def get_addr(self, addr_type="ipv4_private", timeout_sec=None): - """Wait until a type of IP address on the test interface is available - then return it. - """ - if not self._ssh_session: - self.start_ssh() - return utils.get_addr( - self._ssh_session, self.test_interface, addr_type, timeout_sec - ) - - def _cleanup_iperf_port(self): - """Checks and kills zombie iperf servers occupying intended port.""" - iperf_check_cmd = ( - "netstat -tulpn | grep LISTEN | grep iperf3" " | grep :{}" - ).format(self.port) - iperf_check = self._ssh_session.run(iperf_check_cmd, ignore_status=True) - iperf_check = iperf_check.stdout - if iperf_check: - logging.debug("Killing zombie server on port {}".format(self.port)) - iperf_pid = iperf_check.split(" ")[-1].split("/")[0] - self._ssh_session.run("kill -9 {}".format(str(iperf_pid))) - - def start(self, extra_args="", tag="", iperf_binary=None): - """Starts iperf server on specified machine and port. - - Args: - extra_args: A string representing extra arguments to start iperf - server with. - tag: Appended to log file name to identify logs from different - iperf runs. - iperf_binary: Location of iperf3 binary. If none, it is assumed the - the binary is in the path. - """ - if self.started: - return - - if not self._ssh_session: - self.start_ssh() - self._cleanup_iperf_port() - if not iperf_binary: - logging.debug( - "No iperf3 binary specified. " "Assuming iperf3 is in the path." - ) - iperf_binary = "iperf3" - else: - logging.debug("Using iperf3 binary located at %s" % iperf_binary) - iperf_command = "{} -s -J -p {}".format(iperf_binary, self.port) - - cmd = "{cmd} {extra_flags} > {log_file}".format( - cmd=iperf_command, - extra_flags=extra_args, - log_file=self._get_remote_log_path(), - ) - - job_result = self._ssh_session.run_async(cmd) - self._iperf_pid = job_result.stdout - self._current_tag = tag - - def stop(self): - """Stops the iperf server. - - Returns: - The name of the log file generated from the terminated session. - """ - if not self.started: - return - - if self._use_killall: - self._ssh_session.run("killall iperf3", ignore_status=True) - else: - self._ssh_session.run_async("kill -9 {}".format(str(self._iperf_pid))) - - iperf_result = self._ssh_session.run( - "cat {}".format(self._get_remote_log_path()) - ) - - log_file = self._get_full_file_path(self._current_tag) - with open(log_file, "w") as f: - f.write(iperf_result.stdout) - - self._ssh_session.run_async("rm {}".format(self._get_remote_log_path())) - self._iperf_pid = None - return log_file - - def start_ssh(self): - """Starts an ssh session to the iperf server.""" - if not self._ssh_session: - self._ssh_session = connection.SshConnection(self.ssh_settings) - - def close_ssh(self): - """Closes the ssh session to the iperf server, if one exists, preventing - connection reset errors when rebooting server device. - """ - if self.started: - self.stop() - if self._ssh_session: - self._ssh_session.close() - self._ssh_session = None - - -# TODO(markdr): Remove this after automagic controller creation has been -# removed. -class _AndroidDeviceBridge(object): - """A helper class for connecting serial numbers to AndroidDevices.""" - - _test_class = None - - @staticmethod - @subscribe_static(TestClassBeginEvent) - def on_test_begin(event): - _AndroidDeviceBridge._test_class = event.test_class - - @staticmethod - @subscribe_static(TestClassEndEvent) - def on_test_end(_): - _AndroidDeviceBridge._test_class = None - - @staticmethod - def android_devices(): - """A dict of serial -> AndroidDevice, where AndroidDevice is a device - found in the current TestClass's controllers. - """ - if not _AndroidDeviceBridge._test_class: - return {} - return { - device.serial: device - for device in _AndroidDeviceBridge._test_class.android_devices - } - - -event_bus.register_subscription(_AndroidDeviceBridge.on_test_begin.subscription) -event_bus.register_subscription(_AndroidDeviceBridge.on_test_end.subscription) - - -class IPerfServerOverAdb(IPerfServerBase): - """Class that handles iperf3 operations over ADB devices.""" - - def __init__(self, android_device_or_serial, port): - """Creates a new IPerfServerOverAdb object. - - Args: - android_device_or_serial: Either an AndroidDevice object, or the - serial that corresponds to the AndroidDevice. Note that the - serial must be present in an AndroidDevice entry in the ACTS - config. - port: The port number to open the iperf server on. - """ - super().__init__(port) - self._android_device_or_serial = android_device_or_serial - - self._iperf_process = None - self._current_tag = "" - - @property - def port(self): - return self._port - - @property - def started(self): - return self._iperf_process is not None - - @property - def _android_device(self): - if isinstance(self._android_device_or_serial, AndroidDevice): - return self._android_device_or_serial - else: - return _AndroidDeviceBridge.android_devices()[ - self._android_device_or_serial - ] - - def _get_device_log_path(self): - return "~/data/iperf_server_port%s.log" % self.port - - def start(self, extra_args="", tag="", iperf_binary=None): - """Starts iperf server on an ADB device. - - Args: - extra_args: A string representing extra arguments to start iperf - server with. - tag: Appended to log file name to identify logs from different - iperf runs. - iperf_binary: Location of iperf3 binary. If none, it is assumed the - the binary is in the path. - """ - if self._iperf_process is not None: - return - - if not iperf_binary: - logging.debug( - "No iperf3 binary specified. " "Assuming iperf3 is in the path." - ) - iperf_binary = "iperf3" - else: - logging.debug("Using iperf3 binary located at %s" % iperf_binary) - iperf_command = "{} -s -J -p {}".format(iperf_binary, self.port) - - self._iperf_process = self._android_device.adb.shell_nb( - "{cmd} {extra_flags} > {log_file} 2>&1".format( - cmd=iperf_command, - extra_flags=extra_args, - log_file=self._get_device_log_path(), - ) - ) - - self._iperf_process_adb_pid = "" - while len(self._iperf_process_adb_pid) == 0: - self._iperf_process_adb_pid = self._android_device.adb.shell( - "pgrep iperf3 -n" - ) - - self._current_tag = tag - - def stop(self): - """Stops the iperf server. - - Returns: - The name of the log file generated from the terminated session. - """ - if self._iperf_process is None: - return - - job.run("kill -9 {}".format(self._iperf_process.pid)) - - # TODO(markdr): update with definitive kill method - while True: - iperf_process_list = self._android_device.adb.shell("pgrep iperf3") - if iperf_process_list.find(self._iperf_process_adb_pid) == -1: - break - else: - self._android_device.adb.shell( - "kill -9 {}".format(self._iperf_process_adb_pid) - ) - - iperf_result = self._android_device.adb.shell( - "cat {}".format(self._get_device_log_path()) - ) - - log_file = self._get_full_file_path(self._current_tag) - with open(log_file, "w") as f: - f.write(iperf_result) - - self._android_device.adb.shell("rm {}".format(self._get_device_log_path())) - - self._iperf_process = None - return log_file
diff --git a/src/antlion/controllers/openwrt_ap.py b/src/antlion/controllers/openwrt_ap.py deleted file mode 100644 index dc99ef2..0000000 --- a/src/antlion/controllers/openwrt_ap.py +++ /dev/null
@@ -1,719 +0,0 @@ -"""Controller for Open WRT access point.""" - -import random -import re -import time - -from antlion import logger -from antlion import signals -from antlion.controllers.ap_lib import hostapd_constants -from antlion.controllers.openwrt_lib import network_settings -from antlion.controllers.openwrt_lib import wireless_config -from antlion.controllers.openwrt_lib import wireless_settings_applier -from antlion.controllers.openwrt_lib.openwrt_constants import ( - OpenWrtModelMap as modelmap, -) -from antlion.controllers.openwrt_lib.openwrt_constants import OpenWrtWifiSetting -from antlion.controllers.openwrt_lib.openwrt_constants import SYSTEM_INFO_CMD -from antlion.controllers.utils_lib.ssh import connection -from antlion.controllers.utils_lib.ssh import settings -import yaml - -MOBLY_CONTROLLER_CONFIG_NAME = "OpenWrtAP" -ACTS_CONTROLLER_REFERENCE_NAME = "access_points" -OPEN_SECURITY = "none" -PSK1_SECURITY = "psk" -PSK_SECURITY = "psk2" -WEP_SECURITY = "wep" -ENT_SECURITY = "wpa2" -OWE_SECURITY = "owe" -SAE_SECURITY = "sae" -SAEMIXED_SECURITY = "sae-mixed" -ENABLE_RADIO = "0" -PMF_ENABLED = 2 -WIFI_2G = "wifi2g" -WIFI_5G = "wifi5g" -WAIT_TIME = 20 -DEFAULT_RADIOS = ("radio0", "radio1") - - -def create(configs): - """Creates ap controllers from a json config. - - Creates an ap controller from either a list, or a single element. The element - can either be just the hostname or a dictionary containing the hostname and - username of the AP to connect to over SSH. - - Args: - configs: The json configs that represent this controller. - - Returns: - AccessPoint object - - Example: - Below is the config file entry for OpenWrtAP as a list. A testbed can have - 1 or more APs to configure. Each AP has a "ssh_config" key to provide SSH - login information. OpenWrtAP#__init__() uses this to create SSH object. - - "OpenWrtAP": [ - { - "ssh_config": { - "user" : "root", - "host" : "192.168.1.1" - } - }, - { - "ssh_config": { - "user" : "root", - "host" : "192.168.1.2" - } - } - ] - """ - return [OpenWrtAP(c) for c in configs] - - -def destroy(aps): - """Destroys a list of AccessPoints. - - Args: - aps: The list of AccessPoints to destroy. - """ - for ap in aps: - ap.close() - ap.close_ssh() - - -def get_info(aps): - """Get information on a list of access points. - - Args: - aps: A list of AccessPoints. - - Returns: - A list of all aps hostname. - """ - return [ap.ssh_settings.hostname for ap in aps] - - -class OpenWrtAP(object): - """An AccessPoint controller. - - Attributes: - ssh: The ssh connection to the AP. - ssh_settings: The ssh settings being used by the ssh connection. - log: Logging object for AccessPoint. - wireless_setting: object holding wireless configuration. - network_setting: Object for network configuration. - model: OpenWrt HW model. - radios: Fit interface for test. - """ - - def __init__(self, config): - """Initialize AP.""" - self.ssh_settings = settings.from_config(config["ssh_config"]) - self.ssh = connection.SshConnection(self.ssh_settings) - self.log = logger.create_logger( - lambda msg: "[OpenWrtAP|%s] %s" % (self.ssh_settings.hostname, msg) - ) - self.wireless_setting = None - self.network_setting = network_settings.NetworkSettings( - self.ssh, self.ssh_settings, self.log - ) - self.model = self.get_model_name() - if self.model in modelmap.__dict__: - self.radios = modelmap.__dict__[self.model] - else: - self.radios = DEFAULT_RADIOS - - def configure_ap(self, wifi_configs, channel_2g, channel_5g): - """Configure AP with the required settings. - - Each test class inherits WifiBaseTest. Based on the test, we may need to - configure PSK, WEP, OPEN, ENT networks on 2G and 5G bands in any - combination. We call WifiBaseTest methods get_psk_network(), - get_open_network(), get_wep_network() and get_ent_network() to create - dictionaries which contains this information. 'wifi_configs' is a list of - such dictionaries. Example below configures 2 WiFi networks - 1 PSK 2G and - 1 Open 5G on one AP. configure_ap() is called from WifiBaseTest to - configure the APs. - - wifi_configs = [ - { - '2g': { - 'SSID': '2g_AkqXWPK4', - 'security': 'psk2', - 'password': 'YgYuXqDO9H', - 'hiddenSSID': False - }, - }, - { - '5g': { - 'SSID': '5g_8IcMR1Sg', - 'security': 'none', - 'hiddenSSID': False - }, - } - ] - - Args: - wifi_configs: list of network settings for 2G and 5G bands. - channel_2g: channel for 2G band. - channel_5g: channel for 5G band. - """ - # generate wifi configs to configure - wireless_configs = self.generate_wireless_configs(wifi_configs) - self.wireless_setting = wireless_settings_applier.WirelessSettingsApplier( - self.ssh, - wireless_configs, - channel_2g, - channel_5g, - self.radios[1], - self.radios[0], - ) - self.wireless_setting.apply_wireless_settings() - - def start_ap(self): - """Starts the AP with the settings in /etc/config/wireless.""" - self.ssh.run("wifi up") - curr_time = time.time() - while time.time() < curr_time + WAIT_TIME: - if self.get_wifi_status(): - return - time.sleep(3) - if not self.get_wifi_status(): - raise ValueError("Failed to turn on WiFi on the AP.") - - def stop_ap(self): - """Stops the AP.""" - self.ssh.run("wifi down") - curr_time = time.time() - while time.time() < curr_time + WAIT_TIME: - if not self.get_wifi_status(): - return - time.sleep(3) - if self.get_wifi_status(): - raise ValueError("Failed to turn off WiFi on the AP.") - - def get_bssids_for_wifi_networks(self): - """Get BSSIDs for wifi networks configured. - - Returns: - Dictionary of SSID - BSSID map for both bands. - """ - bssid_map = {"2g": {}, "5g": {}} - for radio in self.radios: - ssid_ifname_map = self.get_ifnames_for_ssids(radio) - if radio == self.radios[0]: - for ssid, ifname in ssid_ifname_map.items(): - bssid_map["5g"][ssid] = self.get_bssid(ifname) - elif radio == self.radios[1]: - for ssid, ifname in ssid_ifname_map.items(): - bssid_map["2g"][ssid] = self.get_bssid(ifname) - return bssid_map - - def get_ifnames_for_ssids(self, radio): - """Get interfaces for wifi networks. - - Args: - radio: 2g or 5g radio get the bssids from. - - Returns: - dictionary of ssid - ifname mappings. - """ - ssid_ifname_map = {} - str_output = self.ssh.run("wifi status %s" % radio).stdout - wifi_status = yaml.load( - str_output.replace("\t", "").replace("\n", ""), Loader=yaml.SafeLoader - ) - wifi_status = wifi_status[radio] - if wifi_status["up"]: - interfaces = wifi_status["interfaces"] - for config in interfaces: - ssid = config["config"]["ssid"] - ifname = config["ifname"] - ssid_ifname_map[ssid] = ifname - return ssid_ifname_map - - def get_bssid(self, ifname): - """Get MAC address from an interface. - - Args: - ifname: interface name of the corresponding MAC. - - Returns: - BSSID of the interface. - """ - ifconfig = self.ssh.run("ifconfig %s" % ifname).stdout - mac_addr = ifconfig.split("\n")[0].split()[-1] - return mac_addr - - def set_wpa_encryption(self, encryption): - """Set different encryptions to wpa or wpa2. - - Args: - encryption: ccmp, tkip, or ccmp+tkip. - """ - str_output = self.ssh.run("wifi status").stdout - wifi_status = yaml.load( - str_output.replace("\t", "").replace("\n", ""), Loader=yaml.SafeLoader - ) - - # Counting how many interface are enabled. - total_interface = 0 - for radio in self.radios: - num_interface = len(wifi_status[radio]["interfaces"]) - total_interface += num_interface - - # Iterates every interface to get and set wpa encryption. - default_extra_interface = 2 - for i in range(total_interface + default_extra_interface): - origin_encryption = self.ssh.run( - "uci get wireless.@wifi-iface[{}].encryption".format(i) - ).stdout - origin_psk_pattern = re.match(r"psk\b", origin_encryption) - target_psk_pattern = re.match(r"psk\b", encryption) - origin_psk2_pattern = re.match(r"psk2\b", origin_encryption) - target_psk2_pattern = re.match(r"psk2\b", encryption) - - if origin_psk_pattern == target_psk_pattern: - self.ssh.run( - "uci set wireless.@wifi-iface[{}].encryption={}".format( - i, encryption - ) - ) - - if origin_psk2_pattern == target_psk2_pattern: - self.ssh.run( - "uci set wireless.@wifi-iface[{}].encryption={}".format( - i, encryption - ) - ) - - self.ssh.run("uci commit wireless") - self.ssh.run("wifi") - - def set_password(self, pwd_5g=None, pwd_2g=None): - """Set password for individual interface. - - Args: - pwd_5g: 8 ~ 63 chars, ascii letters and digits password for 5g network. - pwd_2g: 8 ~ 63 chars, ascii letters and digits password for 2g network. - """ - if pwd_5g: - if len(pwd_5g) < 8 or len(pwd_5g) > 63: - self.log.error("Password must be 8~63 characters long") - # Only accept ascii letters and digits - elif not re.match("^[A-Za-z0-9]*$", pwd_5g): - self.log.error("Password must only contains ascii letters and digits") - else: - self.ssh.run( - "uci set wireless.@wifi-iface[{}].key={}".format(3, pwd_5g) - ) - self.log.info("Set 5G password to :{}".format(pwd_5g)) - - if pwd_2g: - if len(pwd_2g) < 8 or len(pwd_2g) > 63: - self.log.error("Password must be 8~63 characters long") - # Only accept ascii letters and digits - elif not re.match("^[A-Za-z0-9]*$", pwd_2g): - self.log.error("Password must only contains ascii letters and digits") - else: - self.ssh.run( - "uci set wireless.@wifi-iface[{}].key={}".format(2, pwd_2g) - ) - self.log.info("Set 2G password to :{}".format(pwd_2g)) - - self.ssh.run("uci commit wireless") - self.ssh.run("wifi") - - def set_ssid(self, ssid_5g=None, ssid_2g=None): - """Set SSID for individual interface. - - Args: - ssid_5g: 8 ~ 63 chars for 5g network. - ssid_2g: 8 ~ 63 chars for 2g network. - """ - if ssid_5g: - if len(ssid_5g) < 8 or len(ssid_5g) > 63: - self.log.error("SSID must be 8~63 characters long") - # Only accept ascii letters and digits - else: - self.ssh.run( - "uci set wireless.@wifi-iface[{}].ssid={}".format(3, ssid_5g) - ) - self.log.info("Set 5G SSID to :{}".format(ssid_5g)) - - if ssid_2g: - if len(ssid_2g) < 8 or len(ssid_2g) > 63: - self.log.error("SSID must be 8~63 characters long") - # Only accept ascii letters and digits - else: - self.ssh.run( - "uci set wireless.@wifi-iface[{}].ssid={}".format(2, ssid_2g) - ) - self.log.info("Set 2G SSID to :{}".format(ssid_2g)) - - self.ssh.run("uci commit wireless") - self.ssh.run("wifi") - - def generate_mobility_domain(self): - """Generate 4-character hexadecimal ID. - - Returns: - String; a 4-character hexadecimal ID. - """ - md = "{:04x}".format(random.getrandbits(16)) - self.log.info("Mobility Domain ID: {}".format(md)) - return md - - def enable_80211r(self, iface, md): - """Enable 802.11r for one single radio. - - Args: - iface: index number of wifi-iface. - 2: radio1 - 3: radio0 - md: mobility domain. a 4-character hexadecimal ID. - Raises: - TestSkip if 2g or 5g radio is not up or 802.11r is not enabled. - """ - str_output = self.ssh.run("wifi status").stdout - wifi_status = yaml.load( - str_output.replace("\t", "").replace("\n", ""), Loader=yaml.SafeLoader - ) - # Check if the radio is up. - if iface == OpenWrtWifiSetting.IFACE_2G: - if wifi_status[self.radios[1]]["up"]: - self.log.info("2g network is ENABLED") - else: - raise signals.TestSkip("2g network is NOT ENABLED") - elif iface == OpenWrtWifiSetting.IFACE_5G: - if wifi_status[self.radios[0]]["up"]: - self.log.info("5g network is ENABLED") - else: - raise signals.TestSkip("5g network is NOT ENABLED") - - # Setup 802.11r. - self.ssh.run("uci set wireless.@wifi-iface[{}].ieee80211r='1'".format(iface)) - self.ssh.run( - "uci set wireless.@wifi-iface[{}].ft_psk_generate_local='1'".format(iface) - ) - self.ssh.run( - "uci set wireless.@wifi-iface[{}].mobility_domain='{}'".format(iface, md) - ) - self.ssh.run("uci commit wireless") - self.ssh.run("wifi") - - # Check if 802.11r is enabled. - result = self.ssh.run( - "uci get wireless.@wifi-iface[{}].ieee80211r".format(iface) - ).stdout - if result == "1": - self.log.info("802.11r is ENABLED") - else: - raise signals.TestSkip("802.11r is NOT ENABLED") - - def generate_wireless_configs(self, wifi_configs): - """Generate wireless configs to configure. - - Converts wifi_configs from configure_ap() to a list of 'WirelessConfig' - objects. Each object represents a wifi network to configure on the AP. - - Args: - wifi_configs: Network list of different security types and bands. - - Returns: - wireless configuration for openwrt AP. - """ - num_2g = 1 - num_5g = 1 - wireless_configs = [] - - for i in range(len(wifi_configs)): - if hostapd_constants.BAND_2G in wifi_configs[i]: - config = wifi_configs[i][hostapd_constants.BAND_2G] - if config["security"] == PSK_SECURITY: - wireless_configs.append( - wireless_config.WirelessConfig( - "%s%s" % (WIFI_2G, num_2g), - config["SSID"], - config["security"], - hostapd_constants.BAND_2G, - password=config["password"], - hidden=config["hiddenSSID"], - ieee80211w=config["ieee80211w"], - ) - ) - elif config["security"] == PSK1_SECURITY: - wireless_configs.append( - wireless_config.WirelessConfig( - "%s%s" % (WIFI_2G, num_2g), - config["SSID"], - config["security"], - hostapd_constants.BAND_2G, - password=config["password"], - hidden=config["hiddenSSID"], - ieee80211w=config["ieee80211w"], - ) - ) - elif config["security"] == WEP_SECURITY: - wireless_configs.append( - wireless_config.WirelessConfig( - "%s%s" % (WIFI_2G, num_2g), - config["SSID"], - config["security"], - hostapd_constants.BAND_2G, - wep_key=config["wepKeys"][0], - hidden=config["hiddenSSID"], - ) - ) - elif config["security"] == OPEN_SECURITY: - wireless_configs.append( - wireless_config.WirelessConfig( - "%s%s" % (WIFI_2G, num_2g), - config["SSID"], - config["security"], - hostapd_constants.BAND_2G, - hidden=config["hiddenSSID"], - ) - ) - elif config["security"] == OWE_SECURITY: - wireless_configs.append( - wireless_config.WirelessConfig( - "%s%s" % (WIFI_2G, num_2g), - config["SSID"], - config["security"], - hostapd_constants.BAND_2G, - hidden=config["hiddenSSID"], - ieee80211w=PMF_ENABLED, - ) - ) - elif config["security"] == SAE_SECURITY: - wireless_configs.append( - wireless_config.WirelessConfig( - "%s%s" % (WIFI_2G, num_2g), - config["SSID"], - config["security"], - hostapd_constants.BAND_2G, - password=config["password"], - hidden=config["hiddenSSID"], - ieee80211w=PMF_ENABLED, - ) - ) - elif config["security"] == SAEMIXED_SECURITY: - wireless_configs.append( - wireless_config.WirelessConfig( - "%s%s" % (WIFI_2G, num_2g), - config["SSID"], - config["security"], - hostapd_constants.BAND_2G, - password=config["password"], - hidden=config["hiddenSSID"], - ieee80211w=config["ieee80211w"], - ) - ) - elif config["security"] == ENT_SECURITY: - wireless_configs.append( - wireless_config.WirelessConfig( - "%s%s" % (WIFI_2G, num_2g), - config["SSID"], - config["security"], - hostapd_constants.BAND_2G, - radius_server_ip=config["radius_server_ip"], - radius_server_port=config["radius_server_port"], - radius_server_secret=config["radius_server_secret"], - hidden=config["hiddenSSID"], - ) - ) - num_2g += 1 - if hostapd_constants.BAND_5G in wifi_configs[i]: - config = wifi_configs[i][hostapd_constants.BAND_5G] - if config["security"] == PSK_SECURITY: - wireless_configs.append( - wireless_config.WirelessConfig( - "%s%s" % (WIFI_5G, num_5g), - config["SSID"], - config["security"], - hostapd_constants.BAND_5G, - password=config["password"], - hidden=config["hiddenSSID"], - ieee80211w=config["ieee80211w"], - ) - ) - elif config["security"] == PSK1_SECURITY: - wireless_configs.append( - wireless_config.WirelessConfig( - "%s%s" % (WIFI_5G, num_5g), - config["SSID"], - config["security"], - hostapd_constants.BAND_5G, - password=config["password"], - hidden=config["hiddenSSID"], - ieee80211w=config["ieee80211w"], - ) - ) - elif config["security"] == WEP_SECURITY: - wireless_configs.append( - wireless_config.WirelessConfig( - "%s%s" % (WIFI_5G, num_5g), - config["SSID"], - config["security"], - hostapd_constants.BAND_5G, - wep_key=config["wepKeys"][0], - hidden=config["hiddenSSID"], - ) - ) - elif config["security"] == OPEN_SECURITY: - wireless_configs.append( - wireless_config.WirelessConfig( - "%s%s" % (WIFI_5G, num_5g), - config["SSID"], - config["security"], - hostapd_constants.BAND_5G, - hidden=config["hiddenSSID"], - ) - ) - elif config["security"] == OWE_SECURITY: - wireless_configs.append( - wireless_config.WirelessConfig( - "%s%s" % (WIFI_5G, num_5g), - config["SSID"], - config["security"], - hostapd_constants.BAND_5G, - hidden=config["hiddenSSID"], - ieee80211w=PMF_ENABLED, - ) - ) - elif config["security"] == SAE_SECURITY: - wireless_configs.append( - wireless_config.WirelessConfig( - "%s%s" % (WIFI_5G, num_5g), - config["SSID"], - config["security"], - hostapd_constants.BAND_5G, - password=config["password"], - hidden=config["hiddenSSID"], - ieee80211w=PMF_ENABLED, - ) - ) - elif config["security"] == SAEMIXED_SECURITY: - wireless_configs.append( - wireless_config.WirelessConfig( - "%s%s" % (WIFI_5G, num_5g), - config["SSID"], - config["security"], - hostapd_constants.BAND_5G, - password=config["password"], - hidden=config["hiddenSSID"], - ieee80211w=config["ieee80211w"], - ) - ) - elif config["security"] == ENT_SECURITY: - wireless_configs.append( - wireless_config.WirelessConfig( - "%s%s" % (WIFI_5G, num_5g), - config["SSID"], - config["security"], - hostapd_constants.BAND_5G, - radius_server_ip=config["radius_server_ip"], - radius_server_port=config["radius_server_port"], - radius_server_secret=config["radius_server_secret"], - hidden=config["hiddenSSID"], - ) - ) - num_5g += 1 - - return wireless_configs - - def get_wifi_network(self, security=None, band=None): - """Return first match wifi interface's config. - - Args: - security: psk2 or none - band: '2g' or '5g' - - Returns: - A dict contains match wifi interface's config. - """ - - for wifi_iface in self.wireless_setting.wireless_configs: - match_list = [] - wifi_network = wifi_iface.__dict__ - if security: - match_list.append(security == wifi_network["security"]) - if band: - match_list.append(band == wifi_network["band"]) - - if all(match_list): - wifi_network["SSID"] = wifi_network["ssid"] - if not wifi_network["password"]: - del wifi_network["password"] - return wifi_network - return None - - def get_wifi_status(self): - """Check if radios are up. Default are 2G and 5G bands. - - Returns: - True if both radios are up. False if not. - """ - status = True - for radio in self.radios: - try: - str_output = self.ssh.run("wifi status %s" % radio).stdout - wifi_status = yaml.load( - str_output.replace("\t", "").replace("\n", ""), - Loader=yaml.SafeLoader, - ) - status = wifi_status[radio]["up"] and status - except: - self.log.info("Failed to make ssh connection to the OpenWrt") - return False - return status - - def verify_wifi_status(self, timeout=20): - """Ensure wifi interfaces are ready. - - Args: - timeout: An integer that is the number of times to try - wait for interface ready. - Returns: - True if both radios are up. False if not. - """ - start_time = time.time() - end_time = start_time + timeout - while time.time() < end_time: - if self.get_wifi_status(): - return True - time.sleep(1) - return False - - def get_model_name(self): - """Get Openwrt model name. - - Returns: - A string include device brand and model. e.g. NETGEAR_R8000 - """ - out = self.ssh.run(SYSTEM_INFO_CMD).stdout.split("\n") - for line in out: - if "board_name" in line: - model = line.split()[1].strip('",').split(",") - return "_".join(map(lambda i: i.upper(), model)) - self.log.info("Failed to retrieve OpenWrt model information.") - return None - - def close(self): - """Reset wireless and network settings to default and stop AP.""" - if self.network_setting.config: - self.network_setting.cleanup_network_settings() - if self.wireless_setting: - self.wireless_setting.cleanup_wireless_settings() - - def close_ssh(self): - """Close SSH connection to AP.""" - self.ssh.close() - - def reboot(self): - """Reboot Openwrt.""" - self.ssh.run("reboot")
diff --git a/src/antlion/controllers/openwrt_lib/OWNERS b/src/antlion/controllers/openwrt_lib/OWNERS deleted file mode 100644 index 6ddb5ea..0000000 --- a/src/antlion/controllers/openwrt_lib/OWNERS +++ /dev/null
@@ -1,4 +0,0 @@ -jerrypcchen@google.com -gmoturu@google.com -martschneider@google.com -sishichen@google.com
diff --git a/src/antlion/controllers/openwrt_lib/__init__.py b/src/antlion/controllers/openwrt_lib/__init__.py deleted file mode 100644 index e69de29..0000000 --- a/src/antlion/controllers/openwrt_lib/__init__.py +++ /dev/null
diff --git a/src/antlion/controllers/openwrt_lib/network_const.py b/src/antlion/controllers/openwrt_lib/network_const.py deleted file mode 100644 index 3b05b83..0000000 --- a/src/antlion/controllers/openwrt_lib/network_const.py +++ /dev/null
@@ -1,289 +0,0 @@ -LOCALHOST = "192.168.1.1" - -# params for ipsec.conf -IPSEC_CONF = { - "config setup": { - "charondebug": "chd 2,ike 2,knl 2,net 2,esp 2,dmn 2," - "mgr 2,lib 1,cfg 2,enc 1".__repr__(), - "uniqueids": "never", - }, - "conn %default": {"ike": "aes128-sha-modp1024", "esp": "aes128-sha1"}, -} - -IPSEC_L2TP_PSK = { - "conn L2TP_PSK": { - "keyexchange": "ikev1", - "type": "transport", - "left": LOCALHOST, - "leftprotoport": "17/1701", - "leftauth": "psk", - "right": "%any", - "rightprotoport": "17/%any", - "rightsubnet": "0.0.0.0/0", - "rightauth": "psk", - "auto": "add", - } -} - -IPSEC_L2TP_RSA = { - "conn L2TP_RSA": { - "keyexchange": "ikev1", - "type": "transport", - "left": LOCALHOST, - "leftprotoport": "17/1701", - "leftauth": "pubkey", - "leftcert": "serverCert.der", - "right": "%any", - "rightprotoport": "17/%any", - "rightsubnet": "0.0.0.0/0", - "rightauth": "pubkey", - "auto": "add", - } -} - -IPSEC_HYBRID_RSA = { - "conn HYBRID_RSA": { - "keyexchange": "ikev1", - "left": LOCALHOST, - "leftsubnet": "0.0.0.0/0", - "leftauth": "pubkey", - "leftcert": "serverCert.der", - "leftsendcert": "always", - "right": "%any", - "rightsubnet": "0.0.0.0/0", - "rightauth": "pubkey", - "rightauth2": "xauth", - "xauth": "server", - "auto": "add", - } -} - -IPSEC_XAUTH_PSK = { - "conn XAUTH_PSK": { - "keyexchange": "ikev1", - "left": LOCALHOST, - "leftsubnet": "0.0.0.0/0", - "leftauth": "psk", - "right": "%any", - "rightsubnet": "0.0.0.0/0", - "rightauth": "psk", - "rightauth2": "xauth", - "auto": "add", - } -} - -IPSEC_XAUTH_RSA = { - "conn XAUTH_RSA": { - "keyexchange": "ikev1", - "left": LOCALHOST, - "leftsubnet": "0.0.0.0/0", - "leftcert": "serverCert.der", - "leftsendcert": "always", - "right": "%any", - "rightsubnet": "0.0.0.0/0", - "rightauth": "xauth", - "xauth": "server", - "auto": "add", - } -} - -IPSEC_IKEV2_MSCHAPV2 = { - "conn IKEV2_MSCHAPV2": { - "keyexchange": "ikev2", - "left": LOCALHOST, - "leftid": LOCALHOST, - "leftcert": "serverCert.der", - "leftsubnet": "0.0.0.0/0", - "leftauth": "pubkey", - "leftsendcert": "always", - "right": "%any", - "rightid": "vpntest", - "rightauth": "eap-mschapv2", - "auto": "add", - } -} - -IPSEC_IKEV2_PSK = { - "conn IKEV2_PSK": { - "keyexchange": "ikev2", - "left": LOCALHOST, - "leftid": LOCALHOST, - "leftauth": "psk", - "leftsubnet": "0.0.0.0/0", - "right": "%any", - "rightid": "vpntest", - "rightauth": "psk", - "auto": "add", - } -} - -IPSEC_IKEV2_RSA = { - "conn IKEV2_RSA": { - "keyexchange": "ikev2", - "left": LOCALHOST, - "leftid": LOCALHOST, - "leftcert": "serverCert.der", - "leftsubnet": "0.0.0.0/0", - "leftauth": "pubkey", - "leftsendcert": "always", - "right": "%any", - "rightid": "vpntest@%s" % LOCALHOST, - "rightauth": "pubkey", - "rightcert": "clientCert.pem", - "auto": "add", - } -} - -IPSEC_IKEV2_MSCHAPV2_HOSTNAME = { - "conn IKEV2_MSCHAPV2_HOSTNAME": { - "keyexchange": "ikev2", - "left": LOCALHOST, - "leftid": "strongswan-vpn-server.android-iperf.com", - "leftcert": "serverCert.der", - "leftsubnet": "0.0.0.0/0", - "leftauth": "pubkey", - "leftsendcert": "always", - "right": "%any", - "rightid": "vpntest", - "rightauth": "eap-mschapv2", - "auto": "add", - } -} - -IPSEC_IKEV2_PSK_HOSTNAME = { - "conn IKEV2_PSK_HOSTNAME": { - "keyexchange": "ikev2", - "left": LOCALHOST, - "leftid": "strongswan-vpn-server.android-iperf.com", - "leftauth": "psk", - "leftsubnet": "0.0.0.0/0", - "right": "%any", - "rightid": "vpntest", - "rightauth": "psk", - "auto": "add", - } -} - -IPSEC_IKEV2_RSA_HOSTNAME = { - "conn IKEV2_RSA_HOSTNAME": { - "keyexchange": "ikev2", - "left": LOCALHOST, - "leftid": "strongswan-vpn-server.android-iperf.com", - "leftcert": "serverCert.der", - "leftsubnet": "0.0.0.0/0", - "leftauth": "pubkey", - "leftsendcert": "always", - "right": "%any", - "rightid": "vpntest@strongswan-vpn-server.android-iperf.com", - "rightauth": "pubkey", - "rightcert": "clientCert.pem", - "auto": "add", - } -} - -# parmas for lx2tpd - -XL2TPD_CONF_GLOBAL = ( - "[global]", - "ipsec saref = no", - "debug tunnel = no", - "debug avp = no", - "debug network = no", - "debug state = no", - "access control = no", - "rand source = dev", - "port = 1701", -) - -XL2TPD_CONF_INS = ( - "[lns default]", - "require authentication = yes", - "pass peer = yes", - "ppp debug = no", - "length bit = yes", - "refuse pap = yes", - "refuse chap = yes", -) - -XL2TPD_OPTION = ( - "require-mschap-v2", - "refuse-mschap", - "ms-dns 8.8.8.8", - "ms-dns 8.8.4.4", - "asyncmap 0", - "auth", - "crtscts", - "idle 1800", - "mtu 1410", - "mru 1410", - "connect-delay 5000", - "lock", - "hide-password", - "local", - "debug", - "modem", - "proxyarp", - "lcp-echo-interval 30", - "lcp-echo-failure 4", - "nomppe", -) - -# iptable rules for vpn_pptp -FIREWALL_RULES_FOR_PPTP = ( - "iptables -A input_rule -i ppp+ -j ACCEPT", - "iptables -A output_rule -o ppp+ -j ACCEPT", - "iptables -A forwarding_rule -i ppp+ -j ACCEPT", -) - -# iptable rules for vpn_l2tp -FIREWALL_RULES_FOR_L2TP = ( - "iptables -I INPUT -m policy --dir in --pol ipsec --proto esp -j ACCEPT", - "iptables -I FORWARD -m policy --dir in --pol ipsec --proto esp -j ACCEPT", - "iptables -I FORWARD -m policy --dir out --pol ipsec --proto esp -j ACCEPT", - "iptables -I OUTPUT -m policy --dir out --pol ipsec --proto esp -j ACCEPT", - "iptables -t nat -I POSTROUTING -m policy --pol ipsec --dir out -j ACCEPT", - "iptables -A INPUT -p esp -j ACCEPT", - "iptables -A INPUT -i eth0.2 -p udp --dport 500 -j ACCEPT", - "iptables -A INPUT -i eth0.2 -p tcp --dport 500 -j ACCEPT", - "iptables -A INPUT -i eth0.2 -p udp --dport 4500 -j ACCEPT", - "iptables -A INPUT -p udp --dport 500 -j ACCEPT", - "iptables -A INPUT -p udp --dport 4500 -j ACCEPT", - "iptables -A INPUT -p udp -m policy --dir in --pol ipsec -m udp --dport 1701 -j ACCEPT", -) - -FIREWALL_RULES_DISABLE_DNS_RESPONSE = ( - "iptables -I OUTPUT -p udp --sport 53 -j DROP", - "iptables -I OUTPUT -p tcp --sport 53 -j DROP", - "ip6tables -I OUTPUT -p udp --sport 53 -j DROP", - "ip6tables -I OUTPUT -p tcp --sport 53 -j DROP", -) - - -# Object for vpn profile -class VpnL2tp(object): - """Profile for vpn l2tp type. - - Attributes: - hostname: vpn server domain name - address: vpn server address - username: vpn user account - password: vpn user password - psk_secret: psk for ipsec - name: vpn server name for register in OpenWrt - """ - - def __init__( - self, - vpn_server_hostname, - vpn_server_address, - vpn_username, - vpn_password, - psk_secret, - server_name, - ): - self.name = server_name - self.hostname = vpn_server_hostname - self.address = vpn_server_address - self.username = vpn_username - self.password = vpn_password - self.psk_secret = psk_secret
diff --git a/src/antlion/controllers/openwrt_lib/network_settings.py b/src/antlion/controllers/openwrt_lib/network_settings.py deleted file mode 100644 index 5d14360..0000000 --- a/src/antlion/controllers/openwrt_lib/network_settings.py +++ /dev/null
@@ -1,1149 +0,0 @@ -# Copyright 2022 The Fuchsia Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import re -import time - -from antlion import signals -from antlion import utils -from antlion.controllers.openwrt_lib import network_const - -SERVICE_DNSMASQ = "dnsmasq" -SERVICE_STUNNEL = "stunnel" -SERVICE_NETWORK = "network" -SERVICE_PPTPD = "pptpd" -SERVICE_FIREWALL = "firewall" -SERVICE_IPSEC = "ipsec" -SERVICE_XL2TPD = "xl2tpd" -SERVICE_ODHCPD = "odhcpd" -SERVICE_OPENNDS = "opennds" -SERVICE_UHTTPD = "uhttpd" -PPTP_PACKAGE = "pptpd kmod-nf-nathelper-extra" -L2TP_PACKAGE = "strongswan-full openssl-util xl2tpd" -NAT6_PACKAGE = "ip6tables kmod-ipt-nat6" -CAPTIVE_PORTAL_PACKAGE = "opennds php7-cli php7-mod-openssl php7-cgi php7" -MDNS_PACKAGE = "avahi-utils avahi-daemon-service-http avahi-daemon-service-ssh libavahi-client avahi-dbus-daemon" -STUNNEL_CONFIG_PATH = "/etc/stunnel/DoTServer.conf" -HISTORY_CONFIG_PATH = "/etc/dirty_configs" -PPTPD_OPTION_PATH = "/etc/ppp/options.pptpd" -XL2TPD_CONFIG_PATH = "/etc/xl2tpd/xl2tpd.conf" -XL2TPD_OPTION_CONFIG_PATH = "/etc/ppp/options.xl2tpd" -FIREWALL_CUSTOM_OPTION_PATH = "/etc/firewall.user" -PPP_CHAP_SECRET_PATH = "/etc/ppp/chap-secrets" -IKEV2_VPN_CERT_KEYS_PATH = "/var/ikev2_cert.sh" -TCPDUMP_DIR = "/tmp/tcpdump/" -LOCALHOST = "192.168.1.1" -DEFAULT_PACKAGE_INSTALL_TIMEOUT = 200 - - -class NetworkSettings(object): - """Class for network settings. - - Attributes: - ssh: ssh connection object. - ssh_settings: ssh settings for AccessPoint. - service_manager: Object manage service configuration. - user: username for ssh. - ip: ip address for AccessPoint. - log: Logging object for AccessPoint. - config: A list to store changes on network settings. - firewall_rules_list: A list of firewall rule name list. - cleanup_map: A dict for compare oppo functions. - l2tp: profile for vpn l2tp server. - """ - - def __init__(self, ssh, ssh_settings, logger): - """Initialize wireless settings. - - Args: - ssh: ssh connection object. - ssh_settings: ssh settings for AccessPoint. - logger: Logging object for AccessPoint. - """ - self.ssh = ssh - self.service_manager = ServiceManager(ssh) - self.ssh_settings = ssh_settings - self.user = self.ssh_settings.username - self.ip = self.ssh_settings.hostname - self.log = logger - self.config = set() - self.firewall_rules_list = [] - self.cleanup_map = { - "setup_dns_server": self.remove_dns_server, - "setup_vpn_pptp_server": self.remove_vpn_pptp_server, - "setup_vpn_l2tp_server": self.remove_vpn_l2tp_server, - "disable_ipv6": self.enable_ipv6, - "setup_ipv6_bridge": self.remove_ipv6_bridge, - "default_dns": self.del_default_dns, - "default_v6_dns": self.del_default_v6_dns, - "ipv6_prefer_option": self.remove_ipv6_prefer_option, - "block_dns_response": self.unblock_dns_response, - "setup_mdns": self.remove_mdns, - "add_dhcp_rapid_commit": self.remove_dhcp_rapid_commit, - "setup_captive_portal": self.remove_cpative_portal, - } - # This map contains cleanup functions to restore the configuration to - # its default state. We write these keys to HISTORY_CONFIG_PATH prior to - # making any changes to that subsystem. - # This makes it easier to recover after an aborted test. - self.update_firewall_rules_list() - self.cleanup_network_settings() - self.clear_tcpdump() - - def cleanup_network_settings(self): - """Reset all changes on Access point.""" - - # Detect if any changes that is not clean up. - if self.file_exists(HISTORY_CONFIG_PATH): - out = self.ssh.run("cat %s" % HISTORY_CONFIG_PATH).stdout - if out: - self.config = set(out.split("\n")) - - if self.config: - temp = self.config.copy() - for change in temp: - change_list = change.split() - if len(change_list) > 1: - self.cleanup_map[change_list[0]](*change_list[1:]) - else: - self.cleanup_map[change]() - self.config = set() - - if self.file_exists(HISTORY_CONFIG_PATH): - out = self.ssh.run("cat %s" % HISTORY_CONFIG_PATH).stdout - if not out: - self.ssh.run("rm %s" % HISTORY_CONFIG_PATH) - - def commit_changes(self): - """Apply changes on Access point.""" - self.ssh.run("uci commit") - self.service_manager.restart_services() - self.create_config_file("\n".join(self.config), HISTORY_CONFIG_PATH) - - def package_install(self, package_list): - """Install packages on OpenWrtAP via opkg If not installed. - - Args: - package_list: package list to install. - e.g. "pptpd kmod-mppe kmod-nf-nathelper-extra" - """ - self.ssh.run("opkg update") - for package_name in package_list.split(" "): - if not self._package_installed(package_name): - self.ssh.run( - "opkg install %s" % package_name, - timeout=DEFAULT_PACKAGE_INSTALL_TIMEOUT, - ) - self.log.info("Package: %s installed." % package_name) - else: - self.log.info("Package: %s skipped (already installed)." % package_name) - - def package_remove(self, package_list): - """Remove packages on OpenWrtAP via opkg If existed. - - Args: - package_list: package list to remove. - """ - for package_name in package_list.split(" "): - if self._package_installed(package_name): - self.ssh.run("opkg remove %s" % package_name) - self.log.info("Package: %s removed." % package_name) - else: - self.log.info("No exist package %s found." % package_name) - - def _package_installed(self, package_name): - """Check if target package installed on OpenWrtAP. - - Args: - package_name: package name want to check. - - Returns: - True if installed. - """ - if self.ssh.run("opkg list-installed %s" % package_name).stdout: - return True - return False - - def file_exists(self, abs_file_path): - """Check if target file exist on specific path on OpenWrt. - - Args: - abs_file_path: Absolute path for the file. - - Returns: - True if Existed. - """ - path, file_name = abs_file_path.rsplit("/", 1) - if self.ssh.run( - "ls %s | grep %s" % (path, file_name), ignore_status=True - ).stdout: - return True - return False - - def path_exists(self, abs_path): - """Check if dir exist on OpenWrt. - - Args: - abs_path: absolutely path for create folder. - """ - try: - self.ssh.run("ls %s" % abs_path) - except: - return False - return True - - def create_folder(self, abs_path): - """If dir not exist, create it. - - Args: - abs_path: absolutely path for create folder. - """ - if not self.path_exists(abs_path): - self.ssh.run("mkdir %s" % abs_path) - else: - self.log.info("%s already existed." % abs_path) - - def count(self, config, key): - """Count in uci config. - - Args: - config: config or section to research - key: keywords to e.g. rule, domain - Returns: - Numbers of the count. - """ - count = self.ssh.run( - "uci show %s | grep =%s" % (config, key), ignore_status=True - ).stdout - return len(count.split("\n")) - - def create_config_file(self, config, file_path): - """Create config file. Overwrite if file already exist. - - Args: - config: A string of content of config. - file_path: Config's abs_path. - """ - self.ssh.run('echo -e "%s" > %s' % (config, file_path)) - - def replace_config_option(self, old_option, new_option, file_path): - """Replace config option if pattern match. - - If find match pattern with old_option, then replace it with new_option. - Else add new_option to the file. - - Args: - old_option: the regexp pattern to replace. - new_option: the option to add. - file_path: Config's abs_path. - """ - config = self.ssh.run("cat %s" % file_path).stdout - config, count = re.subn(old_option, new_option, config) - if not count: - config = "\n".join([config, new_option]) - self.create_config_file(config, file_path) - - def remove_config_option(self, option, file_path): - """Remove option from config file. - - Args: - option: Option to remove. Support regular expression. - file_path: Config's abs_path. - Returns: - Boolean for find option to remove. - """ - config = self.ssh.run("cat %s" % file_path).stdout.split("\n") - for line in config: - count = re.subn(option, "", line)[1] - if count > 0: - config.remove(line) - self.create_config_file("\n".join(config), file_path) - return True - self.log.warning("No match option to remove.") - return False - - def setup_dns_server(self, domain_name): - """Setup DNS server on OpenWrtAP. - - Args: - domain_name: Local dns domain name. - """ - self.config.add("setup_dns_server") - self.log.info("Setup DNS server with domain name %s" % domain_name) - self.ssh.run("uci set dhcp.@dnsmasq[0].local='/%s/'" % domain_name) - self.ssh.run("uci set dhcp.@dnsmasq[0].domain='%s'" % domain_name) - self.add_resource_record(domain_name, self.ip) - self.service_manager.need_restart(SERVICE_DNSMASQ) - self.commit_changes() - - # Check stunnel package is installed - self.package_install("stunnel") - self.service_manager.stop(SERVICE_STUNNEL) - self.service_manager.disable(SERVICE_STUNNEL) - - # Enable stunnel - self.create_stunnel_config() - self.ssh.run("stunnel /etc/stunnel/DoTServer.conf") - - def remove_dns_server(self): - """Remove DNS server on OpenWrtAP.""" - if self.file_exists("/var/run/stunnel.pid"): - self.ssh.run("kill $(cat /var/run/stunnel.pid)") - self.ssh.run("uci set dhcp.@dnsmasq[0].local='/lan/'") - self.ssh.run("uci set dhcp.@dnsmasq[0].domain='lan'") - self.clear_resource_record() - self.service_manager.need_restart(SERVICE_DNSMASQ) - self.config.discard("setup_dns_server") - self.commit_changes() - - def add_resource_record(self, domain_name, domain_ip): - """Add resource record. - - Args: - domain_name: A string for domain name. - domain_ip: A string for domain ip. - """ - self.ssh.run("uci add dhcp domain") - self.ssh.run("uci set dhcp.@domain[-1].name='%s'" % domain_name) - self.ssh.run("uci set dhcp.@domain[-1].ip='%s'" % domain_ip) - self.service_manager.need_restart(SERVICE_DNSMASQ) - - def del_resource_record(self): - """Delete the last resource record.""" - self.ssh.run("uci delete dhcp.@domain[-1]") - self.service_manager.need_restart(SERVICE_DNSMASQ) - - def clear_resource_record(self): - """Delete the all resource record.""" - rr = self.ssh.run("uci show dhcp | grep =domain", ignore_status=True).stdout - if rr: - for _ in rr.split("\n"): - self.del_resource_record() - self.service_manager.need_restart(SERVICE_DNSMASQ) - - def create_stunnel_config(self): - """Create config for stunnel service.""" - stunnel_config = [ - "pid = /var/run/stunnel.pid", - "[dns]", - "accept = 853", - "connect = 127.0.0.1:53", - "cert = /etc/stunnel/fullchain.pem", - "key = /etc/stunnel/privkey.pem", - ] - config_string = "\n".join(stunnel_config) - self.create_config_file(config_string, STUNNEL_CONFIG_PATH) - - def setup_vpn_pptp_server(self, local_ip, user, password): - """Setup pptp vpn server on OpenWrt. - - Args: - local_ip: local pptp server ip address. - user: username for pptp user. - password: password for pptp user. - """ - # Install pptp service - self.package_install(PPTP_PACKAGE) - - self.config.add("setup_vpn_pptp_server") - # Edit /etc/config/pptpd & /etc/ppp/options.pptpd - self.setup_pptpd(local_ip, user, password) - # Edit /etc/config/firewall & /etc/firewall.user - self.setup_firewall_rules_for_pptp() - # Enable service - self.service_manager.enable(SERVICE_PPTPD) - self.service_manager.need_restart(SERVICE_PPTPD) - self.service_manager.need_restart(SERVICE_FIREWALL) - self.commit_changes() - - def remove_vpn_pptp_server(self): - """Remove pptp vpn server on OpenWrt.""" - # Edit /etc/config/pptpd - self.restore_pptpd() - # Edit /etc/config/firewall & /etc/firewall.user - self.restore_firewall_rules_for_pptp() - # Disable service - self.service_manager.disable(SERVICE_PPTPD) - self.service_manager.need_restart(SERVICE_PPTPD) - self.service_manager.need_restart(SERVICE_FIREWALL) - self.config.discard("setup_vpn_pptp_server") - self.commit_changes() - - self.package_remove(PPTP_PACKAGE) - self.ssh.run("rm /etc/ppp/options.pptpd") - self.ssh.run("rm /etc/config/pptpd") - - def setup_pptpd(self, local_ip, username, password, ms_dns="8.8.8.8"): - """Setup pptpd config for ip addr and account. - - Args: - local_ip: vpn server address - username: pptp vpn username - password: pptp vpn password - ms_dns: DNS server - """ - # Calculate remote ip address - # e.g. local_ip = 10.10.10.9 - # remote_ip = 10.10.10.10 -250 - remote_ip = local_ip.split(".") - remote_ip.append(str(int(remote_ip.pop(-1)) + 1)) - remote_ip = ".".join(remote_ip) - # Enable pptp service and set ip addr - self.ssh.run("uci set pptpd.pptpd.enabled=1") - self.ssh.run("uci set pptpd.pptpd.localip='%s'" % local_ip) - self.ssh.run("uci set pptpd.pptpd.remoteip='%s-250'" % remote_ip) - - # Setup pptp service account - self.ssh.run("uci set pptpd.@login[0].username='%s'" % username) - self.ssh.run("uci set pptpd.@login[0].password='%s'" % password) - self.service_manager.need_restart(SERVICE_PPTPD) - - self.replace_config_option( - r"#*ms-dns \d+.\d+.\d+.\d+", "ms-dns %s" % ms_dns, PPTPD_OPTION_PATH - ) - self.replace_config_option("(#no)*proxyarp", "proxyarp", PPTPD_OPTION_PATH) - - def restore_pptpd(self): - """Disable pptpd.""" - self.ssh.run("uci set pptpd.pptpd.enabled=0") - self.remove_config_option(r"\S+ pptp-server \S+ \*", PPP_CHAP_SECRET_PATH) - self.service_manager.need_restart(SERVICE_PPTPD) - - def setup_vpn_l2tp_server( - self, - vpn_server_hostname, - vpn_server_address, - vpn_username, - vpn_password, - psk_secret, - server_name, - country, - org, - ): - """Setup l2tp vpn server on OpenWrt. - - Args: - vpn_server_hostname: vpn server domain name - vpn_server_address: vpn server addr - vpn_username: vpn account - vpn_password: vpn password - psk_secret: psk for ipsec - server_name: vpn server name for register in OpenWrt - country: country code for generate cert keys. - org: Organization name for generate cert keys. - """ - self.l2tp = network_const.VpnL2tp( - vpn_server_hostname, - vpn_server_address, - vpn_username, - vpn_password, - psk_secret, - server_name, - ) - - self.package_install(L2TP_PACKAGE) - self.config.add("setup_vpn_l2tp_server") - - # /etc/strongswan.conf: Strongswan configuration file - self.setup_strongswan() - # /etc/ipsec.conf /etc/ipsec.secrets - self.setup_ipsec() - # /etc/xl2tpd/xl2tpd.conf & /etc/ppp/options.xl2tpd - self.setup_xl2tpd() - # /etc/ppp/chap-secrets - self.setup_ppp_secret() - # /etc/config/firewall & /etc/firewall.user - self.setup_firewall_rules_for_l2tp() - # setup vpn server local ip - self.setup_vpn_local_ip() - # generate cert and key for rsa - if self.l2tp.name == "ikev2-server": - self.generate_ikev2_vpn_cert_keys(country, org) - self.add_resource_record(self.l2tp.hostname, LOCALHOST) - else: - self.generate_vpn_cert_keys(country, org) - # restart service - self.service_manager.need_restart(SERVICE_IPSEC) - self.service_manager.need_restart(SERVICE_XL2TPD) - self.service_manager.need_restart(SERVICE_FIREWALL) - self.commit_changes() - - def remove_vpn_l2tp_server(self): - """Remove l2tp vpn server on OpenWrt.""" - self.config.discard("setup_vpn_l2tp_server") - self.restore_firewall_rules_for_l2tp() - self.remove_vpn_local_ip() - if self.l2tp.name == "ikev2-server": - self.clear_resource_record() - self.service_manager.need_restart(SERVICE_IPSEC) - self.service_manager.need_restart(SERVICE_XL2TPD) - self.service_manager.need_restart(SERVICE_FIREWALL) - self.commit_changes() - self.package_remove(L2TP_PACKAGE) - if hasattr(self, "l2tp"): - delattr(self, "l2tp") - - def setup_strongswan(self, dns="8.8.8.8"): - """Setup strongswan config.""" - config = [ - "charon {", - " load_modular = yes", - " plugins {", - " include strongswan.d/charon/*.conf", - " }", - " dns1=%s" % dns, - "}", - ] - self.create_config_file("\n".join(config), "/etc/strongswan.conf") - - def setup_ipsec(self): - """Setup ipsec config.""" - - def load_ipsec_config(data, rightsourceip=False): - for i in data.keys(): - config.append(i) - for j in data[i].keys(): - config.append("\t %s=%s" % (j, data[i][j])) - if rightsourceip: - config.append( - "\t rightsourceip=%s.16/26" - % self.l2tp.address.rsplit(".", 1)[0] - ) - config.append("") - - config = [] - load_ipsec_config(network_const.IPSEC_IKEV2_MSCHAPV2, True) - load_ipsec_config(network_const.IPSEC_IKEV2_PSK, True) - load_ipsec_config(network_const.IPSEC_IKEV2_RSA, True) - load_ipsec_config(network_const.IPSEC_IKEV2_MSCHAPV2_HOSTNAME, True) - load_ipsec_config(network_const.IPSEC_IKEV2_PSK_HOSTNAME, True) - load_ipsec_config(network_const.IPSEC_IKEV2_RSA_HOSTNAME, True) - load_ipsec_config(network_const.IPSEC_CONF) - load_ipsec_config(network_const.IPSEC_L2TP_PSK) - load_ipsec_config(network_const.IPSEC_L2TP_RSA) - load_ipsec_config(network_const.IPSEC_HYBRID_RSA, True) - load_ipsec_config(network_const.IPSEC_XAUTH_PSK, True) - load_ipsec_config(network_const.IPSEC_XAUTH_RSA, True) - self.create_config_file("\n".join(config), "/etc/ipsec.conf") - - ipsec_secret = [] - ipsec_secret.append(r": PSK \"%s\"" % self.l2tp.psk_secret) - ipsec_secret.append(r": RSA \"%s\"" % "serverKey.der") - ipsec_secret.append( - r"%s : XAUTH \"%s\"" % (self.l2tp.username, self.l2tp.password) - ) - self.create_config_file("\n".join(ipsec_secret), "/etc/ipsec.secrets") - - def setup_xl2tpd(self, ip_range=20): - """Setup xl2tpd config.""" - net_id, host_id = self.l2tp.address.rsplit(".", 1) - xl2tpd_conf = list(network_const.XL2TPD_CONF_GLOBAL) - xl2tpd_conf.append("auth file = %s" % PPP_CHAP_SECRET_PATH) - xl2tpd_conf.extend(network_const.XL2TPD_CONF_INS) - xl2tpd_conf.append( - "ip range = %s.%s-%s.%s" - % (net_id, host_id, net_id, str(int(host_id) + ip_range)) - ) - xl2tpd_conf.append("local ip = %s" % self.l2tp.address) - xl2tpd_conf.append("name = %s" % self.l2tp.name) - xl2tpd_conf.append("pppoptfile = %s" % XL2TPD_OPTION_CONFIG_PATH) - - self.create_config_file("\n".join(xl2tpd_conf), XL2TPD_CONFIG_PATH) - xl2tpd_option = list(network_const.XL2TPD_OPTION) - xl2tpd_option.append("name %s" % self.l2tp.name) - self.create_config_file("\n".join(xl2tpd_option), XL2TPD_OPTION_CONFIG_PATH) - - def setup_ppp_secret(self): - self.replace_config_option( - r"\S+ %s \S+ \*" % self.l2tp.name, - "%s %s %s *" % (self.l2tp.username, self.l2tp.name, self.l2tp.password), - PPP_CHAP_SECRET_PATH, - ) - - def generate_vpn_cert_keys(self, country, org): - """Generate cert and keys for vpn server.""" - rsa = "--type rsa" - lifetime = "--lifetime 365" - size = "--size 4096" - - self.ssh.run("ipsec pki --gen %s %s --outform der > caKey.der" % (rsa, size)) - self.ssh.run( - "ipsec pki --self --ca %s --in caKey.der %s --dn " - '"C=%s, O=%s, CN=%s" --outform der > caCert.der' - % (lifetime, rsa, country, org, self.l2tp.hostname) - ) - self.ssh.run( - "ipsec pki --gen %s %s --outform der > serverKey.der" % (size, rsa) - ) - self.ssh.run( - "ipsec pki --pub --in serverKey.der %s | ipsec pki " - "--issue %s --cacert caCert.der --cakey caKey.der " - '--dn "C=%s, O=%s, CN=%s" --san %s --flag serverAuth' - " --flag ikeIntermediate --outform der > serverCert.der" - % (rsa, lifetime, country, org, self.l2tp.hostname, LOCALHOST) - ) - self.ssh.run( - "ipsec pki --gen %s %s --outform der > clientKey.der" % (size, rsa) - ) - self.ssh.run( - "ipsec pki --pub --in clientKey.der %s | ipsec pki " - "--issue %s --cacert caCert.der --cakey caKey.der " - '--dn "C=%s, O=%s, CN=%s@%s" --outform der > ' - "clientCert.der" - % (rsa, lifetime, country, org, self.l2tp.username, self.l2tp.hostname) - ) - - self.ssh.run( - "openssl rsa -inform DER -in clientKey.der" - " -out clientKey.pem -outform PEM" - ) - self.ssh.run( - "openssl x509 -inform DER -in clientCert.der" - " -out clientCert.pem -outform PEM" - ) - self.ssh.run( - "openssl x509 -inform DER -in caCert.der" " -out caCert.pem -outform PEM" - ) - self.ssh.run( - "openssl pkcs12 -in clientCert.pem -inkey clientKey.pem" - " -certfile caCert.pem -export -out clientPkcs.p12 -passout pass:" - ) - - self.ssh.run("mv caCert.pem /etc/ipsec.d/cacerts/") - self.ssh.run("mv *Cert* /etc/ipsec.d/certs/") - self.ssh.run("mv *Key* /etc/ipsec.d/private/") - if not self.path_exists("/www/downloads/"): - self.ssh.run("mkdir /www/downloads/") - self.ssh.run("mv clientPkcs.p12 /www/downloads/") - self.ssh.run("chmod 664 /www/downloads/clientPkcs.p12") - - def generate_ikev2_vpn_cert_keys(self, country, org): - rsa = "--type rsa" - lifetime = "--lifetime 365" - size = "--size 4096" - - if not self.path_exists("/www/downloads/"): - self.ssh.run("mkdir /www/downloads/") - - ikev2_vpn_cert_keys = [ - "ipsec pki --gen %s %s --outform der > caKey.der" % (rsa, size), - "ipsec pki --self --ca %s --in caKey.der %s --dn " - '"C=%s, O=%s, CN=%s" --outform der > caCert.der' - % (lifetime, rsa, country, org, self.l2tp.hostname), - "ipsec pki --gen %s %s --outform der > serverKey.der" % (size, rsa), - "ipsec pki --pub --in serverKey.der %s | ipsec pki --issue %s " - r"--cacert caCert.der --cakey caKey.der --dn \"C=%s, O=%s, CN=%s\" " - "--san %s --san %s --flag serverAuth --flag ikeIntermediate " - "--outform der > serverCert.der" - % ( - rsa, - lifetime, - country, - org, - self.l2tp.hostname, - LOCALHOST, - self.l2tp.hostname, - ), - "ipsec pki --gen %s %s --outform der > clientKey.der" % (size, rsa), - "ipsec pki --pub --in clientKey.der %s | ipsec pki --issue %s " - r"--cacert caCert.der --cakey caKey.der --dn \"C=%s, O=%s, CN=%s@%s\" " - r"--san \"%s\" --san \"%s@%s\" --san \"%s@%s\" --outform der " - "> clientCert.der" - % ( - rsa, - lifetime, - country, - org, - self.l2tp.username, - self.l2tp.hostname, - self.l2tp.username, - self.l2tp.username, - LOCALHOST, - self.l2tp.username, - self.l2tp.hostname, - ), - "openssl rsa -inform DER -in clientKey.der " - "-out clientKey.pem -outform PEM", - "openssl x509 -inform DER -in clientCert.der " - "-out clientCert.pem -outform PEM", - "openssl x509 -inform DER -in caCert.der " "-out caCert.pem -outform PEM", - "openssl pkcs12 -in clientCert.pem -inkey clientKey.pem " - "-certfile caCert.pem -export -out clientPkcs.p12 -passout pass:", - "mv caCert.pem /etc/ipsec.d/cacerts/", - "mv *Cert* /etc/ipsec.d/certs/", - "mv *Key* /etc/ipsec.d/private/", - "mv clientPkcs.p12 /www/downloads/", - "chmod 664 /www/downloads/clientPkcs.p12", - ] - file_string = "\n".join(ikev2_vpn_cert_keys) - self.create_config_file(file_string, IKEV2_VPN_CERT_KEYS_PATH) - - self.ssh.run("chmod +x %s" % IKEV2_VPN_CERT_KEYS_PATH) - self.ssh.run("%s" % IKEV2_VPN_CERT_KEYS_PATH) - - def update_firewall_rules_list(self): - """Update rule list in /etc/config/firewall.""" - new_rules_list = [] - for i in range(self.count("firewall", "rule")): - rule = self.ssh.run("uci get firewall.@rule[%s].name" % i).stdout - new_rules_list.append(rule) - self.firewall_rules_list = new_rules_list - - def setup_firewall_rules_for_pptp(self): - """Setup firewall for vpn pptp server.""" - self.update_firewall_rules_list() - if "pptpd" not in self.firewall_rules_list: - self.ssh.run("uci add firewall rule") - self.ssh.run("uci set firewall.@rule[-1].name='pptpd'") - self.ssh.run("uci set firewall.@rule[-1].target='ACCEPT'") - self.ssh.run("uci set firewall.@rule[-1].proto='tcp'") - self.ssh.run("uci set firewall.@rule[-1].dest_port='1723'") - self.ssh.run("uci set firewall.@rule[-1].family='ipv4'") - self.ssh.run("uci set firewall.@rule[-1].src='wan'") - - if "GRP" not in self.firewall_rules_list: - self.ssh.run("uci add firewall rule") - self.ssh.run("uci set firewall.@rule[-1].name='GRP'") - self.ssh.run("uci set firewall.@rule[-1].target='ACCEPT'") - self.ssh.run("uci set firewall.@rule[-1].src='wan'") - self.ssh.run("uci set firewall.@rule[-1].proto='47'") - - iptable_rules = list(network_const.FIREWALL_RULES_FOR_PPTP) - self.add_custom_firewall_rules(iptable_rules) - self.service_manager.need_restart(SERVICE_FIREWALL) - - def restore_firewall_rules_for_pptp(self): - """Restore firewall for vpn pptp server.""" - self.update_firewall_rules_list() - if "pptpd" in self.firewall_rules_list: - self.ssh.run( - "uci del firewall.@rule[%s]" % self.firewall_rules_list.index("pptpd") - ) - self.update_firewall_rules_list() - if "GRP" in self.firewall_rules_list: - self.ssh.run( - "uci del firewall.@rule[%s]" % self.firewall_rules_list.index("GRP") - ) - self.remove_custom_firewall_rules() - self.service_manager.need_restart(SERVICE_FIREWALL) - - def setup_firewall_rules_for_l2tp(self): - """Setup firewall for vpn l2tp server.""" - self.update_firewall_rules_list() - if "ipsec esp" not in self.firewall_rules_list: - self.ssh.run("uci add firewall rule") - self.ssh.run("uci set firewall.@rule[-1].name='ipsec esp'") - self.ssh.run("uci set firewall.@rule[-1].target='ACCEPT'") - self.ssh.run("uci set firewall.@rule[-1].proto='esp'") - self.ssh.run("uci set firewall.@rule[-1].src='wan'") - - if "ipsec nat-t" not in self.firewall_rules_list: - self.ssh.run("uci add firewall rule") - self.ssh.run("uci set firewall.@rule[-1].name='ipsec nat-t'") - self.ssh.run("uci set firewall.@rule[-1].target='ACCEPT'") - self.ssh.run("uci set firewall.@rule[-1].src='wan'") - self.ssh.run("uci set firewall.@rule[-1].proto='udp'") - self.ssh.run("uci set firewall.@rule[-1].dest_port='4500'") - - if "auth header" not in self.firewall_rules_list: - self.ssh.run("uci add firewall rule") - self.ssh.run("uci set firewall.@rule[-1].name='auth header'") - self.ssh.run("uci set firewall.@rule[-1].target='ACCEPT'") - self.ssh.run("uci set firewall.@rule[-1].src='wan'") - self.ssh.run("uci set firewall.@rule[-1].proto='ah'") - - net_id = self.l2tp.address.rsplit(".", 1)[0] - iptable_rules = list(network_const.FIREWALL_RULES_FOR_L2TP) - iptable_rules.append("iptables -A FORWARD -s %s.0/24" " -j ACCEPT" % net_id) - iptable_rules.append( - "iptables -t nat -A POSTROUTING" - " -s %s.0/24 -o eth0.2 -j MASQUERADE" % net_id - ) - - self.add_custom_firewall_rules(iptable_rules) - self.service_manager.need_restart(SERVICE_FIREWALL) - - def restore_firewall_rules_for_l2tp(self): - """Restore firewall for vpn l2tp server.""" - self.update_firewall_rules_list() - if "ipsec esp" in self.firewall_rules_list: - self.ssh.run( - "uci del firewall.@rule[%s]" - % self.firewall_rules_list.index("ipsec esp") - ) - self.update_firewall_rules_list() - if "ipsec nat-t" in self.firewall_rules_list: - self.ssh.run( - "uci del firewall.@rule[%s]" - % self.firewall_rules_list.index("ipsec nat-t") - ) - self.update_firewall_rules_list() - if "auth header" in self.firewall_rules_list: - self.ssh.run( - "uci del firewall.@rule[%s]" - % self.firewall_rules_list.index("auth header") - ) - self.remove_custom_firewall_rules() - self.service_manager.need_restart(SERVICE_FIREWALL) - - def add_custom_firewall_rules(self, rules): - """Backup current custom rules and replace with arguments. - - Args: - rules: A list of iptable rules to apply. - """ - backup_file_path = FIREWALL_CUSTOM_OPTION_PATH + ".backup" - if not self.file_exists(backup_file_path): - self.ssh.run("mv %s %s" % (FIREWALL_CUSTOM_OPTION_PATH, backup_file_path)) - for rule in rules: - self.ssh.run("echo %s >> %s" % (rule, FIREWALL_CUSTOM_OPTION_PATH)) - - def remove_custom_firewall_rules(self): - """Clean up and recover custom firewall rules.""" - backup_file_path = FIREWALL_CUSTOM_OPTION_PATH + ".backup" - if self.file_exists(backup_file_path): - self.ssh.run("mv %s %s" % (backup_file_path, FIREWALL_CUSTOM_OPTION_PATH)) - else: - self.log.debug("Did not find %s" % backup_file_path) - self.ssh.run("echo " " > %s" % FIREWALL_CUSTOM_OPTION_PATH) - - def disable_pptp_service(self): - """Disable pptp service.""" - self.package_remove(PPTP_PACKAGE) - - def setup_vpn_local_ip(self): - """Setup VPN Server local ip on OpenWrt for client ping verify.""" - self.ssh.run("uci set network.lan2=interface") - self.ssh.run("uci set network.lan2.type=bridge") - self.ssh.run("uci set network.lan2.ifname=eth1.2") - self.ssh.run("uci set network.lan2.proto=static") - self.ssh.run('uci set network.lan2.ipaddr="%s"' % self.l2tp.address) - self.ssh.run("uci set network.lan2.netmask=255.255.255.0") - self.ssh.run("uci set network.lan2=interface") - self.service_manager.reload(SERVICE_NETWORK) - self.commit_changes() - - def remove_vpn_local_ip(self): - """Discard vpn local ip on OpenWrt.""" - self.ssh.run("uci delete network.lan2") - self.service_manager.reload(SERVICE_NETWORK) - self.commit_changes() - - def enable_ipv6(self): - """Enable ipv6 on OpenWrt.""" - self.ssh.run("uci set network.lan.ipv6=1") - self.ssh.run("uci set network.wan.ipv6=1") - self.service_manager.enable("odhcpd") - self.service_manager.reload(SERVICE_NETWORK) - self.config.discard("disable_ipv6") - self.commit_changes() - - def disable_ipv6(self): - """Disable ipv6 on OpenWrt.""" - self.config.add("disable_ipv6") - self.ssh.run("uci set network.lan.ipv6=0") - self.ssh.run("uci set network.wan.ipv6=0") - self.service_manager.disable("odhcpd") - self.service_manager.reload(SERVICE_NETWORK) - self.commit_changes() - - def setup_ipv6_bridge(self): - """Setup ipv6 bridge for client have ability to access network.""" - self.config.add("setup_ipv6_bridge") - - self.ssh.run("uci set dhcp.lan.dhcpv6=relay") - self.ssh.run("uci set dhcp.lan.ra=relay") - self.ssh.run("uci set dhcp.lan.ndp=relay") - - self.ssh.run("uci set dhcp.wan6=dhcp") - self.ssh.run("uci set dhcp.wan6.dhcpv6=relay") - self.ssh.run("uci set dhcp.wan6.ra=relay") - self.ssh.run("uci set dhcp.wan6.ndp=relay") - self.ssh.run("uci set dhcp.wan6.master=1") - self.ssh.run("uci set dhcp.wan6.interface=wan6") - - # Enable service - self.service_manager.need_restart(SERVICE_ODHCPD) - self.commit_changes() - - def remove_ipv6_bridge(self): - """Discard ipv6 bridge on OpenWrt.""" - if "setup_ipv6_bridge" in self.config: - self.config.discard("setup_ipv6_bridge") - - self.ssh.run("uci set dhcp.lan.dhcpv6=server") - self.ssh.run("uci set dhcp.lan.ra=server") - self.ssh.run("uci delete dhcp.lan.ndp") - - self.ssh.run("uci delete dhcp.wan6") - - self.service_manager.need_restart(SERVICE_ODHCPD) - self.commit_changes() - - def _add_dhcp_option(self, args): - self.ssh.run('uci add_list dhcp.lan.dhcp_option="%s"' % args) - - def _remove_dhcp_option(self, args): - self.ssh.run('uci del_list dhcp.lan.dhcp_option="%s"' % args) - - def add_default_dns(self, addr_list): - """Add default dns server for client. - - Args: - addr_list: dns ip address for Openwrt client. - """ - self._add_dhcp_option("6,%s" % ",".join(addr_list)) - self.config.add("default_dns %s" % addr_list) - self.service_manager.need_restart(SERVICE_DNSMASQ) - self.commit_changes() - - def del_default_dns(self, addr_list): - """Remove default dns server for client. - - Args: - addr_list: list of dns ip address for Openwrt client. - """ - self._remove_dhcp_option("6,%s" % addr_list) - self.config.discard("default_dns %s" % addr_list) - self.service_manager.need_restart(SERVICE_DNSMASQ) - self.commit_changes() - - def add_default_v6_dns(self, addr_list): - """Add default v6 dns server for client. - - Args: - addr_list: dns ip address for Openwrt client. - """ - self.ssh.run('uci add_list dhcp.lan.dns="%s"' % addr_list) - self.config.add("default_v6_dns %s" % addr_list) - self.service_manager.need_restart(SERVICE_ODHCPD) - self.commit_changes() - - def del_default_v6_dns(self, addr_list): - """Del default v6 dns server for client. - - Args: - addr_list: dns ip address for Openwrt client. - """ - self.ssh.run('uci del_list dhcp.lan.dns="%s"' % addr_list) - self.config.add("default_v6_dns %s" % addr_list) - self.service_manager.need_restart(SERVICE_ODHCPD) - self.commit_changes() - - def add_ipv6_prefer_option(self): - self._add_dhcp_option("108,1800i") - self.config.add("ipv6_prefer_option") - self.service_manager.need_restart(SERVICE_DNSMASQ) - self.commit_changes() - - def remove_ipv6_prefer_option(self): - self._remove_dhcp_option("108,1800i") - self.config.discard("ipv6_prefer_option") - self.service_manager.need_restart(SERVICE_DNSMASQ) - self.commit_changes() - - def add_dhcp_rapid_commit(self): - self.create_config_file("dhcp-rapid-commit\n", "/etc/dnsmasq.conf") - self.config.add("add_dhcp_rapid_commit") - self.service_manager.need_restart(SERVICE_DNSMASQ) - self.commit_changes() - - def remove_dhcp_rapid_commit(self): - self.create_config_file("", "/etc/dnsmasq.conf") - self.config.discard("add_dhcp_rapid_commit") - self.service_manager.need_restart(SERVICE_DNSMASQ) - self.commit_changes() - - def start_tcpdump(self, test_name, args="", interface="br-lan"): - """ "Start tcpdump on OpenWrt. - - Args: - test_name: Test name for create tcpdump file name. - args: Option args for tcpdump. - interface: Interface to logging. - Returns: - tcpdump_file_name: tcpdump file name on OpenWrt. - pid: tcpdump process id. - """ - self.package_install("tcpdump") - if not self.path_exists(TCPDUMP_DIR): - self.ssh.run("mkdir %s" % TCPDUMP_DIR) - tcpdump_file_name = "openwrt_%s_%s.pcap" % ( - test_name, - time.strftime("%Y-%m-%d_%H-%M-%S", time.localtime(time.time())), - ) - tcpdump_file_path = "".join([TCPDUMP_DIR, tcpdump_file_name]) - cmd = "tcpdump -i %s -s0 %s -w %s" % (interface, args, tcpdump_file_path) - self.ssh.run_async(cmd) - pid = self._get_tcpdump_pid(tcpdump_file_name) - if not pid: - raise signals.TestFailure("Fail to start tcpdump on OpenWrt.") - # Set delay to prevent tcpdump fail to capture target packet. - time.sleep(15) - return tcpdump_file_name - - def stop_tcpdump(self, tcpdump_file_name, pull_dir=None): - """Stop tcpdump on OpenWrt and pull the pcap file. - - Args: - tcpdump_file_name: tcpdump file name on OpenWrt. - pull_dir: Keep none if no need to pull. - Returns: - tcpdump abs_path on host. - """ - # Set delay to prevent tcpdump fail to capture target packet. - time.sleep(15) - pid = self._get_tcpdump_pid(tcpdump_file_name) - self.ssh.run("kill -9 %s" % pid, ignore_status=True) - if self.path_exists(TCPDUMP_DIR) and pull_dir: - tcpdump_path = "".join([TCPDUMP_DIR, tcpdump_file_name]) - tcpdump_remote_path = "/".join([pull_dir, tcpdump_file_name]) - tcpdump_local_path = "%s@%s:%s" % (self.user, self.ip, tcpdump_path) - utils.exe_cmd("scp %s %s" % (tcpdump_local_path, tcpdump_remote_path)) - - if self._get_tcpdump_pid(tcpdump_file_name): - raise signals.TestFailure("Failed to stop tcpdump on OpenWrt.") - if self.file_exists(tcpdump_path): - self.ssh.run("rm -f %s" % tcpdump_path) - return tcpdump_remote_path if pull_dir else None - - def clear_tcpdump(self): - self.ssh.run("killall tcpdump", ignore_status=True) - if self.ssh.run("pgrep tcpdump", ignore_status=True).stdout: - raise signals.TestFailure("Failed to clean up tcpdump process.") - if self.path_exists(TCPDUMP_DIR): - self.ssh.run("rm -f %s/*" % TCPDUMP_DIR) - - def _get_tcpdump_pid(self, tcpdump_file_name): - """Check tcpdump process on OpenWrt.""" - return self.ssh.run( - "pgrep -f %s" % (tcpdump_file_name), ignore_status=True - ).stdout - - def setup_mdns(self): - self.config.add("setup_mdns") - self.package_install(MDNS_PACKAGE) - self.commit_changes() - - def remove_mdns(self): - self.config.discard("setup_mdns") - self.package_remove(MDNS_PACKAGE) - self.commit_changes() - - def block_dns_response(self): - self.config.add("block_dns_response") - iptable_rules = list(network_const.FIREWALL_RULES_DISABLE_DNS_RESPONSE) - self.add_custom_firewall_rules(iptable_rules) - self.service_manager.need_restart(SERVICE_FIREWALL) - self.commit_changes() - - def unblock_dns_response(self): - self.config.discard("block_dns_response") - self.remove_custom_firewall_rules() - self.service_manager.need_restart(SERVICE_FIREWALL) - self.commit_changes() - - def setup_captive_portal(self, fas_fdqn, fas_port=2080): - """Create captive portal with Forwarding Authentication Service. - - Args: - fas_fdqn: String for captive portal page's fdqn add to local dns server. - fas_port: Port for captive portal page. - """ - self.package_install(CAPTIVE_PORTAL_PACKAGE) - self.config.add("setup_captive_portal %s" % fas_port) - self.ssh.run("uci set opennds.@opennds[0].fas_secure_enabled=2") - self.ssh.run("uci set opennds.@opennds[0].gatewayport=2050") - self.ssh.run("uci set opennds.@opennds[0].fasport=%s" % fas_port) - self.ssh.run("uci set opennds.@opennds[0].fasremotefqdn=%s" % fas_fdqn) - self.ssh.run('uci set opennds.@opennds[0].faspath="/nds/fas-aes.php"') - self.ssh.run("uci set opennds.@opennds[0].faskey=1234567890") - self.service_manager.need_restart(SERVICE_OPENNDS) - # Config uhttpd - self.ssh.run("uci set uhttpd.main.interpreter=.php=/usr/bin/php-cgi") - self.ssh.run("uci add_list uhttpd.main.listen_http=0.0.0.0:%s" % fas_port) - self.ssh.run("uci add_list uhttpd.main.listen_http=[::]:%s" % fas_port) - self.service_manager.need_restart(SERVICE_UHTTPD) - # cp fas-aes.php - self.create_folder("/www/nds/") - self.ssh.run("cp /etc/opennds/fas-aes.php /www/nds") - # Add fdqn - self.add_resource_record(fas_fdqn, LOCALHOST) - self.commit_changes() - - def remove_cpative_portal(self, fas_port=2080): - """Remove captive portal. - - Args: - fas_port: Port for captive portal page. - """ - # Remove package - self.package_remove(CAPTIVE_PORTAL_PACKAGE) - # Clean up config - self.ssh.run("rm /etc/config/opennds") - # Remove fdqn - self.clear_resource_record() - # Restore uhttpd - self.ssh.run("uci del uhttpd.main.interpreter") - self.ssh.run("uci del_list uhttpd.main.listen_http='0.0.0.0:%s'" % fas_port) - self.ssh.run("uci del_list uhttpd.main.listen_http='[::]:%s'" % fas_port) - self.service_manager.need_restart(SERVICE_UHTTPD) - # Clean web root - self.ssh.run("rm -r /www/nds") - self.config.discard("setup_captive_portal %s" % fas_port) - self.commit_changes() - - -class ServiceManager(object): - """Class for service on OpenWrt. - - Attributes: - ssh: ssh object for the AP. - _need_restart: Record service need to restart. - """ - - def __init__(self, ssh): - self.ssh = ssh - self._need_restart = set() - - def enable(self, service_name): - """Enable service auto start.""" - self.ssh.run("/etc/init.d/%s enable" % service_name) - - def disable(self, service_name): - """Disable service auto start.""" - self.ssh.run("/etc/init.d/%s disable" % service_name) - - def restart(self, service_name): - """Restart the service.""" - self.ssh.run("/etc/init.d/%s restart" % service_name) - - def reload(self, service_name): - """Restart the service.""" - self.ssh.run("/etc/init.d/%s reload" % service_name) - - def restart_services(self): - """Restart all services need to restart.""" - for service in self._need_restart: - if service == SERVICE_NETWORK: - self.reload(service) - self.restart(service) - self._need_restart = set() - - def stop(self, service_name): - """Stop the service.""" - self.ssh.run("/etc/init.d/%s stop" % service_name) - - def need_restart(self, service_name): - self._need_restart.add(service_name)
diff --git a/src/antlion/controllers/openwrt_lib/openwrt_constants.py b/src/antlion/controllers/openwrt_lib/openwrt_constants.py deleted file mode 100644 index 3b591d5..0000000 --- a/src/antlion/controllers/openwrt_lib/openwrt_constants.py +++ /dev/null
@@ -1,38 +0,0 @@ -#!/usr/bin/env python3 -# -# Copyright 2022 The Fuchsia Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -SYSTEM_INFO_CMD = "ubus call system board" - - -class OpenWrtWifiSecurity: - # Used by OpenWrt AP - WPA_PSK_DEFAULT = "psk" - WPA_PSK_CCMP = "psk+ccmp" - WPA_PSK_TKIP = "psk+tkip" - WPA_PSK_TKIP_AND_CCMP = "psk+tkip+ccmp" - WPA2_PSK_DEFAULT = "psk2" - WPA2_PSK_CCMP = "psk2+ccmp" - WPA2_PSK_TKIP = "psk2+tkip" - WPA2_PSK_TKIP_AND_CCMP = "psk2+tkip+ccmp" - - -class OpenWrtWifiSetting: - IFACE_2G = 2 - IFACE_5G = 3 - - -class OpenWrtModelMap: - NETGEAR_R8000 = ("radio2", "radio1")
diff --git a/src/antlion/controllers/openwrt_lib/wireless_config.py b/src/antlion/controllers/openwrt_lib/wireless_config.py deleted file mode 100644 index 9cdb309..0000000 --- a/src/antlion/controllers/openwrt_lib/wireless_config.py +++ /dev/null
@@ -1,53 +0,0 @@ -"""Class for Wireless config.""" - -NET_IFACE = "lan" - - -class WirelessConfig(object): - """Creates an object to hold wireless config. - - Attributes: - name: name of the wireless config - ssid: SSID of the network. - security: security of the wifi network. - band: band of the wifi network. - iface: network interface of the wifi network. - password: password for psk network. - wep_key: wep keys for wep network. - wep_key_num: key number for wep network. - radius_server_ip: IP address of radius server. - radius_server_port: Port number of radius server. - radius_server_secret: Secret key of radius server. - hidden: Boolean, if the wifi network is hidden. - ieee80211w: PMF bit of the wifi network. - """ - - def __init__( - self, - name, - ssid, - security, - band, - iface=NET_IFACE, - password=None, - wep_key=None, - wep_key_num=1, - radius_server_ip=None, - radius_server_port=None, - radius_server_secret=None, - hidden=False, - ieee80211w=None, - ): - self.name = name - self.ssid = ssid - self.security = security - self.band = band - self.iface = iface - self.password = password - self.wep_key = wep_key - self.wep_key_num = wep_key_num - self.radius_server_ip = radius_server_ip - self.radius_server_port = radius_server_port - self.radius_server_secret = radius_server_secret - self.hidden = hidden - self.ieee80211w = ieee80211w
diff --git a/src/antlion/controllers/openwrt_lib/wireless_settings_applier.py b/src/antlion/controllers/openwrt_lib/wireless_settings_applier.py deleted file mode 100644 index d899a30..0000000 --- a/src/antlion/controllers/openwrt_lib/wireless_settings_applier.py +++ /dev/null
@@ -1,196 +0,0 @@ -"""Class to configure wireless settings.""" - -import time - -from antlion.controllers.ap_lib import hostapd_constants -from antlion.controllers.openwrt_lib.network_settings import SERVICE_DNSMASQ -from antlion.controllers.openwrt_lib.network_settings import ServiceManager - -LEASE_FILE = "/tmp/dhcp.leases" -OPEN_SECURITY = "none" -PSK1_SECURITY = "psk" -PSK_SECURITY = "psk2" -WEP_SECURITY = "wep" -ENT_SECURITY = "wpa2" -OWE_SECURITY = "owe" -SAE_SECURITY = "sae" -SAEMIXED_SECURITY = "sae-mixed" -ENABLE_RADIO = "0" -DISABLE_RADIO = "1" -ENABLE_HIDDEN = "1" -RADIO_2G = "radio1" -RADIO_5G = "radio0" - - -class WirelessSettingsApplier(object): - """Class for wireless settings. - - Attributes: - ssh: ssh object for the AP. - service_manager: Object manage service configuration - wireless_configs: a list of - antlion.controllers.openwrt_lib.wireless_config.WirelessConfig. - channel_2g: channel for 2G band. - channel_5g: channel for 5G band. - """ - - def __init__( - self, ssh, configs, channel_2g, channel_5g, radio_2g=RADIO_2G, radio_5g=RADIO_5G - ): - """Initialize wireless settings. - - Args: - ssh: ssh connection object. - configs: a list of - antlion.controllers.openwrt_lib.wireless_config.WirelessConfig. - channel_2g: channel for 2G band. - channel_5g: channel for 5G band. - """ - self.ssh = ssh - self.service_manager = ServiceManager(ssh) - self.wireless_configs = configs - self.channel_2g = channel_2g - self.channel_5g = channel_5g - self.radio_2g = radio_2g - self.radio_5g = radio_5g - - def apply_wireless_settings(self): - """Configure wireless settings from a list of configs.""" - default_2g_iface = "default_" + self.radio_2g - default_5g_iface = "default_" + self.radio_5g - - # set channels for 2G and 5G bands - self.ssh.run( - "uci set wireless.%s.channel='%s'" % (self.radio_2g, self.channel_2g) - ) - self.ssh.run( - "uci set wireless.%s.channel='%s'" % (self.radio_5g, self.channel_5g) - ) - if self.channel_5g == 165: - self.ssh.run("uci set wireless.%s.htmode='VHT20'" % self.radio_5g) - elif self.channel_5g == 132 or self.channel_5g == 136: - self.ssh.run("iw reg set ZA") - self.ssh.run("uci set wireless.%s.htmode='VHT40'" % self.radio_5g) - - if self.channel_2g == 13: - self.ssh.run("iw reg set AU") - - # disable default OpenWrt SSID - self.ssh.run( - "uci set wireless.%s.disabled='%s'" % (default_2g_iface, DISABLE_RADIO) - ) - self.ssh.run( - "uci set wireless.%s.disabled='%s'" % (default_5g_iface, DISABLE_RADIO) - ) - - # Enable radios - self.ssh.run( - "uci set wireless.%s.disabled='%s'" % (self.radio_2g, ENABLE_RADIO) - ) - self.ssh.run( - "uci set wireless.%s.disabled='%s'" % (self.radio_5g, ENABLE_RADIO) - ) - - for config in self.wireless_configs: - # configure open network - if config.security == OPEN_SECURITY: - if config.band == hostapd_constants.BAND_2G: - self.ssh.run( - "uci set wireless.%s.ssid='%s'" - % (default_2g_iface, config.ssid) - ) - self.ssh.run( - "uci set wireless.%s.disabled='%s'" - % (default_2g_iface, ENABLE_RADIO) - ) - if config.hidden: - self.ssh.run( - "uci set wireless.%s.hidden='%s'" - % (default_2g_iface, ENABLE_HIDDEN) - ) - elif config.band == hostapd_constants.BAND_5G: - self.ssh.run( - "uci set wireless.%s.ssid='%s'" - % (default_5g_iface, config.ssid) - ) - self.ssh.run( - "uci set wireless.%s.disabled='%s'" - % (default_5g_iface, ENABLE_RADIO) - ) - if config.hidden: - self.ssh.run( - "uci set wireless.%s.hidden='%s'" - % (default_5g_iface, ENABLE_HIDDEN) - ) - continue - - self.ssh.run("uci set wireless.%s='wifi-iface'" % config.name) - if config.band == hostapd_constants.BAND_2G: - self.ssh.run( - "uci set wireless.%s.device='%s'" % (config.name, self.radio_2g) - ) - else: - self.ssh.run( - "uci set wireless.%s.device='%s'" % (config.name, self.radio_5g) - ) - self.ssh.run( - "uci set wireless.%s.network='%s'" % (config.name, config.iface) - ) - self.ssh.run("uci set wireless.%s.mode='ap'" % config.name) - self.ssh.run("uci set wireless.%s.ssid='%s'" % (config.name, config.ssid)) - self.ssh.run( - "uci set wireless.%s.encryption='%s'" % (config.name, config.security) - ) - if ( - config.security == PSK_SECURITY - or config.security == SAE_SECURITY - or config.security == PSK1_SECURITY - or config.security == SAEMIXED_SECURITY - ): - self.ssh.run( - "uci set wireless.%s.key='%s'" % (config.name, config.password) - ) - elif config.security == WEP_SECURITY: - self.ssh.run( - "uci set wireless.%s.key%s='%s'" - % (config.name, config.wep_key_num, config.wep_key) - ) - self.ssh.run( - "uci set wireless.%s.key='%s'" % (config.name, config.wep_key_num) - ) - elif config.security == ENT_SECURITY: - self.ssh.run( - "uci set wireless.%s.auth_secret='%s'" - % (config.name, config.radius_server_secret) - ) - self.ssh.run( - "uci set wireless.%s.auth_server='%s'" - % (config.name, config.radius_server_ip) - ) - self.ssh.run( - "uci set wireless.%s.auth_port='%s'" - % (config.name, config.radius_server_port) - ) - if config.ieee80211w: - self.ssh.run( - "uci set wireless.%s.ieee80211w='%s'" - % (config.name, config.ieee80211w) - ) - if config.hidden: - self.ssh.run( - "uci set wireless.%s.hidden='%s'" % (config.name, ENABLE_HIDDEN) - ) - - self.ssh.run("uci commit wireless") - self.ssh.run("cp %s %s.tmp" % (LEASE_FILE, LEASE_FILE)) - - def cleanup_wireless_settings(self): - """Reset wireless settings to default.""" - self.ssh.run("wifi down") - self.ssh.run("rm -f /etc/config/wireless") - self.ssh.run("wifi config") - if self.channel_5g == 132: - self.ssh.run("iw reg set US") - self.ssh.run("cp %s.tmp %s" % (LEASE_FILE, LEASE_FILE)) - self.service_manager.restart(SERVICE_DNSMASQ) - time.sleep(9)
diff --git a/src/antlion/controllers/packet_capture.py b/src/antlion/controllers/packet_capture.py deleted file mode 100755 index ce3d8fd..0000000 --- a/src/antlion/controllers/packet_capture.py +++ /dev/null
@@ -1,310 +0,0 @@ -#!/usr/bin/env python3 -# -# Copyright 2022 The Fuchsia Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import os -import threading -import time - -from antlion import logger -from antlion.controllers.ap_lib.hostapd_constants import FREQUENCY_MAP -from antlion.controllers.ap_lib.hostapd_constants import CENTER_CHANNEL_MAP -from antlion.controllers.ap_lib.hostapd_constants import VHT_CHANNEL -from antlion.controllers.utils_lib.ssh import connection -from antlion.controllers.utils_lib.ssh import formatter -from antlion.controllers.utils_lib.ssh import settings -from antlion.libs.proc.process import Process - -from mobly import asserts - -MOBLY_CONTROLLER_CONFIG_NAME = "PacketCapture" -ACTS_CONTROLLER_REFERENCE_NAME = "packet_capture" -BSS = "BSS" -BSSID = "BSSID" -FREQ = "freq" -FREQUENCY = "frequency" -LEVEL = "level" -MON_2G = "mon0" -MON_5G = "mon1" -BAND_IFACE = {"2G": MON_2G, "5G": MON_5G} -SCAN_IFACE = "wlan2" -SCAN_TIMEOUT = 60 -SEP = ":" -SIGNAL = "signal" -SSID = "SSID" - - -def create(configs): - return [PacketCapture(c) for c in configs] - - -def destroy(pcaps): - for pcap in pcaps: - pcap.close() - - -def get_info(pcaps): - return [pcap.ssh_settings.hostname for pcap in pcaps] - - -class PcapProperties(object): - """Class to maintain packet capture properties after starting tcpdump. - - Attributes: - proc: Process object of tcpdump - pcap_fname: File name of the tcpdump output file - pcap_file: File object for the tcpdump output file - """ - - def __init__(self, proc, pcap_fname, pcap_file): - """Initialize object.""" - self.proc = proc - self.pcap_fname = pcap_fname - self.pcap_file = pcap_file - - -class PacketCaptureError(Exception): - """Error related to Packet capture.""" - - -class PacketCapture(object): - """Class representing packet capturer. - - An instance of this class creates and configures two interfaces for monitor - mode; 'mon0' for 2G and 'mon1' for 5G and one interface for scanning for - wifi networks; 'wlan2' which is a dual band interface. - - Attributes: - pcap_properties: dict that specifies packet capture properties for a - band. - """ - - def __init__(self, configs): - """Initialize objects. - - Args: - configs: config for the packet capture. - """ - self.ssh_settings = settings.from_config(configs["ssh_config"]) - self.ssh = connection.SshConnection(self.ssh_settings) - self.log = logger.create_logger( - lambda msg: "[%s|%s] %s" - % (MOBLY_CONTROLLER_CONFIG_NAME, self.ssh_settings.hostname, msg) - ) - - self._create_interface(MON_2G, "monitor") - self._create_interface(MON_5G, "monitor") - self.managed_mode = True - result = self.ssh.run("ifconfig -a", ignore_status=True) - if result.stderr or SCAN_IFACE not in result.stdout: - self.managed_mode = False - if self.managed_mode: - self._create_interface(SCAN_IFACE, "managed") - - self.pcap_properties = dict() - self._pcap_stop_lock = threading.Lock() - - def _create_interface(self, iface, mode): - """Create interface of monitor/managed mode. - - Create mon0/mon1 for 2G/5G monitor mode and wlan2 for managed mode. - """ - if mode == "monitor": - self.ssh.run("ifconfig wlan%s down" % iface[-1], ignore_status=True) - self.ssh.run("iw dev %s del" % iface, ignore_status=True) - self.ssh.run( - "iw phy%s interface add %s type %s" % (iface[-1], iface, mode), - ignore_status=True, - ) - self.ssh.run("ip link set %s up" % iface, ignore_status=True) - result = self.ssh.run("iw dev %s info" % iface, ignore_status=True) - if result.stderr or iface not in result.stdout: - raise PacketCaptureError("Failed to configure interface %s" % iface) - - def _cleanup_interface(self, iface): - """Clean up monitor mode interfaces.""" - self.ssh.run("iw dev %s del" % iface, ignore_status=True) - result = self.ssh.run("iw dev %s info" % iface, ignore_status=True) - if not result.stderr or "No such device" not in result.stderr: - raise PacketCaptureError("Failed to cleanup monitor mode for %s" % iface) - - def _parse_scan_results(self, scan_result): - """Parses the scan dump output and returns list of dictionaries. - - Args: - scan_result: scan dump output from scan on mon interface. - - Returns: - Dictionary of found network in the scan. - The attributes returned are - a.) SSID - SSID of the network. - b.) LEVEL - signal level. - c.) FREQUENCY - WiFi band the network is on. - d.) BSSID - BSSID of the network. - """ - scan_networks = [] - network = {} - for line in scan_result.splitlines(): - if SEP not in line: - continue - if BSS in line: - network[BSSID] = line.split("(")[0].split()[-1] - field, value = line.lstrip().rstrip().split(SEP)[0:2] - value = value.lstrip() - if SIGNAL in line: - network[LEVEL] = int(float(value.split()[0])) - elif FREQ in line: - network[FREQUENCY] = int(value) - elif SSID in line: - network[SSID] = value - scan_networks.append(network) - network = {} - return scan_networks - - def get_wifi_scan_results(self): - """Starts a wifi scan on wlan2 interface. - - Returns: - List of dictionaries each representing a found network. - """ - if not self.managed_mode: - raise PacketCaptureError("Managed mode not setup") - result = self.ssh.run("iw dev %s scan" % SCAN_IFACE) - if result.stderr: - raise PacketCaptureError("Failed to get scan dump") - if not result.stdout: - return [] - return self._parse_scan_results(result.stdout) - - def start_scan_and_find_network(self, ssid): - """Start a wifi scan on wlan2 interface and find network. - - Args: - ssid: SSID of the network. - - Returns: - True/False if the network if found or not. - """ - curr_time = time.time() - while time.time() < curr_time + SCAN_TIMEOUT: - found_networks = self.get_wifi_scan_results() - for network in found_networks: - if network[SSID] == ssid: - return True - time.sleep(3) # sleep before next scan - return False - - def configure_monitor_mode(self, band, channel, bandwidth=20): - """Configure monitor mode. - - Args: - band: band to configure monitor mode for. - channel: channel to set for the interface. - bandwidth : bandwidth for VHT channel as 40,80,160 - - Returns: - True if configure successful. - False if not successful. - """ - - band = band.upper() - if band not in BAND_IFACE: - self.log.error("Invalid band. Must be 2g/2G or 5g/5G") - return False - - iface = BAND_IFACE[band] - if bandwidth == 20: - self.ssh.run( - "iw dev %s set channel %s" % (iface, channel), ignore_status=True - ) - else: - center_freq = None - for i, j in CENTER_CHANNEL_MAP[VHT_CHANNEL[bandwidth]]["channels"]: - if channel in range(i, j + 1): - center_freq = (FREQUENCY_MAP[i] + FREQUENCY_MAP[j]) / 2 - break - asserts.assert_true(center_freq, "No match channel in VHT channel list.") - self.ssh.run( - "iw dev %s set freq %s %s %s" - % (iface, FREQUENCY_MAP[channel], bandwidth, center_freq), - ignore_status=True, - ) - - result = self.ssh.run("iw dev %s info" % iface, ignore_status=True) - if result.stderr or "channel %s" % channel not in result.stdout: - self.log.error("Failed to configure monitor mode for %s" % band) - return False - return True - - def start_packet_capture(self, band, log_path, pcap_fname): - """Start packet capture for band. - - band = 2G starts tcpdump on 'mon0' interface. - band = 5G starts tcpdump on 'mon1' interface. - - Args: - band: '2g' or '2G' and '5g' or '5G'. - log_path: test log path to save the pcap file. - pcap_fname: name of the pcap file. - - Returns: - pcap_proc: Process object of the tcpdump. - """ - band = band.upper() - if band not in BAND_IFACE.keys() or band in self.pcap_properties: - self.log.error("Invalid band or packet capture already running") - return None - - pcap_name = "%s_%s.pcap" % (pcap_fname, band) - pcap_fname = os.path.join(log_path, pcap_name) - pcap_file = open(pcap_fname, "w+b") - - tcpdump_cmd = "tcpdump -i %s -w - -U 2>/dev/null" % (BAND_IFACE[band]) - cmd = formatter.SshFormatter().format_command( - tcpdump_cmd, None, self.ssh_settings, extra_flags={"-q": None} - ) - pcap_proc = Process(cmd) - pcap_proc.set_on_output_callback(lambda msg: pcap_file.write(msg), binary=True) - pcap_proc.start() - - self.pcap_properties[band] = PcapProperties(pcap_proc, pcap_fname, pcap_file) - return pcap_proc - - def stop_packet_capture(self, proc): - """Stop the packet capture. - - Args: - proc: Process object of tcpdump to kill. - """ - for key, val in self.pcap_properties.items(): - if val.proc is proc: - break - else: - self.log.error("Failed to stop tcpdump. Invalid process.") - return - - proc.stop() - with self._pcap_stop_lock: - self.pcap_properties[key].pcap_file.close() - del self.pcap_properties[key] - - def close(self): - """Cleanup. - - Cleans up all the monitor mode interfaces and closes ssh connections. - """ - self._cleanup_interface(MON_2G) - self._cleanup_interface(MON_5G) - self.ssh.close()
diff --git a/src/antlion/controllers/packet_sender.py b/src/antlion/controllers/packet_sender.py deleted file mode 100644 index da22e79..0000000 --- a/src/antlion/controllers/packet_sender.py +++ /dev/null
@@ -1,941 +0,0 @@ -#!/usr/bin/env python3.4 -# -# Copyright 2022 The Fuchsia Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Collection of utility functions to generate and send custom packets. - -""" -import logging -import multiprocessing -import socket -import time - -import antlion.signals - -# http://www.secdev.org/projects/scapy/ -# On ubuntu, sudo pip3 install scapy -import scapy.all as scapy - -MOBLY_CONTROLLER_CONFIG_NAME = "PacketSender" -ACTS_CONTROLLER_REFERENCE_NAME = "packet_senders" - -GET_FROM_LOCAL_INTERFACE = "get_local" -MAC_BROADCAST = "ff:ff:ff:ff:ff:ff" -IPV4_BROADCAST = "255.255.255.255" -ARP_DST = "00:00:00:00:00:00" -RA_MAC = "33:33:00:00:00:01" -RA_IP = "ff02::1" -RA_PREFIX = "d00d::" -RA_PREFIX_LEN = 64 -DHCP_OFFER_OP = 2 -DHCP_OFFER_SRC_PORT = 67 -DHCP_OFFER_DST_PORT = 68 -DHCP_TRANS_ID = 0x01020304 -DNS_LEN = 3 -PING6_DATA = "BEST PING6 EVER" -PING4_TYPE = 8 -MDNS_TTL = 255 -MDNS_QTYPE = "PTR" -MDNS_UDP_PORT = 5353 -MDNS_V4_IP_DST = "224.0.0.251" -MDNS_V4_MAC_DST = "01:00:5E:00:00:FB" -MDNS_RECURSIVE = 1 -MDNS_V6_IP_DST = "FF02::FB" -MDNS_V6_MAC_DST = "33:33:00:00:00:FB" -ETH_TYPE_IP = 2048 -SAP_SPANNING_TREE = 0x42 -SNAP_OUI = 12 -SNAP_SSAP = 170 -SNAP_DSAP = 170 -SNAP_CTRL = 3 -LLC_XID_CONTROL = 191 -PAD_LEN_BYTES = 128 - - -def create(configs): - """Creates PacketSender controllers from a json config. - - Args: - The json configs that represent this controller - - Returns: - A new PacketSender - """ - return [PacketSender(c) for c in configs] - - -def destroy(objs): - """Destroys a list of PacketSenders and stops sending (if active). - - Args: - objs: A list of PacketSenders - """ - for pkt_sender in objs: - pkt_sender.stop_sending(True) - return - - -def get_info(objs): - """Get information on a list of packet senders. - - Args: - objs: A list of PacketSenders - - Returns: - Network interface name that is being used by each packet sender - """ - return [pkt_sender.interface for pkt_sender in objs] - - -class ThreadSendPacket(multiprocessing.Process): - """Creates a thread that keeps sending the same packet until a stop signal. - - Attributes: - stop_signal: signal to stop the thread execution - packet: desired packet to keep sending - interval: interval between consecutive packets (s) - interface: network interface name (e.g., 'eth0') - log: object used for logging - """ - - def __init__(self, signal, packet, interval, interface, log): - multiprocessing.Process.__init__(self) - self.stop_signal = signal - self.packet = packet - self.interval = interval - self.interface = interface - self.log = log - - def run(self): - self.log.info("Packet Sending Started.") - while True: - if self.stop_signal.is_set(): - # Poison pill means shutdown - self.log.info("Packet Sending Stopped.") - break - - try: - scapy.sendp(self.packet, iface=self.interface, verbose=0) - time.sleep(self.interval) - except Exception: - self.log.exception("Exception when trying to send packet") - return - - return - - -class PacketSenderError(antlion.signals.ControllerError): - """Raises exceptions encountered in packet sender lib.""" - - -class PacketSender(object): - """Send any custom packet over a desired interface. - - Attributes: - log: class logging object - thread_active: indicates whether or not the send thread is active - thread_send: thread object for the concurrent packet transmissions - stop_signal: event to stop the thread - interface: network interface name (e.g., 'eth0') - """ - - def __init__(self, ifname): - """Initiallize the PacketGenerator class. - - Args: - ifname: network interface name that will be used packet generator - """ - self.log = logging.getLogger() - self.packet = None - self.thread_active = False - self.thread_send = None - self.stop_signal = multiprocessing.Event() - self.interface = ifname - - def send_ntimes(self, packet, ntimes, interval): - """Sends a packet ntimes at a given interval. - - Args: - packet: custom built packet from Layer 2 up to Application layer - ntimes: number of packets to send - interval: interval between consecutive packet transmissions (s) - """ - if packet is None: - raise PacketSenderError( - "There is no packet to send. Create a packet first." - ) - - for _ in range(ntimes): - try: - scapy.sendp(packet, iface=self.interface, verbose=0) - time.sleep(interval) - except socket.error as excpt: - self.log.exception("Caught socket exception : %s" % excpt) - return - - def send_receive_ntimes(self, packet, ntimes, interval): - """Sends a packet and receives the reply ntimes at a given interval. - - Args: - packet: custom built packet from Layer 2 up to Application layer - ntimes: number of packets to send - interval: interval between consecutive packet transmissions and - the corresponding reply (s) - """ - if packet is None: - raise PacketSenderError( - "There is no packet to send. Create a packet first." - ) - - for _ in range(ntimes): - try: - scapy.srp1(packet, iface=self.interface, timeout=interval, verbose=0) - time.sleep(interval) - except socket.error as excpt: - self.log.exception("Caught socket exception : %s" % excpt) - return - - def start_sending(self, packet, interval): - """Sends packets in parallel with the main process. - - Creates a thread and keeps sending the same packet at a given interval - until a stop signal is received - - Args: - packet: custom built packet from Layer 2 up to Application layer - interval: interval between consecutive packets (s) - """ - if packet is None: - raise PacketSenderError( - "There is no packet to send. Create a packet first." - ) - - if self.thread_active: - raise PacketSenderError( - ( - "There is already an active thread. Stop it" - "before starting another transmission." - ) - ) - - self.thread_send = ThreadSendPacket( - self.stop_signal, packet, interval, self.interface, self.log - ) - self.thread_send.start() - self.thread_active = True - - def stop_sending(self, ignore_status=False): - """Stops the concurrent thread that is continuously sending packets.""" - if not self.thread_active: - if ignore_status: - return - else: - raise PacketSenderError( - "Error: There is no acive thread running to stop." - ) - - # Stop thread - self.stop_signal.set() - self.thread_send.join() - - # Just as precaution - if self.thread_send.is_alive(): - self.thread_send.terminate() - self.log.warning("Packet Sending forced to terminate") - - self.stop_signal.clear() - self.thread_send = None - self.thread_active = False - - -class ArpGenerator(object): - """Creates a custom ARP packet - - Attributes: - packet: desired built custom packet - src_mac: MAC address (Layer 2) of the source node - src_ipv4: IPv4 address (Layer 3) of the source node - dst_ipv4: IPv4 address (Layer 3) of the destination node - """ - - def __init__(self, **config_params): - """Initialize the class with the required network and packet params. - - Args: - config_params: a dictionary with all the necessary packet fields. - Some fields can be generated automatically. For example: - {'subnet_mask': '255.255.255.0', - 'dst_ipv4': '192.168.1.3', - 'src_ipv4: 'get_local', ... - The key can also be 'get_local' which means the code will read - and use the local interface parameters - """ - interf = config_params["interf"] - self.packet = None - if config_params["src_mac"] == GET_FROM_LOCAL_INTERFACE: - self.src_mac = scapy.get_if_hwaddr(interf) - else: - self.src_mac = config_params["src_mac"] - - self.dst_ipv4 = config_params["dst_ipv4"] - if config_params["src_ipv4"] == GET_FROM_LOCAL_INTERFACE: - self.src_ipv4 = scapy.get_if_addr(interf) - else: - self.src_ipv4 = config_params["src_ipv4"] - - def generate( - self, - op="who-has", - ip_dst=None, - ip_src=None, - hwsrc=None, - hwdst=None, - eth_dst=None, - ): - """Generates a custom ARP packet. - - Args: - op: ARP type (request or reply) - ip_dst: ARP ipv4 destination (Optional) - ip_src: ARP ipv4 source address (Optional) - hwsrc: ARP hardware source address (Optional) - hwdst: ARP hardware destination address (Optional) - eth_dst: Ethernet (layer 2) destination address (Optional) - """ - # Create IP layer - hw_src = hwsrc if hwsrc is not None else self.src_mac - hw_dst = hwdst if hwdst is not None else ARP_DST - ipv4_dst = ip_dst if ip_dst is not None else self.dst_ipv4 - ipv4_src = ip_src if ip_src is not None else self.src_ipv4 - ip4 = scapy.ARP(op=op, pdst=ipv4_dst, psrc=ipv4_src, hwdst=hw_dst, hwsrc=hw_src) - - # Create Ethernet layer - mac_dst = eth_dst if eth_dst is not None else MAC_BROADCAST - ethernet = scapy.Ether(src=self.src_mac, dst=mac_dst) - - self.packet = ethernet / ip4 - return self.packet - - -class DhcpOfferGenerator(object): - """Creates a custom DHCP offer packet - - Attributes: - packet: desired built custom packet - subnet_mask: local network subnet mask - src_mac: MAC address (Layer 2) of the source node - dst_mac: MAC address (Layer 2) of the destination node - src_ipv4: IPv4 address (Layer 3) of the source node - dst_ipv4: IPv4 address (Layer 3) of the destination node - gw_ipv4: IPv4 address (Layer 3) of the Gateway - """ - - def __init__(self, **config_params): - """Initialize the class with the required network and packet params. - - Args: - config_params: contains all the necessary packet parameters. - Some fields can be generated automatically. For example: - {'subnet_mask': '255.255.255.0', - 'dst_ipv4': '192.168.1.3', - 'src_ipv4: 'get_local', ... - The key can also be 'get_local' which means the code will read - and use the local interface parameters - """ - interf = config_params["interf"] - self.packet = None - self.subnet_mask = config_params["subnet_mask"] - self.dst_mac = config_params["dst_mac"] - if config_params["src_mac"] == GET_FROM_LOCAL_INTERFACE: - self.src_mac = scapy.get_if_hwaddr(interf) - else: - self.src_mac = config_params["src_mac"] - - self.dst_ipv4 = config_params["dst_ipv4"] - if config_params["src_ipv4"] == GET_FROM_LOCAL_INTERFACE: - self.src_ipv4 = scapy.get_if_addr(interf) - else: - self.src_ipv4 = config_params["src_ipv4"] - - self.gw_ipv4 = config_params["gw_ipv4"] - - def generate(self, cha_mac=None, dst_ip=None): - """Generates a DHCP offer packet. - - Args: - cha_mac: hardware target address for DHCP offer (Optional) - dst_ip: ipv4 address of target host for renewal (Optional) - """ - - # Create DHCP layer - dhcp = scapy.DHCP( - options=[ - ("message-type", "offer"), - ("subnet_mask", self.subnet_mask), - ("server_id", self.src_ipv4), - ("end"), - ] - ) - - # Overwrite standard DHCP fields - sta_hw = cha_mac if cha_mac is not None else self.dst_mac - sta_ip = dst_ip if dst_ip is not None else self.dst_ipv4 - - # Create Boot - bootp = scapy.BOOTP( - op=DHCP_OFFER_OP, - yiaddr=sta_ip, - siaddr=self.src_ipv4, - giaddr=self.gw_ipv4, - chaddr=scapy.mac2str(sta_hw), - xid=DHCP_TRANS_ID, - ) - - # Create UDP - udp = scapy.UDP(sport=DHCP_OFFER_SRC_PORT, dport=DHCP_OFFER_DST_PORT) - - # Create IP layer - ip4 = scapy.IP(src=self.src_ipv4, dst=IPV4_BROADCAST) - - # Create Ethernet layer - ethernet = scapy.Ether(dst=MAC_BROADCAST, src=self.src_mac) - - self.packet = ethernet / ip4 / udp / bootp / dhcp - return self.packet - - -class NsGenerator(object): - """Creates a custom Neighbor Solicitation (NS) packet - - Attributes: - packet: desired built custom packet - src_mac: MAC address (Layer 2) of the source node - src_ipv6_type: IPv6 source address type (e.g., Link Local, Global, etc) - src_ipv6: IPv6 address (Layer 3) of the source node - dst_ipv6: IPv6 address (Layer 3) of the destination node - """ - - def __init__(self, **config_params): - """Initialize the class with the required network and packet params. - - Args: - config_params: contains all the necessary packet parameters. - Some fields can be generated automatically. For example: - {'subnet_mask': '255.255.255.0', - 'dst_ipv4': '192.168.1.3', - 'src_ipv4: 'get_local', ... - The key can also be 'get_local' which means the code will read - and use the local interface parameters - """ - interf = config_params["interf"] - self.packet = None - if config_params["src_mac"] == GET_FROM_LOCAL_INTERFACE: - self.src_mac = scapy.get_if_hwaddr(interf) - else: - self.src_mac = config_params["src_mac"] - - self.dst_ipv6 = config_params["dst_ipv6"] - self.src_ipv6_type = config_params["src_ipv6_type"] - if config_params["src_ipv6"] == GET_FROM_LOCAL_INTERFACE: - self.src_ipv6 = get_if_addr6(interf, self.src_ipv6_type) - else: - self.src_ipv6 = config_params["src_ipv6"] - - def generate(self, ip_dst=None, eth_dst=None): - """Generates a Neighbor Solicitation (NS) packet (ICMP over IPv6). - - Args: - ip_dst: NS ipv6 destination (Optional) - eth_dst: Ethernet (layer 2) destination address (Optional) - """ - # Compute IP addresses - target_ip6 = ip_dst if ip_dst is not None else self.dst_ipv6 - ndst_ip = socket.inet_pton(socket.AF_INET6, target_ip6) - nnode_mcast = scapy.in6_getnsma(ndst_ip) - node_mcast = socket.inet_ntop(socket.AF_INET6, nnode_mcast) - # Compute MAC addresses - hw_dst = eth_dst if eth_dst is not None else scapy.in6_getnsmac(nnode_mcast) - - # Create IPv6 layer - base = scapy.IPv6(dst=node_mcast, src=self.src_ipv6) - neighbor_solicitation = scapy.ICMPv6ND_NS(tgt=target_ip6) - src_ll_addr = scapy.ICMPv6NDOptSrcLLAddr(lladdr=self.src_mac) - ip6 = base / neighbor_solicitation / src_ll_addr - - # Create Ethernet layer - ethernet = scapy.Ether(src=self.src_mac, dst=hw_dst) - - self.packet = ethernet / ip6 - return self.packet - - -class RaGenerator(object): - """Creates a custom Router Advertisement (RA) packet - - Attributes: - packet: desired built custom packet - src_mac: MAC address (Layer 2) of the source node - src_ipv6_type: IPv6 source address type (e.g., Link Local, Global, etc) - src_ipv6: IPv6 address (Layer 3) of the source node - """ - - def __init__(self, **config_params): - """Initialize the class with the required network and packet params. - - Args: - config_params: contains all the necessary packet parameters. - Some fields can be generated automatically. For example: - {'subnet_mask': '255.255.255.0', - 'dst_ipv4': '192.168.1.3', - 'src_ipv4: 'get_local', ... - The key can also be 'get_local' which means the code will read - and use the local interface parameters - """ - interf = config_params["interf"] - self.packet = None - if config_params["src_mac"] == GET_FROM_LOCAL_INTERFACE: - self.src_mac = scapy.get_if_hwaddr(interf) - else: - self.src_mac = config_params["src_mac"] - - self.src_ipv6_type = config_params["src_ipv6_type"] - if config_params["src_ipv6"] == GET_FROM_LOCAL_INTERFACE: - self.src_ipv6 = get_if_addr6(interf, self.src_ipv6_type) - else: - self.src_ipv6 = config_params["src_ipv6"] - - def generate( - self, lifetime, enableDNS=False, dns_lifetime=0, ip_dst=None, eth_dst=None - ): - """Generates a Router Advertisement (RA) packet (ICMP over IPv6). - - Args: - lifetime: RA lifetime - enableDNS: Add RDNSS option to RA (Optional) - dns_lifetime: Set DNS server lifetime (Optional) - ip_dst: IPv6 destination address (Optional) - eth_dst: Ethernet (layer 2) destination address (Optional) - """ - # Overwrite standard fields if desired - ip6_dst = ip_dst if ip_dst is not None else RA_IP - hw_dst = eth_dst if eth_dst is not None else RA_MAC - - # Create IPv6 layer - base = scapy.IPv6(dst=ip6_dst, src=self.src_ipv6) - router_solicitation = scapy.ICMPv6ND_RA(routerlifetime=lifetime) - src_ll_addr = scapy.ICMPv6NDOptSrcLLAddr(lladdr=self.src_mac) - prefix = scapy.ICMPv6NDOptPrefixInfo(prefixlen=RA_PREFIX_LEN, prefix=RA_PREFIX) - if enableDNS: - rndss = scapy.ICMPv6NDOptRDNSS( - lifetime=dns_lifetime, dns=[self.src_ipv6], len=DNS_LEN - ) - ip6 = base / router_solicitation / src_ll_addr / prefix / rndss - else: - ip6 = base / router_solicitation / src_ll_addr / prefix - - # Create Ethernet layer - ethernet = scapy.Ether(src=self.src_mac, dst=hw_dst) - - self.packet = ethernet / ip6 - return self.packet - - -class Ping6Generator(object): - """Creates a custom Ping v6 packet (i.e., ICMP over IPv6) - - Attributes: - packet: desired built custom packet - src_mac: MAC address (Layer 2) of the source node - dst_mac: MAC address (Layer 2) of the destination node - src_ipv6_type: IPv6 source address type (e.g., Link Local, Global, etc) - src_ipv6: IPv6 address (Layer 3) of the source node - dst_ipv6: IPv6 address (Layer 3) of the destination node - """ - - def __init__(self, **config_params): - """Initialize the class with the required network and packet params. - - Args: - config_params: contains all the necessary packet parameters. - Some fields can be generated automatically. For example: - {'subnet_mask': '255.255.255.0', - 'dst_ipv4': '192.168.1.3', - 'src_ipv4: 'get_local', ... - The key can also be 'get_local' which means the code will read - and use the local interface parameters - """ - interf = config_params["interf"] - self.packet = None - self.dst_mac = config_params["dst_mac"] - if config_params["src_mac"] == GET_FROM_LOCAL_INTERFACE: - self.src_mac = scapy.get_if_hwaddr(interf) - else: - self.src_mac = config_params["src_mac"] - - self.dst_ipv6 = config_params["dst_ipv6"] - self.src_ipv6_type = config_params["src_ipv6_type"] - if config_params["src_ipv6"] == GET_FROM_LOCAL_INTERFACE: - self.src_ipv6 = get_if_addr6(interf, self.src_ipv6_type) - else: - self.src_ipv6 = config_params["src_ipv6"] - - def generate(self, ip_dst=None, eth_dst=None): - """Generates a Ping6 packet (i.e., Echo Request) - - Args: - ip_dst: IPv6 destination address (Optional) - eth_dst: Ethernet (layer 2) destination address (Optional) - """ - # Overwrite standard fields if desired - ip6_dst = ip_dst if ip_dst is not None else self.dst_ipv6 - hw_dst = eth_dst if eth_dst is not None else self.dst_mac - - # Create IPv6 layer - base = scapy.IPv6(dst=ip6_dst, src=self.src_ipv6) - echo_request = scapy.ICMPv6EchoRequest(data=PING6_DATA) - - ip6 = base / echo_request - - # Create Ethernet layer - ethernet = scapy.Ether(src=self.src_mac, dst=hw_dst) - - self.packet = ethernet / ip6 - return self.packet - - -class Ping4Generator(object): - """Creates a custom Ping v4 packet (i.e., ICMP over IPv4) - - Attributes: - packet: desired built custom packet - src_mac: MAC address (Layer 2) of the source node - dst_mac: MAC address (Layer 2) of the destination node - src_ipv4: IPv4 address (Layer 3) of the source node - dst_ipv4: IPv4 address (Layer 3) of the destination node - """ - - def __init__(self, **config_params): - """Initialize the class with the required network and packet params. - - Args: - config_params: contains all the necessary packet parameters. - Some fields can be generated automatically. For example: - {'subnet_mask': '255.255.255.0', - 'dst_ipv4': '192.168.1.3', - 'src_ipv4: 'get_local', ... - The key can also be 'get_local' which means the code will read - and use the local interface parameters - """ - interf = config_params["interf"] - self.packet = None - self.dst_mac = config_params["dst_mac"] - if config_params["src_mac"] == GET_FROM_LOCAL_INTERFACE: - self.src_mac = scapy.get_if_hwaddr(interf) - else: - self.src_mac = config_params["src_mac"] - - self.dst_ipv4 = config_params["dst_ipv4"] - if config_params["src_ipv4"] == GET_FROM_LOCAL_INTERFACE: - self.src_ipv4 = scapy.get_if_addr(interf) - else: - self.src_ipv4 = config_params["src_ipv4"] - - def generate(self, ip_dst=None, eth_dst=None): - """Generates a Ping4 packet (i.e., Echo Request) - - Args: - ip_dst: IP destination address (Optional) - eth_dst: Ethernet (layer 2) destination address (Optional) - """ - - # Overwrite standard fields if desired - sta_ip = ip_dst if ip_dst is not None else self.dst_ipv4 - sta_hw = eth_dst if eth_dst is not None else self.dst_mac - - # Create IPv6 layer - base = scapy.IP(src=self.src_ipv4, dst=sta_ip) - echo_request = scapy.ICMP(type=PING4_TYPE) - - ip4 = base / echo_request - - # Create Ethernet layer - ethernet = scapy.Ether(src=self.src_mac, dst=sta_hw) - - self.packet = ethernet / ip4 - return self.packet - - -class Mdns6Generator(object): - """Creates a custom mDNS IPv6 packet - - Attributes: - packet: desired built custom packet - src_mac: MAC address (Layer 2) of the source node - src_ipv6_type: IPv6 source address type (e.g., Link Local, Global, etc) - src_ipv6: IPv6 address (Layer 3) of the source node - """ - - def __init__(self, **config_params): - """Initialize the class with the required network and packet params. - - Args: - config_params: contains all the necessary packet parameters. - Some fields can be generated automatically. For example: - {'subnet_mask': '255.255.255.0', - 'dst_ipv4': '192.168.1.3', - 'src_ipv4: 'get_local', ... - The key can also be 'get_local' which means the code will read - and use the local interface parameters - """ - interf = config_params["interf"] - self.packet = None - if config_params["src_mac"] == GET_FROM_LOCAL_INTERFACE: - self.src_mac = scapy.get_if_hwaddr(interf) - else: - self.src_mac = config_params["src_mac"] - - self.src_ipv6_type = config_params["src_ipv6_type"] - if config_params["src_ipv6"] == GET_FROM_LOCAL_INTERFACE: - self.src_ipv6 = get_if_addr6(interf, self.src_ipv6_type) - else: - self.src_ipv6 = config_params["src_ipv6"] - - def generate(self, ip_dst=None, eth_dst=None): - """Generates a mDNS v6 packet for multicast DNS config - - Args: - ip_dst: IPv6 destination address (Optional) - eth_dst: Ethernet (layer 2) destination address (Optional) - """ - - # Overwrite standard fields if desired - sta_ip = ip_dst if ip_dst is not None else MDNS_V6_IP_DST - sta_hw = eth_dst if eth_dst is not None else MDNS_V6_MAC_DST - - # Create mDNS layer - qdServer = scapy.DNSQR(qname=self.src_ipv6, qtype=MDNS_QTYPE) - mDNS = scapy.DNS(rd=MDNS_RECURSIVE, qd=qdServer) - - # Create UDP - udp = scapy.UDP(sport=MDNS_UDP_PORT, dport=MDNS_UDP_PORT) - - # Create IP layer - ip6 = scapy.IPv6(src=self.src_ipv6, dst=sta_ip) - - # Create Ethernet layer - ethernet = scapy.Ether(src=self.src_mac, dst=sta_hw) - - self.packet = ethernet / ip6 / udp / mDNS - return self.packet - - -class Mdns4Generator(object): - """Creates a custom mDNS v4 packet - - Attributes: - packet: desired built custom packet - src_mac: MAC address (Layer 2) of the source node - src_ipv4: IPv4 address (Layer 3) of the source node - """ - - def __init__(self, **config_params): - """Initialize the class with the required network and packet params. - - Args: - config_params: contains all the necessary packet parameters. - Some fields can be generated automatically. For example: - {'subnet_mask': '255.255.255.0', - 'dst_ipv4': '192.168.1.3', - 'src_ipv4: 'get_local', ... - The key can also be 'get_local' which means the code will read - and use the local interface parameters - """ - interf = config_params["interf"] - self.packet = None - if config_params["src_mac"] == GET_FROM_LOCAL_INTERFACE: - self.src_mac = scapy.get_if_hwaddr(interf) - else: - self.src_mac = config_params["src_mac"] - - if config_params["src_ipv4"] == GET_FROM_LOCAL_INTERFACE: - self.src_ipv4 = scapy.get_if_addr(interf) - else: - self.src_ipv4 = config_params["src_ipv4"] - - def generate(self, ip_dst=None, eth_dst=None): - """Generates a mDNS v4 packet for multicast DNS config - - Args: - ip_dst: IP destination address (Optional) - eth_dst: Ethernet (layer 2) destination address (Optional) - """ - - # Overwrite standard fields if desired - sta_ip = ip_dst if ip_dst is not None else MDNS_V4_IP_DST - sta_hw = eth_dst if eth_dst is not None else MDNS_V4_MAC_DST - - # Create mDNS layer - qdServer = scapy.DNSQR(qname=self.src_ipv4, qtype=MDNS_QTYPE) - mDNS = scapy.DNS(rd=MDNS_RECURSIVE, qd=qdServer) - - # Create UDP - udp = scapy.UDP(sport=MDNS_UDP_PORT, dport=MDNS_UDP_PORT) - - # Create IP layer - ip4 = scapy.IP(src=self.src_ipv4, dst=sta_ip, ttl=255) - - # Create Ethernet layer - ethernet = scapy.Ether(src=self.src_mac, dst=sta_hw) - - self.packet = ethernet / ip4 / udp / mDNS - return self.packet - - -class Dot3Generator(object): - """Creates a custom 802.3 Ethernet Frame - - Attributes: - packet: desired built custom packet - src_mac: MAC address (Layer 2) of the source node - src_ipv4: IPv4 address (Layer 3) of the source node - """ - - def __init__(self, **config_params): - """Initialize the class with the required network and packet params. - - Args: - config_params: contains all the necessary packet parameters. - Some fields can be generated automatically. For example: - {'subnet_mask': '255.255.255.0', - 'dst_ipv4': '192.168.1.3', - 'src_ipv4: 'get_local', ... - The key can also be 'get_local' which means the code will read - and use the local interface parameters - """ - interf = config_params["interf"] - self.packet = None - self.dst_mac = config_params["dst_mac"] - if config_params["src_mac"] == GET_FROM_LOCAL_INTERFACE: - self.src_mac = scapy.get_if_hwaddr(interf) - else: - self.src_mac = config_params["src_mac"] - - def _build_ether(self, eth_dst=None): - """Creates the basic frame for 802.3 - - Args: - eth_dst: Ethernet (layer 2) destination address (Optional) - """ - # Overwrite standard fields if desired - sta_hw = eth_dst if eth_dst is not None else self.dst_mac - # Create Ethernet layer - dot3_base = scapy.Dot3(src=self.src_mac, dst=sta_hw) - - return dot3_base - - def _pad_frame(self, frame): - """Pads the frame with default length and values - - Args: - frame: Ethernet (layer 2) to be padded - """ - frame.len = PAD_LEN_BYTES - pad = scapy.Padding() - pad.load = "\x00" * PAD_LEN_BYTES - return frame / pad - - def generate(self, eth_dst=None): - """Generates the basic 802.3 frame and adds padding - - Args: - eth_dst: Ethernet (layer 2) destination address (Optional) - """ - # Create 802.3 Base - ethernet = self._build_ether(eth_dst) - - self.packet = self._pad_frame(ethernet) - return self.packet - - def generate_llc(self, eth_dst=None, dsap=2, ssap=3, ctrl=LLC_XID_CONTROL): - """Generates the 802.3 frame with LLC and adds padding - - Args: - eth_dst: Ethernet (layer 2) destination address (Optional) - dsap: Destination Service Access Point (Optional) - ssap: Source Service Access Point (Optional) - ctrl: Control (Optional) - """ - # Create 802.3 Base - ethernet = self._build_ether(eth_dst) - - # Create LLC layer - llc = scapy.LLC(dsap=dsap, ssap=ssap, ctrl=ctrl) - - # Append and create packet - self.packet = self._pad_frame(ethernet / llc) - return self.packet - - def generate_snap( - self, - eth_dst=None, - dsap=SNAP_DSAP, - ssap=SNAP_SSAP, - ctrl=SNAP_CTRL, - oui=SNAP_OUI, - code=ETH_TYPE_IP, - ): - """Generates the 802.3 frame with LLC and SNAP and adds padding - - Args: - eth_dst: Ethernet (layer 2) destination address (Optional) - dsap: Destination Service Access Point (Optional) - ssap: Source Service Access Point (Optional) - ctrl: Control (Optional) - oid: Protocol Id or Org Code (Optional) - code: EtherType (Optional) - """ - # Create 802.3 Base - ethernet = self._build_ether(eth_dst) - - # Create 802.2 LLC header - llc = scapy.LLC(dsap=dsap, ssap=ssap, ctrl=ctrl) - - # Create 802.3 SNAP header - snap = scapy.SNAP(OUI=oui, code=code) - - # Append and create packet - self.packet = self._pad_frame(ethernet / llc / snap) - return self.packet - - -def get_if_addr6(intf, address_type): - """Returns the Ipv6 address from a given local interface. - - Returns the desired IPv6 address from the interface 'intf' in human - readable form. The address type is indicated by the IPv6 constants like - IPV6_ADDR_LINKLOCAL, IPV6_ADDR_GLOBAL, etc. If no address is found, - None is returned. - - Args: - intf: desired interface name - address_type: addrees typle like LINKLOCAL or GLOBAL - - Returns: - Ipv6 address of the specified interface in human readable format - """ - for if_list in scapy.in6_getifaddr(): - if if_list[2] == intf and if_list[1] == address_type: - return if_list[0] - - return None
diff --git a/src/antlion/controllers/pdu.py b/src/antlion/controllers/pdu.py deleted file mode 100644 index 412742e..0000000 --- a/src/antlion/controllers/pdu.py +++ /dev/null
@@ -1,215 +0,0 @@ -#!/usr/bin/env python3 -# -# Copyright 2022 The Fuchsia Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import importlib -import logging - -from antlion import tracelogger - -MOBLY_CONTROLLER_CONFIG_NAME = "PduDevice" -ACTS_CONTROLLER_REFERENCE_NAME = "pdu_devices" - - -def create(configs): - """Creates a PduDevice for each config in configs. - - Args: - configs: List of configs from PduDevice field. - Fields: - device: a string "<brand>.<model>" that corresponds to module - in pdu_lib/ - host: a string of the device ip address - username (optional): a string of the username for device sign-in - password (optional): a string of the password for device sign-in - Return: - A list of PduDevice objects. - """ - if configs: - pdus = [] - for config in configs: - device = config.get("device") - if not device: - raise PduError("Config must provide a device") - - host = config.get("host") - if not device: - raise PduError("Config must provide a host ip address") - username = config.get("username") - password = config.get("password") - pdu = _create_device(device, host, username, password) - pdus.append(pdu) - return pdus - - -def destroy(pdu_list): - """Ensure any connections to devices are closed. - - Args: - pdu_list: A list of PduDevice objects. - """ - for pdu in pdu_list: - pdu.close() - - -def get_info(pdu_list): - """Retrieves info from a list of PduDevice objects. - - Args: - pdu_list: A list of PduDevice objects. - Return: - A list containing a dictionary for each PduDevice, with keys: - 'host': a string of the device ip address - 'username': a string of the username - 'password': a string of the password - """ - info = [] - for pdu in pdu_list: - info.append( - {"host": pdu.host, "username": pdu.username, "password": pdu.password} - ) - return info - - -def _create_device(device, host, username, password): - """Factory method that returns an instance of PduDevice implementation - based on the device string. - """ - module_name = "antlion.controllers.pdu_lib." + device - module = importlib.import_module(module_name) - return module.PduDevice(host, username, password) - - -def get_pdu_port_for_device(device_pdu_config, pdus): - """Retrieves the pdu object and port of that PDU powering a given device. - This is especially necessary when there are multilpe devices on a single PDU - or multiple PDUs registered. - - Args: - device_pdu_config: a dict, representing the config of the device. - pdus: a list of registered PduDevice objects. - - Returns: - A tuple: (PduObject for the device, string port number on that PDU). - - Raises: - ValueError, if there is no PDU matching the given host in the config. - - Example ACTS config: - ... - "testbed": [ - ... - "FuchsiaDevice": [ - { - "ip": "<device_ip>", - "ssh_config": "/path/to/sshconfig", - "PduDevice": { - "host": "192.168.42.185", - "port": 2 - } - } - ], - "AccessPoint": [ - { - "ssh_config": { - ... - }, - "PduDevice": { - "host": "192.168.42.185", - "port" 1 - } - } - ], - "PduDevice": [ - { - "device": "synaccess.np02b", - "host": "192.168.42.185" - } - ] - ], - ... - """ - pdu_ip = device_pdu_config["host"] - port = device_pdu_config["port"] - for pdu in pdus: - if pdu.host == pdu_ip: - return pdu, port - raise ValueError("No PduDevice with host: %s" % pdu_ip) - - -class PduDevice(object): - """An object that defines the basic Pdu functionality and abstracts - the actual hardware. - - This is a pure abstract class. Implementations should be of the same - class name (eg. class PduDevice(pdu.PduDevice)) and exist in - pdu_lib/<brand>/<device_name>.py. PduDevice objects should not be - instantiated by users directly. - """ - - def __init__(self, host, username, password): - if type(self) is PduDevice: - raise NotImplementedError("Base class: cannot be instantiated directly") - self.host = host - self.username = username - self.password = password - self.log = tracelogger.TraceLogger(logging.getLogger()) - - def on_all(self): - """Turns on all outlets on the device.""" - raise NotImplementedError("Base class: cannot be called directly") - - def off_all(self): - """Turns off all outlets on the device.""" - raise NotImplementedError("Base class: cannot be called directly") - - def on(self, outlet): - """Turns on specific outlet on the device. - Args: - outlet: a string of the outlet to turn on. - """ - raise NotImplementedError("Base class: cannot be called directly") - - def off(self, outlet): - """Turns off specific outlet on the device. - Args: - outlet: a string of the outlet to turn off. - """ - raise NotImplementedError("Base class: cannot be called directly") - - def reboot(self, outlet): - """Toggles a specific outlet on the device to off, then to on. - Args: - outlet: a string of the outlet to reboot. - """ - raise NotImplementedError("Base class: cannot be called directly") - - def status(self): - """Retrieves the status of the outlets on the device. - - Return: - A dictionary matching outlet string to: - True: if outlet is On - False: if outlet is Off - """ - raise NotImplementedError("Base class: cannot be called directly") - - def close(self): - """Closes connection to the device.""" - raise NotImplementedError("Base class: cannot be called directly") - - -class PduError(Exception): - """An exception for use within PduDevice implementations"""
diff --git a/src/antlion/controllers/pdu_lib/__init__.py b/src/antlion/controllers/pdu_lib/__init__.py deleted file mode 100644 index e69de29..0000000 --- a/src/antlion/controllers/pdu_lib/__init__.py +++ /dev/null
diff --git a/src/antlion/controllers/pdu_lib/digital_loggers/__init__.py b/src/antlion/controllers/pdu_lib/digital_loggers/__init__.py deleted file mode 100644 index e69de29..0000000 --- a/src/antlion/controllers/pdu_lib/digital_loggers/__init__.py +++ /dev/null
diff --git a/src/antlion/controllers/pdu_lib/digital_loggers/webpowerswitch.py b/src/antlion/controllers/pdu_lib/digital_loggers/webpowerswitch.py deleted file mode 100644 index 1154f95..0000000 --- a/src/antlion/controllers/pdu_lib/digital_loggers/webpowerswitch.py +++ /dev/null
@@ -1,154 +0,0 @@ -#!/usr/bin/env python3 -# -# Copyright 2022 The Fuchsia Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from antlion import signals -from antlion.controllers import pdu - -# Create an optional dependency for dlipower since it has a transitive -# dependency on beautifulsoup4. This library is difficult to maintain as a -# third_party dependency in Fuchsia since it is hosted on launchpad. -# -# TODO(b/246999212): Explore alternatives to the dlipower package -try: - import dlipower - - HAS_IMPORT_DLIPOWER = True -except ImportError: - HAS_IMPORT_DLIPOWER = False - - -class PduDevice(pdu.PduDevice): - """Implementation of pure abstract PduDevice object for the Digital Loggers - WebPowerSwitch PDUs. - - This controller supports the following Digital Loggers PDUs: - - Pro (VII) - - WebPowerSwitch V - - WebPowerSwitch IV - - WebPowerSwitch III - - WebPowerSwitch II - - Ethernet Power Controller III - """ - - def __init__(self, host, username, password): - """ - Note: This may require allowing plaintext password sign in on the - power switch, which can be configure in the device's control panel. - """ - super(PduDevice, self).__init__(host, username, password) - - if not HAS_IMPORT_DLIPOWER: - raise signals.ControllerError( - "Digital Loggers PDUs are not supported with current installed " - "packages; install the dlipower package to add support" - ) - - self.power_switch = dlipower.PowerSwitch( - hostname=host, userid=username, password=password - ) - # Connection is made at command execution, this verifies the device - # can be reached before continuing. - if not self.power_switch.statuslist(): - raise pdu.PduError( - "Failed to connect get WebPowerSwitch status. Incorrect host, " - "userid, or password?" - ) - else: - self.log.info("Connected to WebPowerSwitch (%s)." % host) - - def on_all(self): - """Turn on power to all outlets.""" - for outlet in self.power_switch: - outlet.on() - self._verify_state(outlet.name, "ON") - - def off_all(self): - """Turn off power to all outlets.""" - for outlet in self.power_switch: - outlet.off() - self._verify_state(outlet.name, "OFF") - - def on(self, outlet): - """Turn on power to given outlet - - Args: - outlet: string or int, the outlet name/number - """ - self.power_switch.command_on_outlets("on", str(outlet)) - self._verify_state(outlet, "ON") - - def off(self, outlet): - """Turn off power to given outlet - - Args: - outlet: string or int, the outlet name/number - """ - self.power_switch.command_on_outlets("off", str(outlet)) - self._verify_state(outlet, "OFF") - - def reboot(self, outlet): - """Cycle the given outlet to OFF and back ON. - - Args: - outlet: string or int, the outlet name/number - """ - self.power_switch.command_on_outlets("cycle", str(outlet)) - self._verify_state(outlet, "ON") - - def status(self): - """Return the status of the switch outlets. - - Return: - a dict mapping outlet string numbers to: - True if outlet is ON - False if outlet is OFF - """ - status_list = self.power_switch.statuslist() - return {str(outlet): state == "ON" for outlet, _, state in status_list} - - def close(self): - # Since there isn't a long-running connection, close is not applicable. - pass - - def _verify_state(self, outlet, expected_state, timeout=3): - """Verify that the state of a given outlet is at an expected state. - There can be a slight delay in when the device receives the - command and when the state actually changes (especially when powering - on). This function is used to verify the change has occurred before - exiting. - - Args: - outlet: string, the outlet name or number to check state. - expected_state: string, 'ON' or 'OFF' - - Returns if actual state reaches expected state. - - Raises: - PduError: if state has not reached expected state at timeout. - """ - for _ in range(timeout): - actual_state = self.power_switch.status(str(outlet)) - if actual_state == expected_state: - return - else: - self.log.debug( - "Outlet %s not yet in state %s" % (outlet, expected_state) - ) - raise pdu.PduError( - "Outlet %s on WebPowerSwitch (%s) failed to reach expected state. \n" - "Expected State: %s\n" - "Actual State: %s" % (outlet, self.host, expected_state, actual_state) - )
diff --git a/src/antlion/controllers/pdu_lib/synaccess/__init__.py b/src/antlion/controllers/pdu_lib/synaccess/__init__.py deleted file mode 100644 index e69de29..0000000 --- a/src/antlion/controllers/pdu_lib/synaccess/__init__.py +++ /dev/null
diff --git a/src/antlion/controllers/pdu_lib/synaccess/np02b.py b/src/antlion/controllers/pdu_lib/synaccess/np02b.py deleted file mode 100644 index 70624f0..0000000 --- a/src/antlion/controllers/pdu_lib/synaccess/np02b.py +++ /dev/null
@@ -1,187 +0,0 @@ -#!/usr/bin/env python3 -# -# Copyright 2022 The Fuchsia Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from antlion import utils -from antlion.controllers import pdu - -import re -import telnetlib -import time - - -class PduDevice(pdu.PduDevice): - """Implementation of pure abstract PduDevice object for the Synaccess np02b - Pdu. - """ - - def __init__(self, host, username, password): - super(PduDevice, self).__init__(host, username, password) - self.tnhelper = _TNHelperNP02B(host) - - def on_all(self): - """Turns on both outlets on the np02b.""" - self.tnhelper.cmd("ps 1") - self._verify_state({"1": True, "2": True}) - - def off_all(self): - """Turns off both outlets on the np02b.""" - self.tnhelper.cmd("ps 0") - self._verify_state({"1": False, "2": False}) - - def on(self, outlet): - """Turns on specific outlet on the np02b. - - Args: - outlet: string of the outlet to turn on ('1' or '2') - """ - self.tnhelper.cmd("pset %s 1" % outlet) - self._verify_state({outlet: True}) - - def off(self, outlet): - """Turns off a specifc outlet on the np02b. - - Args: - outlet: string of the outlet to turn off ('1' or '2') - """ - self.tnhelper.cmd("pset %s 0" % outlet) - self._verify_state({outlet: False}) - - def reboot(self, outlet): - """Toggles a specific outlet on the np02b to off, then to on. - - Args: - outlet: string of the outlet to reboot ('1' or '2') - """ - self.off(outlet) - self._verify_state({outlet: False}) - self.on(outlet) - self._verify_state({outlet: True}) - - def status(self): - """Returns the status of the np02b outlets. - - Return: - a dict mapping outlet strings ('1' and '2') to: - True if outlet is ON - False if outlet is OFF - """ - res = self.tnhelper.cmd("pshow") - status_list = re.findall("(ON|OFF)", res) - status_dict = {} - for i, status in enumerate(status_list): - status_dict[str(i + 1)] = status == "ON" - return status_dict - - def close(self): - """Ensure connection to device is closed. - - In this implementation, this shouldn't be necessary, but could be in - others that open on creation. - """ - self.tnhelper.close() - - def _verify_state(self, expected_state, timeout=3): - """Returns when expected_state is reached on device. - - In order to prevent command functions from exiting until the desired - effect has occurred, this function verifys that the expected_state is a - subset of the desired state. - - Args: - expected_state: a dict representing the expected state of one or - more outlets on the device. Maps outlet strings ('1' and/or '2') - to: - True if outlet is expected to be ON. - False if outlet is expected to be OFF. - timeout (default: 3): time in seconds until raising an exception. - - Return: - True, if expected_state is reached. - - Raises: - PduError if expected_state has not been reached by timeout. - """ - end_time = time.time() + timeout - while time.time() < end_time: - actual_state = self.status() - if expected_state.items() <= actual_state.items(): - return True - time.sleep(0.1) - raise pdu.PduError( - "Timeout while verifying state.\n" - "Expected State: %s\n" - "Actual State: %s" % (expected_state, actual_state) - ) - - -class _TNHelperNP02B(object): - """An internal helper class for Telnet with the Synaccess NP02B Pdu. This - helper is specific to the idiosyncrasies of the NP02B and therefore should - not be used with other devices. - """ - - def __init__(self, host): - self._tn = telnetlib.Telnet() - self.host = host - self.tx_cmd_separator = "\n\r" - self.rx_cmd_separator = "\r\n" - self.prompt = ">" - - """ - Executes a command on the device via telnet. - Args: - cmd_str: A string of the command to be run. - Returns: - A string of the response from the valid command (often empty). - """ - - def cmd(self, cmd_str): - # Open session - try: - self._tn.open(self.host, timeout=3) - except: - raise pdu.PduError("Failed to open telnet session to host (%s)" % self.host) - time.sleep(0.1) - - # Read to end of first prompt - cmd_str.strip(self.tx_cmd_separator) - self._tn.read_eager() - time.sleep(0.1) - - # Write command and read all output text - self._tn.write(utils.ascii_string(cmd_str + self.tx_cmd_separator)) - res = self._tn.read_until(utils.ascii_string(self.prompt), 2) - - # Parses out the commands output - if res is None: - raise pdu.PduError("Command failed: %s" % cmd_str) - res = res.decode() - if re.search("Invalid", res): - raise pdu.PduError("Command Invalid: %s" % cmd_str) - res = res.replace(self.prompt, "") - res = res.replace(self.tx_cmd_separator, "") - res = res.replace(self.rx_cmd_separator, "") - res = res.replace(cmd_str, "") - - # Close session - self._tn.close() - - time.sleep(0.5) - - return res - - def close(self): - self._tn.close()
diff --git a/src/antlion/controllers/sl4a_lib/__init__.py b/src/antlion/controllers/sl4a_lib/__init__.py deleted file mode 100644 index 7f1a899..0000000 --- a/src/antlion/controllers/sl4a_lib/__init__.py +++ /dev/null
@@ -1,15 +0,0 @@ -#!/usr/bin/env python3 -# -# Copyright 2022 The Fuchsia Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License.
diff --git a/src/antlion/controllers/sl4a_lib/error_reporter.py b/src/antlion/controllers/sl4a_lib/error_reporter.py deleted file mode 100644 index e560567..0000000 --- a/src/antlion/controllers/sl4a_lib/error_reporter.py +++ /dev/null
@@ -1,243 +0,0 @@ -#!/usr/bin/env python3 -# -# Copyright 2022 The Fuchsia Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import logging -import re -import threading -import time - -from antlion import utils - - -class ErrorLogger(logging.LoggerAdapter): - """A logger for a given error report.""" - - def __init__(self, label): - self.label = label - super(ErrorLogger, self).__init__(logging.getLogger(), {}) - - def process(self, msg, kwargs): - """Transforms a log message to be in a given format.""" - return "[Error Report|%s] %s" % (self.label, msg), kwargs - - -class ErrorReporter(object): - """A class that reports errors and diagnoses possible points of failure. - - Attributes: - max_reports: The maximum number of reports that should be reported. - Defaulted to 1 to prevent multiple reports from reporting at the - same time over one another. - name: The name of the report to be used in the error logs. - """ - - def __init__(self, name, max_reports=1): - """Creates an error report. - - Args: - name: The name of the error report. - max_reports: Sets the maximum number of reports to this value. - """ - self.name = name - self.max_reports = max_reports - self._ticket_number = 0 - self._ticket_lock = threading.Lock() - self._current_request_count = 0 - self._accept_requests = True - - def create_error_report(self, sl4a_manager, sl4a_session, rpc_connection): - """Creates an error report, if possible. - - Returns: - False iff a report cannot be created. - """ - if not self._accept_requests: - return False - - self._current_request_count += 1 - - try: - ticket = self._get_report_ticket() - if not ticket: - return False - - report = ErrorLogger("%s|%s" % (self.name, ticket)) - report.info("Creating error report.") - - ( - self.report_on_adb(sl4a_manager.adb, report) - and self.report_device_processes(sl4a_manager.adb, report) - and self.report_sl4a_state(rpc_connection, sl4a_manager.adb, report) - and self.report_sl4a_session(sl4a_manager, sl4a_session, report) - ) - - return True - finally: - self._current_request_count -= 1 - - def report_on_adb(self, adb, report): - """Creates an error report for ADB. Returns false if ADB has failed.""" - adb_uptime = utils.get_command_uptime('"adb .* server"') - if adb_uptime: - report.info( - "The adb daemon has an uptime of %s " "([[dd-]hh:]mm:ss)." % adb_uptime - ) - else: - report.warning( - "The adb daemon (on the host machine) is not " - "running. All forwarded ports have been removed." - ) - return False - - devices_output = adb.devices() - if adb.serial not in devices_output: - report.warning( - "This device cannot be found by ADB. The device may have shut " - "down or disconnected." - ) - return False - elif re.findall(r"%s\s+offline" % adb.serial, devices_output): - report.warning( - "The device is marked as offline in ADB. We are no longer able " - "to access the device." - ) - return False - else: - report.info("The device is online and accessible through ADB calls.") - return True - - def report_device_processes(self, adb, report): - """Creates an error report for the device's required processes. - - Returns: - False iff user-apks cannot be communicated with over tcp. - """ - zygote_uptime = utils.get_device_process_uptime(adb, "zygote") - if zygote_uptime: - report.info( - "Zygote has been running for %s ([[dd-]hh:]mm:ss). If this " - "value is low, the phone may have recently crashed." % zygote_uptime - ) - else: - report.warning( - "Zygote has been killed. It is likely the Android Runtime has " - "crashed. Check the bugreport/logcat for more information." - ) - return False - - netd_uptime = utils.get_device_process_uptime(adb, "netd") - if netd_uptime: - report.info( - "Netd has been running for %s ([[dd-]hh:]mm:ss). If this " - "value is low, the phone may have recently crashed." % zygote_uptime - ) - else: - report.warning( - "Netd has been killed. The Android Runtime may have crashed. " - "Check the bugreport/logcat for more information." - ) - return False - - adbd_uptime = utils.get_device_process_uptime(adb, "adbd") - if netd_uptime: - report.info( - "Adbd has been running for %s ([[dd-]hh:]mm:ss). If this " - "value is low, the phone may have recently crashed." % adbd_uptime - ) - else: - report.warning("Adbd is not running.") - return False - return True - - def report_sl4a_state(self, rpc_connection, adb, report): - """Creates an error report for the state of SL4A.""" - report.info("Diagnosing Failure over connection %s." % rpc_connection.ports) - - ports = rpc_connection.ports - forwarded_ports_output = adb.forward("--list") - - expected_output = "%s tcp:%s tcp:%s" % ( - adb.serial, - ports.forwarded_port, - ports.server_port, - ) - if expected_output not in forwarded_ports_output: - formatted_output = re.sub( - "^", " ", forwarded_ports_output, flags=re.MULTILINE - ) - report.warning( - "The forwarded port for the failed RpcConnection is missing.\n" - "Expected:\n %s\nBut found:\n%s" - % (expected_output, formatted_output) - ) - return False - else: - report.info( - "The connection port has been properly forwarded to " "the device." - ) - - sl4a_uptime = utils.get_device_process_uptime( - adb, "com.googlecode.android_scripting" - ) - if sl4a_uptime: - report.info( - "SL4A has been running for %s ([[dd-]hh:]mm:ss). If this " - "value is lower than the test case, it must have been " - "restarted during the test." % sl4a_uptime - ) - else: - report.warning( - "The SL4A scripting service is not running. SL4A may have " - "crashed, or have been terminated by the Android Runtime." - ) - return False - return True - - def report_sl4a_session(self, sl4a_manager, session, report): - """Reports the state of an SL4A session.""" - if session.server_port not in sl4a_manager.sl4a_ports_in_use: - report.warning( - "SL4A server port %s not found in set of open " - "ports %s" % (session.server_port, sl4a_manager.sl4a_ports_in_use) - ) - return False - - if session not in sl4a_manager.sessions.values(): - report.warning( - "SL4A session %s over port %s is not managed by " - "the SL4A Manager. This session is already dead." - % (session.uid, session.server_port) - ) - return False - return True - - def finalize_reports(self): - self._accept_requests = False - while self._current_request_count > 0: - # Wait for other threads to finish. - time.sleep(0.1) - - def _get_report_ticket(self): - """Returns the next ticket, or none if all tickets have been used.""" - logging.debug("Getting ticket for SL4A error report.") - with self._ticket_lock: - self._ticket_number += 1 - ticket_number = self._ticket_number - - if ticket_number <= self.max_reports: - return ticket_number - else: - return None
diff --git a/src/antlion/controllers/sl4a_lib/event_dispatcher.py b/src/antlion/controllers/sl4a_lib/event_dispatcher.py deleted file mode 100644 index 503923c..0000000 --- a/src/antlion/controllers/sl4a_lib/event_dispatcher.py +++ /dev/null
@@ -1,491 +0,0 @@ -#!/usr/bin/env python3 -# -# Copyright 2022 The Fuchsia Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from concurrent.futures import ThreadPoolExecutor -import queue -import re -import threading -import time - -from antlion import logger -from antlion.controllers.sl4a_lib import rpc_client - - -class EventDispatcherError(Exception): - """The base class for all EventDispatcher exceptions.""" - - -class IllegalStateError(EventDispatcherError): - """Raise when user tries to put event_dispatcher into an illegal state.""" - - -class DuplicateError(EventDispatcherError): - """Raise when two event handlers have been assigned to an event name.""" - - -class EventDispatcher: - """A class for managing the events for an SL4A Session. - - Attributes: - _serial: The serial of the device. - _rpc_client: The rpc client for that session. - _started: A bool that holds whether or not the event dispatcher is - running. - _executor: The thread pool executor for running event handlers and - polling. - _event_dict: A dictionary of str eventName = Queue<Event> eventQueue - _handlers: A dictionary of str eventName => (lambda, args) handler - _lock: A lock that prevents multiple reads/writes to the event queues. - log: The EventDispatcher's logger. - """ - - DEFAULT_TIMEOUT = 60 - - def __init__(self, serial, rpc_client): - self._serial = serial - self._rpc_client = rpc_client - self._started = False - self._executor = None - self._event_dict = {} - self._handlers = {} - self._lock = threading.RLock() - - def _log_formatter(message): - """Defines the formatting used in the logger.""" - return "[E Dispatcher|%s|%s] %s" % ( - self._serial, - self._rpc_client.uid, - message, - ) - - self.log = logger.create_logger(_log_formatter) - - def poll_events(self): - """Continuously polls all types of events from sl4a. - - Events are sorted by name and store in separate queues. - If there are registered handlers, the handlers will be called with - corresponding event immediately upon event discovery, and the event - won't be stored. If exceptions occur, stop the dispatcher and return - """ - while self._started: - try: - # 60000 in ms, timeout in second - event_obj = self._rpc_client.eventWait(60000, timeout=120) - except rpc_client.Sl4aConnectionError as e: - if self._rpc_client.is_alive: - self.log.warning("Closing due to closed session.") - break - else: - self.log.warning("Closing due to error: %s." % e) - self.close() - raise e - if not event_obj: - continue - elif "name" not in event_obj: - self.log.error("Received Malformed event {}".format(event_obj)) - continue - else: - event_name = event_obj["name"] - # if handler registered, process event - if event_name == "EventDispatcherShutdown": - self.log.debug("Received shutdown signal.") - # closeSl4aSession has been called, which closes the event - # dispatcher. Stop execution on this polling thread. - return - if event_name in self._handlers: - self.log.debug( - "Using handler %s for event: %r" - % (self._handlers[event_name].__name__, event_obj) - ) - self.handle_subscribed_event(event_obj, event_name) - else: - self.log.debug("Queuing event: %r" % event_obj) - self._lock.acquire() - if event_name in self._event_dict: # otherwise, cache event - self._event_dict[event_name].put(event_obj) - else: - q = queue.Queue() - q.put(event_obj) - self._event_dict[event_name] = q - self._lock.release() - - def register_handler(self, handler, event_name, args): - """Registers an event handler. - - One type of event can only have one event handler associated with it. - - Args: - handler: The event handler function to be registered. - event_name: Name of the event the handler is for. - args: User arguments to be passed to the handler when it's called. - - Raises: - IllegalStateError: Raised if attempts to register a handler after - the dispatcher starts running. - DuplicateError: Raised if attempts to register more than one - handler for one type of event. - """ - if self._started: - raise IllegalStateError( - "Cannot register service after polling is " "started." - ) - self._lock.acquire() - try: - if event_name in self._handlers: - raise DuplicateError( - "A handler for {} already exists".format(event_name) - ) - self._handlers[event_name] = (handler, args) - finally: - self._lock.release() - - def start(self): - """Starts the event dispatcher. - - Initiates executor and start polling events. - - Raises: - IllegalStateError: Can't start a dispatcher again when it's already - running. - """ - if not self._started: - self._started = True - self._executor = ThreadPoolExecutor(max_workers=32) - self._executor.submit(self.poll_events) - else: - raise IllegalStateError("Dispatcher is already started.") - - def close(self): - """Clean up and release resources. - - This function should only be called after a - rpc_client.closeSl4aSession() call. - """ - if not self._started: - return - self._started = False - self._executor.shutdown(wait=True) - self.clear_all_events() - - def pop_event(self, event_name, timeout=DEFAULT_TIMEOUT): - """Pop an event from its queue. - - Return and remove the oldest entry of an event. - Block until an event of specified name is available or - times out if timeout is set. - - Args: - event_name: Name of the event to be popped. - timeout: Number of seconds to wait when event is not present. - Never times out if None. - - Returns: - event: The oldest entry of the specified event. None if timed out. - - Raises: - IllegalStateError: Raised if pop is called before the dispatcher - starts polling. - """ - if not self._started: - raise IllegalStateError("Dispatcher needs to be started before popping.") - - e_queue = self.get_event_q(event_name) - - if not e_queue: - raise IllegalStateError( - "Failed to get an event queue for {}".format(event_name) - ) - - try: - # Block for timeout - if timeout: - return e_queue.get(True, timeout) - # Non-blocking poll for event - elif timeout == 0: - return e_queue.get(False) - else: - # Block forever on event wait - return e_queue.get(True) - except queue.Empty: - msg = "Timeout after {}s waiting for event: {}".format(timeout, event_name) - self.log.info(msg) - raise queue.Empty(msg) - - def wait_for_event( - self, event_name, predicate, timeout=DEFAULT_TIMEOUT, *args, **kwargs - ): - """Wait for an event that satisfies a predicate to appear. - - Continuously pop events of a particular name and check against the - predicate until an event that satisfies the predicate is popped or - timed out. Note this will remove all the events of the same name that - do not satisfy the predicate in the process. - - Args: - event_name: Name of the event to be popped. - predicate: A function that takes an event and returns True if the - predicate is satisfied, False otherwise. - timeout: Number of seconds to wait. - *args: Optional positional args passed to predicate(). - **kwargs: Optional keyword args passed to predicate(). - consume_ignored_events: Whether or not to consume events while - searching for the desired event. Defaults to True if unset. - - Returns: - The event that satisfies the predicate. - - Raises: - queue.Empty: Raised if no event that satisfies the predicate was - found before time out. - """ - deadline = time.time() + timeout - ignored_events = [] - consume_events = kwargs.pop("consume_ignored_events", True) - while True: - event = None - try: - event = self.pop_event(event_name, 1) - if consume_events: - self.log.debug("Consuming event: %r" % event) - else: - self.log.debug("Peeking at event: %r" % event) - ignored_events.append(event) - except queue.Empty: - pass - - if event and predicate(event, *args, **kwargs): - for ignored_event in ignored_events: - self.get_event_q(event_name).put(ignored_event) - self.log.debug( - "Matched event: %r with %s" % (event, predicate.__name__) - ) - return event - - if time.time() > deadline: - for ignored_event in ignored_events: - self.get_event_q(event_name).put(ignored_event) - msg = "Timeout after {}s waiting for event: {}".format( - timeout, event_name - ) - self.log.info(msg) - raise queue.Empty(msg) - - def pop_events(self, regex_pattern, timeout, freq=1): - """Pop events whose names match a regex pattern. - - If such event(s) exist, pop one event from each event queue that - satisfies the condition. Otherwise, wait for an event that satisfies - the condition to occur, with timeout. - - Results are sorted by timestamp in ascending order. - - Args: - regex_pattern: The regular expression pattern that an event name - should match in order to be popped. - timeout: Number of seconds to wait for events in case no event - matching the condition exits when the function is called. - - Returns: - results: Pop events whose names match a regex pattern. - Empty if none exist and the wait timed out. - - Raises: - IllegalStateError: Raised if pop is called before the dispatcher - starts polling. - queue.Empty: Raised if no event was found before time out. - """ - if not self._started: - raise IllegalStateError("Dispatcher needs to be started before popping.") - deadline = time.time() + timeout - while True: - # TODO: fix the sleep loop - results = self._match_and_pop(regex_pattern) - if len(results) != 0 or time.time() > deadline: - break - time.sleep(freq) - if len(results) == 0: - msg = "Timeout after {}s waiting for event: {}".format( - timeout, regex_pattern - ) - self.log.error(msg) - raise queue.Empty(msg) - - return sorted(results, key=lambda event: event["time"]) - - def _match_and_pop(self, regex_pattern): - """Pop one event from each of the event queues whose names - match (in a sense of regular expression) regex_pattern. - """ - results = [] - self._lock.acquire() - for name in self._event_dict.keys(): - if re.match(regex_pattern, name): - q = self._event_dict[name] - if q: - try: - results.append(q.get(False)) - except queue.Empty: - pass - self._lock.release() - return results - - def get_event_q(self, event_name): - """Obtain the queue storing events of the specified name. - - If no event of this name has been polled, wait for one to. - - Returns: A queue storing all the events of the specified name. - """ - self._lock.acquire() - if event_name not in self._event_dict or self._event_dict[event_name] is None: - self._event_dict[event_name] = queue.Queue() - self._lock.release() - - event_queue = self._event_dict[event_name] - return event_queue - - def handle_subscribed_event(self, event_obj, event_name): - """Execute the registered handler of an event. - - Retrieve the handler and its arguments, and execute the handler in a - new thread. - - Args: - event_obj: Json object of the event. - event_name: Name of the event to call handler for. - """ - handler, args = self._handlers[event_name] - self._executor.submit(handler, event_obj, *args) - - def _handle( - self, event_handler, event_name, user_args, event_timeout, cond, cond_timeout - ): - """Pop an event of specified type and calls its handler on it. If - condition is not None, block until condition is met or timeout. - """ - if cond: - cond.wait(cond_timeout) - event = self.pop_event(event_name, event_timeout) - return event_handler(event, *user_args) - - def handle_event( - self, - event_handler, - event_name, - user_args, - event_timeout=None, - cond=None, - cond_timeout=None, - ): - """Handle events that don't have registered handlers - - In a new thread, poll one event of specified type from its queue and - execute its handler. If no such event exists, the thread waits until - one appears. - - Args: - event_handler: Handler for the event, which should take at least - one argument - the event json object. - event_name: Name of the event to be handled. - user_args: User arguments for the handler; to be passed in after - the event json. - event_timeout: Number of seconds to wait for the event to come. - cond: A condition to wait on before executing the handler. Should - be a threading.Event object. - cond_timeout: Number of seconds to wait before the condition times - out. Never times out if None. - - Returns: - worker: A concurrent.Future object associated with the handler. - If blocking call worker.result() is triggered, the handler - needs to return something to unblock. - """ - worker = self._executor.submit( - self._handle, - event_handler, - event_name, - user_args, - event_timeout, - cond, - cond_timeout, - ) - return worker - - def pop_all(self, event_name): - """Return and remove all stored events of a specified name. - - Pops all events from their queue. May miss the latest ones. - If no event is available, return immediately. - - Args: - event_name: Name of the events to be popped. - - Returns: - results: List of the desired events. - - Raises: - IllegalStateError: Raised if pop is called before the dispatcher - starts polling. - """ - if not self._started: - raise IllegalStateError( - ("Dispatcher needs to be started before " "popping.") - ) - results = [] - try: - self._lock.acquire() - while True: - e = self._event_dict[event_name].get(block=False) - results.append(e) - except (queue.Empty, KeyError): - return results - finally: - self._lock.release() - - def clear_events(self, event_name): - """Clear all events of a particular name. - - Args: - event_name: Name of the events to be popped. - """ - self._lock.acquire() - try: - q = self.get_event_q(event_name) - q.queue.clear() - except queue.Empty: - return - finally: - self._lock.release() - - def clear_all_events(self): - """Clear all event queues and their cached events.""" - self._lock.acquire() - self._event_dict.clear() - self._lock.release() - - def is_event_match(self, event, field, value): - return self.is_event_match_for_list(event, field, [value]) - - def is_event_match_for_list(self, event, field, value_list): - try: - value_in_event = event["data"][field] - except KeyError: - return False - for value in value_list: - if value_in_event == value: - return True - return False
diff --git a/src/antlion/controllers/sl4a_lib/rpc_client.py b/src/antlion/controllers/sl4a_lib/rpc_client.py deleted file mode 100644 index cc2cee2..0000000 --- a/src/antlion/controllers/sl4a_lib/rpc_client.py +++ /dev/null
@@ -1,379 +0,0 @@ -#!/usr/bin/env python3 -# -# Copyright 2022 The Fuchsia Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -import json -import socket -import threading -import time -from concurrent import futures - -from antlion import error -from antlion import logger - -# The default timeout value when no timeout is set. -SOCKET_TIMEOUT = 60 - -# The Session UID when a UID has not been received yet. -UNKNOWN_UID = -1 - - -class Sl4aException(error.ActsError): - """The base class for all SL4A exceptions.""" - - -class Sl4aStartError(Sl4aException): - """Raised when sl4a is not able to be started.""" - - -class Sl4aApiError(Sl4aException): - """Raised when remote API reports an error. - - This error mirrors the JSON-RPC 2.0 spec for Error Response objects. - - Attributes: - code: The error code returned by SL4A. Not to be confused with - ActsError's error_code. - message: The error message returned by SL4A. - data: The extra data, if any, returned by SL4A. - """ - - def __init__(self, message, code=-1, data=None, rpc_name=""): - super().__init__() - self.message = message - self.code = code - if data is None: - self.data = {} - else: - self.data = data - self.rpc_name = rpc_name - - def __str__(self): - if self.data: - return "Error in RPC %s %s:%s:%s" % ( - self.rpc_name, - self.code, - self.message, - self.data, - ) - else: - return "Error in RPC %s %s:%s" % (self.rpc_name, self.code, self.message) - - -class Sl4aConnectionError(Sl4aException): - """An error raised upon failure to connect to SL4A.""" - - -class Sl4aProtocolError(Sl4aException): - """Raised when there an error in exchanging data with server on device.""" - - NO_RESPONSE_FROM_HANDSHAKE = "No response from handshake." - NO_RESPONSE_FROM_SERVER = "No response from server." - MISMATCHED_API_ID = "Mismatched API id." - - -class Sl4aNotInstalledError(Sl4aException): - """An error raised when an Sl4aClient is created without SL4A installed.""" - - -class Sl4aRpcTimeoutError(Sl4aException): - """An error raised when an SL4A RPC has timed out.""" - - -class RpcClient(object): - """An RPC client capable of processing multiple RPCs concurrently. - - Attributes: - _free_connections: A list of all idle RpcConnections. - _working_connections: A list of all working RpcConnections. - _lock: A lock used for accessing critical memory. - max_connections: The maximum number of RpcConnections at a time. - Increasing or decreasing the number of max connections does NOT - modify the thread pool size being used for self.future RPC calls. - _log: The logger for this RpcClient. - """ - - """The default value for the maximum amount of connections for a client.""" - DEFAULT_MAX_CONNECTION = 15 - - class AsyncClient(object): - """An object that allows RPC calls to be called asynchronously. - - Attributes: - _rpc_client: The RpcClient to use when making calls. - _executor: The ThreadPoolExecutor used to keep track of workers - """ - - def __init__(self, rpc_client): - self._rpc_client = rpc_client - self._executor = futures.ThreadPoolExecutor( - max_workers=max(rpc_client.max_connections - 2, 1) - ) - - def rpc(self, name, *args, **kwargs): - future = self._executor.submit(name, *args, **kwargs) - return future - - def __getattr__(self, name): - """Wrapper for python magic to turn method calls into RPC calls.""" - - def rpc_call(*args, **kwargs): - future = self._executor.submit( - self._rpc_client.__getattr__(name), *args, **kwargs - ) - return future - - return rpc_call - - def __init__( - self, - uid, - serial, - on_error_callback, - _create_connection_func, - max_connections=None, - ): - """Creates a new RpcClient object. - - Args: - uid: The session uid this client is a part of. - serial: The serial of the Android device. Used for logging. - on_error_callback: A callback for when a connection error is raised. - _create_connection_func: A reference to the function that creates a - new session. - max_connections: The maximum number of connections the RpcClient - can have. - """ - self._serial = serial - self.on_error = on_error_callback - self._create_connection_func = _create_connection_func - self._free_connections = [self._create_connection_func(uid)] - - self.uid = self._free_connections[0].uid - self._lock = threading.Lock() - - def _log_formatter(message): - """Formats the message to be logged.""" - return "[RPC Service|%s|%s] %s" % (self._serial, self.uid, message) - - self._log = logger.create_logger(_log_formatter) - - self._working_connections = [] - if max_connections is None: - self.max_connections = RpcClient.DEFAULT_MAX_CONNECTION - else: - self.max_connections = max_connections - - self._async_client = RpcClient.AsyncClient(self) - self.is_alive = True - - def terminate(self): - """Terminates all connections to the SL4A server.""" - if len(self._working_connections) > 0: - self._log.warning( - "%s connections are still active, and waiting on " - "responses.Closing these connections now." - % len(self._working_connections) - ) - connections = self._free_connections + self._working_connections - for connection in connections: - self._log.debug("Closing connection over ports %s" % connection.ports) - connection.close() - self._free_connections = [] - self._working_connections = [] - self.is_alive = False - - def _get_free_connection(self): - """Returns a free connection to be used for an RPC call. - - This function also adds the client to the working set to prevent - multiple users from obtaining the same client. - """ - while True: - if len(self._free_connections) > 0: - with self._lock: - # Check if another thread grabbed the remaining connection. - # while we were waiting for the lock. - if len(self._free_connections) == 0: - continue - client = self._free_connections.pop() - self._working_connections.append(client) - return client - - client_count = len(self._free_connections) + len(self._working_connections) - if client_count < self.max_connections: - with self._lock: - client_count = len(self._free_connections) + len( - self._working_connections - ) - if client_count < self.max_connections: - client = self._create_connection_func(self.uid) - self._working_connections.append(client) - return client - time.sleep(0.01) - - def _release_working_connection(self, connection): - """Marks a working client as free. - - Args: - connection: The client to mark as free. - Raises: - A ValueError if the client is not a known working connection. - """ - # We need to keep this code atomic because the client count is based on - # the length of the free and working connection list lengths. - with self._lock: - self._working_connections.remove(connection) - self._free_connections.append(connection) - - def rpc(self, method, *args, timeout=None, retries=3): - """Sends an rpc to sl4a. - - Sends an rpc call to sl4a over this RpcClient's corresponding session. - - Args: - method: str, The name of the method to execute. - args: any, The args to send to sl4a. - timeout: The amount of time to wait for a response. - retries: Misnomer, is actually the number of tries. - - Returns: - The result of the rpc. - - Raises: - Sl4aProtocolError: Something went wrong with the sl4a protocol. - Sl4aApiError: The rpc went through, however executed with errors. - """ - connection = self._get_free_connection() - ticket = connection.get_new_ticket() - timed_out = False - if timeout: - connection.set_timeout(timeout) - data = {"id": ticket, "method": method, "params": args} - request = json.dumps(data) - response = "" - try: - for i in range(1, retries + 1): - connection.send_request(request) - - response = connection.get_response() - if not response: - if i < retries: - self._log.warning( - "No response for RPC method %s on iteration %s", method, i - ) - continue - else: - self._log.exception( - "No response for RPC method %s on iteration %s", method, i - ) - self.on_error(connection) - raise Sl4aProtocolError( - Sl4aProtocolError.NO_RESPONSE_FROM_SERVER - ) - else: - break - except BrokenPipeError as e: - if self.is_alive: - self._log.exception( - "The device disconnected during RPC call " - "%s. Please check the logcat for a crash " - "or disconnect.", - method, - ) - self.on_error(connection) - else: - self._log.warning("The connection was killed during cleanup:") - self._log.warning(e) - raise Sl4aConnectionError(e) - except socket.timeout as err: - # If a socket connection has timed out, the socket can no longer be - # used. Close it out and remove the socket from the connection pool. - timed_out = True - self._log.warning( - 'RPC "%s" (id: %s) timed out after %s seconds.', - method, - ticket, - timeout or SOCKET_TIMEOUT, - ) - self._log.debug("Closing timed out connection over %s" % connection.ports) - connection.close() - self._working_connections.remove(connection) - # Re-raise the error as an SL4A Error so end users can process it. - raise Sl4aRpcTimeoutError(err) - finally: - if not timed_out: - if timeout: - connection.set_timeout(SOCKET_TIMEOUT) - self._release_working_connection(connection) - result = json.loads(str(response, encoding="utf8")) - - if result["error"]: - error_object = result["error"] - if isinstance(error_object, dict): - # Uses JSON-RPC 2.0 Format - sl4a_api_error = Sl4aApiError( - error_object.get("message", None), - error_object.get("code", -1), - error_object.get("data", {}), - rpc_name=method, - ) - else: - # Fallback on JSON-RPC 1.0 Format - sl4a_api_error = Sl4aApiError(error_object, rpc_name=method) - self._log.warning(sl4a_api_error) - raise sl4a_api_error - if result["id"] != ticket: - self._log.error( - "RPC method %s with mismatched api id %s", method, result["id"] - ) - raise Sl4aProtocolError(Sl4aProtocolError.MISMATCHED_API_ID) - return result["result"] - - @property - def future(self): - """Returns a magic function that returns a future running an RPC call. - - This function effectively allows the idiom: - - >>> rpc_client = RpcClient(...) - >>> # returns after call finishes - >>> rpc_client.someRpcCall() - >>> # Immediately returns a reference to the RPC's future, running - >>> # the lengthy RPC call on another thread. - >>> future = rpc_client.future.someLengthyRpcCall() - >>> rpc_client.doOtherThings() - >>> ... - >>> # Wait for and get the returned value of the lengthy RPC. - >>> # Can specify a timeout as well. - >>> value = future.result() - - The number of concurrent calls to this method is limited to - (max_connections - 2), to prevent future calls from exhausting all free - connections. - """ - return self._async_client - - def __getattr__(self, name): - """Wrapper for python magic to turn method calls into RPC calls.""" - - def rpc_call(*args, **kwargs): - return self.rpc(name, *args, **kwargs) - - if not self.is_alive: - raise Sl4aStartError( - "This SL4A session has already been terminated. You must " - "create a new session to continue." - ) - return rpc_call
diff --git a/src/antlion/controllers/sl4a_lib/rpc_connection.py b/src/antlion/controllers/sl4a_lib/rpc_connection.py deleted file mode 100644 index 6b9bf25..0000000 --- a/src/antlion/controllers/sl4a_lib/rpc_connection.py +++ /dev/null
@@ -1,143 +0,0 @@ -#!/usr/bin/env python3 -# -# Copyright 2022 The Fuchsia Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -import json -import socket -import threading - -from antlion import logger -from antlion.controllers.sl4a_lib import rpc_client - -# The Session UID when a UID has not been received yet. -UNKNOWN_UID = -1 - - -class Sl4aConnectionCommand(object): - """Commands that can be invoked on the sl4a client. - - INIT: Initializes a new sessions in sl4a. - CONTINUE: Creates a connection. - """ - - INIT = "initiate" - CONTINUE = "continue" - - -class RpcConnection(object): - """A single RPC Connection thread. - - Attributes: - _client_socket: The socket this connection uses. - _socket_file: The file created over the _client_socket. - _ticket_counter: The counter storing the current ticket number. - _ticket_lock: A lock on the ticket counter to prevent ticket collisions. - adb: A reference to the AdbProxy of the AndroidDevice. Used for logging. - log: The logger for this RPC Client. - ports: The Sl4aPorts object that stores the ports this connection uses. - uid: The SL4A session ID. - """ - - def __init__(self, adb, ports, client_socket, socket_fd, uid=UNKNOWN_UID): - self._client_socket = client_socket - self._socket_file = socket_fd - self._ticket_counter = 0 - self._ticket_lock = threading.Lock() - self.adb = adb - self.uid = uid - - def _log_formatter(message): - """Defines the formatting used in the logger.""" - return "[SL4A Client|%s|%s|%s] %s" % ( - self.adb.serial, - self.ports.client_port, - self.uid, - message, - ) - - self.log = logger.create_logger(_log_formatter) - - self.ports = ports - self.set_timeout(rpc_client.SOCKET_TIMEOUT) - - def open(self): - if self.uid != UNKNOWN_UID: - start_command = Sl4aConnectionCommand.CONTINUE - else: - start_command = Sl4aConnectionCommand.INIT - - self._initiate_handshake(start_command) - - def _initiate_handshake(self, start_command): - """Establishes a connection with the SL4A server. - - Args: - start_command: The command to send. See Sl4aConnectionCommand. - """ - try: - resp = self._cmd(start_command) - except socket.timeout as e: - self.log.error("Failed to open socket connection: %s", e) - raise - if not resp: - raise rpc_client.Sl4aProtocolError( - rpc_client.Sl4aProtocolError.NO_RESPONSE_FROM_HANDSHAKE - ) - result = json.loads(str(resp, encoding="utf8")) - if result["status"]: - self.uid = result["uid"] - else: - self.log.warning("UID not received for connection %s." % self.ports) - self.uid = UNKNOWN_UID - self.log.debug("Created connection over: %s." % self.ports) - - def _cmd(self, command): - """Sends an session protocol command to SL4A to establish communication. - - Args: - command: The name of the command to execute. - - Returns: - The line that was written back. - """ - self.send_request(json.dumps({"cmd": command, "uid": self.uid})) - return self.get_response() - - def get_new_ticket(self): - """Returns a ticket for a new request.""" - with self._ticket_lock: - self._ticket_counter += 1 - ticket = self._ticket_counter - return ticket - - def set_timeout(self, timeout): - """Sets the socket's wait for response timeout.""" - self._client_socket.settimeout(timeout) - - def send_request(self, request): - """Sends a request over the connection.""" - self._socket_file.write(request.encode("utf8") + b"\n") - self._socket_file.flush() - self.log.debug("Sent: " + request) - - def get_response(self): - """Returns the first response sent back to the client.""" - data = self._socket_file.readline() - self.log.debug("Received: " + data.decode("utf8", errors="replace")) - return data - - def close(self): - """Closes the connection gracefully.""" - self._client_socket.close() - self.adb.remove_tcp_forward(self.ports.forwarded_port)
diff --git a/src/antlion/controllers/sl4a_lib/sl4a_manager.py b/src/antlion/controllers/sl4a_lib/sl4a_manager.py deleted file mode 100644 index f3d7047..0000000 --- a/src/antlion/controllers/sl4a_lib/sl4a_manager.py +++ /dev/null
@@ -1,324 +0,0 @@ -#!/usr/bin/env python3 -# -# Copyright 2022 The Fuchsia Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -import threading - -import time - -from antlion import logger -from antlion.controllers.sl4a_lib import rpc_client -from antlion.controllers.sl4a_lib import sl4a_session -from antlion.controllers.sl4a_lib import error_reporter - -ATTEMPT_INTERVAL = 0.25 -MAX_WAIT_ON_SERVER_SECONDS = 5 - -SL4A_PKG_NAME = "com.googlecode.android_scripting" - -_SL4A_LAUNCH_SERVER_CMD = ( - "am startservice -a com.googlecode.android_scripting.action.LAUNCH_SERVER " - "--ei com.googlecode.android_scripting.extra.USE_SERVICE_PORT %s " - "com.googlecode.android_scripting/.service.ScriptingLayerService" -) - -_SL4A_CLOSE_SERVER_CMD = ( - "am startservice -a com.googlecode.android_scripting.action.KILL_PROCESS " - "--ei com.googlecode.android_scripting.extra.PROXY_PORT %s " - "com.googlecode.android_scripting/.service.ScriptingLayerService" -) - -# The command for finding SL4A's server port as root. -_SL4A_ROOT_FIND_PORT_CMD = ( - # Get all open, listening ports, and their process names - "ss -l -p -n | " - # Find all open TCP ports for SL4A - 'grep "tcp.*droid_scripting" | ' - # Shorten all whitespace to a single space character - 'tr -s " " | ' - # Grab the 5th column (which is server:port) - 'cut -d " " -f 5 |' - # Only grab the port - "sed s/.*://g" -) - -# The command for finding SL4A's server port without root. -_SL4A_USER_FIND_PORT_CMD = ( - # Get all open, listening ports, and their process names - "ss -l -p -n | " - # Find all open ports exposed to the public. This can produce false - # positives since users cannot read the process associated with the port. - 'grep -e "tcp.*::ffff:127\.0\.0\.1:" | ' - # Shorten all whitespace to a single space character - 'tr -s " " | ' - # Grab the 5th column (which is server:port) - 'cut -d " " -f 5 |' - # Only grab the port - "sed s/.*://g" -) - -# The command that begins the SL4A ScriptingLayerService. -_SL4A_START_SERVICE_CMD = ( - "am startservice " "com.googlecode.android_scripting/.service.ScriptingLayerService" -) - -# Maps device serials to their SL4A Manager. This is done to prevent multiple -# Sl4aManagers from existing for the same device. -_all_sl4a_managers = {} - - -def create_sl4a_manager(adb): - """Creates and returns an SL4AManager for the given device. - - Args: - adb: A reference to the device's AdbProxy. - """ - if adb.serial in _all_sl4a_managers: - _all_sl4a_managers[adb.serial].log.warning( - "Attempted to return multiple SL4AManagers on the same device. " - "Returning pre-existing SL4AManager instead." - ) - return _all_sl4a_managers[adb.serial] - else: - manager = Sl4aManager(adb) - _all_sl4a_managers[adb.serial] = manager - return manager - - -class Sl4aManager(object): - """A manager for SL4A Clients to a given AndroidDevice. - - SL4A is a single APK that can host multiple RPC servers at a time. This - class manages each server connection over ADB, and will gracefully - terminate the apk during cleanup. - - Attributes: - _listen_for_port_lock: A lock for preventing multiple threads from - potentially mixing up requested ports. - _sl4a_ports: A set of all known SL4A server ports in use. - adb: A reference to the AndroidDevice's AdbProxy. - log: The logger for this object. - sessions: A dictionary of session_ids to sessions. - """ - - def __init__(self, adb): - self._listen_for_port_lock = threading.Lock() - self._sl4a_ports = set() - self.adb = adb - self.log = logger.create_logger( - lambda msg: "[SL4A Manager|%s] %s" % (adb.serial, msg) - ) - self.sessions = {} - self._started = False - self.error_reporter = error_reporter.ErrorReporter("SL4A %s" % adb.serial) - - @property - def sl4a_ports_in_use(self): - """Returns a list of all server ports used by SL4A servers.""" - return set([session.server_port for session in self.sessions.values()]) - - def diagnose_failure(self, session, connection): - """Diagnoses all potential known reasons SL4A can fail. - - Assumes the failure happened on an RPC call, which verifies the state - of ADB/device.""" - self.error_reporter.create_error_report(self, session, connection) - - def start_sl4a_server(self, device_port, try_interval=ATTEMPT_INTERVAL): - """Opens a server socket connection on SL4A. - - Args: - device_port: The expected port for SL4A to open on. Note that in - many cases, this will be different than the port returned by - this method. - try_interval: The amount of seconds between attempts at finding an - opened port on the AndroidDevice. - - Returns: - The port number on the device the SL4A server is open on. - - Raises: - Sl4aConnectionError if SL4A's opened port cannot be found. - """ - # Launch a server through SL4A. - self.adb.shell(_SL4A_LAUNCH_SERVER_CMD % device_port) - - # There is a chance that the server has not come up yet by the time the - # launch command has finished. Try to read get the listening port again - # after a small amount of time. - time_left = MAX_WAIT_ON_SERVER_SECONDS - while time_left > 0: - port = self._get_open_listening_port() - if port is None: - time.sleep(try_interval) - time_left -= try_interval - else: - return port - - raise rpc_client.Sl4aConnectionError( - "Unable to find a valid open port for a new server connection. " - "Expected port: %s. Open ports: %s" % (device_port, self._sl4a_ports) - ) - - def _get_all_ports_command(self): - """Returns the list of all ports from the command to get ports.""" - is_root = True - if not self.adb.is_root(): - is_root = self.adb.ensure_root() - - if is_root: - return _SL4A_ROOT_FIND_PORT_CMD - else: - # TODO(markdr): When root is unavailable, search logcat output for - # the port the server has opened. - self.log.warning( - "Device cannot be put into root mode. SL4A " - "server connections cannot be verified." - ) - return _SL4A_USER_FIND_PORT_CMD - - def _get_all_ports(self): - return self.adb.shell(self._get_all_ports_command()).split() - - def _get_open_listening_port(self): - """Returns any open, listening port found for SL4A. - - Will return none if no port is found. - """ - possible_ports = self._get_all_ports() - self.log.debug("SL4A Ports found: %s" % possible_ports) - - # Acquire the lock. We lock this method because if multiple threads - # attempt to get a server at the same time, they can potentially find - # the same port as being open, and both attempt to connect to it. - with self._listen_for_port_lock: - for port in possible_ports: - if port not in self._sl4a_ports: - self._sl4a_ports.add(port) - return int(port) - return None - - def is_sl4a_installed(self): - """Returns True if SL4A is installed on the AndroidDevice.""" - return bool(self.adb.shell("pm path %s" % SL4A_PKG_NAME, ignore_status=True)) - - def start_sl4a_service(self): - """Starts the SL4A Service on the device. - - For starting an RPC server, use start_sl4a_server() instead. - """ - # Verify SL4A is installed. - if not self._started: - self._started = True - if not self.is_sl4a_installed(): - raise rpc_client.Sl4aNotInstalledError( - "SL4A is not installed on device %s" % self.adb.serial - ) - if self.adb.shell('(ps | grep "S %s") || true' % SL4A_PKG_NAME): - # Close all SL4A servers not opened by this manager. - # TODO(markdr): revert back to closing all ports after - # b/76147680 is resolved. - self.adb.shell("kill -9 $(pidof %s)" % SL4A_PKG_NAME) - self.adb.shell('settings put global hidden_api_blacklist_exemptions "*"') - # Start the service if it is not up already. - self.adb.shell(_SL4A_START_SERVICE_CMD) - - def obtain_sl4a_server(self, server_port): - """Obtain an SL4A server port. - - If the port is open and valid, return it. Otherwise, open an new server - with the hinted server_port. - """ - if server_port not in self.sl4a_ports_in_use: - return self.start_sl4a_server(server_port) - else: - return server_port - - def create_session( - self, max_connections=None, client_port=0, forwarded_port=0, server_port=None - ): - """Creates an SL4A server with the given ports if possible. - - The ports are not guaranteed to be available for use. If the port - asked for is not available, this will be logged, and the port will - be randomized. - - Args: - client_port: The client port on the host machine - forwarded_port: The server port on the host machine forwarded - by adb from the Android device - server_port: The port on the Android device. - max_connections: The max number of client connections for the - session. - - Returns: - A new Sl4aServer instance. - """ - if server_port is None: - # If a session already exists, use the same server. - if len(self.sessions) > 0: - server_port = self.sessions[sorted(self.sessions.keys())[0]].server_port - # Otherwise, open a new server on a random port. - else: - server_port = 0 - self.log.debug( - "Creating SL4A session client_port={}, forwarded_port={}, server_port={}".format( - client_port, forwarded_port, server_port - ) - ) - self.start_sl4a_service() - session = sl4a_session.Sl4aSession( - self.adb, - client_port, - server_port, - self.obtain_sl4a_server, - self.diagnose_failure, - forwarded_port, - max_connections=max_connections, - ) - self.sessions[session.uid] = session - return session - - def stop_service(self): - """Stops The SL4A Service. Force-stops the SL4A apk.""" - try: - self.adb.shell("am force-stop %s" % SL4A_PKG_NAME, ignore_status=True) - except Exception as e: - self.log.warning("Fail to stop package %s: %s", SL4A_PKG_NAME, e) - self._started = False - - def terminate_all_sessions(self): - """Terminates all SL4A sessions gracefully.""" - self.error_reporter.finalize_reports() - for _, session in self.sessions.items(): - session.terminate() - self.sessions = {} - self._close_all_ports() - - def _close_all_ports(self, try_interval=ATTEMPT_INTERVAL): - """Closes all ports opened on SL4A.""" - ports = self._get_all_ports() - for port in set.union(self._sl4a_ports, ports): - self.adb.shell(_SL4A_CLOSE_SERVER_CMD % port) - time_left = MAX_WAIT_ON_SERVER_SECONDS - while time_left > 0 and self._get_open_listening_port(): - time.sleep(try_interval) - time_left -= try_interval - - if time_left <= 0: - self.log.warning( - "Unable to close all un-managed servers! Server ports that are " - "still open are %s" % self._get_open_listening_port() - ) - self._sl4a_ports = set()
diff --git a/src/antlion/controllers/sl4a_lib/sl4a_ports.py b/src/antlion/controllers/sl4a_lib/sl4a_ports.py deleted file mode 100644 index db9917e..0000000 --- a/src/antlion/controllers/sl4a_lib/sl4a_ports.py +++ /dev/null
@@ -1,37 +0,0 @@ -#!/usr/bin/env python3 -# -# Copyright 2022 The Fuchsia Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -class Sl4aPorts(object): - """A container for the three ports needed for an SL4A connection. - - Attributes: - client_port: The port on the host associated with the SL4A client - forwarded_port: The port forwarded to the Android device. - server_port: The port on the device associated with the SL4A server. - """ - - def __init__(self, client_port=0, forwarded_port=0, server_port=0): - self.client_port = client_port - self.forwarded_port = forwarded_port - self.server_port = server_port - - def __str__(self): - return "(%s, %s, %s)" % ( - self.client_port, - self.forwarded_port, - self.server_port, - )
diff --git a/src/antlion/controllers/sl4a_lib/sl4a_session.py b/src/antlion/controllers/sl4a_lib/sl4a_session.py deleted file mode 100644 index 27edc48..0000000 --- a/src/antlion/controllers/sl4a_lib/sl4a_session.py +++ /dev/null
@@ -1,268 +0,0 @@ -#!/usr/bin/env python3 -# -# Copyright 2022 The Fuchsia Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -import socket -import threading - -import errno - -from antlion import logger -from antlion.controllers.adb_lib.error import AdbError -from antlion.controllers.sl4a_lib import event_dispatcher -from antlion.controllers.sl4a_lib import rpc_connection -from antlion.controllers.sl4a_lib import rpc_client -from antlion.controllers.sl4a_lib import sl4a_ports -from antlion.controllers.sl4a_lib.rpc_client import Sl4aStartError - -SOCKET_TIMEOUT = 60 - -# The SL4A Session UID when a UID has not been received yet. -UNKNOWN_UID = -1 - - -class Sl4aSession(object): - """An object that tracks the state of an SL4A Session. - - Attributes: - _event_dispatcher: The EventDispatcher instance, if any, for this - session. - _terminate_lock: A lock that prevents race conditions for multiple - threads calling terminate() - _terminated: A bool that stores whether or not this session has been - terminated. Terminated sessions cannot be restarted. - adb: A reference to the AndroidDevice's AdbProxy. - log: The logger for this Sl4aSession - server_port: The SL4A server port this session is established on. - uid: The uid that corresponds the the SL4A Server's session id. This - value is only unique during the lifetime of the SL4A apk. - """ - - def __init__( - self, - adb, - host_port, - device_port, - get_server_port_func, - on_error_callback, - forwarded_port=0, - max_connections=None, - ): - """Creates an SL4A Session. - - Args: - adb: A reference to the adb proxy - get_server_port_func: A lambda (int) that returns the corrected - server port. The int passed in hints at which port to use, if - possible. - host_port: The port the host machine uses to connect to the SL4A - server for its first connection. - device_port: The SL4A server port to be used as a hint for which - SL4A server to connect to. - forwarded_port: The server port on host machine forwarded by adb - from Android device to accept SL4A connection - """ - self._event_dispatcher = None - self._terminate_lock = threading.Lock() - self._terminated = False - self.adb = adb - - def _log_formatter(message): - return "[SL4A Session|%s|%s] %s" % (self.adb.serial, self.uid, message) - - self.log = logger.create_logger(_log_formatter) - - self.forwarded_port = forwarded_port - self.server_port = device_port - self.uid = UNKNOWN_UID - self.obtain_server_port = get_server_port_func - self._on_error_callback = on_error_callback - - connection_creator = self._rpc_connection_creator(host_port) - self.rpc_client = rpc_client.RpcClient( - self.uid, - self.adb.serial, - self.diagnose_failure, - connection_creator, - max_connections=max_connections, - ) - - def _rpc_connection_creator(self, host_port): - def create_client(uid): - return self._create_rpc_connection( - ports=sl4a_ports.Sl4aPorts( - host_port, self.forwarded_port, self.server_port - ), - uid=uid, - ) - - return create_client - - @property - def is_alive(self): - return not self._terminated - - def _create_forwarded_port(self, server_port, hinted_port=0): - """Creates a forwarded port to the specified server port. - - Args: - server_port: (int) The port to forward to. - hinted_port: (int) The port to use for forwarding, if available. - Otherwise, the chosen port will be random. - Returns: - The chosen forwarded port. - - Raises AdbError if the version of ADB is too old, or the command fails. - """ - if self.adb.get_version_number() < 37 and hinted_port == 0: - self.log.error( - "The current version of ADB does not automatically provide a " - "port to forward. Please upgrade ADB to version 1.0.37 or " - "higher." - ) - raise Sl4aStartError("Unable to forward a port to the device.") - else: - try: - return self.adb.tcp_forward(hinted_port, server_port) - except AdbError as e: - if "cannot bind listener" in e.stderr: - self.log.warning( - "Unable to use %s to forward to device port %s due to: " - '"%s". Attempting to choose a random port instead.' - % (hinted_port, server_port, e.stderr) - ) - # Call this method again, but this time with no hinted port. - return self._create_forwarded_port(server_port) - raise e - - def _create_rpc_connection(self, ports=None, uid=UNKNOWN_UID): - """Creates an RPC Connection with the specified ports. - - Args: - ports: A Sl4aPorts object or a tuple of (host/client_port, - forwarded_port, device/server_port). If any of these are - zero, the OS will determine their values during connection. - - Note that these ports are only suggestions. If they are not - available, the a different port will be selected. - uid: The UID of the SL4A Session. To create a new session, use - UNKNOWN_UID. - Returns: - An Sl4aClient. - """ - if ports is None: - ports = sl4a_ports.Sl4aPorts(0, 0, 0) - # Open a new server if a server cannot be inferred. - ports.server_port = self.obtain_server_port(ports.server_port) - self.server_port = ports.server_port - # Forward the device port to the host. - ports.forwarded_port = self._create_forwarded_port( - ports.server_port, hinted_port=ports.forwarded_port - ) - client_socket, fd = self._create_client_side_connection(ports) - client = rpc_connection.RpcConnection( - self.adb, ports, client_socket, fd, uid=uid - ) - client.open() - if uid == UNKNOWN_UID: - self.uid = client.uid - return client - - def diagnose_failure(self, connection): - """Diagnoses any problems related to the SL4A session.""" - self._on_error_callback(self, connection) - - def get_event_dispatcher(self): - """Returns the EventDispatcher for this Sl4aSession.""" - if self._event_dispatcher is None: - self._event_dispatcher = event_dispatcher.EventDispatcher( - self.adb.serial, self.rpc_client - ) - return self._event_dispatcher - - def _create_client_side_connection(self, ports): - """Creates and connects the client socket to the forward device port. - - Args: - ports: A Sl4aPorts object or a tuple of (host_port, - forwarded_port, device_port). - - Returns: - A tuple of (socket, socket_file_descriptor). - """ - client_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM) - client_socket.settimeout(SOCKET_TIMEOUT) - client_socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) - if ports.client_port != 0: - try: - client_socket.bind((socket.gethostname(), ports.client_port)) - except OSError as e: - # If the port is in use, log and ask for any open port. - if e.errno == errno.EADDRINUSE: - self.log.warning( - "Port %s is already in use on the host. " - "Generating a random port." % ports.client_port - ) - ports.client_port = 0 - return self._create_client_side_connection(ports) - raise - - # Verify and obtain the port opened by SL4A. - try: - # Connect to the port that has been forwarded to the device. - client_socket.connect(("127.0.0.1", ports.forwarded_port)) - except socket.timeout: - raise rpc_client.Sl4aConnectionError( - "SL4A has not connected over the specified port within the " - "timeout of %s seconds." % SOCKET_TIMEOUT - ) - except socket.error as e: - # In extreme, unlikely cases, a socket error with - # errno.EADDRNOTAVAIL can be raised when a desired host_port is - # taken by a separate program between the bind and connect calls. - # Note that if host_port is set to zero, there is no bind before - # the connection is made, so this error will never be thrown. - if e.errno == errno.EADDRNOTAVAIL: - ports.client_port = 0 - return self._create_client_side_connection(ports) - raise - ports.client_port = client_socket.getsockname()[1] - return client_socket, client_socket.makefile(mode="brw") - - def terminate(self): - """Terminates the session. - - The return of process execution is blocked on completion of all events - being processed by handlers in the Event Dispatcher. - """ - with self._terminate_lock: - if not self._terminated: - self.log.debug("Terminating Session.") - try: - self.rpc_client.closeSl4aSession() - except Exception as e: - if "SL4A session has already been terminated" not in str(e): - self.log.warning(e) - # Must be set after closeSl4aSession so the rpc_client does not - # think the session has closed. - self._terminated = True - if self._event_dispatcher: - try: - self._event_dispatcher.close() - except Exception as e: - self.log.warning(e) - try: - self.rpc_client.terminate() - except Exception as e: - self.log.warning(e)
diff --git a/src/antlion/controllers/sl4a_lib/sl4a_types.py b/src/antlion/controllers/sl4a_lib/sl4a_types.py deleted file mode 100644 index 434ff92..0000000 --- a/src/antlion/controllers/sl4a_lib/sl4a_types.py +++ /dev/null
@@ -1,57 +0,0 @@ -#!/usr/bin/env python3 -# -# Copyright 2022 The Fuchsia Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from antlion.dict_object import DictObject - - -class Sl4aEvent(DictObject): - """Event returned by sl4a calls to eventPoll() and eventWait() - - The 'name' field uniquely identifies the contents of 'data'. - - """ - - def __init__(self, name=None, time=None, data=None): - DictObject.__init__(self, name=name, time=time, data=data) - - -class Sl4aNetworkInfo(DictObject): - """SL4A equivalent of an Android NetworkInfo Object""" - - def __init__( - self, - isAvailable=None, - isConnected=None, - isFailover=None, - isRoaming=None, - ExtraInfo=None, - FailedReason=None, - TypeName=None, - SubtypeName=None, - State=None, - ): - DictObject.__init__( - self, - isAvailable=isAvailable, - isConnected=isConnected, - isFailover=isFailover, - isRoaming=isRoaming, - ExtraInfo=ExtraInfo, - FailedReason=FailedReason, - TypeName=TypeName, - SubtypeName=SubtypeName, - State=State, - )
diff --git a/src/antlion/controllers/sniffer.py b/src/antlion/controllers/sniffer.py deleted file mode 100644 index e87a547..0000000 --- a/src/antlion/controllers/sniffer.py +++ /dev/null
@@ -1,293 +0,0 @@ -#!/usr/bin/env python3 -# -# Copyright 2022 The Fuchsia Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import importlib -import logging - -MOBLY_CONTROLLER_CONFIG_NAME = "Sniffer" -ACTS_CONTROLLER_REFERENCE_NAME = "sniffers" - - -def create(configs): - """Initializes the sniffer structures based on the JSON configuration. The - expected keys are: - - Type: A first-level type of sniffer. Planned to be 'local' for sniffers - running on the local machine, or 'remote' for sniffers running - remotely. - SubType: The specific sniffer type to be used. - Interface: The WLAN interface used to configure the sniffer. - BaseConfigs: A dictionary specifying baseline configurations of the - sniffer. Configurations can be overridden when starting a capture. - The keys must be one of the Sniffer.CONFIG_KEY_* values. - """ - objs = [] - for c in configs: - sniffer_type = c["Type"] - sniffer_subtype = c["SubType"] - interface = c["Interface"] - base_configs = c["BaseConfigs"] - module_name = "antlion.controllers.sniffer_lib.{}.{}".format( - sniffer_type, sniffer_subtype - ) - module = importlib.import_module(module_name) - objs.append( - module.Sniffer(interface, logging.getLogger(), base_configs=base_configs) - ) - return objs - - -def destroy(objs): - """Destroys the sniffers and terminates any ongoing capture sessions.""" - for sniffer in objs: - try: - sniffer.stop_capture() - except SnifferError: - pass - - -class SnifferError(Exception): - """This is the Exception class defined for all errors generated by - Sniffer-related modules. - """ - - -class InvalidDataError(Exception): - """This exception is thrown when invalid configuration data is passed - to a method. - """ - - -class ExecutionError(SnifferError): - """This exception is thrown when trying to configure the capture device - or when trying to execute the capture operation. - - When this exception is seen, it is possible that the sniffer module is run - without sudo (for local sniffers) or keys are out-of-date (for remote - sniffers). - """ - - -class InvalidOperationError(SnifferError): - """Certain methods may only be accessed when the instance upon which they - are invoked is in a certain state. This indicates that the object is not - in the correct state for a method to be called. - """ - - -class Sniffer(object): - """This class defines an object representing a sniffer. - - The object defines the generic behavior of sniffers - irrespective of how - they are implemented, or where they are located: on the local machine or on - the remote machine. - """ - - CONFIG_KEY_CHANNEL = "channel" - - def __init__(self, interface, logger, base_configs=None): - """The constructor for the Sniffer. It constructs a sniffer and - configures it to be ready for capture. - - Args: - interface: A string specifying the interface used to configure the - sniffer. - logger: ACTS logger object. - base_configs: A dictionary containing baseline configurations of the - sniffer. These can be overridden when staring a capture. The - keys are specified by Sniffer.CONFIG_KEY_*. - - Returns: - self: A configured sniffer. - - Raises: - InvalidDataError: if the config_path is invalid. - NoPermissionError: if an error occurs while configuring the - sniffer. - """ - raise NotImplementedError("Base class should not be called directly!") - - def get_descriptor(self): - """This function returns a string describing the sniffer. The specific - string (and its format) is up to each derived sniffer type. - - Returns: - A string describing the sniffer. - """ - raise NotImplementedError("Base class should not be called directly!") - - def get_type(self): - """This function returns the type of the sniffer. - - Returns: - The type (string) of the sniffer. Corresponds to the 'Type' key of - the sniffer configuration. - """ - raise NotImplementedError("Base class should not be called directly!") - - def get_subtype(self): - """This function returns the sub-type of the sniffer. - - Returns: - The sub-type (string) of the sniffer. Corresponds to the 'SubType' - key of the sniffer configuration. - """ - raise NotImplementedError("Base class should not be called directly!") - - def get_interface(self): - """This function returns The interface used to configure the sniffer, - e.g. 'wlan0'. - - Returns: - The interface (string) used to configure the sniffer. Corresponds to - the 'Interface' key of the sniffer configuration. - """ - raise NotImplementedError("Base class should not be called directly!") - - def get_capture_file(self): - """The sniffer places a capture in the logger directory. This function - enables the caller to obtain the path of that capture. - - Returns: - The full path of the current or last capture. - """ - raise NotImplementedError("Base class should not be called directly!") - - def start_capture( - self, - override_configs=None, - additional_args=None, - duration=None, - packet_count=None, - ): - """This function starts a capture which is saved to the specified file - path. - - Depending on the type/subtype and configuration of the sniffer the - capture may terminate on its own or may require an explicit call to the - stop_capture() function. - - This is a non-blocking function so a terminating function must be - called - either explicitly or implicitly: - - Explicitly: call either stop_capture() or wait_for_capture() - - Implicitly: use with a with clause. The wait_for_capture() function - will be called if a duration is specified (i.e. is not - None), otherwise a stop_capture() will be called. - - The capture is saved to a file in the log path of the logger. Use - the get_capture_file() to get the full path to the current or most - recent capture. - - Args: - override_configs: A dictionary which is combined with the - base_configs ("BaseConfigs" in the sniffer configuration). The - keys (specified by Sniffer.CONFIG_KEY_*) determine the - configuration of the sniffer for this specific capture. - additional_args: A string specifying additional raw - command-line arguments to pass to the underlying sniffer. The - interpretation of these flags is sniffer-dependent. - duration: An integer specifying the number of seconds over which to - capture packets. The sniffer will be terminated after this - duration. Used in implicit mode when using a 'with' clause. In - explicit control cases may have to be performed using a - sleep+stop or as the timeout argument to the wait function. - packet_count: An integer specifying the number of packets to capture - before terminating. Should be used with duration to guarantee - that capture terminates at some point (even if did not capture - the specified number of packets). - - Returns: - An ActiveCaptureContext process which can be used with a 'with' - clause. - - Raises: - InvalidDataError: for invalid configurations - NoPermissionError: if an error occurs while configuring and running - the sniffer. - """ - raise NotImplementedError("Base class should not be called directly!") - - def stop_capture(self): - """This function stops a capture and guarantees that the capture is - saved to the capture file configured during the start_capture() method. - Depending on the type of the sniffer the file may previously contain - partial results (e.g. for a local sniffer) or may not exist until the - stop_capture() method is executed (e.g. for a remote sniffer). - - Depending on the type/subtype and configuration of the sniffer the - capture may terminate on its own without requiring a call to this - function. In such a case it is still necessary to call either this - function or the wait_for_capture() function to make sure that the - capture file is moved to the correct location. - - Raises: - NoPermissionError: No permission when trying to stop a capture - and save the capture file. - """ - raise NotImplementedError("Base class should not be called directly!") - - def wait_for_capture(self, timeout=None): - """This function waits for a capture to terminate and guarantees that - the capture is saved to the capture file configured during the - start_capture() method. Depending on the type of the sniffer the file - may previously contain partial results (e.g. for a local sniffer) or - may not exist until the stop_capture() method is executed (e.g. for a - remote sniffer). - - Depending on the type/subtype and configuration of the sniffer the - capture may terminate on its own without requiring a call to this - function. In such a case it is still necessary to call either this - function or the stop_capture() function to make sure that the capture - file is moved to the correct location. - - Args: - timeout: An integer specifying the number of seconds to wait for - the capture to terminate on its own. On expiration of the - timeout the sniffer is stopped explicitly using the - stop_capture() function. - - Raises: - NoPermissionError: No permission when trying to stop a capture and - save the capture file. - """ - raise NotImplementedError("Base class should not be called directly!") - - -class ActiveCaptureContext(object): - """This class defines an object representing an active sniffer capture. - - The object is returned by a Sniffer.start_capture() command and terminates - the capture when the 'with' clause exits. It is syntactic sugar for - try/finally. - """ - - _sniffer = None - _timeout = None - - def __init__(self, sniffer, timeout=None): - self._sniffer = sniffer - self._timeout = timeout - - def __enter__(self): - pass - - def __exit__(self, type, value, traceback): - if self._sniffer is not None: - if self._timeout is None: - self._sniffer.stop_capture() - else: - self._sniffer.wait_for_capture(self._timeout) - self._sniffer = None
diff --git a/src/antlion/controllers/sniffer_lib/__init__.py b/src/antlion/controllers/sniffer_lib/__init__.py deleted file mode 100644 index e69de29..0000000 --- a/src/antlion/controllers/sniffer_lib/__init__.py +++ /dev/null
diff --git a/src/antlion/controllers/sniffer_lib/local/__init__.py b/src/antlion/controllers/sniffer_lib/local/__init__.py deleted file mode 100644 index e69de29..0000000 --- a/src/antlion/controllers/sniffer_lib/local/__init__.py +++ /dev/null
diff --git a/src/antlion/controllers/sniffer_lib/local/local_base.py b/src/antlion/controllers/sniffer_lib/local/local_base.py deleted file mode 100644 index 8873350..0000000 --- a/src/antlion/controllers/sniffer_lib/local/local_base.py +++ /dev/null
@@ -1,160 +0,0 @@ -#!/usr/bin/env python3 - -# Copyright 2022 The Fuchsia Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -""" -Class for Local sniffers - i.e. running on the local machine. - -This class provides configuration for local interfaces but leaves -the actual capture (sniff) to sub-classes. -""" - -import os -import shutil -import signal -import subprocess -import tempfile -from antlion import logger -from antlion import utils -from antlion.controllers import sniffer - - -class SnifferLocalBase(sniffer.Sniffer): - """This class defines the common behaviors of WLAN sniffers running on - WLAN interfaces of the local machine. - - Specific mechanisms to capture packets over the local WLAN interfaces are - implemented by sub-classes of this class - i.e. it is not a final class. - """ - - def __init__(self, interface, logger, base_configs=None): - """See base class documentation""" - self._base_configs = None - self._capture_file_path = "" - self._interface = "" - self._logger = logger - self._process = None - self._temp_capture_file_path = "" - - if interface == "": - raise sniffer.InvalidDataError("Empty interface provided") - self._interface = interface - self._base_configs = base_configs - - try: - utils.exe_cmd("ifconfig", self._interface, "down") - utils.exe_cmd("iwconfig", self._interface, "mode", "monitor") - utils.exe_cmd("ifconfig", self._interface, "up") - except Exception as err: - raise sniffer.ExecutionError(err) - - def get_interface(self): - """See base class documentation""" - return self._interface - - def get_type(self): - """See base class documentation""" - return "local" - - def get_capture_file(self): - return self._capture_file_path - - def _pre_capture_config(self, override_configs=None): - """Utility function which configures the wireless interface per the - specified configurations. Operation is performed before every capture - start using baseline configurations (specified when sniffer initialized) - and override configurations specified here. - """ - final_configs = {} - if self._base_configs: - final_configs.update(self._base_configs) - if override_configs: - final_configs.update(override_configs) - - if sniffer.Sniffer.CONFIG_KEY_CHANNEL in final_configs: - try: - utils.exe_cmd( - "iwconfig", - self._interface, - "channel", - str(final_configs[sniffer.Sniffer.CONFIG_KEY_CHANNEL]), - ) - except Exception as err: - raise sniffer.ExecutionError(err) - - def _get_command_line(self, additional_args=None, duration=None, packet_count=None): - """Utility function to be implemented by every child class - which - are the concrete sniffer classes. Each sniffer-specific class should - derive the command line to execute its sniffer based on the specified - arguments. - """ - raise NotImplementedError("Base class should not be called directly!") - - def _post_process(self): - """Utility function which is executed after a capture is done. It - moves the capture file to the requested location. - """ - self._process = None - shutil.move(self._temp_capture_file_path, self._capture_file_path) - - def start_capture( - self, - override_configs=None, - additional_args=None, - duration=None, - packet_count=None, - ): - """See base class documentation""" - if self._process is not None: - raise sniffer.InvalidOperationError( - "Trying to start a sniff while another is still running!" - ) - capture_dir = os.path.join( - self._logger.log_path, "Sniffer-{}".format(self._interface) - ) - os.makedirs(capture_dir, exist_ok=True) - self._capture_file_path = os.path.join( - capture_dir, "capture_{}.pcap".format(logger.get_log_file_timestamp()) - ) - - self._pre_capture_config(override_configs) - _, self._temp_capture_file_path = tempfile.mkstemp(suffix=".pcap") - - cmd = self._get_command_line( - additional_args=additional_args, - duration=duration, - packet_count=packet_count, - ) - - self._process = utils.start_standing_subprocess(cmd) - return sniffer.ActiveCaptureContext(self, duration) - - def stop_capture(self): - """See base class documentation""" - if self._process is None: - raise sniffer.InvalidOperationError("Trying to stop a non-started process") - utils.stop_standing_subprocess(self._process, kill_signal=signal.SIGINT) - self._post_process() - - def wait_for_capture(self, timeout=None): - """See base class documentation""" - if self._process is None: - raise sniffer.InvalidOperationError( - "Trying to wait on a non-started process" - ) - try: - utils.wait_for_standing_subprocess(self._process, timeout) - self._post_process() - except subprocess.TimeoutExpired: - self.stop_capture()
diff --git a/src/antlion/controllers/sniffer_lib/local/tcpdump.py b/src/antlion/controllers/sniffer_lib/local/tcpdump.py deleted file mode 100644 index 85622dc..0000000 --- a/src/antlion/controllers/sniffer_lib/local/tcpdump.py +++ /dev/null
@@ -1,53 +0,0 @@ -#!/usr/bin/env python3 - -# Copyright 2022 The Fuchsia Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import shutil -from antlion.controllers import sniffer -from antlion.controllers.sniffer_lib.local import local_base - - -class Sniffer(local_base.SnifferLocalBase): - """This class defines a sniffer which uses tcpdump as its back-end""" - - def __init__(self, config_path, logger, base_configs=None): - """See base class documentation""" - self._executable_path = None - - super(local_base.SnifferLocalBase).__init__( - config_path, logger, base_configs=base_configs - ) - - self._executable_path = shutil.which("tcpdump") - if self._executable_path is None: - raise sniffer.SnifferError("Cannot find a path to the 'tcpdump' executable") - - def get_descriptor(self): - """See base class documentation""" - return "local-tcpdump-{}".format(self._interface) - - def get_subtype(self): - """See base class documentation""" - return "tcpdump" - - def _get_command_line(self, additional_args=None, duration=None, packet_count=None): - cmd = "{} -i {} -w {}".format( - self._executable_path, self._interface, self._temp_capture_file_path - ) - if packet_count is not None: - cmd = "{} -c {}".format(cmd, packet_count) - if additional_args is not None: - cmd = "{} {}".format(cmd, additional_args) - return cmd
diff --git a/src/antlion/controllers/sniffer_lib/local/tshark.py b/src/antlion/controllers/sniffer_lib/local/tshark.py deleted file mode 100644 index dd79eed..0000000 --- a/src/antlion/controllers/sniffer_lib/local/tshark.py +++ /dev/null
@@ -1,58 +0,0 @@ -#!/usr/bin/env python3 - -# Copyright 2022 The Fuchsia Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import shutil -from antlion.controllers import sniffer -from antlion.controllers.sniffer_lib.local import local_base - - -class Sniffer(local_base.SnifferLocalBase): - """This class defines a sniffer which uses tshark as its back-end""" - - def __init__(self, config_path, logger, base_configs=None): - """See base class documentation""" - self._executable_path = None - - super().__init__(config_path, logger, base_configs=base_configs) - - self._executable_path = shutil.which("tshark") or shutil.which( - "/usr/local/bin/tshark" - ) - if self._executable_path is None: - raise sniffer.SnifferError( - "Cannot find a path to the 'tshark' " - "executable (or to '/usr/local/bin/tshark')" - ) - - def get_descriptor(self): - """See base class documentation""" - return "local-tshark-{}-ch{}".format(self._interface) - - def get_subtype(self): - """See base class documentation""" - return "tshark" - - def _get_command_line(self, additional_args=None, duration=None, packet_count=None): - cmd = "{} -i {} -w {}".format( - self._executable_path, self._interface, self._temp_capture_file_path - ) - if duration is not None: - cmd = "{} -a duration:{}".format(cmd, duration) - if packet_count is not None: - cmd = "{} -c {}".format(cmd, packet_count) - if additional_args is not None: - cmd = "{} {}".format(cmd, additional_args) - return cmd
diff --git a/src/antlion/controllers/utils_lib/__init__.py b/src/antlion/controllers/utils_lib/__init__.py deleted file mode 100644 index e69de29..0000000 --- a/src/antlion/controllers/utils_lib/__init__.py +++ /dev/null
diff --git a/src/antlion/controllers/utils_lib/commands/__init__.py b/src/antlion/controllers/utils_lib/commands/__init__.py deleted file mode 100644 index e69de29..0000000 --- a/src/antlion/controllers/utils_lib/commands/__init__.py +++ /dev/null
diff --git a/src/antlion/controllers/utils_lib/commands/ip.py b/src/antlion/controllers/utils_lib/commands/ip.py deleted file mode 100644 index 7e028b1..0000000 --- a/src/antlion/controllers/utils_lib/commands/ip.py +++ /dev/null
@@ -1,160 +0,0 @@ -# Copyright 2022 The Fuchsia Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import ipaddress -import re - -from antlion.libs.proc import job - - -class LinuxIpCommand(object): - """Interface for doing standard IP commands on a linux system. - - Wraps standard shell commands used for ip into a python object that can - be interacted with more easily. - """ - - def __init__(self, runner): - """ - Args: - runner: Object that can take unix commands and run them in an - enviroment (eg. connection.SshConnection). - """ - self._runner = runner - - def get_ipv4_addresses(self, net_interface): - """Gets all ipv4 addresses of a network interface. - - Args: - net_interface: string, The network interface to get info on - (eg. wlan0). - - Returns: An iterator of tuples that contain (address, broadcast). - where address is a ipaddress.IPv4Interface and broadcast - is an ipaddress.IPv4Address. - """ - results = self._runner.run("ip addr show dev %s" % net_interface) - lines = results.stdout.splitlines() - - # Example stdout: - # 2: eth0: <BROADCAST,MULTICAST,UP,LOWER_UP> mtu 1500 qdisc mq state UP group default qlen 1000 - # link/ether 48:0f:cf:3c:9d:89 brd ff:ff:ff:ff:ff:ff - # inet 192.168.1.1/24 brd 192.168.1.255 scope global eth0 - # valid_lft forever preferred_lft forever - # inet6 2620:0:1000:1500:a968:a776:2d80:a8b3/64 scope global temporary dynamic - # valid_lft 599919sec preferred_lft 80919sec - - for line in lines: - line = line.strip() - match = re.search("inet (?P<address>[^\s]*) brd (?P<bcast>[^\s]*)", line) - if match: - d = match.groupdict() - address = ipaddress.IPv4Interface(d["address"]) - bcast = ipaddress.IPv4Address(d["bcast"]) - yield (address, bcast) - - match = re.search("inet (?P<address>[^\s]*)", line) - if match: - d = match.groupdict() - address = ipaddress.IPv4Interface(d["address"]) - yield (address, None) - - def add_ipv4_address(self, net_interface, address, broadcast=None): - """Adds an ipv4 address to a net_interface. - - Args: - net_interface: string, The network interface - to get the new ipv4 (eg. wlan0). - address: ipaddress.IPv4Interface, The new ipaddress and netmask - to add to an interface. - broadcast: ipaddress.IPv4Address, The broadcast address to use for - this net_interfaces subnet. - """ - if broadcast: - self._runner.run( - "ip addr add %s broadcast %s dev %s" - % (address, broadcast, net_interface) - ) - else: - self._runner.run("ip addr add %s dev %s" % (address, net_interface)) - - def remove_ipv4_address(self, net_interface, address, ignore_status=False): - """Remove an ipv4 address. - - Removes an ipv4 address from a network interface. - - Args: - net_interface: string, The network interface to remove the - ipv4 address from (eg. wlan0). - address: ipaddress.IPv4Interface or ipaddress.IPv4Address, - The ip address to remove from the net_interface. - ignore_status: True if the exit status can be ignored - Returns: - The job result from a the command - """ - return self._runner.run( - "ip addr del %s dev %s" % (address, net_interface), - ignore_status=ignore_status, - ) - - def set_ipv4_address(self, net_interface, address, broadcast=None): - """Set the ipv4 address. - - Sets the ipv4 address of a network interface. If the network interface - has any other ipv4 addresses these will be cleared. - - Args: - net_interface: string, The network interface to set the ip address - on (eg. wlan0). - address: ipaddress.IPv4Interface, The ip address and subnet to give - the net_interface. - broadcast: ipaddress.IPv4Address, The broadcast address to use for - the subnet. - """ - self.clear_ipv4_addresses(net_interface) - self.add_ipv4_address(net_interface, address, broadcast) - - def clear_ipv4_addresses(self, net_interface): - """Clears all ipv4 addresses registered to a net_interface. - - Args: - net_interface: string, The network interface to clear addresses from - (eg. wlan0). - """ - ip_info = self.get_ipv4_addresses(net_interface) - - for address, _ in ip_info: - result = self.remove_ipv4_address( - net_interface, address, ignore_status=True - ) - # It is possible that the address has already been removed by the - # time this command has been called. In such a case, we would get - # this error message. - error_msg = "RTNETLINK answers: Cannot assign requested address" - if result.exit_status != 0: - if error_msg in result.stderr: - # If it was removed by another process, log a warning - if address not in self.get_ipv4_addresses(net_interface): - self._runner.log.warning( - "Unable to remove address %s. The address was " - "removed by another process." % address - ) - continue - # If it was not removed, raise an error - self._runner.log.error( - "Unable to remove address %s. The address is still " - "registered to %s, despite call for removal." - % (address, net_interface) - ) - raise job.Error(result)
diff --git a/src/antlion/controllers/utils_lib/commands/route.py b/src/antlion/controllers/utils_lib/commands/route.py deleted file mode 100644 index a886455..0000000 --- a/src/antlion/controllers/utils_lib/commands/route.py +++ /dev/null
@@ -1,195 +0,0 @@ -# Copyright 2022 The Fuchsia Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import ipaddress -import re - -from antlion.controllers.utils_lib.ssh import connection - - -class Error(Exception): - """Exception thrown when a valid ip command experiences errors.""" - - -class NetworkInterfaceDown(Error): - """Exception thrown when a network interface is down.""" - - -class LinuxRouteCommand(object): - """Interface for doing standard ip route commands on a linux system.""" - - DEFAULT_ROUTE = "default" - - def __init__(self, runner): - """ - Args: - runner: Object that can take unix commands and run them in an - environment. - """ - self._runner = runner - - def add_route(self, net_interface, address, proto="static"): - """Add an entry to the ip routing table. - - Will add a route for either a specific ip address, or a network. - - Args: - net_interface: string, Any packet that sends through this route - will be sent using this network interface - (eg. wlan0). - address: ipaddress.IPv4Address, ipaddress.IPv4Network, - or DEFAULT_ROUTE. The address to use. If a network - is given then the entire subnet will be routed. - If DEFAULT_ROUTE is given then this will set the - default route. - proto: string, Routing protocol identifier of this route - (e.g. kernel, redirect, boot, static, ra). - See `man ip-route(8)` for details. - - Raises: - NetworkInterfaceDown: Raised when the network interface is down. - """ - try: - self._runner.run( - f"ip route add {address} dev {net_interface} proto {proto}" - ) - except connection.CommandError as e: - if "File exists" in e.result.stderr: - raise Error("Route already exists.") - if "Network is down" in e.result.stderr: - raise NetworkInterfaceDown("Device must be up for adding a route.") - raise - - def get_routes(self, net_interface=None): - """Get the routes in the ip routing table. - - Args: - net_interface: string, If given, only retrieve routes that have - been registered to go through this network - interface (eg. wlan0). - - Returns: An iterator that returns a tuple of (address, net_interface). - If it is the default route then address - will be the DEFAULT_ROUTE. If the route is a subnet then - it will be a ipaddress.IPv4Network otherwise it is a - ipaddress.IPv4Address. - """ - result_ipv4 = self._runner.run("ip -4 route show") - result_ipv6 = self._runner.run("ip -6 route show") - - lines = result_ipv4.stdout.splitlines() + result_ipv6.stdout.splitlines() - - # Scan through each line for valid route entries - # Example output: - # default via 192.168.1.254 dev eth0 proto static - # 192.168.1.0/24 dev eth0 proto kernel scope link src 172.22.100.19 metric 1 - # 192.168.2.1 dev eth2 proto kernel scope link metric 1 - # fe80::/64 dev wlan0 proto static metric 1024 - for line in lines: - if not "dev" in line: - continue - - if line.startswith(self.DEFAULT_ROUTE): - # The default route entry is formatted differently. - match = re.search("dev (?P<net_interface>\S+)", line) - pair = None - if match: - # When there is a match for the route entry pattern create - # A pair to hold the info. - pair = (self.DEFAULT_ROUTE, match.groupdict()["net_interface"]) - else: - # Test the normal route entry pattern. - match = re.search( - "(?P<address>[0-9A-Fa-f\.\:/]+) dev (?P<net_interface>\S+)", line - ) - pair = None - if match: - # When there is a match for the route entry pattern create - # A pair to hold the info. - d = match.groupdict() - # Route can be either a network or specific address - try: - address = ipaddress.ip_address(d["address"]) - except ValueError: - address = d["address"] - - pair = (address, d["net_interface"]) - - # No pair means no pattern was found. - if not pair: - continue - - if net_interface: - # If a net_interface was passed in then only give the pair when it is - # The correct net_interface. - if pair[1] == net_interface: - yield pair - else: - # No net_interface given give all valid route entries. - yield pair - - def is_route(self, address, net_interface=None): - """Checks to see if a route exists. - - Args: - address: ipaddress.IPv4Address, ipaddress.IPv4Network, - or DEFAULT_ROUTE, The address to use. - net_interface: string, If specified, the route must be - registered to go through this network interface - (eg. wlan0). - - Returns: True if the route is found, False otherwise. - """ - for route, _ in self.get_routes(net_interface): - if route == address: - return True - - return False - - def remove_route(self, address, net_interface=None): - """Removes a route from the ip routing table. - - Removes a route from the ip routing table. If the route does not exist - nothing is done. - - Args: - address: ipaddress.IPv4Address, ipaddress.IPv4Network, - or DEFAULT_ROUTE, The address of the route to remove. - net_interface: string, If specified the route being removed is - registered to go through this network interface - (eg. wlan0) - """ - try: - if net_interface: - self._runner.run("ip route del %s dev %s" % (address, net_interface)) - else: - self._runner.run("ip route del %s" % address) - except connection.CommandError as e: - if "No such process" in e.result.stderr: - # The route didn't exist. - return - raise - - def clear_routes(self, net_interface=None): - """Clears all routes. - - Args: - net_interface: The network interface to clear routes on. - If not given then all routes will be removed on all network - interfaces (eg. wlan0). - """ - routes = self.get_routes(net_interface) - - for a, d in routes: - self.remove_route(a, d)
diff --git a/src/antlion/controllers/utils_lib/commands/shell.py b/src/antlion/controllers/utils_lib/commands/shell.py deleted file mode 100644 index ed105af..0000000 --- a/src/antlion/controllers/utils_lib/commands/shell.py +++ /dev/null
@@ -1,244 +0,0 @@ -# Copyright 2022 The Fuchsia Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import shlex -import signal -import time - -from antlion.libs.proc import job - - -class ShellCommand(object): - """Wraps basic commands that tend to be tied very closely to a shell. - - This class is a wrapper for running basic shell commands through - any object that has a run command. Basic shell functionality for managing - the system, programs, and files in wrapped within this class. - - Note: At the moment this only works with the ssh runner. - """ - - def __init__(self, runner, working_dir=None): - """Creates a new shell command invoker. - - Args: - runner: The object that will run the shell commands. - working_dir: The directory that all commands should work in, - if none then the runners enviroment default is used. - """ - self._runner = runner - self._working_dir = working_dir - - def run(self, command, timeout=60): - """Runs a generic command through the runner. - - Takes the command and prepares it to be run in the target shell using - this objects settings. - - Args: - command: The command to run. - timeout: How long to wait for the command (in seconds). - - Returns: - A CmdResult object containing the results of the shell command. - - Raises: - job.Error: When the command executed but had an error. - """ - if self._working_dir: - command_str = "cd %s; %s" % (self._working_dir, command) - else: - command_str = command - - return self._runner.run(command_str, timeout=timeout) - - def is_alive(self, identifier): - """Checks to see if a program is alive. - - Checks to see if a program is alive on the shells enviroment. This can - be used to check on generic programs, or a specific program using - a pid. - - Args: - identifier: string or int, Used to identify the program to check. - if given an int then it is assumed to be a pid. If - given a string then it will be used as a search key - to compare on the running processes. - Returns: - True if a process was found running, false otherwise. - """ - try: - if isinstance(identifier, str): - self.run("ps aux | grep -v grep | grep %s" % identifier) - elif isinstance(identifier, int): - self.signal(identifier, 0) - else: - raise ValueError("Bad type was given for identifier") - - return True - except job.Error: - return False - - def get_pids(self, identifier): - """Gets the pids of a program. - - Searches for a program with a specific name and grabs the pids for all - programs that match. - - Args: - identifier: A search term that identifies the program. - - Returns: An array of all pids that matched the identifier, or None - if no pids were found. - """ - try: - result = self.run("ps aux | grep -v grep | grep %s" % identifier) - except job.Error as e: - if e.result.exit_status == 1: - # Grep returns exit status 1 when no lines are selected. This is - # an expected return code. - return - raise e - - lines = result.stdout.splitlines() - - # The expected output of the above command is like so: - # bob 14349 0.0 0.0 34788 5552 pts/2 Ss Oct10 0:03 bash - # bob 52967 0.0 0.0 34972 5152 pts/4 Ss Oct10 0:00 bash - # Where the format is: - # USER PID ... - for line in lines: - pieces = line.split() - try: - yield int(pieces[1]) - except StopIteration: - return - - def search_file(self, search_string, file_name): - """Searches through a file for a string. - - Args: - search_string: The string or pattern to look for. - file_name: The name of the file to search. - - Returns: - True if the string or pattern was found, False otherwise. - """ - try: - self.run("grep %s %s" % (shlex.quote(search_string), file_name)) - return True - except job.Error: - return False - - def read_file(self, file_name): - """Reads a file through the shell. - - Args: - file_name: The name of the file to read. - - Returns: - A string of the files contents. - """ - return self.run("cat %s" % file_name).stdout - - def write_file(self, file_name, data): - """Writes a block of data to a file through the shell. - - Args: - file_name: The name of the file to write to. - data: The string of data to write. - """ - return self.run("echo %s > %s" % (shlex.quote(data), file_name)) - - def append_file(self, file_name, data): - """Appends a block of data to a file through the shell. - - Args: - file_name: The name of the file to write to. - data: The string of data to write. - """ - return self.run("echo %s >> %s" % (shlex.quote(data), file_name)) - - def touch_file(self, file_name): - """Creates a file through the shell. - - Args: - file_name: The name of the file to create. - """ - self.write_file(file_name, "") - - def delete_file(self, file_name): - """Deletes a file through the shell. - - Args: - file_name: The name of the file to delete. - """ - try: - self.run("rm -r %s" % file_name) - except job.Error as e: - if "No such file or directory" in e.result.stderr: - return - - raise - - def kill(self, identifier, timeout=10): - """Kills a program or group of programs through the shell. - - Kills all programs that match an identifier through the shell. This - will send an increasing queue of kill signals to all programs - that match the identifier until either all are dead or the timeout - finishes. - - Programs are guaranteed to be killed after running this command. - - Args: - identifier: A string used to identify the program. - timeout: The time to wait for all programs to die. Each signal will - take an equal portion of this time. - """ - if isinstance(identifier, int): - pids = [identifier] - else: - pids = list(self.get_pids(identifier)) - - signal_queue = [signal.SIGINT, signal.SIGTERM, signal.SIGKILL] - - signal_duration = timeout / len(signal_queue) - for sig in signal_queue: - for pid in pids: - try: - self.signal(pid, sig) - except job.Error: - pass - - start_time = time.time() - while pids and time.time() - start_time < signal_duration: - time.sleep(0.1) - pids = [pid for pid in pids if self.is_alive(pid)] - - if not pids: - break - - def signal(self, pid, sig): - """Sends a specific signal to a program. - - Args: - pid: The process id of the program to kill. - sig: The signal to send. - - Raises: - job.Error: Raised when the signal fail to reach - the specified program. - """ - self.run("kill -%d %d" % (sig, pid))
diff --git a/src/antlion/controllers/utils_lib/host_utils.py b/src/antlion/controllers/utils_lib/host_utils.py deleted file mode 100644 index 1b66089..0000000 --- a/src/antlion/controllers/utils_lib/host_utils.py +++ /dev/null
@@ -1,62 +0,0 @@ -# Copyright 2022 The Fuchsia Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import socket - - -def get_available_host_port(): - """Finds a semi-random available port. - - A race condition is still possible after the port number is returned, if - another process happens to bind it. - - Returns: - A port number that is unused on both TCP and UDP. - """ - # On the 2.6 kernel, calling _try_bind() on UDP socket returns the - # same port over and over. So always try TCP first. - while True: - # Ask the OS for an unused port. - port = _try_bind(0, socket.SOCK_STREAM, socket.IPPROTO_TCP) - # Check if this port is unused on the other protocol. - if port and _try_bind(port, socket.SOCK_DGRAM, socket.IPPROTO_UDP): - return port - - -def is_port_available(port): - """Checks if a given port number is available on the system. - - Args: - port: An integer which is the port number to check. - - Returns: - True if the port is available; False otherwise. - """ - return _try_bind(port, socket.SOCK_STREAM, socket.IPPROTO_TCP) and _try_bind( - port, socket.SOCK_DGRAM, socket.IPPROTO_UDP - ) - - -def _try_bind(port, socket_type, socket_proto): - s = socket.socket(socket.AF_INET, socket_type, socket_proto) - try: - try: - s.bind(("", port)) - # The result of getsockname() is protocol dependent, but for both - # IPv4 and IPv6 the second field is a port number. - return s.getsockname()[1] - except socket.error: - return None - finally: - s.close()
diff --git a/src/antlion/controllers/utils_lib/ssh/__init__.py b/src/antlion/controllers/utils_lib/ssh/__init__.py deleted file mode 100644 index e69de29..0000000 --- a/src/antlion/controllers/utils_lib/ssh/__init__.py +++ /dev/null
diff --git a/src/antlion/controllers/utils_lib/ssh/connection.py b/src/antlion/controllers/utils_lib/ssh/connection.py deleted file mode 100644 index 23c80d7..0000000 --- a/src/antlion/controllers/utils_lib/ssh/connection.py +++ /dev/null
@@ -1,460 +0,0 @@ -# Copyright 2022 The Fuchsia Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import collections -import os -import re -import shutil -import tempfile -import threading -import time -import uuid - -from antlion import logger -from antlion.controllers.utils_lib import host_utils -from antlion.controllers.utils_lib.ssh import formatter -from antlion.libs.proc import job - - -class Error(Exception): - """An error occurred during an ssh operation.""" - - -class CommandError(Exception): - """An error occurred with the command. - - Attributes: - result: The results of the ssh command that had the error. - """ - - def __init__(self, result): - """ - Args: - result: The result of the ssh command that created the problem. - """ - self.result = result - - def __str__(self): - return "cmd: %s\nstdout: %s\nstderr: %s" % ( - self.result.command, - self.result.stdout, - self.result.stderr, - ) - - -_Tunnel = collections.namedtuple("_Tunnel", ["local_port", "remote_port", "proc"]) - - -class SshConnection(object): - """Provides a connection to a remote machine through ssh. - - Provides the ability to connect to a remote machine and execute a command - on it. The connection will try to establish a persistent connection When - a command is run. If the persistent connection fails it will attempt - to connect normally. - """ - - @property - def socket_path(self): - """Returns: The os path to the master socket file.""" - return os.path.join(self._master_ssh_tempdir, "socket") - - def __init__(self, settings): - """ - Args: - settings: The ssh settings to use for this connection. - formatter: The object that will handle formatting ssh command - for use with the background job. - """ - self._settings = settings - self._formatter = formatter.SshFormatter() - self._lock = threading.Lock() - self._master_ssh_proc = None - self._master_ssh_tempdir = None - self._tunnels = list() - - def log_line(msg): - return "[SshConnection | %s] %s" % (self._settings.hostname, msg) - - self.log = logger.create_logger(log_line) - - def __enter__(self): - return self - - def __exit__(self, _, __, ___): - self.close() - - def __del__(self): - self.close() - - def setup_master_ssh(self, timeout_seconds=5): - """Sets up the master ssh connection. - - Sets up the initial master ssh connection if it has not already been - started. - - Args: - timeout_seconds: The time to wait for the master ssh connection to - be made. - - Raises: - Error: When setting up the master ssh connection fails. - """ - with self._lock: - if self._master_ssh_proc is not None: - socket_path = self.socket_path - if ( - not os.path.exists(socket_path) - or self._master_ssh_proc.poll() is not None - ): - self.log.debug( - "Master ssh connection to %s is down.", self._settings.hostname - ) - self._cleanup_master_ssh() - - if self._master_ssh_proc is None: - # Create a shared socket in a temp location. - self._master_ssh_tempdir = tempfile.mkdtemp(prefix="ssh-master") - - # Setup flags and options for running the master ssh - # -N: Do not execute a remote command. - # ControlMaster: Spawn a master connection. - # ControlPath: The master connection socket path. - extra_flags = {"-N": None} - extra_options = { - "ControlMaster": True, - "ControlPath": self.socket_path, - "BatchMode": True, - } - - # Construct the command and start it. - master_cmd = self._formatter.format_ssh_local_command( - self._settings, extra_flags=extra_flags, extra_options=extra_options - ) - self.log.info("Starting master ssh connection.") - self._master_ssh_proc = job.run_async(master_cmd) - - end_time = time.time() + timeout_seconds - - while time.time() < end_time: - if os.path.exists(self.socket_path): - break - time.sleep(0.2) - else: - self._cleanup_master_ssh() - raise Error("Master ssh connection timed out.") - - def run( - self, - command, - timeout=60, - ignore_status=False, - env=None, - io_encoding="utf-8", - attempts=2, - ): - """Runs a remote command over ssh. - - Will ssh to a remote host and run a command. This method will - block until the remote command is finished. - - Args: - command: The command to execute over ssh. Can be either a string - or a list. - timeout: number seconds to wait for command to finish. - ignore_status: bool True to ignore the exit code of the remote - subprocess. Note that if you do ignore status codes, - you should handle non-zero exit codes explicitly. - env: dict environment variables to setup on the remote host. - io_encoding: str unicode encoding of command output. - attempts: Number of attempts before giving up on command failures. - - Returns: - A job.Result containing the results of the ssh command. - - Raises: - job.TimeoutError: When the remote command took to long to execute. - Error: When the ssh connection failed to be created. - CommandError: Ssh worked, but the command had an error executing. - """ - if attempts == 0: - return None - if env is None: - env = {} - - try: - self.setup_master_ssh(self._settings.connect_timeout) - except Error: - self.log.warning( - "Failed to create master ssh connection, using " - "normal ssh connection." - ) - - extra_options = {"BatchMode": True} - if self._master_ssh_proc: - extra_options["ControlPath"] = self.socket_path - - identifier = str(uuid.uuid4()) - full_command = 'echo "CONNECTED: %s"; %s' % (identifier, command) - - terminal_command = self._formatter.format_command( - full_command, env, self._settings, extra_options=extra_options - ) - - dns_retry_count = 2 - while True: - result = job.run( - terminal_command, - ignore_status=True, - timeout=timeout, - io_encoding=io_encoding, - ) - output = result.stdout - - # Check for a connected message to prevent false negatives. - valid_connection = re.search( - "^CONNECTED: %s" % identifier, output, flags=re.MULTILINE - ) - if valid_connection: - # Remove the first line that contains the connect message. - line_index = output.find("\n") + 1 - if line_index == 0: - line_index = len(output) - real_output = output[line_index:].encode(io_encoding) - - result = job.Result( - command=result.command, - stdout=real_output, - stderr=result._raw_stderr, - exit_status=result.exit_status, - duration=result.duration, - did_timeout=result.did_timeout, - encoding=io_encoding, - ) - if result.exit_status and not ignore_status: - raise job.Error(result) - return result - - error_string = result.stderr - - had_dns_failure = result.exit_status == 255 and re.search( - r"^ssh: .*: Name or service not known", error_string, flags=re.MULTILINE - ) - if had_dns_failure: - dns_retry_count -= 1 - if not dns_retry_count: - raise Error("DNS failed to find host.", result) - self.log.debug("Failed to connect to host, retrying...") - else: - break - - had_timeout = re.search( - r"^ssh: connect to host .* port .*: " r"Connection timed out\r$", - error_string, - flags=re.MULTILINE, - ) - if had_timeout: - raise Error("Ssh timed out.", result) - - permission_denied = "Permission denied" in error_string - if permission_denied: - raise Error("Permission denied.", result) - - unknown_host = re.search( - r"ssh: Could not resolve hostname .*: " r"Name or service not known", - error_string, - flags=re.MULTILINE, - ) - if unknown_host: - raise Error("Unknown host.", result) - - self.log.error("An unknown error has occurred. Job result: %s" % result) - ping_output = job.run( - "ping %s -c 3 -w 1" % self._settings.hostname, ignore_status=True - ) - self.log.error("Ping result: %s" % ping_output) - if attempts > 1: - self._cleanup_master_ssh() - self.run(command, timeout, ignore_status, env, io_encoding, attempts - 1) - raise Error("The job failed for unknown reasons.", result) - - def run_async(self, command, env=None): - """Starts up a background command over ssh. - - Will ssh to a remote host and startup a command. This method will - block until there is confirmation that the remote command has started. - - Args: - command: The command to execute over ssh. Can be either a string - or a list. - env: A dictionary of environment variables to setup on the remote - host. - - Returns: - The result of the command to launch the background job. - - Raises: - CmdTimeoutError: When the remote command took to long to execute. - SshTimeoutError: When the connection took to long to established. - SshPermissionDeniedError: When permission is not allowed on the - remote host. - """ - return self.run( - f"({command}) < /dev/null > /dev/null 2>&1 & echo -n $!", env=env - ) - - def close(self): - """Clean up open connections to remote host.""" - self._cleanup_master_ssh() - while self._tunnels: - self.close_ssh_tunnel(self._tunnels[0].local_port) - - def _cleanup_master_ssh(self): - """ - Release all resources (process, temporary directory) used by an active - master SSH connection. - """ - # If a master SSH connection is running, kill it. - if self._master_ssh_proc is not None: - self.log.debug("Nuking master_ssh_job.") - self._master_ssh_proc.kill() - self._master_ssh_proc.wait() - self._master_ssh_proc = None - - # Remove the temporary directory for the master SSH socket. - if self._master_ssh_tempdir is not None: - self.log.debug("Cleaning master_ssh_tempdir.") - shutil.rmtree(self._master_ssh_tempdir) - self._master_ssh_tempdir = None - - def create_ssh_tunnel(self, port, local_port=None): - """Create an ssh tunnel from local_port to port. - - This securely forwards traffic from local_port on this machine to the - remote SSH host at port. - - Args: - port: remote port on the host. - local_port: local forwarding port, or None to pick an available - port. - - Returns: - the created tunnel process. - """ - if not local_port: - local_port = host_utils.get_available_host_port() - else: - for tunnel in self._tunnels: - if tunnel.remote_port == port: - return tunnel.local_port - - extra_flags = { - "-n": None, # Read from /dev/null for stdin - "-N": None, # Do not execute a remote command - "-q": None, # Suppress warnings and diagnostic commands - "-L": "%d:localhost:%d" % (local_port, port), - } - extra_options = dict() - if self._master_ssh_proc: - extra_options["ControlPath"] = self.socket_path - tunnel_cmd = self._formatter.format_ssh_local_command( - self._settings, extra_flags=extra_flags, extra_options=extra_options - ) - self.log.debug("Full tunnel command: %s", tunnel_cmd) - # Exec the ssh process directly so that when we deliver signals, we - # deliver them straight to the child process. - tunnel_proc = job.run_async(tunnel_cmd) - self.log.debug( - "Started ssh tunnel, local = %d remote = %d, pid = %d", - local_port, - port, - tunnel_proc.pid, - ) - self._tunnels.append(_Tunnel(local_port, port, tunnel_proc)) - return local_port - - def close_ssh_tunnel(self, local_port): - """Close a previously created ssh tunnel of a TCP port. - - Args: - local_port: int port on localhost previously forwarded to the remote - host. - - Returns: - integer port number this port was forwarded to on the remote host or - None if no tunnel was found. - """ - idx = None - for i, tunnel in enumerate(self._tunnels): - if tunnel.local_port == local_port: - idx = i - break - if idx is not None: - tunnel = self._tunnels.pop(idx) - tunnel.proc.kill() - tunnel.proc.wait() - return tunnel.remote_port - return None - - def send_file(self, local_path, remote_path, ignore_status=False): - """Send a file from the local host to the remote host. - - Args: - local_path: string path of file to send on local host. - remote_path: string path to copy file to on remote host. - ignore_status: Whether or not to ignore the command's exit_status. - """ - # TODO: This may belong somewhere else: b/32572515 - user_host = self._formatter.format_host_name(self._settings) - job.run( - "scp %s %s:%s" % (local_path, user_host, remote_path), - ignore_status=ignore_status, - ) - - def pull_file(self, local_path, remote_path, ignore_status=False): - """Send a file from remote host to local host - - Args: - local_path: string path of file to recv on local host - remote_path: string path to copy file from on remote host. - ignore_status: Whether or not to ignore the command's exit_status. - """ - user_host = self._formatter.format_host_name(self._settings) - job.run( - "scp %s:%s %s" % (user_host, remote_path, local_path), - ignore_status=ignore_status, - ) - - def find_free_port(self, interface_name="localhost"): - """Find a unused port on the remote host. - - Note that this method is inherently racy, since it is impossible - to promise that the remote port will remain free. - - Args: - interface_name: string name of interface to check whether a - port is used against. - - Returns: - integer port number on remote interface that was free. - """ - # TODO: This may belong somewhere else: b/3257251 - free_port_cmd = ( - 'python -c "import socket; s=socket.socket(); ' - "s.bind(('%s', 0)); print(s.getsockname()[1]); s.close()\"" - ) % interface_name - port = int(self.run(free_port_cmd).stdout) - # Yield to the os to ensure the port gets cleaned up. - time.sleep(0.001) - return port
diff --git a/src/antlion/controllers/utils_lib/ssh/formatter.py b/src/antlion/controllers/utils_lib/ssh/formatter.py deleted file mode 100644 index 41450ca..0000000 --- a/src/antlion/controllers/utils_lib/ssh/formatter.py +++ /dev/null
@@ -1,202 +0,0 @@ -# Copyright 2022 The Fuchsia Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -class SshFormatter(object): - """Handles formatting ssh commands. - - Handler for formatting chunks of the ssh command to run. - """ - - def format_ssh_executable(self, settings): - """Format the executable name. - - Formats the executable name as a string. - - Args: - settings: The ssh settings being used. - - Returns: - A string for the ssh executable name. - """ - return settings.executable - - def format_host_name(self, settings): - """Format hostname. - - Formats the hostname to connect to. - - Args: - settings: The ssh settings being used. - - Returns: - A string of the connection host name to connect to. - """ - return "%s@%s" % (settings.username, settings.hostname) - - def format_value(self, value): - """Formats a command line value. - - Takes in a value and formats it so it can be safely used in the - command line. - - Args: - value: The value to format. - - Returns: - A string representation of the formatted value. - """ - if isinstance(value, bool): - return "yes" if value else "no" - - return str(value) - - def format_options_list(self, options): - """Format the option list. - - Formats a dictionary of options into a list of strings to be used - on the command line. - - Args: - options: A dictionary of options. - - Returns: - An iterator of strings that should go on the command line. - """ - for option_name in options: - option = options[option_name] - - yield "-o" - yield "%s=%s" % (option_name, self.format_value(option)) - - def format_flag_list(self, flags): - """Format the flags list. - - Formats a dictionary of flags into a list of strings to be used - on the command line. - - Args: - flags: A dictonary of options. - - Returns: - An iterator of strings that should be used on the command line. - """ - for flag_name in flags: - flag = flags[flag_name] - - yield flag_name - if flag is not None: - yield self.format_value(flag) - - def format_ssh_local_command(self, settings, extra_flags={}, extra_options={}): - """Formats the local part of the ssh command. - - Formats the local section of the ssh command. This is the part of the - command that will actual launch ssh on our local machine with the - specified settings. - - Args: - settings: The ssh settings. - extra_flags: Extra flags to inlcude. - extra_options: Extra options to include. - - Returns: - An array of strings that make up the command and its local - arguments. - """ - options = settings.construct_ssh_options() - for extra_option_name in extra_options: - options[extra_option_name] = extra_options[extra_option_name] - options_list = list(self.format_options_list(options)) - - flags = settings.construct_ssh_flags() - for extra_flag_name in extra_flags: - flags[extra_flag_name] = extra_flags[extra_flag_name] - flags_list = list(self.format_flag_list(flags)) - - all_options = options_list + flags_list - host_name = self.format_host_name(settings) - executable = self.format_ssh_executable(settings) - - base_command = [executable] + all_options + [host_name] - - return base_command - - def format_ssh_command( - self, remote_command, settings, extra_flags={}, extra_options={} - ): - """Formats the full ssh command. - - Creates the full format for an ssh command. - - Args: - remote_command: A string that represents the remote command to - execute. - settings: The ssh settings to use. - extra_flags: Extra flags to include in the settings. - extra_options: Extra options to include in the settings. - - Returns: - A list of strings that make up the total ssh command. - """ - local_command = self.format_ssh_local_command( - settings, extra_flags, extra_options - ) - - local_command.append(remote_command) - return local_command - - def format_remote_command(self, command, env): - """Formats the remote part of the ssh command. - - Formatts the command that will run on the remote machine. - - Args: - command: string, The command to be executed. - env: Enviroment variables to add to the remote envirment. - - Returns: - A string that represents the command line to execute on the remote - machine. - """ - if not env: - env_str = "" - else: - env_str = "export " - for name in env: - value = env[name] - env_str += "%s=%s " % (name, str(value)) - env_str += ";" - - execution_line = "%s %s;" % (env_str, command) - return execution_line - - def format_command(self, command, env, settings, extra_flags={}, extra_options={}): - """Formats a full command. - - Formats the full command to run in order to run a command on a remote - machine. - - Args: - command: The command to run on the remote machine. Can either be - a string or a list. - env: The enviroment variables to include on the remote machine. - settings: The ssh settings to use. - extra_flags: Extra flags to include with the settings. - extra_options: Extra options to include with the settings. - """ - remote_command = self.format_remote_command(command, env) - return self.format_ssh_command( - remote_command, settings, extra_flags, extra_options - )
diff --git a/src/antlion/controllers/utils_lib/ssh/settings.py b/src/antlion/controllers/utils_lib/ssh/settings.py deleted file mode 100644 index ead5844..0000000 --- a/src/antlion/controllers/utils_lib/ssh/settings.py +++ /dev/null
@@ -1,125 +0,0 @@ -# Copyright 2022 The Fuchsia Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Create a SshSettings from a dictionary from an ACTS config - -Args: - config dict instance from an ACTS config - -Returns: - An instance of SshSettings or None -""" - -from typing import Dict, Optional, Union - - -class SshSettings(object): - """Contains settings for ssh. - - Container for ssh connection settings. - - Attributes: - username: The name of the user to log in as. - hostname: The name of the host to connect to. - executable: The ssh executable to use. - port: The port to connect through (usually 22). - host_file: The known host file to use. - connect_timeout: How long to wait on a connection before giving a - timeout. - alive_interval: How long between ssh heartbeat signals to keep the - connection alive. - """ - - def __init__( - self, - hostname: str, - username: str, - port: int = 22, - host_file: str = "/dev/null", - connect_timeout: int = 30, - alive_interval: int = 300, - executable: str = "/usr/bin/ssh", - identity_file: Optional[str] = None, - ssh_config: Optional[str] = None, - ): - self.username = username - self.hostname = hostname - self.executable = executable - self.port = port - self.host_file = host_file - self.connect_timeout = connect_timeout - self.alive_interval = alive_interval - self.identity_file = identity_file - self.ssh_config = ssh_config - - def construct_ssh_options(self) -> Dict[str, Union[str, int, bool]]: - """Construct the ssh options. - - Constructs a dictionary of option that should be used with the ssh - command. - - Returns: - A dictionary of option name to value. - """ - current_options = {} - current_options["StrictHostKeyChecking"] = False - current_options["UserKnownHostsFile"] = self.host_file - current_options["ConnectTimeout"] = self.connect_timeout - current_options["ServerAliveInterval"] = self.alive_interval - return current_options - - def construct_ssh_flags(self) -> Dict[str, Union[None, str, int]]: - """Construct the ssh flags. - - Constructs what flags should be used in the ssh connection. - - Returns: - A dictionary of flag name to value. If value is none then it is - treated as a binary flag. - """ - current_flags = {} - current_flags["-a"] = None - current_flags["-x"] = None - current_flags["-p"] = self.port - if self.identity_file: - current_flags["-i"] = self.identity_file - if self.ssh_config: - current_flags["-F"] = self.ssh_config - return current_flags - - -def from_config(config: Dict[str, Union[str, int]]) -> Optional[SshSettings]: - if config is None: - return None # Having no settings is not an error - - ssh_binary_path = config.get("ssh_binary_path", "/usr/bin/ssh") - user = config.get("user", None) - host = config.get("host", None) - port = config.get("port", 22) - identity_file = config.get("identity_file", None) - ssh_config = config.get("ssh_config", None) - connect_timeout = config.get("connect_timeout", 30) - if user is None or host is None: - raise ValueError( - f"Malformed SSH config did not include user and host keys: {config}" - ) - - return SshSettings( - host, - user, - port=port, - identity_file=identity_file, - ssh_config=ssh_config, - connect_timeout=connect_timeout, - executable=ssh_binary_path, - )
diff --git a/src/antlion/dict_object.py b/src/antlion/dict_object.py deleted file mode 100644 index 9e3288f..0000000 --- a/src/antlion/dict_object.py +++ /dev/null
@@ -1,84 +0,0 @@ -#!/usr/bin/env python3 -# -# Copyright 2022 The Fuchsia Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -class DictObject(dict): - """Optional convenient base type for creating simple objects that are - naturally serializable. - - A DictObject provides object-oriented access semantics to a dictionary, - allowing it to look like a class with defined members. By ensuring that - all of the class members are serializable, the object can be serialized - as a dictionary/de-serialized from a dictionary. - """ - - def __init__(self, *args, **kwargs): - """Constructor for a dictionary-as-object representation of kwargs - - Args: - args: Currently unused - included for completeness - kwargs: keyword arguments used to construct the underlying dict - - Returns: - Instance of DictObject - """ - super(DictObject, self).update(**kwargs) - - def __getattr__(self, name): - """Returns a key from the superclass dictionary as an attribute - - Args: - name: name of the pseudo class attribute - - Returns: - Dictionary item stored at "name" - - Raises: - AttributeError if the item is not found - """ - try: - return self[name] - except KeyError as ke: - raise AttributeError(ke) - - def __setattr__(self, name, value): - """Updates the value of a key=name to a given value - - Args: - name: name of the pseudo class attribute - value: value of the key - - Raises: - AttributeError if the item is not found - """ - if name in super(DictObject, self).keys(): - super(DictObject, self).__setitem__(name, value) - else: - raise AttributeError("Class does not have attribute {}".format(value)) - - @classmethod - def from_dict(cls, dictionary): - """Factory method for constructing a DictObject from a dictionary - - Args: - dictionary: Dictionary used to construct the DictObject - - Returns: - Instance of DictObject - """ - c = cls() - c.update(dictionary) - return c
diff --git a/src/antlion/error.py b/src/antlion/error.py deleted file mode 100644 index bf69b7d..0000000 --- a/src/antlion/error.py +++ /dev/null
@@ -1,41 +0,0 @@ -"""This class is where error information will be stored. -""" - -from antlion.signals import TestError - - -class ActsError(TestError): - """Base Acts Error""" - - def __init__(self, *args, **kwargs): - class_name = self.__class__.__name__ - self.error_doc = self.__class__.__doc__ - self.error_code = getattr(ActsErrorCode, class_name, ActsErrorCode.UNKNOWN) - extras = dict(**kwargs, error_doc=self.error_doc, error_code=self.error_code) - details = args[0] if len(args) > 0 else "" - super().__init__(details, extras) - - -class ActsErrorCode: - # Framework Errors 0-999 - - UNKNOWN = 0 - - # This error code is used to implement unittests for this class. - ActsError = 100 - AndroidDeviceError = 101 - - # Controllers Errors 1000-3999 - - Sl4aStartError = 1001 - Sl4aApiError = 1002 - Sl4aConnectionError = 1003 - Sl4aProtocolError = 1004 - Sl4aNotInstalledError = 1005 - Sl4aRpcTimeoutError = 1006 - - # Util Errors 4000-9999 - - FastbootError = 9000 - AdbError = 9001 - AdbCommandError = 9002
diff --git a/src/antlion/event/__init__.py b/src/antlion/event/__init__.py deleted file mode 100644 index e69de29..0000000 --- a/src/antlion/event/__init__.py +++ /dev/null
diff --git a/src/antlion/event/decorators.py b/src/antlion/event/decorators.py deleted file mode 100644 index b845dad..0000000 --- a/src/antlion/event/decorators.py +++ /dev/null
@@ -1,67 +0,0 @@ -#!/usr/bin/env python3 -# -# Copyright 2022 The Fuchsia Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -from antlion.event.subscription_handle import InstanceSubscriptionHandle -from antlion.event.subscription_handle import StaticSubscriptionHandle -from antlion.event import subscription_bundle - - -def subscribe_static(event_type, event_filter=None, order=0): - """A decorator that subscribes a static or module-level function. - - This function must be registered manually. - """ - - class InnerSubscriptionHandle(StaticSubscriptionHandle): - def __init__(self, func): - super().__init__(event_type, func, event_filter=event_filter, order=order) - - return InnerSubscriptionHandle - - -def subscribe(event_type, event_filter=None, order=0): - """A decorator that subscribes an instance method.""" - - class InnerSubscriptionHandle(InstanceSubscriptionHandle): - def __init__(self, func): - super().__init__(event_type, func, event_filter=event_filter, order=order) - - return InnerSubscriptionHandle - - -def register_static_subscriptions(decorated): - """Registers all static subscriptions in decorated's attributes. - - Args: - decorated: The object being decorated - - Returns: - The decorated. - """ - subscription_bundle.create_from_static(decorated).register() - - return decorated - - -def register_instance_subscriptions(obj): - """A decorator that subscribes all instance subscriptions after object init.""" - old_init = obj.__init__ - - def init_replacement(self, *args, **kwargs): - old_init(self, *args, **kwargs) - subscription_bundle.create_from_instance(self).register() - - obj.__init__ = init_replacement - return obj
diff --git a/src/antlion/event/event.py b/src/antlion/event/event.py deleted file mode 100644 index 6cc23ef..0000000 --- a/src/antlion/event/event.py +++ /dev/null
@@ -1,92 +0,0 @@ -#!/usr/bin/env python3 -# -# Copyright 2022 The Fuchsia Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -class Event(object): - """The base class for all event objects.""" - - -# TODO(markdr): Move these into test_runner.py -class TestEvent(Event): - """The base class for test-related events.""" - - def __init__(self): - pass - - -class TestCaseEvent(TestEvent): - """The base class for test-case-related events.""" - - def __init__(self, test_class, test_case): - super().__init__() - self.test_class = test_class - self.test_case = test_case - - @property - def test_case_name(self): - return self.test_case - - @property - def test_class_name(self): - return self.test_class.__class__.__name__ - - -class TestCaseSignalEvent(TestCaseEvent): - """The base class for test-case-signal-related events.""" - - def __init__(self, test_class, test_case, test_signal): - super().__init__(test_class, test_case) - self.test_signal = test_signal - - -class TestCaseBeginEvent(TestCaseEvent): - """The event posted when a test case has begun.""" - - -class TestCaseEndEvent(TestCaseSignalEvent): - """The event posted when a test case has ended.""" - - -class TestCaseSkippedEvent(TestCaseSignalEvent): - """The event posted when a test case has been skipped.""" - - -class TestCaseFailureEvent(TestCaseSignalEvent): - """The event posted when a test case has failed.""" - - -class TestCasePassedEvent(TestCaseSignalEvent): - """The event posted when a test case has passed.""" - - -class TestClassEvent(TestEvent): - """The base class for test-class-related events""" - - def __init__(self, test_class): - super().__init__() - self.test_class = test_class - - -class TestClassBeginEvent(TestClassEvent): - """The event posted when a test class has begun testing.""" - - -class TestClassEndEvent(TestClassEvent): - """The event posted when a test class has finished testing.""" - - def __init__(self, test_class, result): - super().__init__(test_class) - self.result = result
diff --git a/src/antlion/event/event_bus.py b/src/antlion/event/event_bus.py deleted file mode 100644 index 5488b80..0000000 --- a/src/antlion/event/event_bus.py +++ /dev/null
@@ -1,296 +0,0 @@ -#!/usr/bin/env python3 -# -# Copyright 2022 The Fuchsia Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -import bisect -import logging -import inspect -from threading import RLock - -from antlion.event.event_subscription import EventSubscription -from antlion.event.subscription_handle import SubscriptionHandle - - -class _EventBus(object): - """ - Attributes: - _subscriptions: A dictionary of {EventType: list<EventSubscription>}. - _registration_id_map: A dictionary of - {RegistrationID: EventSubscription} - _subscription_lock: The lock to prevent concurrent removal or addition - to events. - """ - - def __init__(self): - self._subscriptions = {} - self._registration_id_map = {} - self._subscription_lock = RLock() - - def register(self, event_type, func, filter_fn=None, order=0): - """Subscribes the given function to the event type given. - - Args: - event_type: The type of the event to subscribe to. - func: The function to call when the event is posted. - filter_fn: An option function to be called before calling the - subscribed func. If this function returns falsy, then the - function will not be invoked. - order: The order the the subscription should run in. Lower values - run first, with the default value set to 0. In the case of a - tie between two subscriptions of the same event type, the - subscriber added first executes first. In the case of a tie - between two subscribers of a different type, the type of the - subscription that is more specific goes first (i.e. - BaseEventType will execute after ChildEventType if they share - the same order). - - Returns: - A registration ID. - """ - subscription = EventSubscription( - event_type, func, event_filter=filter_fn, order=order - ) - return self.register_subscription(subscription) - - def register_subscriptions(self, subscriptions): - """Registers all subscriptions to the event bus. - - Args: - subscriptions: an iterable that returns EventSubscriptions - - Returns: - The list of registration IDs. - """ - registration_ids = [] - for subscription in subscriptions: - registration_ids.append(self.register_subscription(subscription)) - - return registration_ids - - def register_subscription(self, subscription): - """Registers the given subscription to the event bus. - - Args: - subscription: An EventSubscription object - - Returns: - A registration ID. - """ - with self._subscription_lock: - if subscription.event_type in self._subscriptions.keys(): - subscription_list = self._subscriptions[subscription.event_type] - subscription_list.append(subscription) - subscription_list.sort(key=lambda x: x.order) - else: - subscription_list = list() - bisect.insort(subscription_list, subscription) - self._subscriptions[subscription.event_type] = subscription_list - - registration_id = id(subscription) - self._registration_id_map[registration_id] = subscription - - return registration_id - - def post(self, event, ignore_errors=False): - """Posts an event to its subscribers. - - Args: - event: The event object to send to the subscribers. - ignore_errors: Deliver to all subscribers, ignoring any errors. - """ - listening_subscriptions = [] - for current_type in inspect.getmro(type(event)): - if current_type not in self._subscriptions.keys(): - continue - for subscription in self._subscriptions[current_type]: - listening_subscriptions.append(subscription) - - # The subscriptions will be collected in sorted runs of sorted order. - # Running timsort here is the optimal way to sort this list. - listening_subscriptions.sort(key=lambda x: x.order) - for subscription in listening_subscriptions: - try: - subscription.deliver(event) - except Exception: - if ignore_errors: - logging.exception( - "An exception occurred while handling " "an event." - ) - continue - raise - - def unregister(self, registration_id): - """Unregisters an EventSubscription. - - Args: - registration_id: the Subscription or registration_id to unsubscribe. - """ - if type(registration_id) is SubscriptionHandle: - subscription = registration_id.subscription - registration_id = id(registration_id.subscription) - elif type(registration_id) is EventSubscription: - subscription = registration_id - registration_id = id(registration_id) - elif registration_id in self._registration_id_map.keys(): - subscription = self._registration_id_map[registration_id] - elif type(registration_id) is not int: - raise ValueError( - 'Subscription ID "%s" is not a valid ID. This value' - "must be an integer ID returned from subscribe()." % registration_id - ) - else: - # The value is a "valid" id, but is not subscribed. It's possible - # another thread has unsubscribed this value. - logging.warning( - "Attempted to unsubscribe %s, but the matching " - "subscription cannot be found." % registration_id - ) - return False - - event_type = subscription.event_type - with self._subscription_lock: - self._registration_id_map.pop(registration_id, None) - if ( - event_type in self._subscriptions - and subscription in self._subscriptions[event_type] - ): - self._subscriptions[event_type].remove(subscription) - return True - - def unregister_all(self, from_list=None, from_event=None): - """Removes all event subscriptions. - - Args: - from_list: Unregisters all events from a given list. - from_event: Unregisters all events of a given event type. - """ - if from_list is None: - from_list = list(self._registration_id_map.values()) - - for subscription in from_list: - if from_event is None or subscription.event_type == from_event: - self.unregister(subscription) - - -_event_bus = _EventBus() - - -def register(event_type, func, filter_fn=None, order=0): - """Subscribes the given function to the event type given. - - Args: - event_type: The type of the event to subscribe to. - func: The function to call when the event is posted. - filter_fn: An option function to be called before calling the subscribed - func. If this function returns falsy, then the function will - not be invoked. - order: The order the the subscription should run in. Lower values run - first, with the default value set to 0. In the case of a tie - between two subscriptions of the same event type, the - subscriber added first executes first. In the case of a tie - between two subscribers of a different type, the type of the - subscription that is more specific goes first (i.e. BaseEventType - will execute after ChildEventType if they share the same order). - - Returns: - A registration ID. - """ - return _event_bus.register(event_type, func, filter_fn=filter_fn, order=order) - - -def register_subscriptions(subscriptions): - """Registers all subscriptions to the event bus. - - Args: - subscriptions: an iterable that returns EventSubscriptions - - Returns: - The list of registration IDs. - """ - return _event_bus.register_subscriptions(subscriptions) - - -def register_subscription(subscription): - """Registers the given subscription to the event bus. - - Args: - subscription: An EventSubscription object - - Returns: - A registration ID. - """ - return _event_bus.register_subscription(subscription) - - -def post(event, ignore_errors=False): - """Posts an event to its subscribers. - - Args: - event: The event object to send to the subscribers. - ignore_errors: Deliver to all subscribers, ignoring any errors. - """ - _event_bus.post(event, ignore_errors) - - -def unregister(registration_id): - """Unregisters an EventSubscription. - - Args: - registration_id: the Subscription or registration_id to unsubscribe. - """ - # null check for the corner case where the _event_bus is destroyed before - # the subscribers unregister. In such case there is nothing else to - # be done. - if _event_bus is None: - return True - return _event_bus.unregister(registration_id) - - -def unregister_all(from_list=None, from_event=None): - """Removes all event subscriptions. - - Args: - from_list: Unregisters all events from a given list. - from_event: Unregisters all events of a given event type. - """ - return _event_bus.unregister_all(from_list=from_list, from_event=from_event) - - -class listen_for(object): - """A context-manager class (with statement) for listening to an event within - a given section of code. - - Usage: - - with listen_for(EventType, event_listener): - func_that_posts_event() # Will call event_listener - - func_that_posts_event() # Will not call event_listener - - """ - - def __init__(self, event_type, func, filter_fn=None, order=0): - self.event_type = event_type - self.func = func - self.filter_fn = filter_fn - self.order = order - self.registration_id = None - - def __enter__(self): - self.registration_id = _event_bus.register( - self.event_type, self.func, filter_fn=self.filter_fn, order=self.order - ) - - def __exit__(self, *unused): - _event_bus.unregister(self.registration_id)
diff --git a/src/antlion/event/event_subscription.py b/src/antlion/event/event_subscription.py deleted file mode 100644 index ee8720c..0000000 --- a/src/antlion/event/event_subscription.py +++ /dev/null
@@ -1,50 +0,0 @@ -#!/usr/bin/env python3 -# -# Copyright 2022 The Fuchsia Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -class EventSubscription(object): - """A class that defines the way a function is subscribed to an event. - - Attributes: - event_type: The type of the event. - _func: The subscribed function. - _event_filter: A lambda that returns True if an event should be passed - to the subscribed function. - order: The order value in which this subscription should be called. - """ - - def __init__(self, event_type, func, event_filter=None, order=0): - self._event_type = event_type - self._func = func - self._event_filter = event_filter - self.order = order - - @property - def event_type(self): - return self._event_type - - def deliver(self, event): - """Delivers an event to the subscriber. - - This function will not deliver the event if the event filter rejects the - event. - - Args: - event: The event to send to the subscriber. - """ - if self._event_filter and not self._event_filter(event): - return - self._func(event)
diff --git a/src/antlion/event/subscription_bundle.py b/src/antlion/event/subscription_bundle.py deleted file mode 100644 index ac3cfb6..0000000 --- a/src/antlion/event/subscription_bundle.py +++ /dev/null
@@ -1,156 +0,0 @@ -import logging -import threading - -from antlion.event import event_bus -from antlion.event.event_subscription import EventSubscription -from antlion.event.subscription_handle import InstanceSubscriptionHandle -from antlion.event.subscription_handle import SubscriptionHandle -from antlion.event.subscription_handle import StaticSubscriptionHandle - - -class SubscriptionBundle(object): - """A class for maintaining a set of EventSubscriptions in the event bus. - - Attributes: - subscriptions: A dictionary of {EventSubscription: RegistrationID} - """ - - def __init__(self): - self.subscriptions = {} - self._subscription_lock = threading.Lock() - self._registered = False - - @property - def registered(self): - """True if this SubscriptionBundle has been registered.""" - return self._registered - - def add(self, event_type, func, event_filter=None, order=0): - """Adds a new Subscription to this SubscriptionBundle. - - If this SubscriptionBundle is registered, the added Subscription will - also be registered. - - Returns: - the EventSubscription object created. - """ - subscription = EventSubscription( - event_type, func, event_filter=event_filter, order=order - ) - return self.add_subscription(subscription) - - def add_subscription(self, subscription): - """Adds an existing Subscription to the subscription bundle. - - If this SubscriptionBundle is registered, the added subscription will - also be registered. - - Returns: - the subscription object. - """ - registration_id = None - with self._subscription_lock: - if self.registered: - registration_id = event_bus.register_subscription(subscription) - - self.subscriptions[subscription] = registration_id - return subscription - - def remove_subscription(self, subscription): - """Removes a subscription from the SubscriptionBundle. - - If the SubscriptionBundle is registered, removing the subscription will - also unregister it. - """ - if subscription not in self.subscriptions.keys(): - return False - with self._subscription_lock: - if self.registered: - event_bus.unregister(self.subscriptions[subscription]) - del self.subscriptions[subscription] - return True - - def register(self): - """Registers all subscriptions found within this object.""" - if self.registered: - return - with self._subscription_lock: - self._registered = True - for subscription, registration_id in self.subscriptions.items(): - if registration_id is not None: - logging.warning( - "Registered subscription found in " - "unregistered SubscriptionBundle: %s, %s" - % (subscription, registration_id) - ) - self.subscriptions[subscription] = event_bus.register_subscription( - subscription - ) - - def unregister(self): - """Unregisters all subscriptions managed by this SubscriptionBundle.""" - if not self.registered: - return - with self._subscription_lock: - self._registered = False - for subscription, registration_id in self.subscriptions.items(): - if registration_id is None: - logging.warning( - "Unregistered subscription found in " - "registered SubscriptionBundle: %s, %s" - % (subscription, registration_id) - ) - event_bus.unregister(subscription) - self.subscriptions[subscription] = None - - -def create_from_static(obj): - """Generates a SubscriptionBundle from @subscribe_static functions on obj. - - Args: - obj: The object that contains @subscribe_static functions. Can either - be a module or a class. - - Returns: - An unregistered SubscriptionBundle. - """ - return _create_from_object(obj, obj, StaticSubscriptionHandle) - - -def create_from_instance(instance): - """Generates a SubscriptionBundle from an instance's @subscribe functions. - - Args: - instance: The instance object that contains @subscribe functions. - - Returns: - An unregistered SubscriptionBundle. - """ - return _create_from_object(instance, instance.__class__, InstanceSubscriptionHandle) - - -def _create_from_object(obj, obj_to_search, subscription_handle_type): - """Generates a SubscriptionBundle from an object's SubscriptionHandles. - - Note that instance variables do not have the class's functions as direct - attributes. The attributes are resolved from the type of the object. Here, - we need to search through the instance's class to find the correct types, - and subscribe the instance-specific subscriptions. - - Args: - obj: The object that contains SubscriptionHandles. - obj_to_search: The class to search for SubscriptionHandles from. - subscription_handle_type: The type of the SubscriptionHandles to - capture. - - Returns: - An unregistered SubscriptionBundle. - """ - bundle = SubscriptionBundle() - for attr_name, attr_value in obj_to_search.__dict__.items(): - if isinstance(attr_value, subscription_handle_type): - bundle.add_subscription(getattr(obj, attr_name).subscription) - if isinstance(attr_value, staticmethod): - if isinstance(getattr(obj, attr_name), subscription_handle_type): - bundle.add_subscription(getattr(obj, attr_name).subscription) - return bundle
diff --git a/src/antlion/event/subscription_handle.py b/src/antlion/event/subscription_handle.py deleted file mode 100644 index 6aa9c3c..0000000 --- a/src/antlion/event/subscription_handle.py +++ /dev/null
@@ -1,79 +0,0 @@ -#!/usr/bin/env python3 -# -# Copyright 2022 The Fuchsia Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -from antlion.event.event_subscription import EventSubscription - - -class SubscriptionHandle(object): - """The object created by a method decorated with an event decorator.""" - - def __init__(self, event_type, func, event_filter=None, order=0): - self._event_type = event_type - self._func = func - self._event_filter = event_filter - self._order = order - self._subscription = None - self._owner = None - - @property - def subscription(self): - if self._subscription: - return self._subscription - self._subscription = EventSubscription( - self._event_type, - self._func, - event_filter=self._event_filter, - order=self._order, - ) - return self._subscription - - def __get__(self, instance, owner): - # If our owner has been initialized, or do not have an instance owner, - # return self. - if self._owner is not None or instance is None: - return self - - # Otherwise, we create a new SubscriptionHandle that will only be used - # for the instance that owns this SubscriptionHandle. - ret = SubscriptionHandle( - self._event_type, self._func, self._event_filter, self._order - ) - ret._owner = instance - ret._func = ret._wrap_call(ret._func) - for attr, value in owner.__dict__.items(): - if value is self: - setattr(instance, attr, ret) - break - return ret - - def _wrap_call(self, func): - def _wrapped_call(*args, **kwargs): - if self._owner is None: - return func(*args, **kwargs) - else: - return func(self._owner, *args, **kwargs) - - return _wrapped_call - - def __call__(self, *args, **kwargs): - return self._func(*args, **kwargs) - - -class InstanceSubscriptionHandle(SubscriptionHandle): - """A SubscriptionHandle for instance methods.""" - - -class StaticSubscriptionHandle(SubscriptionHandle): - """A SubscriptionHandle for static methods."""
diff --git a/src/antlion/keys.py b/src/antlion/keys.py deleted file mode 100644 index b545d44..0000000 --- a/src/antlion/keys.py +++ /dev/null
@@ -1,119 +0,0 @@ -#!/usr/bin/env python3 -# -# Copyright 2022 The Fuchsia Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import enum - -"""This module has the global key values that are used across framework -modules. -""" - - -class Config(enum.Enum): - """Enum values for test config related lookups.""" - - # Keys used to look up values from test config files. - # These keys define the wording of test configs and their internal - # references. - key_log_path = "logpath" - key_testbeds_under_test = "testbeds_under_test" - key_testbed = "testbed" - key_testbed_name = "name" - # configpath is the directory. key_config_full_path is the file path. - key_config_path = "configpath" - key_config_full_path = "config_full_path" - key_test_paths = "testpaths" - key_port = "Port" - key_address = "Address" - key_test_case_iterations = "test_case_iterations" - key_test_failure_tracebacks = "test_failure_tracebacks" - # Config names for controllers packaged in ACTS. - key_access_point = "AccessPoint" - key_android_device = "AndroidDevice" - key_attenuator = "Attenuator" - key_bluetooth_pts_device = "BluetoothPtsDevice" - key_fuchsia_device = "FuchsiaDevice" - key_iperf_client = "IPerfClient" - key_iperf_server = "IPerfServer" - key_openwrt_ap = "OpenWrtAP" - key_packet_capture = "PacketCapture" - key_packet_sender = "PacketSender" - key_pdu = "PduDevice" - key_sniffer = "Sniffer" - # Internal keys, used internally, not exposed to user's config files. - ikey_user_param = "user_params" - ikey_testbed_name = "testbed_name" - ikey_logger = "log" - ikey_logpath = "log_path" - ikey_summary_writer = "summary_writer" - # module name of controllers packaged in ACTS. - m_key_access_point = "access_point" - m_key_android_device = "android_device" - m_key_attenuator = "attenuator" - m_key_bluetooth_pts_device = "bluetooth_pts_device" - m_key_fuchsia_device = "fuchsia_device" - m_key_iperf_client = "iperf_client" - m_key_iperf_server = "iperf_server" - m_key_openwrt_ap = "openwrt_ap" - m_key_packet_capture = "packet_capture" - m_key_packet_sender = "packet_sender" - m_key_pdu = "pdu" - m_key_sniffer = "sniffer" - - # A list of keys whose values in configs should not be passed to test - # classes without unpacking first. - reserved_keys = (key_testbed, key_log_path, key_test_paths) - - # Controller names packaged with ACTS. - builtin_controller_names = [ - key_access_point, - key_android_device, - key_attenuator, - key_bluetooth_pts_device, - key_fuchsia_device, - key_iperf_client, - key_iperf_server, - key_openwrt_ap, - key_packet_capture, - key_packet_sender, - key_pdu, - key_sniffer, - ] - - -def get_name_by_value(value): - for name, member in Config.__members__.items(): - if member.value == value: - return name - return None - - -def get_module_name(name_in_config): - """Translates the name of a controller in config file to its module name.""" - return value_to_value(name_in_config, "m_%s") - - -def value_to_value(ref_value, pattern): - """Translates the value of a key to the value of its corresponding key. The - corresponding key is chosen based on the variable name pattern. - """ - ref_key_name = get_name_by_value(ref_value) - if not ref_key_name: - return None - target_key_name = pattern % ref_key_name - try: - return getattr(Config, target_key_name).value - except AttributeError: - return None
diff --git a/src/antlion/libs/__init__.py b/src/antlion/libs/__init__.py deleted file mode 100644 index e69de29..0000000 --- a/src/antlion/libs/__init__.py +++ /dev/null
diff --git a/src/antlion/libs/logging/__init__.py b/src/antlion/libs/logging/__init__.py deleted file mode 100644 index e69de29..0000000 --- a/src/antlion/libs/logging/__init__.py +++ /dev/null
diff --git a/src/antlion/libs/logging/log_stream.py b/src/antlion/libs/logging/log_stream.py deleted file mode 100644 index 27aa077..0000000 --- a/src/antlion/libs/logging/log_stream.py +++ /dev/null
@@ -1,460 +0,0 @@ -#!/usr/bin/env python3 -# -# Copyright 2022 The Fuchsia Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -import logging -import os -import sys -from logging import FileHandler -from logging import Handler -from logging import StreamHandler -from logging.handlers import RotatingFileHandler - -from antlion import context -from antlion.context import ContextLevel -from antlion.event import event_bus -from antlion.event.decorators import subscribe_static - - -# yapf: disable -class LogStyles: - NONE = 0x00 - LOG_DEBUG = 0x01 - LOG_INFO = 0x02 - LOG_WARNING = 0x04 - LOG_ERROR = 0x08 - LOG_CRITICAL = 0x10 - - DEFAULT_LEVELS = LOG_DEBUG + LOG_INFO + LOG_ERROR - ALL_LEVELS = LOG_DEBUG + LOG_INFO + LOG_WARNING + LOG_ERROR + LOG_CRITICAL - - MONOLITH_LOG = 0x0100 - TESTCLASS_LOG = 0x0200 - TESTCASE_LOG = 0x0400 - TO_STDOUT = 0x0800 - TO_ACTS_LOG = 0x1000 - ROTATE_LOGS = 0x2000 - - ALL_FILE_LOGS = MONOLITH_LOG + TESTCLASS_LOG + TESTCASE_LOG - - LEVEL_NAMES = { - LOG_DEBUG: 'debug', - LOG_INFO: 'info', - LOG_WARNING: 'warning', - LOG_ERROR: 'error', - LOG_CRITICAL: 'critical', - } - - LOG_LEVELS = [ - LOG_DEBUG, - LOG_INFO, - LOG_WARNING, - LOG_ERROR, - LOG_CRITICAL, - ] - - LOG_LOCATIONS = [ - TO_STDOUT, - TO_ACTS_LOG, - MONOLITH_LOG, - TESTCLASS_LOG, - TESTCASE_LOG - ] - - LEVEL_TO_NO = { - LOG_DEBUG: logging.DEBUG, - LOG_INFO: logging.INFO, - LOG_WARNING: logging.WARNING, - LOG_ERROR: logging.ERROR, - LOG_CRITICAL: logging.CRITICAL, - } - - LOCATION_TO_CONTEXT_LEVEL = { - MONOLITH_LOG: ContextLevel.ROOT, - TESTCLASS_LOG: ContextLevel.TESTCLASS, - TESTCASE_LOG: ContextLevel.TESTCASE - } -# yapf: enable - -_log_streams = dict() -_null_handler = logging.NullHandler() - - -@subscribe_static(context.NewContextEvent) -def _update_handlers(event): - for log_stream in _log_streams.values(): - log_stream.update_handlers(event) - - -event_bus.register_subscription(_update_handlers.subscription) - - -def create_logger( - name, - log_name=None, - base_path="", - subcontext="", - log_styles=LogStyles.NONE, - stream_format=None, - file_format=None, -): - """Creates a Python Logger object with the given attributes. - - Creation through this method will automatically manage the logger in the - background for test-related events, such as TestCaseBegin and TestCaseEnd - Events. - - Args: - name: The name of the LogStream. Used as the file name prefix. - log_name: The name of the underlying logger. Use LogStream name as - default. - base_path: The base path used by the logger. - subcontext: Location of logs relative to the test context path. - log_styles: An integer or array of integers that are the sum of - corresponding flag values in LogStyles. Examples include: - - >>> LogStyles.LOG_INFO + LogStyles.TESTCASE_LOG - - >>> LogStyles.ALL_LEVELS + LogStyles.MONOLITH_LOG - - >>> [LogStyles.DEFAULT_LEVELS + LogStyles.MONOLITH_LOG] - >>> LogStyles.LOG_ERROR + LogStyles.TO_ACTS_LOG] - stream_format: Format used for log output to stream - file_format: Format used for log output to files - """ - if name in _log_streams: - _log_streams[name].cleanup() - log_stream = _LogStream( - name, log_name, base_path, subcontext, log_styles, stream_format, file_format - ) - _set_logger(log_stream) - return log_stream.logger - - -def _set_logger(log_stream): - _log_streams[log_stream.name] = log_stream - return log_stream - - -class AlsoToLogHandler(Handler): - """Logs a message at a given level also to another logger. - - Used for logging messages at a high enough level to the main log, or another - logger. - """ - - def __init__(self, to_logger=None, *args, **kwargs): - super().__init__(*args, **kwargs) - self._log = logging.getLogger(to_logger) - - def emit(self, record): - self._log.log(record.levelno, record.getMessage()) - - -class MovableFileHandler(FileHandler): - """FileHandler implementation that allows the output file to be changed - during operation. - """ - - def set_file(self, file_name): - """Set the target output file to file_name. - - Args: - file_name: path to the new output file - """ - self.baseFilename = os.path.abspath(file_name) - if self.stream is not None: - new_stream = self._open() - # An atomic operation redirects the output and closes the old file - os.dup2(new_stream.fileno(), self.stream.fileno()) - self.stream = new_stream - - -class MovableRotatingFileHandler(RotatingFileHandler): - """RotatingFileHandler implementation that allows the output file to be - changed during operation. Rotated files will automatically adopt the newest - output path. - """ - - set_file = MovableFileHandler.set_file - - -class InvalidStyleSetError(Exception): - """Raised when the given LogStyles are an invalid set.""" - - -class _LogStream(object): - """A class that sets up a logging.Logger object. - - The LogStream class creates a logging.Logger object. LogStream is also - responsible for managing the logger when events take place, such as - TestCaseEndedEvents and TestCaseBeginEvents. - - Attributes: - name: The name of the LogStream. - logger: The logger created by this LogStream. - base_path: The base path used by the logger. Use logging.log_path - as default. - subcontext: Location of logs relative to the test context path. - stream_format: Format used for log output to stream - file_format: Format used for log output to files - """ - - def __init__( - self, - name, - log_name=None, - base_path="", - subcontext="", - log_styles=LogStyles.NONE, - stream_format=None, - file_format=None, - ): - """Creates a LogStream. - - Args: - name: The name of the LogStream. Used as the file name prefix. - log_name: The name of the underlying logger. Use LogStream name - as default. - base_path: The base path used by the logger. Use logging.log_path - as default. - subcontext: Location of logs relative to the test context path. - log_styles: An integer or array of integers that are the sum of - corresponding flag values in LogStyles. Examples include: - - >>> LogStyles.LOG_INFO + LogStyles.TESTCASE_LOG - - >>> LogStyles.ALL_LEVELS + LogStyles.MONOLITH_LOG - - >>> [LogStyles.DEFAULT_LEVELS + LogStyles.MONOLITH_LOG] - >>> LogStyles.LOG_ERROR + LogStyles.TO_ACTS_LOG] - stream_format: Format used for log output to stream - file_format: Format used for log output to files - """ - self.name = name - if log_name is not None: - self.logger = logging.getLogger(log_name) - else: - self.logger = logging.getLogger(name) - # Add a NullHandler to suppress unwanted console output - self.logger.addHandler(_null_handler) - self.logger.propagate = False - self.base_path = base_path or getattr(logging, "log_path", "/tmp/acts_logs") - self.subcontext = subcontext - context.TestContext.add_base_output_path(self.logger.name, self.base_path) - context.TestContext.add_subcontext(self.logger.name, self.subcontext) - self.stream_format = stream_format - self.file_format = file_format - self._testclass_handlers = [] - self._testcase_handlers = [] - if not isinstance(log_styles, list): - log_styles = [log_styles] - self.__validate_styles(log_styles) - for log_style in log_styles: - self.__handle_style(log_style) - - @staticmethod - def __validate_styles(_log_styles_list): - """Determines if the given list of styles is valid. - - Terminology: - Log-level: any of [DEBUG, INFO, WARNING, ERROR, CRITICAL]. - Log Location: any of [MONOLITH_LOG, TESTCLASS_LOG, - TESTCASE_LOG, TO_STDOUT, TO_ACTS_LOG]. - - Styles are invalid when any of the below criteria are met: - A log-level is not set within an element of the list. - A log location is not set within an element of the list. - A log-level, log location pair appears twice within the list. - A log-level has both TESTCLASS and TESTCASE locations set - within the list. - ROTATE_LOGS is set without MONOLITH_LOG, - TESTCLASS_LOG, or TESTCASE_LOG. - - Raises: - InvalidStyleSetError if the given style cannot be achieved. - """ - - def invalid_style_error(message): - raise InvalidStyleSetError( - "{LogStyle Set: %s} %s" % (_log_styles_list, message) - ) - - # Store the log locations that have already been set per level. - levels_dict = {} - for log_style in _log_styles_list: - for level in LogStyles.LOG_LEVELS: - if log_style & level: - levels_dict[level] = levels_dict.get(level, LogStyles.NONE) - # Check that a log-level, log location pair has not yet - # been set. - for log_location in LogStyles.LOG_LOCATIONS: - if log_style & log_location: - if log_location & levels_dict[level]: - invalid_style_error( - "The log location %s for log level %s has " - "been set multiple times" % (log_location, level) - ) - else: - levels_dict[level] |= log_location - # Check that for a given log-level, not more than one - # of MONOLITH_LOG, TESTCLASS_LOG, TESTCASE_LOG is set. - locations = levels_dict[level] & LogStyles.ALL_FILE_LOGS - valid_locations = [ - LogStyles.TESTCASE_LOG, - LogStyles.TESTCLASS_LOG, - LogStyles.MONOLITH_LOG, - LogStyles.NONE, - ] - if locations not in valid_locations: - invalid_style_error( - "More than one of MONOLITH_LOG, TESTCLASS_LOG, " - "TESTCASE_LOG is set for log level %s." % level - ) - if log_style & LogStyles.ALL_LEVELS == 0: - invalid_style_error( - "LogStyle %s needs to set a log " "level." % log_style - ) - if log_style & ~LogStyles.ALL_LEVELS == 0: - invalid_style_error( - "LogStyle %s needs to set a log " "location." % log_style - ) - if log_style & LogStyles.ROTATE_LOGS and not log_style & ( - LogStyles.MONOLITH_LOG - | LogStyles.TESTCLASS_LOG - | LogStyles.TESTCASE_LOG - ): - invalid_style_error( - "LogStyle %s has ROTATE_LOGS set, but does " - "not specify a log type." % log_style - ) - - @staticmethod - def __create_rotating_file_handler(filename): - """Generates a callable to create an appropriate RotatingFileHandler.""" - # Magic number explanation: 10485760 == 10MB - return MovableRotatingFileHandler(filename, maxBytes=10485760, backupCount=5) - - @staticmethod - def __get_file_handler_creator(log_style): - """Gets the callable to create the correct FileLogHandler.""" - create_file_handler = MovableFileHandler - if log_style & LogStyles.ROTATE_LOGS: - create_file_handler = _LogStream.__create_rotating_file_handler - return create_file_handler - - @staticmethod - def __get_lowest_log_level(log_style): - """Returns the lowest log level's LogStyle for the given log_style.""" - for log_level in LogStyles.LOG_LEVELS: - if log_level & log_style: - return log_level - return LogStyles.NONE - - def __get_current_output_dir(self, depth=ContextLevel.TESTCASE): - """Gets the current output directory from the context system. Make the - directory if it doesn't exist. - - Args: - depth: The desired level of the output directory. For example, - the TESTCLASS level would yield the directory associated with - the current test class context, even if the test is currently - within a test case. - """ - curr_context = context.get_current_context(depth) - return curr_context.get_full_output_path(self.logger.name) - - def __create_handler(self, creator, level, location): - """Creates the FileHandler. - - Args: - creator: The callable that creates the FileHandler - level: The logging level (INFO, DEBUG, etc.) for this handler. - location: The log location (MONOLITH, TESTCLASS, TESTCASE) for this - handler. - - Returns: A FileHandler - """ - directory = self.__get_current_output_dir( - LogStyles.LOCATION_TO_CONTEXT_LEVEL[location] - ) - base_name = "%s_%s.txt" % (self.name, LogStyles.LEVEL_NAMES[level]) - handler = creator(os.path.join(directory, base_name)) - handler.setLevel(LogStyles.LEVEL_TO_NO[level]) - if self.file_format: - handler.setFormatter(self.file_format) - return handler - - def __handle_style(self, log_style): - """Creates the handlers described in the given log_style.""" - handler_creator = self.__get_file_handler_creator(log_style) - - # Handle streaming logs to STDOUT or the ACTS Logger - if log_style & (LogStyles.TO_ACTS_LOG | LogStyles.TO_STDOUT): - lowest_log_level = self.__get_lowest_log_level(log_style) - - if log_style & LogStyles.TO_ACTS_LOG: - handler = AlsoToLogHandler() - else: # LogStyles.TO_STDOUT: - handler = StreamHandler(sys.stdout) - if self.stream_format: - handler.setFormatter(self.stream_format) - - handler.setLevel(LogStyles.LEVEL_TO_NO[lowest_log_level]) - self.logger.addHandler(handler) - - # Handle streaming logs to log-level files - for log_level in LogStyles.LOG_LEVELS: - log_location = log_style & LogStyles.ALL_FILE_LOGS - if not (log_style & log_level and log_location): - continue - - handler = self.__create_handler(handler_creator, log_level, log_location) - self.logger.addHandler(handler) - - if log_style & LogStyles.TESTCLASS_LOG: - self._testclass_handlers.append(handler) - if log_style & LogStyles.TESTCASE_LOG: - self._testcase_handlers.append(handler) - - def __remove_handler(self, handler): - """Removes a handler from the logger, unless it's a NullHandler.""" - if handler is not _null_handler: - handler.close() - self.logger.removeHandler(handler) - - def update_handlers(self, event): - """Update the output file paths for log handlers upon a change in - the test context. - - Args: - event: An instance of NewContextEvent. - """ - handlers = [] - if isinstance(event, context.NewTestClassContextEvent): - handlers = self._testclass_handlers + self._testcase_handlers - if isinstance(event, context.NewTestCaseContextEvent): - handlers = self._testcase_handlers - - if not handlers: - return - new_dir = self.__get_current_output_dir() - for handler in handlers: - filename = os.path.basename(handler.baseFilename) - handler.set_file(os.path.join(new_dir, filename)) - - def cleanup(self): - """Removes all LogHandlers from the logger.""" - for handler in self.logger.handlers: - self.__remove_handler(handler)
diff --git a/src/antlion/libs/ota/__init__.py b/src/antlion/libs/ota/__init__.py deleted file mode 100644 index e69de29..0000000 --- a/src/antlion/libs/ota/__init__.py +++ /dev/null
diff --git a/src/antlion/libs/ota/ota_runners/__init__.py b/src/antlion/libs/ota/ota_runners/__init__.py deleted file mode 100644 index e69de29..0000000 --- a/src/antlion/libs/ota/ota_runners/__init__.py +++ /dev/null
diff --git a/src/antlion/libs/ota/ota_runners/ota_runner.py b/src/antlion/libs/ota/ota_runners/ota_runner.py deleted file mode 100644 index 05dba4c..0000000 --- a/src/antlion/libs/ota/ota_runners/ota_runner.py +++ /dev/null
@@ -1,225 +0,0 @@ -#!/usr/bin/env python3 -# -# Copyright 2022 The Fuchsia Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import time -from zipfile import ZipFile - -"""The setup time in seconds.""" -SL4A_SERVICE_SETUP_TIME = 5 -"""The path to the metadata found within the OTA package.""" -OTA_PACKAGE_METADATA_PATH = "META-INF/com/android/metadata" - - -class OtaError(Exception): - """Raised when an error in the OTA Update process occurs.""" - - -class InvalidOtaUpdateError(OtaError): - """Raised when the update from one version to another is not valid.""" - - -class OtaRunner(object): - """The base class for all OTA Update Runners.""" - - def __init__(self, ota_tool, android_device): - self.ota_tool = ota_tool - self.android_device = android_device - self.serial = self.android_device.serial - - def _update(self): - post_build_id = self.get_post_build_id() - log = self.android_device.log - old_info = self.android_device.adb.getprop("ro.build.fingerprint") - log.info("Starting Update. Beginning build info: %s", old_info) - log.info("Stopping services.") - self.android_device.stop_services() - log.info("Beginning tool.") - self.ota_tool.update(self) - log.info("Tool finished. Waiting for boot completion.") - self.android_device.wait_for_boot_completion() - new_info = self.android_device.adb.getprop("ro.build.fingerprint") - if not old_info or old_info == new_info: - raise OtaError( - "The device was not updated to a new build. " - "Previous build: %s. Current build: %s. " - "Expected build: %s" % (old_info, new_info, post_build_id) - ) - log.info("Boot completed. Rooting adb.") - self.android_device.root_adb() - log.info("Root complete.") - if self.android_device.skip_sl4a: - self.android_device.log.info("Skipping SL4A install.") - else: - for _ in range(3): - self.android_device.log.info( - 'Re-installing SL4A from "%s".', self.get_sl4a_apk() - ) - self.android_device.adb.install( - "-r -g %s" % self.get_sl4a_apk(), ignore_status=True - ) - time.sleep(SL4A_SERVICE_SETUP_TIME) - if self.android_device.is_sl4a_installed(): - break - log.info("Starting services.") - self.android_device.start_services() - self.android_device.update_sdk_api_level() - log.info("Services started. Running ota tool cleanup.") - self.ota_tool.cleanup(self) - log.info("Cleanup complete.") - - def get_ota_package_metadata(self, requested_field): - """Returns a variable found within the OTA package's metadata. - - Args: - requested_field: the name of the metadata field - - Will return None if the variable cannot be found. - """ - ota_zip = ZipFile(self.get_ota_package(), "r") - if OTA_PACKAGE_METADATA_PATH in ota_zip.namelist(): - with ota_zip.open(OTA_PACKAGE_METADATA_PATH) as metadata: - timestamp_line = requested_field.encode("utf-8") - timestamp_offset = len(timestamp_line) + 1 - - for line in metadata.readlines(): - if line.startswith(timestamp_line): - return line[timestamp_offset:].decode("utf-8").strip() - return None - - def validate_update(self): - """Raises an error if updating to the next build is not valid. - - Raises: - InvalidOtaUpdateError if the ota version is not valid, or cannot be - validated. - """ - # The timestamp the current device build was created at. - cur_img_timestamp = self.android_device.adb.getprop("ro.build.date.utc") - ota_img_timestamp = self.get_ota_package_metadata("post-timestamp") - - if ota_img_timestamp is None: - raise InvalidOtaUpdateError( - "Unable to find the timestamp " "for the OTA build." - ) - - try: - if int(ota_img_timestamp) <= int(cur_img_timestamp): - cur_fingerprint = self.android_device.adb.getprop( - "ro.bootimage.build.fingerprint" - ) - ota_fingerprint = self.get_post_build_id() - raise InvalidOtaUpdateError( - "The OTA image comes from an earlier build than the " - "source build. Current build: Time: %s -- %s, " - "OTA build: Time: %s -- %s" - % ( - cur_img_timestamp, - cur_fingerprint, - ota_img_timestamp, - ota_fingerprint, - ) - ) - except ValueError: - raise InvalidOtaUpdateError( - "Unable to parse timestamps. Current timestamp: %s, OTA " - "timestamp: %s" % (ota_img_timestamp, cur_img_timestamp) - ) - - def get_post_build_id(self): - """Returns the post-build ID found within the OTA package metadata. - - Raises: - InvalidOtaUpdateError if the post-build ID cannot be found. - """ - return self.get_ota_package_metadata("post-build") - - def can_update(self): - """Whether or not an update package is available for the device.""" - return NotImplementedError() - - def get_ota_package(self): - raise NotImplementedError() - - def get_sl4a_apk(self): - raise NotImplementedError() - - -class SingleUseOtaRunner(OtaRunner): - """A single use OtaRunner. - - SingleUseOtaRunners can only be ran once. If a user attempts to run it more - than once, an error will be thrown. Users can avoid the error by checking - can_update() before calling update(). - """ - - def __init__(self, ota_tool, android_device, ota_package, sl4a_apk): - super(SingleUseOtaRunner, self).__init__(ota_tool, android_device) - self._ota_package = ota_package - self._sl4a_apk = sl4a_apk - self._called = False - - def can_update(self): - return not self._called - - def update(self): - """Starts the update process.""" - if not self.can_update(): - raise OtaError( - "A SingleUseOtaTool instance cannot update a device " "multiple times." - ) - self._called = True - self._update() - - def get_ota_package(self): - return self._ota_package - - def get_sl4a_apk(self): - return self._sl4a_apk - - -class MultiUseOtaRunner(OtaRunner): - """A multiple use OtaRunner. - - MultiUseOtaRunner can only be ran for as many times as there have been - packages provided to them. If a user attempts to run it more than the number - of provided packages, an error will be thrown. Users can avoid the error by - checking can_update() before calling update(). - """ - - def __init__(self, ota_tool, android_device, ota_packages, sl4a_apks): - super(MultiUseOtaRunner, self).__init__(ota_tool, android_device) - self._ota_packages = ota_packages - self._sl4a_apks = sl4a_apks - self.current_update_number = 0 - - def can_update(self): - return not self.current_update_number == len(self._ota_packages) - - def update(self): - """Starts the update process.""" - if not self.can_update(): - raise OtaError( - "This MultiUseOtaRunner has already updated all " - "given packages onto the phone." - ) - self._update() - self.current_update_number += 1 - - def get_ota_package(self): - return self._ota_packages[self.current_update_number] - - def get_sl4a_apk(self): - return self._sl4a_apks[self.current_update_number]
diff --git a/src/antlion/libs/ota/ota_runners/ota_runner_factory.py b/src/antlion/libs/ota/ota_runners/ota_runner_factory.py deleted file mode 100644 index a5622da..0000000 --- a/src/antlion/libs/ota/ota_runners/ota_runner_factory.py +++ /dev/null
@@ -1,215 +0,0 @@ -#!/usr/bin/env python3 -# -# Copyright 2022 The Fuchsia Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import logging - -from antlion.config_parser import ActsConfigError -from antlion.libs.ota.ota_runners import ota_runner -from antlion.libs.ota.ota_tools import ota_tool_factory -from antlion.libs.ota.ota_tools import adb_sideload_ota_tool - -_bound_devices = {} - -DEFAULT_OTA_TOOL = adb_sideload_ota_tool.AdbSideloadOtaTool.__name__ -DEFAULT_OTA_COMMAND = "adb" - - -def create_all_from_configs(config, android_devices): - """Creates a new OtaTool for each given AndroidDevice. - - After an OtaTool is assigned to a device, another OtaTool cannot be created - for that device. This will prevent OTA Update tests that accidentally flash - the same build onto a device more than once. - - Args: - config: the ACTS config user_params. - android_devices: The devices to run an OTA Update on. - - Returns: - A list of OtaRunners responsible for updating the given devices. The - indexes match the indexes of the corresponding AndroidDevice in - android_devices. - """ - return [create_from_configs(config, ad) for ad in android_devices] - - -def create_from_configs(config, android_device): - """Creates a new OtaTool for the given AndroidDevice. - - After an OtaTool is assigned to a device, another OtaTool cannot be created - for that device. This will prevent OTA Update tests that accidentally flash - the same build onto a device more than once. - - Args: - config: the ACTS config user_params. - android_device: The device to run the OTA Update on. - - Returns: - An OtaRunner responsible for updating the given device. - """ - # Default to adb sideload - try: - ota_tool_class_name = get_ota_value_from_config( - config, "ota_tool", android_device - ) - except ActsConfigError: - ota_tool_class_name = DEFAULT_OTA_TOOL - - if ota_tool_class_name not in config: - if ota_tool_class_name is not DEFAULT_OTA_TOOL: - raise ActsConfigError( - "If the ota_tool is overloaded, the path to the tool must be " - 'added to the ACTS config file under {"OtaToolName": ' - '"path/to/tool"} (in this case, {"%s": "path/to/tool"}.' - % ota_tool_class_name - ) - else: - command = DEFAULT_OTA_COMMAND - else: - command = config[ota_tool_class_name] - if type(command) is list: - # If file came as a list in the config. - if len(command) == 1: - command = command[0] - else: - raise ActsConfigError( - 'Config value for "%s" must be either a string or a list ' - "of exactly one element" % ota_tool_class_name - ) - - ota_package = get_ota_value_from_config(config, "ota_package", android_device) - ota_sl4a = get_ota_value_from_config(config, "ota_sl4a", android_device) - if type(ota_sl4a) != type(ota_package): - raise ActsConfigError( - "The ota_package and ota_sl4a must either both be strings, or " - 'both be lists. Device with serial "%s" has mismatched types.' - % android_device.serial - ) - return create(ota_package, ota_sl4a, android_device, ota_tool_class_name, command) - - -def create( - ota_package, - ota_sl4a, - android_device, - ota_tool_class_name=DEFAULT_OTA_TOOL, - command=DEFAULT_OTA_COMMAND, - use_cached_runners=True, -): - """ - Args: - ota_package: A string or list of strings corresponding to the - update.zip package location(s) for running an OTA update. - ota_sl4a: A string or list of strings corresponding to the - sl4a.apk package location(s) for running an OTA update. - ota_tool_class_name: The class name for the desired ota_tool - command: The command line tool name for the updater - android_device: The AndroidDevice to run the OTA Update on. - use_cached_runners: Whether or not to use runners cached by previous - create calls. - - Returns: - An OtaRunner with the given properties from the arguments. - """ - ota_tool = ota_tool_factory.create(ota_tool_class_name, command) - return create_from_package( - ota_package, ota_sl4a, android_device, ota_tool, use_cached_runners - ) - - -def create_from_package( - ota_package, ota_sl4a, android_device, ota_tool, use_cached_runners=True -): - """ - Args: - ota_package: A string or list of strings corresponding to the - update.zip package location(s) for running an OTA update. - ota_sl4a: A string or list of strings corresponding to the - sl4a.apk package location(s) for running an OTA update. - ota_tool: The OtaTool to be paired with the returned OtaRunner - android_device: The AndroidDevice to run the OTA Update on. - use_cached_runners: Whether or not to use runners cached by previous - create calls. - - Returns: - An OtaRunner with the given properties from the arguments. - """ - if android_device in _bound_devices and use_cached_runners: - logging.warning( - "Android device %s has already been assigned an " - "OtaRunner. Returning previously created runner." - ) - return _bound_devices[android_device] - - if type(ota_package) != type(ota_sl4a): - raise TypeError( - "The ota_package and ota_sl4a must either both be strings, or " - 'both be lists. Device with serial "%s" has requested mismatched ' - "types." % android_device.serial - ) - - if type(ota_package) is str: - runner = ota_runner.SingleUseOtaRunner( - ota_tool, android_device, ota_package, ota_sl4a - ) - elif type(ota_package) is list: - runner = ota_runner.MultiUseOtaRunner( - ota_tool, android_device, ota_package, ota_sl4a - ) - else: - raise TypeError( - 'The "ota_package" value in the acts config must be ' - "either a list or a string." - ) - - _bound_devices[android_device] = runner - return runner - - -def get_ota_value_from_config(config, key, android_device): - """Returns a key for the given AndroidDevice. - - Args: - config: The ACTS config - key: The base key desired (ota_tool, ota_sl4a, or ota_package) - android_device: An AndroidDevice - - Returns: The value at the specified key. - Throws: ActsConfigError if the value cannot be determined from the config. - """ - suffix = "" - if "ota_map" in config: - if android_device.serial in config["ota_map"]: - suffix = "_%s" % config["ota_map"][android_device.serial] - - ota_package_key = "%s%s" % (key, suffix) - if ota_package_key not in config: - if suffix != "": - raise ActsConfigError( - "Asked for an OTA Update without specifying a required value. " - '"ota_map" has entry {"%s": "%s"}, but there is no ' - 'corresponding entry {"%s":"/path/to/file"} found within the ' - "ACTS config." % (android_device.serial, suffix[1:], ota_package_key) - ) - else: - raise ActsConfigError( - "Asked for an OTA Update without specifying a required value. " - '"ota_map" does not exist or have a key for serial "%s", and ' - 'the default value entry "%s" cannot be found within the ACTS ' - "config." % (android_device.serial, ota_package_key) - ) - - return config[ota_package_key]
diff --git a/src/antlion/libs/ota/ota_tools/__init__.py b/src/antlion/libs/ota/ota_tools/__init__.py deleted file mode 100644 index e69de29..0000000 --- a/src/antlion/libs/ota/ota_tools/__init__.py +++ /dev/null
diff --git a/src/antlion/libs/ota/ota_tools/adb_sideload_ota_tool.py b/src/antlion/libs/ota/ota_tools/adb_sideload_ota_tool.py deleted file mode 100644 index f097f45..0000000 --- a/src/antlion/libs/ota/ota_tools/adb_sideload_ota_tool.py +++ /dev/null
@@ -1,47 +0,0 @@ -#!/usr/bin/env python3 -# -# Copyright 2022 The Fuchsia Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import logging - -from antlion.libs.ota.ota_tools.ota_tool import OtaTool - -# OTA Packages can be upwards of 1 GB. This may take some time to transfer over -# USB 2.0. -PUSH_TIMEOUT = 10 * 60 - - -class AdbSideloadOtaTool(OtaTool): - """Updates an AndroidDevice using adb sideload.""" - - def __init__(self, ignored_command): - # "command" is ignored. The ACTS adb version is used to prevent - # differing adb versions from constantly killing adbd. - super(AdbSideloadOtaTool, self).__init__(ignored_command) - - def update(self, ota_runner): - logging.info("Rooting adb") - ota_runner.android_device.root_adb() - logging.info("Rebooting to sideload") - ota_runner.android_device.adb.reboot("sideload") - ota_runner.android_device.adb.wait_for_sideload() - logging.info("Sideloading ota package") - package_path = ota_runner.get_ota_package() - logging.info('Running adb sideload with package "%s"' % package_path) - ota_runner.android_device.adb.sideload(package_path, timeout=PUSH_TIMEOUT) - logging.info("Sideload complete. Waiting for device to come back up.") - ota_runner.android_device.adb.wait_for_recovery() - ota_runner.android_device.reboot(stop_at_lock_screen=True) - logging.info("Device is up. Update complete.")
diff --git a/src/antlion/libs/ota/ota_tools/ota_tool.py b/src/antlion/libs/ota/ota_tools/ota_tool.py deleted file mode 100644 index 2a1aa4a..0000000 --- a/src/antlion/libs/ota/ota_tools/ota_tool.py +++ /dev/null
@@ -1,46 +0,0 @@ -#!/usr/bin/env python3 -# -# Copyright 2022 The Fuchsia Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -class OtaTool(object): - """A Wrapper for an OTA Update command or tool. - - Each OtaTool acts as a facade to the underlying command or tool used to - update the device. - """ - - def __init__(self, command): - """Creates an OTA Update tool with the given properties. - - Args: - command: A string that is used as the command line tool - """ - self.command = command - - def update(self, ota_runner): - """Begins the OTA Update. Returns after the update has installed. - - Args: - ota_runner: The OTA Runner that handles the device information. - """ - raise NotImplementedError() - - def cleanup(self, ota_runner): - """A cleanup method for the OTA Tool to run after the update completes. - - Args: - ota_runner: The OTA Runner that handles the device information. - """
diff --git a/src/antlion/libs/ota/ota_tools/ota_tool_factory.py b/src/antlion/libs/ota/ota_tools/ota_tool_factory.py deleted file mode 100644 index 0eff707..0000000 --- a/src/antlion/libs/ota/ota_tools/ota_tool_factory.py +++ /dev/null
@@ -1,54 +0,0 @@ -#!/usr/bin/env python3 -# -# Copyright 2022 The Fuchsia Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from antlion.libs.ota.ota_tools.adb_sideload_ota_tool import AdbSideloadOtaTool -from antlion.libs.ota.ota_tools.update_device_ota_tool import UpdateDeviceOtaTool - -_CONSTRUCTORS = { - AdbSideloadOtaTool.__name__: lambda command: AdbSideloadOtaTool(command), - UpdateDeviceOtaTool.__name__: lambda command: UpdateDeviceOtaTool(command), -} -_constructed_tools = {} - - -def create(ota_tool_class, command): - """Returns an OtaTool with the given class name. - - If the tool has already been created, the existing instance will be - returned. - - Args: - ota_tool_class: the class/type of the tool you wish to use. - command: the command line tool being used. - - Returns: - An OtaTool. - """ - if ota_tool_class in _constructed_tools: - return _constructed_tools[ota_tool_class] - - if ota_tool_class not in _CONSTRUCTORS: - raise KeyError( - "Given Ota Tool class name does not match a known " - 'name. Found "%s". Expected any of %s. If this tool ' - "does exist, add it to the _CONSTRUCTORS dict in this " - "module." % (ota_tool_class, _CONSTRUCTORS.keys()) - ) - - new_update_tool = _CONSTRUCTORS[ota_tool_class](command) - _constructed_tools[ota_tool_class] = new_update_tool - - return new_update_tool
diff --git a/src/antlion/libs/ota/ota_tools/update_device_ota_tool.py b/src/antlion/libs/ota/ota_tools/update_device_ota_tool.py deleted file mode 100644 index 4bdde99..0000000 --- a/src/antlion/libs/ota/ota_tools/update_device_ota_tool.py +++ /dev/null
@@ -1,61 +0,0 @@ -#!/usr/bin/env python3 -# -# Copyright 2022 The Fuchsia Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import logging -import os -import shutil -import tempfile - -from antlion.libs.ota.ota_tools import ota_tool -from antlion.libs.proc import job -from antlion import utils - -# OTA Packages can be upwards of 1 GB. This may take some time to transfer over -# USB 2.0. A/B devices must also complete the update in the background. -UPDATE_TIMEOUT = 60 * 60 -UPDATE_LOCATION = "/data/ota_package/update.zip" - - -class UpdateDeviceOtaTool(ota_tool.OtaTool): - """Runs an OTA Update with system/update_engine/scripts/update_device.py.""" - - def __init__(self, command): - super(UpdateDeviceOtaTool, self).__init__(command) - - self.unzip_path = tempfile.mkdtemp() - utils.unzip_maintain_permissions(self.command, self.unzip_path) - - self.command = os.path.join(self.unzip_path, "update_device.py") - - def update(self, ota_runner): - logging.info("Forcing adb to be in root mode.") - ota_runner.android_device.root_adb() - update_command = "python3 %s -s %s %s" % ( - self.command, - ota_runner.serial, - ota_runner.get_ota_package(), - ) - logging.info("Running %s" % update_command) - result = job.run(update_command, timeout=UPDATE_TIMEOUT) - logging.info("Output: %s" % result.stdout) - - logging.info("Rebooting device for update to go live.") - ota_runner.android_device.reboot(stop_at_lock_screen=True) - logging.info("Reboot sent.") - - def __del__(self): - """Delete the unzipped update_device folder before ACTS exits.""" - shutil.rmtree(self.unzip_path)
diff --git a/src/antlion/libs/ota/ota_updater.py b/src/antlion/libs/ota/ota_updater.py deleted file mode 100644 index 6db9649..0000000 --- a/src/antlion/libs/ota/ota_updater.py +++ /dev/null
@@ -1,71 +0,0 @@ -#!/usr/bin/env python3 -# -# Copyright 2022 The Fuchsia Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from antlion import utils -from antlion.libs.ota.ota_runners import ota_runner_factory - -"""Maps AndroidDevices to OtaRunners.""" -ota_runners = {} - - -def initialize(user_params, android_devices): - """Initialize OtaRunners for each device. - - Args: - user_params: The user_params from the ACTS config. - android_devices: The android_devices in the test. - """ - for ad in android_devices: - ota_runners[ad] = ota_runner_factory.create_from_configs(user_params, ad) - - -def _check_initialization(android_device): - """Check if a given device was initialized.""" - if android_device not in ota_runners: - raise KeyError( - 'Android Device with serial "%s" has not been ' - "initialized for OTA Updates. Did you forget to call" - "ota_updater.initialize()?" % android_device.serial - ) - - -def update(android_device, ignore_update_errors=False): - """Update a given AndroidDevice. - - Args: - android_device: The device to update - ignore_update_errors: Whether or not to ignore update errors such as - no more updates available for a given device. Default is false. - Throws: - OtaError if ignore_update_errors is false and the OtaRunner has run out - of packages to update the phone with. - """ - _check_initialization(android_device) - ota_runners[android_device].validate_update() - try: - ota_runners[android_device].update() - except Exception as e: - if ignore_update_errors: - return - android_device.log.error(e) - android_device.take_bug_report("ota_update", utils.get_current_epoch_time()) - raise e - - -def can_update(android_device): - """Whether or not a device can be updated.""" - _check_initialization(android_device) - return ota_runners[android_device].can_update()
diff --git a/src/antlion/libs/proc/__init__.py b/src/antlion/libs/proc/__init__.py deleted file mode 100644 index e69de29..0000000 --- a/src/antlion/libs/proc/__init__.py +++ /dev/null
diff --git a/src/antlion/libs/proc/job.py b/src/antlion/libs/proc/job.py deleted file mode 100644 index c1cdc24..0000000 --- a/src/antlion/libs/proc/job.py +++ /dev/null
@@ -1,204 +0,0 @@ -# Copyright 2022 The Fuchsia Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import logging -import os -import subprocess -import time - - -class Error(Exception): - """Indicates that a command failed, is fatal to the test unless caught.""" - - def __init__(self, result): - super(Error, self).__init__(result) - self.result: Result = result - - -class TimeoutError(Error): - """Thrown when a BackgroundJob times out on wait.""" - - -class Result(object): - """Command execution result. - - Contains information on subprocess execution after it has exited. - - Attributes: - command: An array containing the command and all arguments that - was executed. - exit_status: Integer exit code of the process. - stdout_raw: The raw bytes output from standard out. - stderr_raw: The raw bytes output from standard error - duration: How long the process ran for. - did_timeout: True if the program timed out and was killed. - """ - - @property - def stdout(self): - """String representation of standard output.""" - if not self._stdout_str: - self._stdout_str = self._raw_stdout.decode( - encoding=self._encoding, errors="replace" - ) - self._stdout_str = self._stdout_str.strip() - return self._stdout_str - - @property - def stderr(self): - """String representation of standard error.""" - if not self._stderr_str: - self._stderr_str = self._raw_stderr.decode( - encoding=self._encoding, errors="replace" - ) - self._stderr_str = self._stderr_str.strip() - return self._stderr_str - - def __init__( - self, - command=[], - stdout=bytes(), - stderr=bytes(), - exit_status=None, - duration=0, - did_timeout=False, - encoding="utf-8", - ): - """ - Args: - command: The command that was run. This will be a list containing - the executed command and all args. - stdout: The raw bytes that standard output gave. - stderr: The raw bytes that standard error gave. - exit_status: The exit status of the command. - duration: How long the command ran. - did_timeout: True if the command timed out. - encoding: The encoding standard that the program uses. - """ - self.command = command - self.exit_status = exit_status - self._raw_stdout = stdout - self._raw_stderr = stderr - self._stdout_str = None - self._stderr_str = None - self._encoding = encoding - self.duration = duration - self.did_timeout = did_timeout - - def __repr__(self): - return ( - "job.Result(command=%r, stdout=%r, stderr=%r, exit_status=%r, " - "duration=%r, did_timeout=%r, encoding=%r)" - ) % ( - self.command, - self._raw_stdout, - self._raw_stderr, - self.exit_status, - self.duration, - self.did_timeout, - self._encoding, - ) - - -def run(command, timeout=60, ignore_status=False, env=None, io_encoding="utf-8"): - """Execute a command in a subproccess and return its output. - - Commands can be either shell commands (given as strings) or the - path and arguments to an executable (given as a list). This function - will block until the subprocess finishes or times out. - - Args: - command: The command to execute. Can be either a string or a list. - timeout: number seconds to wait for command to finish. - ignore_status: bool True to ignore the exit code of the remote - subprocess. Note that if you do ignore status codes, - you should handle non-zero exit codes explicitly. - env: dict enviroment variables to setup on the remote host. - io_encoding: str unicode encoding of command output. - - Returns: - A job.Result containing the results of the ssh command. - - Raises: - job.TimeoutError: When the remote command took to long to execute. - Error: When the command had an error executing and ignore_status==False. - """ - start_time = time.time() - proc = subprocess.Popen( - command, - env=env, - stdout=subprocess.PIPE, - stderr=subprocess.PIPE, - shell=not isinstance(command, list), - ) - # Wait on the process terminating - timed_out = False - out = bytes() - err = bytes() - try: - (out, err) = proc.communicate(timeout=timeout) - except subprocess.TimeoutExpired: - timed_out = True - proc.kill() - proc.wait() - - result = Result( - command=command, - stdout=out, - stderr=err, - exit_status=proc.returncode, - duration=time.time() - start_time, - encoding=io_encoding, - did_timeout=timed_out, - ) - logging.debug(result) - - if timed_out: - logging.error("Command %s with %s timeout setting timed out", command, timeout) - raise TimeoutError(result) - - if not ignore_status and proc.returncode != 0: - raise Error(result) - - return result - - -def run_async(command, env=None): - """Execute a command in a subproccess asynchronously. - - It is the callers responsibility to kill/wait on the resulting - subprocess.Popen object. - - Commands can be either shell commands (given as strings) or the - path and arguments to an executable (given as a list). This function - will not block. - - Args: - command: The command to execute. Can be either a string or a list. - env: dict enviroment variables to setup on the remote host. - - Returns: - A subprocess.Popen object representing the created subprocess. - - """ - proc = subprocess.Popen( - command, - env=env, - preexec_fn=os.setpgrp, - shell=not isinstance(command, list), - stdout=subprocess.PIPE, - stderr=subprocess.STDOUT, - ) - logging.debug("command %s started with pid %s", command, proc.pid) - return proc
diff --git a/src/antlion/libs/proc/process.py b/src/antlion/libs/proc/process.py deleted file mode 100644 index 9a3bbcd..0000000 --- a/src/antlion/libs/proc/process.py +++ /dev/null
@@ -1,277 +0,0 @@ -#!/usr/bin/env python3 -# -# Copyright 2022 The Fuchsia Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import logging -import os -import shlex -import signal -import subprocess -import sys -import time -from threading import Thread - -_on_windows = sys.platform == "win32" - - -class ProcessError(Exception): - """Raised when invalid operations are run on a Process.""" - - -class Process(object): - """A Process object used to run various commands. - - Attributes: - _command: The initial command to run. - _subprocess_kwargs: The kwargs to send to Popen for more control over - execution. - _process: The subprocess.Popen object currently executing a process. - _listening_thread: The thread that is listening for the process to stop. - _redirection_thread: The thread that is redirecting process output. - _on_output_callback: The callback to call when output is received. - _on_terminate_callback: The callback to call when the process terminates - without stop() being called first. - _started: Whether or not start() was called. - _stopped: Whether or not stop() was called. - """ - - def __init__(self, command, **kwargs): - """Creates a Process object. - - Note that this constructor does not begin the process. To start the - process, use Process.start(). - """ - # Split command string into list if shell=True is not specified - self._use_shell = kwargs.get("shell", False) - if not self._use_shell and isinstance(command, str): - command = shlex.split(command) - self._command = command - self._subprocess_kwargs = kwargs - if _on_windows: - self._subprocess_kwargs[ - "creationflags" - ] = subprocess.CREATE_NEW_PROCESS_GROUP - else: - self._subprocess_kwargs["start_new_session"] = True - self._process = None - - self._listening_thread = None - self._redirection_thread = None - self._on_output_callback = lambda *args, **kw: None - self._binary_output = False - self._on_terminate_callback = lambda *args, **kw: "" - - self._started = False - self._stopped = False - - def set_on_output_callback(self, on_output_callback, binary=False): - """Sets the on_output_callback function. - - Args: - on_output_callback: The function to be called when output is sent to - the output. The output callback has the following signature: - - >>> def on_output_callback(output_line): - >>> return None - - binary: If True, read the process output as raw binary. - Returns: - self - """ - self._on_output_callback = on_output_callback - self._binary_output = binary - return self - - def set_on_terminate_callback(self, on_terminate_callback): - """Sets the on_self_terminate callback function. - - Args: - on_terminate_callback: The function to be called when the process - has terminated on its own. The callback has the following - signature: - - >>> def on_self_terminate_callback(popen_process): - >>> return 'command to run' or None - - If a string is returned, the string returned will be the command - line used to run the command again. If None is returned, the - process will end without restarting. - - Returns: - self - """ - self._on_terminate_callback = on_terminate_callback - return self - - def start(self): - """Starts the process's execution.""" - if self._started: - raise ProcessError("Process has already started.") - self._started = True - self._process = None - - self._listening_thread = Thread(target=self._exec_loop) - self._listening_thread.start() - - time_up_at = time.time() + 1 - - while self._process is None: - if time.time() > time_up_at: - raise OSError("Unable to open process!") - - self._stopped = False - - @staticmethod - def _get_timeout_left(timeout, start_time): - return max(0.1, timeout - (time.time() - start_time)) - - def is_running(self): - """Checks that the underlying Popen process is still running - - Returns: - True if the process is running. - """ - return self._process is not None and self._process.poll() is None - - def _join_threads(self): - """Waits for the threads associated with the process to terminate.""" - if self._listening_thread is not None: - self._listening_thread.join() - self._listening_thread = None - - if self._redirection_thread is not None: - self._redirection_thread.join() - self._redirection_thread = None - - def _kill_process(self): - """Kills the underlying process/process group. Implementation is - platform-dependent.""" - if _on_windows: - subprocess.check_call("taskkill /F /T /PID %s" % self._process.pid) - else: - self.signal(signal.SIGKILL) - - def wait(self, kill_timeout=60.0): - """Waits for the process to finish execution. - - If the process has reached the kill_timeout, the process will be killed - instead. - - Note: the on_self_terminate callback will NOT be called when calling - this function. - - Args: - kill_timeout: The amount of time to wait until killing the process. - """ - if self._stopped: - raise ProcessError("Process is already being stopped.") - self._stopped = True - - try: - self._process.wait(kill_timeout) - except subprocess.TimeoutExpired: - self._kill_process() - finally: - self._join_threads() - self._started = False - - def signal(self, sig): - """Sends a signal to the process. - - Args: - sig: The signal to be sent. - """ - if _on_windows: - raise ProcessError("Unable to call Process.signal on windows.") - - pgid = os.getpgid(self._process.pid) - os.killpg(pgid, sig) - - def stop(self): - """Stops the process. - - This command is effectively equivalent to kill, but gives time to clean - up any related work on the process, such as output redirection. - - Note: the on_self_terminate callback will NOT be called when calling - this function. - """ - self.wait(0) - - def _redirect_output(self): - """Redirects the output from the command into the on_output_callback.""" - if self._binary_output: - while True: - data = self._process.stdout.read(1024) - - if not data: - return - else: - self._on_output_callback(data) - else: - while True: - line = self._process.stdout.readline().decode("utf-8", errors="replace") - - if not line: - return - else: - # Output the line without trailing \n and whitespace. - self._on_output_callback(line.rstrip()) - - @staticmethod - def __start_process(command, **kwargs): - """A convenient wrapper function for starting the process.""" - acts_logger = logging.getLogger() - acts_logger.debug('Starting command "%s" with kwargs %s', command, kwargs) - return subprocess.Popen(command, **kwargs) - - def _exec_loop(self): - """Executes Popen in a loop. - - When Popen terminates without stop() being called, - self._on_terminate_callback() will be called. The returned value from - _on_terminate_callback will then be used to determine if the loop should - continue and start up the process again. See set_on_terminate_callback() - for more information. - """ - command = self._command - while True: - self._process = self.__start_process( - command, - stdout=subprocess.PIPE, - stderr=subprocess.STDOUT, - bufsize=1, - **self._subprocess_kwargs, - ) - self._redirection_thread = Thread(target=self._redirect_output) - self._redirection_thread.start() - self._process.wait() - - if self._stopped: - logging.debug("The process for command %s was stopped.", command) - break - else: - logging.debug("The process for command %s terminated.", command) - # Wait for all output to be processed before sending - # _on_terminate_callback() - self._redirection_thread.join() - logging.debug("Beginning on_terminate_callback for %s.", command) - retry_value = self._on_terminate_callback(self._process) - if retry_value: - if not self._use_shell and isinstance(retry_value, str): - retry_value = shlex.split(retry_value) - command = retry_value - else: - break
diff --git a/src/antlion/libs/yaml_writer.py b/src/antlion/libs/yaml_writer.py deleted file mode 100644 index 33c349f..0000000 --- a/src/antlion/libs/yaml_writer.py +++ /dev/null
@@ -1,48 +0,0 @@ -#!/usr/bin/env python3 -# -# Copyright 2022 The Fuchsia Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import collections -import yaml - -# Allow yaml to dump OrderedDict -yaml.add_representer( - collections.OrderedDict, - lambda dumper, data: dumper.represent_dict(data), - Dumper=yaml.SafeDumper, -) - - -def _str_representer(dumper, data): - if len(data.splitlines()) > 1: - data = "\n".join( - line.replace("\t", " ").rstrip() for line in data.splitlines() - ) - return dumper.represent_scalar("tag:yaml.org,2002:str", data, style="|") - return dumper.represent_scalar("tag:yaml.org,2002:str", data) - - -# Automatically convert multiline strings into block literals -yaml.add_representer(str, _str_representer, Dumper=yaml.SafeDumper) - -_DUMP_KWARGS = dict(explicit_start=True, allow_unicode=True, indent=4) -if yaml.__version__ >= "5.1": - _DUMP_KWARGS.update(sort_keys=False) - - -def safe_dump(content, file): - """Calls yaml.safe_dump to write content to the file, with additional - parameters from _DUMP_KWARGS.""" - yaml.safe_dump(content, file, **_DUMP_KWARGS)
diff --git a/src/antlion/logger.py b/src/antlion/logger.py deleted file mode 100755 index 1d18ad8..0000000 --- a/src/antlion/logger.py +++ /dev/null
@@ -1,319 +0,0 @@ -#!/usr/bin/env python3 -# -# Copyright 2022 The Fuchsia Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import datetime -import logging -import os -import re - -from copy import copy - -from antlion import tracelogger -from antlion.libs.logging import log_stream -from antlion.libs.logging.log_stream import LogStyles - -log_line_format = "%(asctime)s.%(msecs).03d %(levelname)s %(message)s" -# The micro seconds are added by the format string above, -# so the time format does not include ms. -log_line_time_format = "%Y-%m-%d %H:%M:%S" -log_line_timestamp_len = 23 - -logline_timestamp_re = re.compile("\d\d\d\d-\d\d-\d\d \d\d:\d\d:\d\d.\d\d\d") - - -# yapf: disable -class Style: - RESET = '\033[0m' - BRIGHT = '\033[1m' - DIM = '\033[2m' - NORMAL = '\033[22m' - - -class Fore: - BLACK = '\033[30m' - RED = '\033[31m' - GREEN = '\033[32m' - YELLOW = '\033[33m' - BLUE = '\033[34m' - MAGENTA = '\033[35m' - CYAN = '\033[36m' - WHITE = '\033[37m' - RESET = '\033[39m' - - -class Back: - BLACK = '\033[40m' - RED = '\033[41m' - GREEN = '\033[42m' - YELLOW = '\033[43m' - BLUE = '\033[44m' - MAGENTA = '\033[45m' - CYAN = '\033[46m' - WHITE = '\033[47m' - RESET = '\033[49m' - - -LOG_LEVELS = { - 'DEBUG': {'level': 10, 'style': Fore.GREEN + Style.BRIGHT}, - 'CASE': {'level': 11, 'style': Back.BLUE + Fore.WHITE + Style.BRIGHT}, - 'SUITE': {'level': 12, 'style': Back.MAGENTA + Fore.WHITE + Style.BRIGHT}, - 'INFO': {'level': 20, 'style': Style.NORMAL}, - 'STEP': {'level': 15, 'style': Fore.WHITE + Style.BRIGHT}, - 'WARNING': {'level': 30, 'style': Fore.YELLOW + Style.BRIGHT}, - 'ERROR': {'level': 40, 'style': Fore.RED + Style.BRIGHT}, - 'EXCEPTION': {'level': 45, 'style': Back.RED + Fore.WHITE + Style.BRIGHT}, - 'DEVICE': {'level': 51, 'style': Fore.CYAN + Style.BRIGHT}, -} -# yapf: enable - - -class ColoredLogFormatter(logging.Formatter): - def format(self, record): - colored_record = copy(record) - level_name = colored_record.levelname - style = LOG_LEVELS[level_name]["style"] - formatted_level_name = "%s%s%s" % (style, level_name, Style.RESET) - colored_record.levelname = formatted_level_name - return super().format(colored_record) - - -def _parse_logline_timestamp(t): - """Parses a logline timestamp into a tuple. - - Args: - t: Timestamp in logline format. - - Returns: - An iterable of date and time elements in the order of month, day, hour, - minute, second, microsecond. - """ - date, time = t.split(" ") - year, month, day = date.split("-") - h, m, s = time.split(":") - s, ms = s.split(".") - return year, month, day, h, m, s, ms - - -def is_valid_logline_timestamp(timestamp): - if len(timestamp) == log_line_timestamp_len: - if logline_timestamp_re.match(timestamp): - return True - return False - - -def logline_timestamp_comparator(t1, t2): - """Comparator for timestamps in logline format. - - Args: - t1: Timestamp in logline format. - t2: Timestamp in logline format. - - Returns: - -1 if t1 < t2; 1 if t1 > t2; 0 if t1 == t2. - """ - dt1 = _parse_logline_timestamp(t1) - dt2 = _parse_logline_timestamp(t2) - for u1, u2 in zip(dt1, dt2): - if u1 < u2: - return -1 - elif u1 > u2: - return 1 - return 0 - - -def _get_timestamp(time_format, delta=None): - t = datetime.datetime.now() - if delta: - t = t + datetime.timedelta(seconds=delta) - return t.strftime(time_format)[:-3] - - -def epoch_to_log_line_timestamp(epoch_time): - """Converts an epoch timestamp in ms to log line timestamp format, which - is readable for humans. - - Args: - epoch_time: integer, an epoch timestamp in ms. - - Returns: - A string that is the corresponding timestamp in log line timestamp - format. - """ - s, ms = divmod(epoch_time, 1000) - d = datetime.datetime.fromtimestamp(s) - return d.strftime("%Y-%m-%d %H:%M:%S.") + str(ms) - - -def get_log_line_timestamp(delta=None): - """Returns a timestamp in the format used by log lines. - - Default is current time. If a delta is set, the return value will be - the current time offset by delta seconds. - - Args: - delta: Number of seconds to offset from current time; can be negative. - - Returns: - A timestamp in log line format with an offset. - """ - return _get_timestamp("%Y-%m-%d %H:%M:%S.%f", delta) - - -def get_log_file_timestamp(delta=None): - """Returns a timestamp in the format used for log file names. - - Default is current time. If a delta is set, the return value will be - the current time offset by delta seconds. - - Args: - delta: Number of seconds to offset from current time; can be negative. - - Returns: - A timestamp in log file name format with an offset. - """ - return _get_timestamp("%Y-%m-%d_%H-%M-%S-%f", delta) - - -def _setup_test_logger(log_path, prefix=None): - """Customizes the root logger for a test run. - - The logger object has a stream handler and a file handler. The stream - handler logs INFO level to the terminal, the file handler logs DEBUG - level to files. - - Args: - log_path: Location of the log file. - prefix: A prefix for each log line in terminal. - """ - logging.log_path = log_path - log_styles = [ - LogStyles.LOG_INFO + LogStyles.TO_STDOUT, - LogStyles.DEFAULT_LEVELS + LogStyles.TESTCASE_LOG, - ] - terminal_format = log_line_format - if prefix: - terminal_format = "[{}] {}".format(prefix, log_line_format) - stream_formatter = ColoredLogFormatter(terminal_format, log_line_time_format) - file_formatter = logging.Formatter(log_line_format, log_line_time_format) - log = log_stream.create_logger( - "test_run", - "", - log_styles=log_styles, - stream_format=stream_formatter, - file_format=file_formatter, - ) - log.setLevel(logging.DEBUG) - _enable_additional_log_levels() - - -def _enable_additional_log_levels(): - """Enables logging levels used for tracing tests and debugging devices.""" - for log_type, log_data in LOG_LEVELS.items(): - logging.addLevelName(log_data["level"], log_type) - - -def kill_test_logger(logger): - """Cleans up a test logger object by removing all of its handlers. - - Args: - logger: The logging object to clean up. - """ - for h in list(logger.handlers): - logger.removeHandler(h) - if isinstance(h, logging.FileHandler): - h.close() - - -def create_latest_log_alias(actual_path): - """Creates a symlink to the latest test run logs. - - Args: - actual_path: The source directory where the latest test run's logs are. - """ - link_path = os.path.join(os.path.dirname(actual_path), "latest") - if os.path.islink(link_path): - os.remove(link_path) - try: - os.symlink(actual_path, link_path) - except OSError: - logging.warning("Failed to create symlink to latest logs dir.", exc_info=True) - - -def setup_test_logger(log_path, prefix=None): - """Customizes the root logger for a test run. - - Args: - log_path: Location of the report file. - prefix: A prefix for each log line in terminal. - filename: Name of the files. The default is the time the objects - are requested. - """ - os.makedirs(log_path, exist_ok=True) - _setup_test_logger(log_path, prefix) - create_latest_log_alias(log_path) - - -def normalize_log_line_timestamp(log_line_timestamp): - """Replace special characters in log line timestamp with normal characters. - - Args: - log_line_timestamp: A string in the log line timestamp format. Obtained - with get_log_line_timestamp. - - Returns: - A string representing the same time as input timestamp, but without - special characters. - """ - norm_tp = log_line_timestamp.replace(" ", "_") - norm_tp = norm_tp.replace(":", "-") - return norm_tp - - -class LoggerAdapter(logging.LoggerAdapter): - """A LoggerAdapter class that takes in a lambda for transforming logs.""" - - def __init__(self, logging_lambda): - self.logging_lambda = logging_lambda - super(LoggerAdapter, self).__init__(logging.getLogger(), {}) - - def process(self, msg, kwargs): - return self.logging_lambda(msg), kwargs - - -def create_logger(logging_lambda=lambda message: message): - """Returns a logger with logging defined by a given lambda. - - Args: - logging_lambda: A lambda of the form: - >>> lambda log_message: return 'string' - """ - return tracelogger.TraceLogger(LoggerAdapter(logging_lambda)) - - -def create_tagged_trace_logger(tag=""): - """Returns a logger that logs each line with the given prefix. - - Args: - tag: The tag of the log line, E.g. if tag == tag123, the output - line would be: - - <TESTBED> <TIME> <LOG_LEVEL> [tag123] logged message - """ - - def logging_lambda(msg): - return "[%s] %s" % (tag, msg) - - return create_logger(logging_lambda)
diff --git a/src/antlion/net.py b/src/antlion/net.py deleted file mode 100644 index 6f56703..0000000 --- a/src/antlion/net.py +++ /dev/null
@@ -1,67 +0,0 @@ -#!/usr/bin/env python3 -# -# Copyright 2023 The Fuchsia Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import errno -import time -import socket - -from typing import Optional - - -def wait_for_port(host: str, port: int, timeout_sec: int = 5) -> None: - """Wait for the host to start accepting connections on the port. - - Some services take some time to start. Call this after launching the service - to avoid race conditions. - - Args: - host: IP of the running service. - port: Port of the running service. - timeout_sec: Seconds to wait until raising TimeoutError - - Raises: - TimeoutError: when timeout_sec has expired without a successful - connection to the service - """ - last_error: Optional[OSError] = None - timeout = time.perf_counter() + timeout_sec - - while True: - try: - time_left = max(timeout - time.perf_counter(), 0) - with socket.create_connection((host, port), timeout=time_left): - return - except ConnectionRefusedError as e: - # Occurs when the host is online but not ready to accept connections - # yet; wait to see if the host becomes ready. - last_error = e - except socket.timeout as e: - # socket.timeout was aliased to TimeoutError in Python 3.10. - last_error = e - except OSError as e: - if e.errno == errno.EHOSTUNREACH: - # No route to host. Occurs when the interface to the host is - # torn down; wait to see if the interface comes back. - last_error = e - else: - # Unexpected error - raise e - - if time.perf_counter() >= timeout: - raise TimeoutError( - f"Waited over {timeout_sec}s for the service to start " - f"accepting connections at {host}:{port}" - ) from last_error
diff --git a/src/antlion/records.py b/src/antlion/records.py deleted file mode 100644 index 1c7ad23..0000000 --- a/src/antlion/records.py +++ /dev/null
@@ -1,247 +0,0 @@ -#!/usr/bin/env python3 -# -# Copyright 2022 The Fuchsia Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""This module is where all the record definitions and record containers live. -""" - -import collections -import copy -import io -import json - -from antlion import logger -from antlion.libs import yaml_writer - -from mobly.records import ExceptionRecord -from mobly.records import OUTPUT_FILE_SUMMARY -from mobly.records import TestResultEnums as MoblyTestResultEnums -from mobly.records import TestResultRecord as MoblyTestResultRecord -from mobly.records import TestResult as MoblyTestResult -from mobly.records import TestSummaryEntryType -from mobly.records import TestSummaryWriter as MoblyTestSummaryWriter - - -class TestSummaryWriter(MoblyTestSummaryWriter): - """Writes test results to a summary file in real time. Inherits from Mobly's - TestSummaryWriter. - """ - - def dump(self, content, entry_type): - """Update Mobly's implementation of dump to work on OrderedDict. - - See MoblyTestSummaryWriter.dump for documentation. - """ - new_content = collections.OrderedDict(copy.deepcopy(content)) - new_content["Type"] = entry_type.value - new_content.move_to_end("Type", last=False) - # Both user code and Mobly code can trigger this dump, hence the lock. - with self._lock: - # For Python3, setting the encoding on yaml.safe_dump does not work - # because Python3 file descriptors set an encoding by default, which - # PyYAML uses instead of the encoding on yaml.safe_dump. So, the - # encoding has to be set on the open call instead. - with io.open(self._path, "a", encoding="utf-8") as f: - # Use safe_dump here to avoid language-specific tags in final - # output. - yaml_writer.safe_dump(new_content, f) - - -class TestResultEnums(MoblyTestResultEnums): - """Enums used for TestResultRecord class. Inherits from Mobly's - TestResultEnums. - - Includes the tokens to mark test result with, and the string names for each - field in TestResultRecord. - """ - - RECORD_LOG_BEGIN_TIME = "Log Begin Time" - RECORD_LOG_END_TIME = "Log End Time" - - -class TestResultRecord(MoblyTestResultRecord): - """A record that holds the information of a test case execution. This class - inherits from Mobly's TestResultRecord class. - - Attributes: - test_name: A string representing the name of the test case. - begin_time: Epoch timestamp of when the test case started. - end_time: Epoch timestamp of when the test case ended. - self.uid: Unique identifier of a test case. - self.result: Test result, PASS/FAIL/SKIP. - self.extras: User defined extra information of the test result. - self.details: A string explaining the details of the test case. - """ - - def __init__(self, t_name, t_class=None): - super().__init__(t_name, t_class) - self.log_begin_time = None - self.log_end_time = None - - def test_begin(self): - """Call this when the test case it records begins execution. - - Sets the begin_time of this record. - """ - super().test_begin() - self.log_begin_time = logger.epoch_to_log_line_timestamp(self.begin_time) - - def _test_end(self, result, e): - """Class internal function to signal the end of a test case execution. - - Args: - result: One of the TEST_RESULT enums in TestResultEnums. - e: A test termination signal (usually an exception object). It can - be any exception instance or of any subclass of - acts.signals.TestSignal. - """ - super()._test_end(result, e) - if self.end_time: - self.log_end_time = logger.epoch_to_log_line_timestamp(self.end_time) - - def to_dict(self): - """Gets a dictionary representing the content of this class. - - Returns: - A dictionary representing the content of this class. - """ - d = collections.OrderedDict() - d[TestResultEnums.RECORD_NAME] = self.test_name - d[TestResultEnums.RECORD_CLASS] = self.test_class - d[TestResultEnums.RECORD_BEGIN_TIME] = self.begin_time - d[TestResultEnums.RECORD_END_TIME] = self.end_time - d[TestResultEnums.RECORD_LOG_BEGIN_TIME] = self.log_begin_time - d[TestResultEnums.RECORD_LOG_END_TIME] = self.log_end_time - d[TestResultEnums.RECORD_RESULT] = self.result - d[TestResultEnums.RECORD_UID] = self.uid - d[TestResultEnums.RECORD_EXTRAS] = self.extras - d[TestResultEnums.RECORD_DETAILS] = self.details - d[TestResultEnums.RECORD_EXTRA_ERRORS] = { - key: value.to_dict() for (key, value) in self.extra_errors.items() - } - d[TestResultEnums.RECORD_STACKTRACE] = self.stacktrace - return d - - def json_str(self): - """Converts this test record to a string in json format. - - Format of the json string is: - { - 'Test Name': <test name>, - 'Begin Time': <epoch timestamp>, - 'Details': <details>, - ... - } - - Returns: - A json-format string representing the test record. - """ - return json.dumps(self.to_dict()) - - -class TestResult(MoblyTestResult): - """A class that contains metrics of a test run. This class inherits from - Mobly's TestResult class. - - This class is essentially a container of TestResultRecord objects. - - Attributes: - self.requested: A list of strings, each is the name of a test requested - by user. - self.failed: A list of records for tests failed. - self.executed: A list of records for tests that were actually executed. - self.passed: A list of records for tests passed. - self.skipped: A list of records for tests skipped. - """ - - def __add__(self, r): - """Overrides '+' operator for TestResult class. - - The add operator merges two TestResult objects by concatenating all of - their lists together. - - Args: - r: another instance of TestResult to be added - - Returns: - A TestResult instance that's the sum of two TestResult instances. - """ - if not isinstance(r, MoblyTestResult): - raise TypeError("Operand %s of type %s is not a TestResult." % (r, type(r))) - sum_result = TestResult() - for name in sum_result.__dict__: - r_value = getattr(r, name) - l_value = getattr(self, name) - if isinstance(r_value, list): - setattr(sum_result, name, l_value + r_value) - return sum_result - - def json_str(self): - """Converts this test result to a string in json format. - - Format of the json string is: - { - "Results": [ - {<executed test record 1>}, - {<executed test record 2>}, - ... - ], - "Summary": <summary dict> - } - - Returns: - A json-format string representing the test results. - """ - d = collections.OrderedDict() - d["ControllerInfo"] = { - record.controller_name: record.controller_info - for record in self.controller_info - } - d["Results"] = [record.to_dict() for record in self.executed] - d["Summary"] = self.summary_dict() - d["Error"] = self.errors_list() - json_str = json.dumps(d, indent=4) - return json_str - - def summary_str(self): - """Gets a string that summarizes the stats of this test result. - - The summary provides the counts of how many test cases fall into each - category, like "Passed", "Failed" etc. - - Format of the string is: - Requested <int>, Executed <int>, ... - - Returns: - A summary string of this test result. - """ - l = ["%s %s" % (k, v) for k, v in self.summary_dict().items()] - msg = ", ".join(l) - return msg - - def errors_list(self): - l = list() - for record in self.error: - if isinstance(record, TestResultRecord): - keys = [ - TestResultEnums.RECORD_NAME, - TestResultEnums.RECORD_DETAILS, - TestResultEnums.RECORD_EXTRA_ERRORS, - ] - elif isinstance(record, ExceptionRecord): - keys = [TestResultEnums.RECORD_DETAILS, TestResultEnums.RECORD_POSITION] - else: - return [] - l.append({k: record.to_dict()[k] for k in keys}) - return l
diff --git a/src/antlion/signals.py b/src/antlion/signals.py deleted file mode 100644 index a3599f4..0000000 --- a/src/antlion/signals.py +++ /dev/null
@@ -1,19 +0,0 @@ -#!/usr/bin/env python3 -# -# Copyright 2022 The Fuchsia Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""This module is where all the test signal classes and related utilities live. -""" - -from mobly.signals import *
diff --git a/src/antlion/test_decorators.py b/src/antlion/test_decorators.py deleted file mode 100644 index a152f4f..0000000 --- a/src/antlion/test_decorators.py +++ /dev/null
@@ -1,99 +0,0 @@ -#!/usr/bin/env python3 -# -# Copyright 2022 The Fuchsia Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from antlion import signals - - -def __select_last(test_signals, _): - return test_signals[-1] - - -def repeated_test(num_passes, acceptable_failures=0, result_selector=__select_last): - """A decorator that runs a test case multiple times. - - This decorator can be used to run a test multiple times and aggregate the - data into a single test result. By setting `result_selector`, the user can - access the returned result of each run, allowing them to average results, - return the median, or gather and return standard deviation values. - - This decorator should be used on test cases, and should not be used on - static or class methods. The test case must take in an additional argument, - `attempt_number`, which returns the current attempt number, starting from - 1. - - Note that any TestSignal intended to abort or skip the test will take - abort or skip immediately. - - Args: - num_passes: The number of times the test needs to pass to report the - test case as passing. - acceptable_failures: The number of failures accepted. If the failures - exceeds this number, the test will stop repeating. The maximum - number of runs is `num_passes + acceptable_failures`. If the test - does fail, result_selector will still be called. - result_selector: A lambda that takes in the list of TestSignals and - returns the test signal to report the test case as. Note that the - list also contains any uncaught exceptions from the test execution. - """ - - def decorator(func): - if not func.__name__.startswith("test_"): - raise ValueError('Tests must start with "test_".') - - def test_wrapper(self): - num_failures = 0 - num_seen_passes = 0 - test_signals_received = [] - for i in range(num_passes + acceptable_failures): - try: - func(self, i + 1) - except ( - signals.TestFailure, - signals.TestError, - AssertionError, - ) as signal: - test_signals_received.append(signal) - num_failures += 1 - except signals.TestPass as signal: - test_signals_received.append(signal) - num_seen_passes += 1 - except (signals.TestSignal, KeyboardInterrupt): - raise - except Exception as signal: - test_signals_received.append(signal) - num_failures += 1 - else: - num_seen_passes += 1 - test_signals_received.append( - signals.TestPass( - "Test iteration %s of %s passed without details." - % (i, func.__name__) - ) - ) - - if num_failures > acceptable_failures: - break - elif num_seen_passes == num_passes: - break - else: - self.teardown_test() - self.setup_test() - - raise result_selector(test_signals_received, self) - - return test_wrapper - - return decorator
diff --git a/src/antlion/test_runner.py b/src/antlion/test_runner.py deleted file mode 100644 index bcb516f..0000000 --- a/src/antlion/test_runner.py +++ /dev/null
@@ -1,328 +0,0 @@ -#!/usr/bin/env python3 -# -# Copyright 2022 The Fuchsia Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -import itertools - -import importlib -import inspect -import fnmatch -import json -import logging -import os -import sys - -from antlion import base_test -from antlion import keys -from antlion import logger -from antlion import records -from antlion import signals -from antlion import utils -from antlion import error - -from mobly.records import ExceptionRecord - - -def _find_test_class(): - """Finds the test class in a test script. - - Walk through module members and find the subclass of BaseTestClass. Only - one subclass is allowed in a test script. - - Returns: - The test class in the test module. - """ - test_classes = [] - main_module_members = sys.modules["__main__"] - for _, module_member in main_module_members.__dict__.items(): - if inspect.isclass(module_member): - if issubclass(module_member, base_test.BaseTestClass): - test_classes.append(module_member) - if len(test_classes) != 1: - logging.error( - "Expected 1 test class per file, found %s.", - [t.__name__ for t in test_classes], - ) - sys.exit(1) - return test_classes[0] - - -def execute_one_test_class(test_class, test_config, test_identifier): - """Executes one specific test class. - - You could call this function in your own cli test entry point if you choose - not to use act.py. - - Args: - test_class: A subclass of acts.base_test.BaseTestClass that has the test - logic to be executed. - test_config: A dict representing one set of configs for a test run. - test_identifier: A list of tuples specifying which test cases to run in - the test class. - - Returns: - True if all tests passed without any error, False otherwise. - - Raises: - If signals.TestAbortAll is raised by a test run, pipe it through. - """ - tr = TestRunner(test_config, test_identifier) - try: - tr.run(test_class) - return tr.results.is_all_pass - except signals.TestAbortAll: - raise - except: - logging.exception("Exception when executing %s.", tr.testbed_name) - finally: - tr.stop() - - -class TestRunner(object): - """The class that instantiates test classes, executes test cases, and - report results. - - Attributes: - test_run_config: The TestRunConfig object specifying what tests to run. - id: A string that is the unique identifier of this test run. - log: The logger object used throughout this test run. - test_classes: A dictionary where we can look up the test classes by name - to instantiate. Supports unix shell style wildcards. - run_list: A list of tuples specifying what tests to run. - results: The test result object used to record the results of this test - run. - running: A boolean signifies whether this test run is ongoing or not. - """ - - def __init__(self, test_configs, run_list): - self.test_run_config = test_configs - self.testbed_name = self.test_run_config.testbed_name - start_time = logger.get_log_file_timestamp() - self.id = "{}@{}".format(self.testbed_name, start_time) - self.test_run_config.log_path = os.path.abspath( - os.path.join(self.test_run_config.log_path, self.testbed_name, start_time) - ) - logger.setup_test_logger(self.log_path, self.testbed_name) - self.log = logging.getLogger() - self.test_run_config.summary_writer = records.TestSummaryWriter( - os.path.join(self.log_path, records.OUTPUT_FILE_SUMMARY) - ) - self.run_list = run_list - self.dump_config() - self.results = records.TestResult() - self.running = False - - @property - def log_path(self): - """The path to write logs of this test run to.""" - return self.test_run_config.log_path - - @property - def summary_writer(self): - """The object responsible for writing summary and results data.""" - return self.test_run_config.summary_writer - - def import_test_modules(self, test_paths): - """Imports test classes from test scripts. - - 1. Locate all .py files under test paths. - 2. Import the .py files as modules. - 3. Find the module members that are test classes. - 4. Categorize the test classes by name. - - Args: - test_paths: A list of directory paths where the test files reside. - - Returns: - A dictionary where keys are test class name strings, values are - actual test classes that can be instantiated. - """ - - def is_testfile_name(name, ext): - if ext == ".py": - if name.endswith("Test") or name.endswith("_test"): - return True - return False - - file_list = utils.find_files(test_paths, is_testfile_name) - test_classes = {} - for path, name, _ in file_list: - sys.path.append(path) - try: - with utils.SuppressLogOutput(log_levels=[logging.INFO, logging.ERROR]): - module = importlib.import_module(name) - except Exception as e: - logging.debug("Failed to import %s: %s", path, str(e)) - for test_cls_name, _ in self.run_list: - alt_name = name.replace("_", "").lower() - alt_cls_name = test_cls_name.lower() - # Only block if a test class on the run list causes an - # import error. We need to check against both naming - # conventions: AaaBbb and aaa_bbb. - if name == test_cls_name or alt_name == alt_cls_name: - msg = ( - "Encountered error importing test class %s, " "abort." - ) % test_cls_name - # This exception is logged here to help with debugging - # under py2, because "raise X from Y" syntax is only - # supported under py3. - self.log.exception(msg) - raise ValueError(msg) - continue - for member_name in dir(module): - if not member_name.startswith("__"): - if member_name.endswith("Test"): - test_class = getattr(module, member_name) - if inspect.isclass(test_class): - test_classes[member_name] = test_class - return test_classes - - def run_test_class(self, test_cls_name, test_cases=None): - """Instantiates and executes a test class. - - If test_cases is None, the test cases listed by self.tests will be - executed instead. If self.tests is empty as well, no test case in this - test class will be executed. - - Args: - test_cls_name: Name of the test class to execute. - test_cases: List of test case names to execute within the class. - - Raises: - ValueError is raised if the requested test class could not be found - in the test_paths directories. - """ - matches = fnmatch.filter(self.test_classes.keys(), test_cls_name) - if not matches: - self.log.info( - "Cannot find test class %s or classes matching pattern, " - "skipping for now." % test_cls_name - ) - record = records.TestResultRecord("*all*", test_cls_name) - record.test_skip(signals.TestSkip("Test class does not exist.")) - self.results.add_record(record) - return - if matches != [test_cls_name]: - self.log.info( - "Found classes matching pattern %s: %s", test_cls_name, matches - ) - - for test_cls_name_match in matches: - test_cls = self.test_classes[test_cls_name_match] - test_cls_instance = test_cls(self.test_run_config) - try: - cls_result = test_cls_instance.run(test_cases) - self.results += cls_result - except signals.TestAbortAll as e: - self.results += e.results - raise e - - def run(self, test_class=None): - """Executes test cases. - - This will instantiate controller and test classes, and execute test - classes. This can be called multiple times to repeatedly execute the - requested test cases. - - A call to TestRunner.stop should eventually happen to conclude the life - cycle of a TestRunner. - - Args: - test_class: The python module of a test class. If provided, run this - class; otherwise, import modules in under test_paths - based on run_list. - """ - if not self.running: - self.running = True - - if test_class: - self.test_classes = {test_class.__name__: test_class} - else: - t_paths = self.test_run_config.controller_configs[ - keys.Config.key_test_paths.value - ] - self.test_classes = self.import_test_modules(t_paths) - self.log.debug("Executing run list %s.", self.run_list) - for test_cls_name, test_case_names in self.run_list: - if not self.running: - break - - if test_case_names: - self.log.debug( - "Executing test cases %s in test class %s.", - test_case_names, - test_cls_name, - ) - else: - self.log.debug("Executing test class %s", test_cls_name) - - try: - self.run_test_class(test_cls_name, test_case_names) - except error.ActsError as e: - self.results.error.append(ExceptionRecord(e)) - self.log.error("Test Runner Error: %s" % e.details) - except signals.TestAbortAll as e: - self.log.warning("Abort all subsequent test classes. Reason: %s", e) - raise - - def stop(self): - """Releases resources from test run. Should always be called after - TestRunner.run finishes. - - This function concludes a test run and writes out a test report. - """ - if self.running: - msg = "\nSummary for test run %s: %s\n" % ( - self.id, - self.results.summary_str(), - ) - self._write_results_to_file() - self.log.info(msg.strip()) - logger.kill_test_logger(self.log) - self.running = False - - def _write_results_to_file(self): - """Writes test results to file(s) in a serializable format.""" - # Old JSON format - path = os.path.join(self.log_path, "test_run_summary.json") - with open(path, "w") as f: - f.write(self.results.json_str()) - # New YAML format - self.summary_writer.dump( - self.results.summary_dict(), records.TestSummaryEntryType.SUMMARY - ) - - def dump_config(self): - """Writes the test config to a JSON file under self.log_path""" - config_path = os.path.join(self.log_path, "test_configs.json") - with open(config_path, "a") as f: - json.dump( - dict( - itertools.chain( - self.test_run_config.user_params.items(), - self.test_run_config.controller_configs.items(), - ) - ), - f, - skipkeys=True, - indent=4, - ) - - def write_test_campaign(self): - """Log test campaign file.""" - path = os.path.join(self.log_path, "test_campaign.log") - with open(path, "w") as f: - for test_class, test_cases in self.run_list: - f.write("%s:\n%s" % (test_class, ",\n".join(test_cases))) - f.write("\n\n")
diff --git a/src/antlion/test_utils/OWNERS b/src/antlion/test_utils/OWNERS deleted file mode 100644 index bf3ed6c..0000000 --- a/src/antlion/test_utils/OWNERS +++ /dev/null
@@ -1 +0,0 @@ -include /acts_tests/tests/OWNERS
diff --git a/src/antlion/test_utils/__init__.py b/src/antlion/test_utils/__init__.py deleted file mode 100644 index e69de29..0000000 --- a/src/antlion/test_utils/__init__.py +++ /dev/null
diff --git a/src/antlion/test_utils/abstract_devices/__init__.py b/src/antlion/test_utils/abstract_devices/__init__.py deleted file mode 100644 index e69de29..0000000 --- a/src/antlion/test_utils/abstract_devices/__init__.py +++ /dev/null
diff --git a/src/antlion/test_utils/abstract_devices/wlan_device.py b/src/antlion/test_utils/abstract_devices/wlan_device.py deleted file mode 100644 index 5891012..0000000 --- a/src/antlion/test_utils/abstract_devices/wlan_device.py +++ /dev/null
@@ -1,598 +0,0 @@ -#!/usr/bin/env python3 -# -# Copyright 2022 The Fuchsia Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import inspect -import logging - -from antlion.controllers import iperf_client -from antlion.controllers.android_device import AndroidDevice -from antlion.controllers.fuchsia_device import FuchsiaDevice -from antlion.test_utils.wifi import wifi_test_utils as awutils -from antlion.utils import adb_shell_ping - -FUCHSIA_VALID_SECURITY_TYPES = {"none", "wep", "wpa", "wpa2", "wpa3"} - - -def create_wlan_device(hardware_device): - """Creates a generic WLAN device based on type of device that is sent to - the functions. - - Args: - hardware_device: A WLAN hardware device that is supported by ACTS. - """ - if isinstance(hardware_device, FuchsiaDevice): - return FuchsiaWlanDevice(hardware_device) - elif isinstance(hardware_device, AndroidDevice): - return AndroidWlanDevice(hardware_device) - else: - raise ValueError( - "Unable to create WlanDevice for type %s" % type(hardware_device) - ) - - -class WlanDevice(object): - """Class representing a generic WLAN device. - - Each object of this class represents a generic WLAN device. - Android device and Fuchsia devices are the currently supported devices/ - - Attributes: - device: A generic WLAN device. - """ - - def __init__(self, device): - self.device = device - self.log = logging - self.identifier = None - - def wifi_toggle_state(self, state): - """Base generic WLAN interface. Only called if not overridden by - another supported device. - """ - raise NotImplementedError( - "{} must be defined.".format(inspect.currentframe().f_code.co_name) - ) - - def reset_wifi(self): - """Base generic WLAN interface. Only called if not overridden by - another supported device. - """ - raise NotImplementedError( - "{} must be defined.".format(inspect.currentframe().f_code.co_name) - ) - - def take_bug_report(self, test_name=None, begin_time=None): - """Base generic WLAN interface. Only called if not overridden by - another supported device. - """ - raise NotImplementedError( - "{} must be defined.".format(inspect.currentframe().f_code.co_name) - ) - - def get_log(self, test_name, begin_time): - """Base generic WLAN interface. Only called if not overridden by - another supported device. - """ - raise NotImplementedError( - "{} must be defined.".format(inspect.currentframe().f_code.co_name) - ) - - def turn_location_off_and_scan_toggle_off(self): - """Base generic WLAN interface. Only called if not overridden by - another supported device. - """ - raise NotImplementedError( - "{} must be defined.".format(inspect.currentframe().f_code.co_name) - ) - - def associate( - self, - target_ssid, - target_pwd=None, - check_connectivity=True, - hidden=False, - target_security=None, - ): - """Base generic WLAN interface. Only called if not overriden by - another supported device. - """ - raise NotImplementedError( - "{} must be defined.".format(inspect.currentframe().f_code.co_name) - ) - - def disconnect(self): - """Base generic WLAN interface. Only called if not overridden by - another supported device. - """ - raise NotImplementedError( - "{} must be defined.".format(inspect.currentframe().f_code.co_name) - ) - - def get_wlan_interface_id_list(self): - """Base generic WLAN interface. Only called if not overridden by - another supported device. - """ - raise NotImplementedError( - "{} must be defined.".format(inspect.currentframe().f_code.co_name) - ) - - def get_default_wlan_test_interface(self): - raise NotImplementedError( - "{} must be defined.".format(inspect.currentframe().f_code.co_name) - ) - - def destroy_wlan_interface(self, iface_id): - """Base generic WLAN interface. Only called if not overridden by - another supported device. - """ - raise NotImplementedError( - "{} must be defined.".format(inspect.currentframe().f_code.co_name) - ) - - def send_command(self, command): - raise NotImplementedError( - "{} must be defined.".format(inspect.currentframe().f_code.co_name) - ) - - def is_connected(self, ssid=None): - raise NotImplementedError( - "{} must be defined.".format(inspect.currentframe().f_code.co_name) - ) - - def can_ping( - self, - dest_ip, - count=3, - interval=1000, - timeout=1000, - size=25, - additional_ping_params=None, - ): - raise NotImplementedError( - "{} must be defined.".format(inspect.currentframe().f_code.co_name) - ) - - def ping( - self, - dest_ip, - count=3, - interval=1000, - timeout=1000, - size=25, - additional_ping_params=None, - ): - raise NotImplementedError( - "{} must be defined.".format(inspect.currentframe().f_code.co_name) - ) - - def hard_power_cycle(self, pdus=None): - raise NotImplementedError( - "{} must be defined.".format(inspect.currentframe().f_code.co_name) - ) - - def save_network(self, ssid): - raise NotImplementedError( - "{} must be defined.".format(inspect.currentframe().f_code.co_name) - ) - - def clear_saved_networks(self): - raise NotImplementedError( - "{} must be defined.".format(inspect.currentframe().f_code.co_name) - ) - - def create_iperf_client(self, test_interface=None): - raise NotImplementedError( - "{} must be defined.".format(inspect.currentframe().f_code.co_name) - ) - - def feature_is_present(self, feature: str) -> bool: - raise NotImplementedError( - "{} must be defined.".format(inspect.currentframe().f_code.co_name) - ) - - -class AndroidWlanDevice(WlanDevice): - """Class wrapper for an Android WLAN device. - - Each object of this class represents a generic WLAN device. - Android device and Fuchsia devices are the currently supported devices/ - - Attributes: - android_device: An Android WLAN device. - """ - - def __init__(self, android_device): - super().__init__(android_device) - self.identifier = android_device.serial - - def wifi_toggle_state(self, state): - awutils.wifi_toggle_state(self.device, state) - - def reset_wifi(self): - awutils.reset_wifi(self.device) - - def take_bug_report(self, test_name=None, begin_time=None): - self.device.take_bug_report(test_name, begin_time) - - def get_log(self, test_name, begin_time): - self.device.cat_adb_log(test_name, begin_time) - - def turn_location_off_and_scan_toggle_off(self): - awutils.turn_location_off_and_scan_toggle_off(self.device) - - def associate( - self, - target_ssid, - target_pwd=None, - key_mgmt=None, - check_connectivity=True, - hidden=False, - target_security=None, - ): - """Function to associate an Android WLAN device. - - Args: - target_ssid: SSID to associate to. - target_pwd: Password for the SSID, if necessary. - key_mgmt: The hostapd wpa_key_mgmt value, distinguishes wpa3 from - wpa2 for android tests. - check_connectivity: Whether to check for internet connectivity. - hidden: Whether the network is hidden. - Returns: - True if successfully connected to WLAN, False if not. - """ - network = {"SSID": target_ssid, "hiddenSSID": hidden} - if target_pwd: - network["password"] = target_pwd - if key_mgmt: - network["security"] = key_mgmt - try: - awutils.connect_to_wifi_network( - self.device, - network, - check_connectivity=check_connectivity, - hidden=hidden, - ) - return True - except Exception as e: - self.device.log.info("Failed to associated (%s)" % e) - return False - - def disconnect(self): - awutils.turn_location_off_and_scan_toggle_off(self.device) - - def get_wlan_interface_id_list(self): - pass - - def get_default_wlan_test_interface(self): - return "wlan0" - - def destroy_wlan_interface(self, iface_id): - pass - - def send_command(self, command): - return self.device.adb.shell(str(command)) - - def is_connected(self, ssid=None): - wifi_info = self.device.droid.wifiGetConnectionInfo() - if ssid: - return "BSSID" in wifi_info and wifi_info["SSID"] == ssid - return "BSSID" in wifi_info - - def can_ping( - self, - dest_ip, - count=3, - interval=1000, - timeout=1000, - size=25, - additional_ping_params=None, - ): - return adb_shell_ping( - self.device, dest_ip=dest_ip, count=count, timeout=timeout - ) - - def ping(self, dest_ip, count=3, interval=1000, timeout=1000, size=25): - pass - - def hard_power_cycle(self, pdus): - pass - - def save_network(self, ssid): - pass - - def clear_saved_networks(self): - pass - - def create_iperf_client(self, test_interface=None): - """Returns an iperf client on the Android, without requiring a - specific config. - - Args: - test_interface: optional, string, name of test interface. - - Returns: - IPerfClient object - """ - if not test_interface: - test_interface = self.get_default_wlan_test_interface() - - return iperf_client.IPerfClientOverAdb( - android_device_or_serial=self.device, test_interface=test_interface - ) - - def feature_is_present(self, feature: str) -> bool: - pass - - -class FuchsiaWlanDevice(WlanDevice): - """Class wrapper for an Fuchsia WLAN device. - - Each object of this class represents a generic WLAN device. - Android device and Fuchsia devices are the currently supported devices/ - - Attributes: - fuchsia_device: A Fuchsia WLAN device. - """ - - device: FuchsiaDevice - - def __init__(self, fuchsia_device): - super().__init__(fuchsia_device) - self.identifier = fuchsia_device.ip - self.device.configure_wlan() - - def wifi_toggle_state(self, state): - """Stub for Fuchsia implementation.""" - - def reset_wifi(self): - """Stub for Fuchsia implementation.""" - - def take_bug_report(self, test_name=None, begin_time=None): - """Stub for Fuchsia implementation.""" - self.device.take_bug_report(test_name, begin_time) - - def get_log(self, test_name, begin_time): - """Stub for Fuchsia implementation.""" - - def turn_location_off_and_scan_toggle_off(self): - """Stub for Fuchsia implementation.""" - - def associate( - self, - target_ssid, - target_pwd=None, - key_mgmt=None, - check_connectivity=True, - hidden=False, - target_security=None, - ): - """Function to associate a Fuchsia WLAN device. - - Args: - target_ssid: SSID to associate to. - target_pwd: Password for the SSID, if necessary. - key_mgmt: the hostapd wpa_key_mgmt, if specified. - check_connectivity: Whether to check for internet connectivity. - hidden: Whether the network is hidden. - target_security: string, target security for network, used to - save the network in policy connects (see wlan_policy_lib) - Returns: - True if successfully connected to WLAN, False if not. - """ - if self.device.association_mechanism == "drivers": - bss_scan_response = self.device.sl4f.wlan_lib.wlanScanForBSSInfo() - if bss_scan_response.get("error"): - self.log.error( - "Scan for BSS info failed. Err: %s" % bss_scan_response["error"] - ) - return False - - bss_descs_for_ssid = bss_scan_response["result"].get(target_ssid, None) - if not bss_descs_for_ssid or len(bss_descs_for_ssid) < 1: - self.log.error( - "Scan failed to find a BSS description for target_ssid %s" - % target_ssid - ) - return False - - connection_response = self.device.sl4f.wlan_lib.wlanConnectToNetwork( - target_ssid, bss_descs_for_ssid[0], target_pwd=target_pwd - ) - return self.device.check_connect_response(connection_response) - else: - return self.device.wlan_policy_controller.save_and_connect( - target_ssid, target_security, password=target_pwd - ) - - def disconnect(self): - """Function to disconnect from a Fuchsia WLAN device. - Asserts if disconnect was not successful. - """ - if self.device.association_mechanism == "drivers": - disconnect_response = self.device.sl4f.wlan_lib.wlanDisconnect() - return self.device.check_disconnect_response(disconnect_response) - else: - return ( - self.device.wlan_policy_controller.remove_all_networks_and_wait_for_no_connections() - ) - - def status(self): - return self.device.sl4f.wlan_lib.wlanStatus() - - def can_ping( - self, - dest_ip, - count=3, - interval=1000, - timeout=1000, - size=25, - additional_ping_params=None, - ): - return self.device.can_ping( - dest_ip, - count=count, - interval=interval, - timeout=timeout, - size=size, - additional_ping_params=additional_ping_params, - ) - - def ping( - self, - dest_ip, - count=3, - interval=1000, - timeout=1000, - size=25, - additional_ping_params=None, - ): - return self.device.ping( - dest_ip, - count=count, - interval=interval, - timeout=timeout, - size=size, - additional_ping_params=additional_ping_params, - ) - - def get_wlan_interface_id_list(self): - """Function to list available WLAN interfaces. - - Returns: - A list of wlan interface IDs. - """ - return self.device.sl4f.wlan_lib.wlanGetIfaceIdList().get("result") - - def get_default_wlan_test_interface(self): - """Returns name of the WLAN client interface""" - return self.device.wlan_client_test_interface_name - - def destroy_wlan_interface(self, iface_id): - """Function to associate a Fuchsia WLAN device. - - Args: - target_ssid: SSID to associate to. - target_pwd: Password for the SSID, if necessary. - check_connectivity: Whether to check for internet connectivity. - hidden: Whether the network is hidden. - Returns: - True if successfully destroyed wlan interface, False if not. - """ - result = self.device.sl4f.wlan_lib.wlanDestroyIface(iface_id) - if result.get("error") is None: - return True - else: - self.log.error( - "Failed to destroy interface with: {}".format(result.get("error")) - ) - return False - - def send_command(self, command): - return self.device.ssh.run(str(command)).stdout - - def is_connected(self, ssid=None): - """Determines if wlan_device is connected to wlan network. - - Args: - ssid (optional): string, to check if device is connect to a specific - network. - - Returns: - True, if connected to a network or to the correct network when SSID - is provided. - False, if not connected or connect to incorrect network when SSID is - provided. - """ - response = self.status() - if response.get("error"): - raise ConnectionError("Failed to get client network connection status") - result = response.get("result") - if isinstance(result, dict): - connected_to = result.get("Connected") - # TODO(https://fxbug.dev/85938): Remove backwards compatibility once - # ACTS is versioned with Fuchsia. - if not connected_to: - connected_to = result.get("connected_to") - if not connected_to: - return False - - if ssid: - # Replace encoding errors instead of raising an exception. - # Since `ssid` is a string, this will not affect the test - # for equality. - connected_ssid = bytearray(connected_to["ssid"]).decode( - encoding="utf-8", errors="replace" - ) - return ssid == connected_ssid - return True - return False - - def hard_power_cycle(self, pdus): - self.device.reboot(reboot_type="hard", testbed_pdus=pdus) - - def save_network(self, target_ssid, security_type=None, target_pwd=None): - if self.device.association_mechanism == "drivers": - raise EnvironmentError( - "Cannot save network using the drivers. Saved networks are a " - "policy layer concept." - ) - if security_type and security_type not in FUCHSIA_VALID_SECURITY_TYPES: - raise TypeError("Invalid security type: %s" % security_type) - if not self.device.wlan_policy_controller.save_network( - target_ssid, security_type, password=target_pwd - ): - raise EnvironmentError("Failed to save network: %s" % target_ssid) - - def clear_saved_networks(self): - if self.device.association_mechanism == "drivers": - raise EnvironmentError( - "Cannot clear saved network using the drivers. Saved networks " - "are a policy layer concept." - ) - if not self.device.wlan_policy_controller.remove_all_networks(): - raise EnvironmentError("Failed to clear saved networks") - - def create_iperf_client(self, test_interface=None): - """Returns an iperf client on the FuchsiaDevice, without requiring a - specific config. - - Args: - test_interface: optional, string, name of test interface. Defaults - to first found wlan client interface. - - Returns: - IPerfClient object - """ - if not test_interface: - test_interface = self.get_default_wlan_test_interface() - - # A package server is necessary to acquire the iperf3 client for - # some builds. - self.device.start_package_server() - - return iperf_client.IPerfClientOverSsh( - { - "user": "fuchsia", - "host": self.device.ip, - "ssh_config": self.device.ssh_config, - }, - ssh_provider=self.device.ssh, - test_interface=test_interface, - ) - - def feature_is_present(self, feature: str) -> bool: - return feature in self.device.wlan_features
diff --git a/src/antlion/test_utils/abstract_devices/wmm_transceiver.py b/src/antlion/test_utils/abstract_devices/wmm_transceiver.py deleted file mode 100644 index e38d91a..0000000 --- a/src/antlion/test_utils/abstract_devices/wmm_transceiver.py +++ /dev/null
@@ -1,696 +0,0 @@ -#!/usr/bin/env python3 -# -# Copyright 2022 The Fuchsia Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import logging -import multiprocessing -import time - -from datetime import datetime -from uuid import uuid4 - -from antlion import signals -from antlion import tracelogger -from antlion import utils -from antlion.controllers import iperf_client -from antlion.controllers import iperf_server - -AC_VO = "AC_VO" -AC_VI = "AC_VI" -AC_BE = "AC_BE" -AC_BK = "AC_BK" - -# TODO(fxb/61421): Add tests to check all DSCP classes are mapped to the correct -# AC (there are many that aren't included here). Requires implementation of -# sniffer. -DEFAULT_AC_TO_TOS_TAG_MAP = {AC_VO: "0xC0", AC_VI: "0x80", AC_BE: "0x0", AC_BK: "0x20"} -UDP = "udp" -TCP = "tcp" -DEFAULT_IPERF_PORT = 5201 -DEFAULT_STREAM_TIME = 10 -DEFAULT_IP_ADDR_TIMEOUT = 15 -PROCESS_JOIN_TIMEOUT = 60 -AVAILABLE = True -UNAVAILABLE = False - - -class WmmTransceiverError(signals.ControllerError): - pass - - -def create(config, identifier=None, wlan_devices=None, access_points=None): - """Creates a WmmTransceiver from a config. - - Args: - config: dict, config parameters for the transceiver. Contains: - - iperf_config: dict, the config to use for creating IPerfClients - and IPerfServers (excluding port). - - port_range_start: int, the lower bound of the port range to use - for creating IPerfServers. Defaults to 5201. - - wlan_device: string, the identifier of the wlan_device used for - this WmmTransceiver (optional) - - identifier: string, identifier for the WmmTransceiver. Must be provided - either as arg or in the config. - wlan_devices: list of WlanDevice objects from which to get the - wlan_device, if any, used as this transceiver - access_points: list of AccessPoint objects from which to get the - access_point, if any, used as this transceiver - """ - try: - # If identifier is not provided as func arg, it must be provided via - # config file. - if not identifier: - identifier = config["identifier"] - iperf_config = config["iperf_config"] - - except KeyError as err: - raise WmmTransceiverError( - "Parameter not provided as func arg, nor found in config: %s" % err - ) - - if wlan_devices is None: - wlan_devices = [] - - if access_points is None: - access_points = [] - - port_range_start = config.get("port_range_start", DEFAULT_IPERF_PORT) - - wd = None - ap = None - if "wlan_device" in config: - wd = _find_wlan_device(config["wlan_device"], wlan_devices) - elif "access_point" in config: - ap = _find_access_point(config["access_point"], access_points) - - return WmmTransceiver( - iperf_config, - identifier, - wlan_device=wd, - access_point=ap, - port_range_start=port_range_start, - ) - - -def _find_wlan_device(wlan_device_identifier, wlan_devices): - """Returns WlanDevice based on string identifier (e.g. ip, serial, etc.) - - Args: - wlan_device_identifier: string, identifier for the desired WlanDevice - wlan_devices: list, WlanDevices to search through - - Returns: - WlanDevice, with identifier matching wlan_device_identifier - - Raises: - WmmTransceiverError, if no WlanDevice matches identifier - """ - for wd in wlan_devices: - if wlan_device_identifier == wd.identifier: - return wd - raise WmmTransceiverError( - "No WlanDevice with identifier: %s" % wlan_device_identifier - ) - - -def _find_access_point(access_point_ip, access_points): - """Returns AccessPoint based on string ip address - - Args: - access_point_ip: string, control plane ip addr of the desired AP, - access_points: list, AccessPoints to search through - - Returns: - AccessPoint, with hostname matching access_point_ip - - Raises: - WmmTransceiverError, if no AccessPoint matches ip""" - for ap in access_points: - if ap.ssh_settings.hostname == access_point_ip: - return ap - raise WmmTransceiverError("No AccessPoint with ip: %s" % access_point_ip) - - -class WmmTransceiver(object): - """Object for handling WMM tagged streams between devices""" - - def __init__( - self, - iperf_config, - identifier, - wlan_device=None, - access_point=None, - port_range_start=5201, - ): - self.identifier = identifier - self.log = tracelogger.TraceLogger( - WmmTransceiverLoggerAdapter( - logging.getLogger(), {"identifier": self.identifier} - ) - ) - # WlanDevice or AccessPoint, that is used as the transceiver. Only one - # will be set. This helps consolodate association, setup, teardown, etc. - self.wlan_device = wlan_device - self.access_point = access_point - - # Parameters used to create IPerfClient and IPerfServer objects on - # device - self._iperf_config = iperf_config - self._test_interface = self._iperf_config.get("test_interface") - self._port_range_start = port_range_start - self._next_server_port = port_range_start - - # Maps IPerfClients, used for streams from this device, to True if - # available, False if reserved - self._iperf_clients = {} - - # Maps IPerfServers, used to receive streams from other devices, to True - # if available, False if reserved - self._iperf_servers = {} - - # Maps ports of servers, which are provided to other transceivers, to - # the actual IPerfServer objects - self._iperf_server_ports = {} - - # Maps stream UUIDs to IPerfClients reserved for that streams use - self._reserved_clients = {} - - # Maps stream UUIDs to (WmmTransceiver, IPerfServer) tuples, where the - # server is reserved on the transceiver for that streams use - self._reserved_servers = {} - - # Maps with shared memory functionality to be used across the parallel - # streams. active_streams holds UUIDs of streams that are currently - # running on this device (mapped to True, since there is no - # multiprocessing set). stream_results maps UUIDs of streams completed - # on this device to IPerfResult results for that stream. - self._manager = multiprocessing.Manager() - self._active_streams = self._manager.dict() - self._stream_results = self._manager.dict() - - # Holds parameters for streams that are prepared to run asynchronously - # (i.e. resources have been allocated). Maps UUIDs of the future streams - # to a dict, containing the stream parameters. - self._pending_async_streams = {} - - # Set of UUIDs of asynchronous streams that have at least started, but - # have not had their resources reclaimed yet - self._ran_async_streams = set() - - # Set of stream parallel process, which can be joined if completed - # successfully, or terminated and joined in the event of an error - self._running_processes = set() - - def run_synchronous_traffic_stream(self, stream_parameters, subnet): - """Runs a traffic stream with IPerf3 between two WmmTransceivers and - saves the results. - - Args: - stream_parameters: dict, containing parameters to used for the - stream. See _parse_stream_parameters for details. - subnet: string, the subnet of the network to use for the stream - - Returns: - uuid: UUID object, identifier of the stream - """ - ( - receiver, - access_category, - bandwidth, - stream_time, - ) = self._parse_stream_parameters(stream_parameters) - uuid = uuid4() - - (client, server_ip, server_port) = self._get_stream_resources( - uuid, receiver, subnet - ) - - self._validate_server_address(server_ip, uuid) - - self.log.info( - "Running synchronous stream to %s WmmTransceiver" % receiver.identifier - ) - self._run_traffic( - uuid, - client, - server_ip, - server_port, - self._active_streams, - self._stream_results, - access_category=access_category, - bandwidth=bandwidth, - stream_time=stream_time, - ) - - self._return_stream_resources(uuid) - return uuid - - def prepare_asynchronous_stream(self, stream_parameters, subnet): - """Reserves resources and saves configs for upcoming asynchronous - traffic streams, so they can be started more simultaneously. - - Args: - stream_parameters: dict, containing parameters to used for the - stream. See _parse_stream_parameters for details. - subnet: string, the subnet of the network to use for the stream - - Returns: - uuid: UUID object, identifier of the stream - """ - (receiver, access_category, bandwidth, time) = self._parse_stream_parameters( - stream_parameters - ) - uuid = uuid4() - - (client, server_ip, server_port) = self._get_stream_resources( - uuid, receiver, subnet - ) - - self._validate_server_address(server_ip, uuid) - - pending_stream_config = { - "client": client, - "server_ip": server_ip, - "server_port": server_port, - "access_category": access_category, - "bandwidth": bandwidth, - "time": time, - } - - self._pending_async_streams[uuid] = pending_stream_config - self.log.info("Stream to %s WmmTransceiver prepared." % receiver.identifier) - return uuid - - def start_asynchronous_streams(self, start_time=None): - """Starts pending asynchronous streams between two WmmTransceivers as - parallel processes. - - Args: - start_time: float, time, seconds since epoch, at which to start the - stream (for better synchronicity). If None, start immediately. - """ - for uuid in self._pending_async_streams: - pending_stream_config = self._pending_async_streams[uuid] - client = pending_stream_config["client"] - server_ip = pending_stream_config["server_ip"] - server_port = pending_stream_config["server_port"] - access_category = pending_stream_config["access_category"] - bandwidth = pending_stream_config["bandwidth"] - time = pending_stream_config["time"] - - process = multiprocessing.Process( - target=self._run_traffic, - args=[ - uuid, - client, - server_ip, - server_port, - self._active_streams, - self._stream_results, - ], - kwargs={ - "access_category": access_category, - "bandwidth": bandwidth, - "stream_time": time, - "start_time": start_time, - }, - ) - - # This needs to be set here to ensure its marked active before - # it even starts. - self._active_streams[uuid] = True - process.start() - self._ran_async_streams.add(uuid) - self._running_processes.add(process) - - self._pending_async_streams.clear() - - def cleanup_asynchronous_streams(self, timeout=PROCESS_JOIN_TIMEOUT): - """Releases reservations on resources (IPerfClients and IPerfServers) - that were held for asynchronous streams, both pending and finished. - Attempts to join any running processes, logging an error if timeout is - exceeded. - - Args: - timeout: time, in seconds, to wait for each running process, if any, - to join - """ - self.log.info("Cleaning up any asynchronous streams.") - - # Releases resources for any streams that were prepared, but no run - for uuid in self._pending_async_streams: - self.log.error("Pending asynchronous stream %s never ran. Cleaning." % uuid) - self._return_stream_resources(uuid) - self._pending_async_streams.clear() - - # Attempts to join any running streams, terminating them after timeout - # if necessary. - while self._running_processes: - process = self._running_processes.pop() - process.join(timeout) - if process.is_alive(): - self.log.error( - "Stream process failed to join in %s seconds. Terminating." - % timeout - ) - process.terminate() - process.join() - self._active_streams.clear() - - # Release resources for any finished streams - while self._ran_async_streams: - uuid = self._ran_async_streams.pop() - self._return_stream_resources(uuid) - - def get_results(self, uuid): - """Retrieves a streams IPerfResults from stream_results - - Args: - uuid: UUID object, identifier of the stream - """ - return self._stream_results.get(uuid, None) - - def destroy_resources(self): - for server in self._iperf_servers: - server.stop() - self._iperf_servers.clear() - self._iperf_server_ports.clear() - self._iperf_clients.clear() - self._next_server_port = self._port_range_start - self._stream_results.clear() - - @property - def has_active_streams(self): - return bool(self._active_streams) - - # Helper Functions - - def _run_traffic( - self, - uuid, - client, - server_ip, - server_port, - active_streams, - stream_results, - access_category=None, - bandwidth=None, - stream_time=DEFAULT_STREAM_TIME, - start_time=None, - ): - """Runs an iperf3 stream. - - 1. Adds stream UUID to active_streams - 2. Runs stream - 3. Saves results to stream_results - 4. Removes stream UUID from active_streams - - Args: - uuid: UUID object, identifier for stream - client: IPerfClient object on device - server_ip: string, ip address of IPerfServer for stream - server_port: int, port of the IPerfServer for stream - active_streams: multiprocessing.Manager.dict, which holds stream - UUIDs of active streams on the device - stream_results: multiprocessing.Manager.dict, which maps stream - UUIDs of streams to IPerfResult objects - access_category: string, WMM access category to use with iperf - (AC_BK, AC_BE, AC_VI, AC_VO). Unset if None. - bandwidth: int, bandwidth in mbps to use with iperf. Implies UDP. - Unlimited if None. - stream_time: int, time in seconds, to run iperf stream - start_time: float, time, seconds since epoch, at which to start the - stream (for better synchronicity). If None, start immediately. - """ - active_streams[uuid] = True - # SSH sessions must be started within the process that is going to - # use it. - if type(client) == iperf_client.IPerfClientOverSsh: - with utils.SuppressLogOutput(): - client.start_ssh() - - ac_flag = "" - bandwidth_flag = "" - time_flag = "-t %s" % stream_time - - if access_category: - ac_flag = " -S %s" % DEFAULT_AC_TO_TOS_TAG_MAP[access_category] - - if bandwidth: - bandwidth_flag = " -u -b %sM" % bandwidth - - iperf_flags = "-p %s -i 1 %s%s%s -J" % ( - server_port, - time_flag, - ac_flag, - bandwidth_flag, - ) - if not start_time: - start_time = time.time() - time_str = datetime.fromtimestamp(start_time).strftime("%H:%M:%S.%f") - self.log.info( - "At %s, starting %s second stream to %s:%s with (AC: %s, Bandwidth: %s)" - % ( - time_str, - stream_time, - server_ip, - server_port, - access_category, - bandwidth if bandwidth else "Unlimited", - ) - ) - - # If present, wait for stream start time - if start_time: - current_time = time.time() - while current_time < start_time: - current_time = time.time() - path = client.start(server_ip, iperf_flags, "%s" % uuid) - stream_results[uuid] = iperf_server.IPerfResult( - path, reporting_speed_units="mbps" - ) - - if type(client) == iperf_client.IPerfClientOverSsh: - client.close_ssh() - active_streams.pop(uuid) - - def _get_stream_resources(self, uuid, receiver, subnet): - """Reserves an IPerfClient and IPerfServer for a stream. - - Args: - uuid: UUID object, identifier of the stream - receiver: WmmTransceiver object, which will be the streams receiver - subnet: string, subnet of test network, to retrieve the appropriate - server address - - Returns: - (IPerfClient, string, int) representing the client, server address, - and server port to use for the stream - """ - client = self._get_client(uuid) - server_ip, server_port = self._get_server(receiver, uuid, subnet) - return (client, server_ip, server_port) - - def _return_stream_resources(self, uuid): - """Releases reservations on a streams IPerfClient and IPerfServer, so - they can be used by a future stream. - - Args: - uuid: UUID object, identifier of the stream - """ - if uuid in self._active_streams: - raise EnvironmentError("Resource still being used by stream %s" % uuid) - (receiver, server_port) = self._reserved_servers.pop(uuid) - receiver._release_server(server_port) - client = self._reserved_clients.pop(uuid) - self._iperf_clients[client] = AVAILABLE - - def _get_client(self, uuid): - """Retrieves and reserves IPerfClient for use in a stream. If none are - available, a new one is created. - - Args: - uuid: UUID object, identifier for stream, used to link client to - stream for teardown - - Returns: - IPerfClient on device - """ - reserved_client = None - for client in self._iperf_clients: - if self._iperf_clients[client] == AVAILABLE: - reserved_client = client - break - else: - reserved_client = iperf_client.create([self._iperf_config])[0] - # Due to the nature of multiprocessing, ssh connections must - # be started inside the parallel processes, so it must be closed - # here. - if type(reserved_client) == iperf_client.IPerfClientOverSsh: - reserved_client.close_ssh() - - self._iperf_clients[reserved_client] = UNAVAILABLE - self._reserved_clients[uuid] = reserved_client - return reserved_client - - def _get_server(self, receiver, uuid, subnet): - """Retrieves the address and port of a reserved IPerfServer object from - the receiver object for use in a stream. - - Args: - receiver: WmmTransceiver, to get an IPerfServer from - uuid: UUID, identifier for stream, used to link server to stream - for teardown - subnet: string, subnet of test network, to retrieve the appropriate - server address - - Returns: - (string, int) representing the IPerfServer address and port - """ - (server_ip, server_port) = receiver._reserve_server(subnet) - self._reserved_servers[uuid] = (receiver, server_port) - return (server_ip, server_port) - - def _reserve_server(self, subnet): - """Reserves an available IPerfServer for use in a stream from another - WmmTransceiver. If none are available, a new one is created. - - Args: - subnet: string, subnet of test network, to retrieve the appropriate - server address - - Returns: - (string, int) representing the IPerfServer address and port - """ - reserved_server = None - for server in self._iperf_servers: - if self._iperf_servers[server] == AVAILABLE: - reserved_server = server - break - else: - iperf_server_config = self._iperf_config - iperf_server_config.update({"port": self._next_server_port}) - self._next_server_port += 1 - reserved_server = iperf_server.create([iperf_server_config])[0] - self._iperf_server_ports[reserved_server.port] = reserved_server - - self._iperf_servers[reserved_server] = UNAVAILABLE - reserved_server.start() - end_time = time.time() + DEFAULT_IP_ADDR_TIMEOUT - while time.time() < end_time: - if self.wlan_device: - addresses = utils.get_interface_ip_addresses( - self.wlan_device.device, self._test_interface - ) - else: - addresses = reserved_server.get_interface_ip_addresses( - self._test_interface - ) - for addr in addresses["ipv4_private"]: - if utils.ip_in_subnet(addr, subnet): - return (addr, reserved_server.port) - raise AttributeError( - "Reserved server has no ipv4 address in the %s subnet" % subnet - ) - - def _release_server(self, server_port): - """Releases reservation on IPerfServer, which was held for a stream - from another WmmTransceiver. - - Args: - server_port: int, the port of the IPerfServer being returned (since) - it is the identifying characteristic - """ - server = self._iperf_server_ports[server_port] - server.stop() - self._iperf_servers[server] = AVAILABLE - - def _validate_server_address(self, server_ip, uuid, timeout=60): - """Verifies server address can be pinged before attempting to run - traffic, since iperf is unforgiving when the server is unreachable. - - Args: - server_ip: string, ip address of the iperf server - uuid: string, uuid of the stream to use this server - timeout: int, time in seconds to wait for server to respond to pings - - Raises: - WmmTransceiverError, if, after timeout, server ip is unreachable. - """ - self.log.info("Verifying server address (%s) is reachable." % server_ip) - end_time = time.time() + timeout - while time.time() < end_time: - if self.can_ping(server_ip): - break - else: - self.log.debug( - "Could not ping server address (%s). Retrying in 1 second." - % (server_ip) - ) - time.sleep(1) - else: - self._return_stream_resources(uuid) - raise WmmTransceiverError( - "IPerfServer address (%s) unreachable." % server_ip - ) - - def can_ping(self, dest_ip): - """Utilizes can_ping function in wlan_device or access_point device to - ping dest_ip - - Args: - dest_ip: string, ip address to ping - - Returns: - True, if dest address is reachable - False, otherwise - """ - if self.wlan_device: - return self.wlan_device.can_ping(dest_ip) - else: - return self.access_point.can_ping(dest_ip) - - def _parse_stream_parameters(self, stream_parameters): - """Parses stream_parameters from dictionary. - - Args: - stream_parameters: dict of stream parameters - 'receiver': WmmTransceiver, the receiver for the stream - 'access_category': String, the access category to use for the - stream. Unset if None. - 'bandwidth': int, bandwidth in mbps for the stream. If set, - implies UDP. If unset, implies TCP and unlimited bandwidth. - 'time': int, time in seconds to run stream. - - Returns: - (receiver, access_category, bandwidth, time) as - (WmmTransceiver, String, int, int) - """ - receiver = stream_parameters["receiver"] - access_category = stream_parameters.get("access_category", None) - bandwidth = stream_parameters.get("bandwidth", None) - time = stream_parameters.get("time", DEFAULT_STREAM_TIME) - return (receiver, access_category, bandwidth, time) - - -class WmmTransceiverLoggerAdapter(logging.LoggerAdapter): - def process(self, msg, kwargs): - if self.extra["identifier"]: - log_identifier = " | %s" % self.extra["identifier"] - else: - log_identifier = "" - msg = "[WmmTransceiver%s] %s" % (log_identifier, msg) - return (msg, kwargs)
diff --git a/src/antlion/test_utils/dhcp/__init__.py b/src/antlion/test_utils/dhcp/__init__.py deleted file mode 100644 index e69de29..0000000 --- a/src/antlion/test_utils/dhcp/__init__.py +++ /dev/null
diff --git a/src/antlion/test_utils/dhcp/base_test.py b/src/antlion/test_utils/dhcp/base_test.py deleted file mode 100644 index 6f68c3e..0000000 --- a/src/antlion/test_utils/dhcp/base_test.py +++ /dev/null
@@ -1,263 +0,0 @@ -#!/usr/bin/env python3 -# -# Copyright 2022 The Fuchsia Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import time - -from antlion import utils -from antlion.controllers.access_point import setup_ap, AccessPoint -from antlion.controllers.ap_lib import dhcp_config -from antlion.controllers.ap_lib import hostapd_constants -from antlion.controllers.ap_lib.hostapd_security import Security -from antlion.controllers.ap_lib.hostapd_utils import generate_random_password -from antlion.test_utils.abstract_devices.wlan_device import create_wlan_device -from antlion.test_utils.wifi import base_test - -from mobly import asserts - - -class Dhcpv4InteropFixture(base_test.WifiBaseTest): - """Test helpers for validating DHCPv4 Interop - - Test Bed Requirement: - * One Android device or Fuchsia device - * One Access Point - """ - - def setup_class(self): - super().setup_class() - - device_type = self.user_params.get("dut", "fuchsia_devices") - if device_type == "fuchsia_devices": - self.dut = create_wlan_device(self.fuchsia_devices[0]) - elif device_type == "android_devices": - self.dut = create_wlan_device(self.android_devices[0]) - else: - raise ValueError( - f'Invalid "dut" type specified in config: "{device_type}".' - 'Expected "fuchsia_devices" or "android_devices".' - ) - - self.access_point: AccessPoint = self.access_points[0] - self.access_point.stop_all_aps() - - def setup_test(self): - if hasattr(self, "android_devices"): - for ad in self.android_devices: - ad.droid.wakeLockAcquireBright() - ad.droid.wakeUpNow() - self.dut.wifi_toggle_state(True) - - def teardown_test(self): - if hasattr(self, "android_devices"): - for ad in self.android_devices: - ad.droid.wakeLockRelease() - ad.droid.goToSleepNow() - self.dut.turn_location_off_and_scan_toggle_off() - self.dut.disconnect() - self.dut.reset_wifi() - self.access_point.stop_all_aps() - - def connect(self, ap_params): - asserts.assert_true( - self.dut.associate( - ap_params["ssid"], - target_pwd=ap_params["password"], - target_security=ap_params["target_security"], - ), - "Failed to connect.", - ) - - def setup_ap(self): - """Generates a hostapd config and sets up the AP with that config. - Does not run a DHCP server. - - Returns: A dictionary of information about the AP. - """ - ssid = utils.rand_ascii_str(20) - security_mode = hostapd_constants.WPA2_STRING - security_profile = Security( - security_mode=security_mode, - password=generate_random_password(length=20), - wpa_cipher="CCMP", - wpa2_cipher="CCMP", - ) - password = security_profile.password - target_security = ( - hostapd_constants.SECURITY_STRING_TO_DEFAULT_TARGET_SECURITY.get( - security_mode - ) - ) - - ap_ids = setup_ap( - access_point=self.access_point, - profile_name="whirlwind", - mode=hostapd_constants.MODE_11N_MIXED, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - n_capabilities=[], - ac_capabilities=[], - force_wmm=True, - ssid=ssid, - security=security_profile, - password=password, - ) - - if len(ap_ids) > 1: - raise Exception("Expected only one SSID on AP") - - configured_subnets = self.access_point.get_configured_subnets() - if len(configured_subnets) > 1: - raise Exception("Expected only one subnet on AP") - router_ip = configured_subnets[0].router - network = configured_subnets[0].network - - self.access_point.stop_dhcp() - - return { - "ssid": ssid, - "password": password, - "target_security": target_security, - "ip": router_ip, - "network": network, - "id": ap_ids[0], - } - - def device_can_ping(self, dest_ip): - """Checks if the DUT can ping the given address. - - Returns: True if can ping, False otherwise""" - self.log.info("Attempting to ping %s..." % dest_ip) - ping_result = self.dut.can_ping(dest_ip, count=2) - if ping_result: - self.log.info("Success pinging: %s" % dest_ip) - else: - self.log.info("Failure pinging: %s" % dest_ip) - return ping_result - - def get_device_ipv4_addr(self, interface=None, timeout=20): - """Checks if device has an ipv4 private address. Sleeps 1 second between - retries. - - Args: - interface: string, name of interface from which to get ipv4 address. - - Raises: - ConnectionError, if DUT does not have an ipv4 address after all - timeout. - - Returns: - The device's IP address - - """ - self.log.debug("Fetching updated WLAN interface list") - if interface is None: - interface = self.dut.device.wlan_client_test_interface_name - self.log.info( - "Checking if DUT has received an ipv4 addr on iface %s. Will retry for %s " - "seconds." % (interface, timeout) - ) - timeout = time.time() + timeout - while time.time() < timeout: - ip_addrs = self.dut.device.get_interface_ip_addresses(interface) - - if len(ip_addrs["ipv4_private"]) > 0: - ip = ip_addrs["ipv4_private"][0] - self.log.info("DUT has an ipv4 address: %s" % ip) - return ip - else: - self.log.debug( - "DUT does not yet have an ipv4 address...retrying in 1 " "second." - ) - time.sleep(1) - else: - raise ConnectionError("DUT failed to get an ipv4 address.") - - def run_test_case_expect_dhcp_success(self, _test_name, settings): - """Starts the AP and DHCP server, and validates that the client - connects and obtains an address. - - Args: - _test_name: name of the test being run, this variable is not used - settings: a dictionary containing: - dhcp_parameters: a dictionary of DHCP parameters - dhcp_options: a dictionary of DHCP options - """ - ap_params = self.setup_ap() - subnet_conf = dhcp_config.Subnet( - subnet=ap_params["network"], - router=ap_params["ip"], - additional_parameters=settings["dhcp_parameters"], - additional_options=settings["dhcp_options"], - ) - dhcp_conf = dhcp_config.DhcpConfig(subnets=[subnet_conf]) - - self.log.debug("DHCP Configuration:\n" + dhcp_conf.render_config_file() + "\n") - - self.access_point.start_dhcp(dhcp_conf=dhcp_conf) - self.connect(ap_params=ap_params) - - # Typical log lines look like: - # dhcpd[26695]: DHCPDISCOVER from f8:0f:f9:3d:ce:d1 via wlan1 - # dhcpd[26695]: DHCPOFFER on 192.168.9.2 to f8:0f:f9:3d:ce:d1 via wlan1 - # dhcpd[26695]: DHCPREQUEST for 192.168.9.2 (192.168.9.1) from f8:0f:f9:3d:ce:d1 via wlan1 - # dhcpd[26695]: DHCPACK on 192.168.9.2 to f8:0f:f9:3d:ce:d1 via wlan1 - - try: - ip = self.get_device_ipv4_addr() - except ConnectionError: - self.log.warn(dhcp_logs) - asserts.fail(f"DUT failed to get an IP address") - - # Get updates to DHCP logs - dhcp_logs = self.access_point.get_dhcp_logs() - - expected_string = f"DHCPDISCOVER from" - asserts.assert_equal( - dhcp_logs.count(expected_string), - 1, - f'Incorrect count of DHCP Discovers ("{expected_string}") in logs:\n' - + dhcp_logs - + "\n", - ) - - expected_string = f"DHCPOFFER on {ip}" - asserts.assert_equal( - dhcp_logs.count(expected_string), - 1, - f'Incorrect count of DHCP Offers ("{expected_string}") in logs:\n' - + dhcp_logs - + "\n", - ) - - expected_string = f"DHCPREQUEST for {ip}" - asserts.assert_true( - dhcp_logs.count(expected_string) >= 1, - f'Incorrect count of DHCP Requests ("{expected_string}") in logs: ' - + dhcp_logs - + "\n", - ) - - expected_string = f"DHCPACK on {ip}" - asserts.assert_true( - dhcp_logs.count(expected_string) >= 1, - f'Incorrect count of DHCP Acks ("{expected_string}") in logs: ' - + dhcp_logs - + "\n", - ) - - asserts.assert_true( - self.device_can_ping(ap_params["ip"]), - f'DUT failed to ping router at {ap_params["ip"]}', - )
diff --git a/src/antlion/test_utils/fuchsia/__init__.py b/src/antlion/test_utils/fuchsia/__init__.py deleted file mode 100644 index e69de29..0000000 --- a/src/antlion/test_utils/fuchsia/__init__.py +++ /dev/null
diff --git a/src/antlion/test_utils/fuchsia/utils.py b/src/antlion/test_utils/fuchsia/utils.py deleted file mode 100644 index 89bbc64..0000000 --- a/src/antlion/test_utils/fuchsia/utils.py +++ /dev/null
@@ -1,120 +0,0 @@ -#!/usr/bin/env python3 -# -# Copyright 2022 The Fuchsia Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import os -from antlion.controllers.fuchsia_lib.ssh import SSHError - - -def http_file_download_by_curl( - fd, - url, - out_path="/tmp/", - curl_loc="/bin/curl", - remove_file_after_check=True, - timeout=3600, - limit_rate=None, - additional_args=None, - retry=3, -): - """Download http file by ssh curl. - - Args: - fd: Fuchsia Device Object. - url: The url that file to be downloaded from. - out_path: Optional. Where to download file to. - out_path is /tmp by default. - curl_loc: Location of curl binary on fd. - remove_file_after_check: Whether to remove the downloaded file after - check. - timeout: timeout for file download to complete. - limit_rate: download rate in bps. None, if do not apply rate limit. - additional_args: Any additional args for curl. - retry: the retry request times provided in curl command. - """ - file_directory, file_name = _generate_file_directory_and_file_name(url, out_path) - file_path = os.path.join(file_directory, file_name) - curl_cmd = curl_loc - if limit_rate: - curl_cmd += f" --limit-rate {limit_rate}" - if retry: - curl_cmd += f" --retry {retry}" - if additional_args: - curl_cmd += f" {additional_args}" - curl_cmd += f" --url {url} > {file_path}" - - fd.log.info(f"Download {url} to {file_path} by ssh command {curl_cmd}") - try: - fd.ssh.run(curl_cmd, timeout_sec=timeout) - if _check_file_existence(fd, file_path): - fd.log.info(f"{url} is downloaded to {file_path} successfully") - return True - - fd.log.warning(f"Fail to download {url}") - return False - except SSHError as e: - fd.log.warning(f'Command "{curl_cmd}" failed with error {e}') - return False - except Exception as e: - fd.log.error(f"Download {url} failed with unexpected exception {e}") - return False - finally: - if remove_file_after_check: - fd.log.info(f"Remove the downloaded file {file_path}") - try: - fd.ssh.run(f"rm {file_path}") - except SSHError: - pass - - -def _generate_file_directory_and_file_name(url, out_path): - """Splits the file from the url and specifies the appropriate location of - where to store the downloaded file. - - Args: - url: A url to the file that is going to be downloaded. - out_path: The location of where to store the file that is downloaded. - - Returns: - file_directory: The directory of where to store the downloaded file. - file_name: The name of the file that is being downloaded. - """ - file_name = url.split("/")[-1] - if not out_path: - file_directory = "/tmp/" - elif not out_path.endswith("/"): - file_directory, file_name = os.path.split(out_path) - else: - file_directory = out_path - return file_directory, file_name - - -def _check_file_existence(fd, file_path): - """Check file existence by file_path. If expected_file_size - is provided, then also check if the file meet the file size requirement. - - Args: - fd: A fuchsia device - file_path: Where to store the file on the fuchsia device. - """ - try: - result = fd.ssh.run(f'ls -al "{file_path}"') - fd.log.debug(f"File {file_path} exists.") - return True - except SSHError as e: - if "No such file or directory" in e.result.stderr: - fd.log.debug(f"File {file_path} does not exist.") - return False - raise e
diff --git a/src/antlion/test_utils/fuchsia/wmm_test_cases.py b/src/antlion/test_utils/fuchsia/wmm_test_cases.py deleted file mode 100644 index 48eb8ce..0000000 --- a/src/antlion/test_utils/fuchsia/wmm_test_cases.py +++ /dev/null
@@ -1,1326 +0,0 @@ -#!/usr/bin/env python3 -# -# Copyright 2022 The Fuchsia Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Internal Traffic Differentiation -test_internal_traffic_diff_VO_VI = { - "phase_1": { - "stream_VO": dict( - transmitter_str="staut", - receiver_str="access_point", - access_category="AC_VO", - max_bandwidth_percentage=0.85, - validation=[ - dict(operator=">=", bandwidth_percentage=0.87, rel_tolerance=0.03) - ], - ), - "stream_VI": dict( - transmitter_str="staut", - receiver_str="access_point", - access_category="AC_VI", - max_bandwidth_percentage=1.0, - validation=[dict(operator="<", phase="phase_1", stream="stream_VO")], - ), - } -} - -test_internal_traffic_diff_VO_BE = { - "phase_1": { - "stream_VO": dict( - transmitter_str="staut", - receiver_str="access_point", - access_category="AC_VO", - max_bandwidth_percentage=0.85, - validation=[ - dict(operator=">=", bandwidth_percentage=0.89, rel_tolerance=0.03) - ], - ), - "stream_BE": dict( - transmitter_str="staut", - receiver_str="access_point", - access_category="AC_BE", - max_bandwidth_percentage=1.0, - validation=[dict(operator="<", phase="phase_1", stream="stream_VO")], - ), - } -} - -test_internal_traffic_diff_VO_BK = { - "phase_1": { - "stream_VO": dict( - transmitter_str="staut", - receiver_str="access_point", - access_category="AC_VO", - max_bandwidth_percentage=0.85, - validation=[ - dict(operator=">=", bandwidth_percentage=0.89, rel_tolerance=0.03) - ], - ), - "stream_BK": dict( - transmitter_str="staut", - receiver_str="access_point", - access_category="AC_BK", - max_bandwidth_percentage=1.0, - validation=[dict(operator="<", phase="phase_1", stream="stream_VO")], - ), - } -} - -test_internal_traffic_diff_VI_BE = { - "phase_1": { - "stream_VI": dict( - transmitter_str="staut", - receiver_str="access_point", - access_category="AC_VI", - max_bandwidth_percentage=0.85, - validation=[ - dict(operator=">=", bandwidth_percentage=0.89, rel_tolerance=0.03) - ], - ), - "stream_BE": dict( - transmitter_str="staut", - receiver_str="access_point", - access_category="AC_BE", - max_bandwidth_percentage=1.0, - validation=[dict(operator="<", phase="phase_1", stream="stream_VI")], - ), - } -} - -test_internal_traffic_diff_VI_BK = { - "phase_1": { - "stream_VI": dict( - transmitter_str="staut", - receiver_str="access_point", - access_category="AC_VI", - max_bandwidth_percentage=0.85, - validation=[ - dict(operator=">=", bandwidth_percentage=0.89, rel_tolerance=0.03) - ], - ), - "stream_BK": dict( - transmitter_str="staut", - receiver_str="access_point", - access_category="AC_BK", - max_bandwidth_percentage=1.0, - validation=[dict(operator="<", phase="phase_1", stream="stream_VI")], - ), - } -} - -test_internal_traffic_diff_BE_BK = { - "phase_1": { - "stream_BE": dict( - transmitter_str="staut", - receiver_str="access_point", - access_category="AC_BE", - max_bandwidth_percentage=0.85, - validation=[ - dict(operator=">=", bandwidth_percentage=0.81, rel_tolerance=0.03) - ], - ), - "stream_BK": dict( - transmitter_str="staut", - receiver_str="access_point", - access_category="AC_BK", - max_bandwidth_percentage=1.0, - validation=[dict(operator="<", phase="phase_1", stream="stream_BE")], - ), - } -} -# External Traffic Differentiation - -# Single station, STAUT transmits high priority -test_external_traffic_diff_staut_VO_ap_VI = { - "phase_1": { - "stream_VO_staut_to_ap": dict( - transmitter_str="staut", - receiver_str="access_point", - access_category="AC_VO", - max_bandwidth_percentage=0.85, - validation=[ - dict(operator=">=", bandwidth_percentage=0.87, rel_tolerance=0.03) - ], - ), - "stream_VI_ap_to_staut": dict( - transmitter_str="access_point", - receiver_str="staut", - access_category="AC_VI", - max_bandwidth_percentage=1.0, - validation=[ - dict(operator="<", phase="phase_1", stream="stream_VO_staut_to_ap") - ], - ), - } -} - -test_external_traffic_diff_staut_VO_ap_BE = { - "phase_1": { - "stream_VO_staut_to_ap": dict( - transmitter_str="staut", - receiver_str="access_point", - access_category="AC_VO", - max_bandwidth_percentage=0.85, - validation=[ - dict(operator=">=", bandwidth_percentage=0.89, rel_tolerance=0.03) - ], - ), - "stream_BE_ap_to_staut": dict( - transmitter_str="access_point", - receiver_str="staut", - access_category="AC_BE", - max_bandwidth_percentage=1.0, - validation=[ - dict(operator="<", phase="phase_1", stream="stream_VO_staut_to_ap") - ], - ), - } -} - -test_external_traffic_diff_staut_VO_ap_BK = { - "phase_1": { - "stream_VO_staut_to_ap": dict( - transmitter_str="staut", - receiver_str="access_point", - access_category="AC_VO", - max_bandwidth_percentage=0.85, - validation=[ - dict(operator=">=", bandwidth_percentage=0.89, rel_tolerance=0.03) - ], - ), - "stream_BK_ap_to_staut": dict( - transmitter_str="access_point", - receiver_str="staut", - access_category="AC_BK", - max_bandwidth_percentage=1.0, - validation=[ - dict(operator="<", phase="phase_1", stream="stream_VO_staut_to_ap") - ], - ), - } -} - -test_external_traffic_diff_staut_VI_ap_BE = { - "phase_1": { - "stream_VI_staut_to_ap": dict( - transmitter_str="staut", - receiver_str="access_point", - access_category="AC_VI", - max_bandwidth_percentage=0.85, - validation=[ - dict(operator=">=", bandwidth_percentage=0.89, rel_tolerance=0.03) - ], - ), - "stream_BE_ap_to_staut": dict( - transmitter_str="access_point", - receiver_str="staut", - access_category="AC_BE", - max_bandwidth_percentage=1.0, - validation=[ - dict(operator="<", phase="phase_1", stream="stream_VI_staut_to_ap") - ], - ), - } -} - -test_external_traffic_diff_staut_VI_ap_BK = { - "phase_1": { - "stream_VI_staut_to_ap": dict( - transmitter_str="staut", - receiver_str="access_point", - access_category="AC_VI", - max_bandwidth_percentage=0.85, - validation=[ - dict(operator=">=", bandwidth_percentage=0.89, rel_tolerance=0.03) - ], - ), - "stream_BK_ap_to_staut": dict( - transmitter_str="access_point", - receiver_str="staut", - access_category="AC_BK", - max_bandwidth_percentage=1.0, - validation=[ - dict(operator="<", phase="phase_1", stream="stream_VI_staut_to_ap") - ], - ), - } -} - -test_external_traffic_diff_staut_BE_ap_BK = { - "phase_1": { - "stream_BE_staut_to_ap": dict( - transmitter_str="staut", - receiver_str="access_point", - access_category="AC_BE", - max_bandwidth_percentage=0.85, - validation=[ - dict(operator=">=", bandwidth_percentage=0.81, rel_tolerance=0.03) - ], - ), - "stream_BK_ap_to_staut": dict( - transmitter_str="access_point", - receiver_str="staut", - access_category="AC_BK", - max_bandwidth_percentage=1.0, - validation=[ - dict(operator="<", phase="phase_1", stream="stream_BE_staut_to_ap") - ], - ), - } -} - -# Single station, STAUT transmits low priority -test_external_traffic_diff_staut_VI_ap_VO = { - "phase_1": { - "stream_VO_ap_to_staut": dict( - transmitter_str="access_point", - receiver_str="staut", - access_category="AC_VO", - max_bandwidth_percentage=0.85, - validation=[ - dict(operator=">=", bandwidth_percentage=0.87, rel_tolerance=0.03) - ], - ), - "stream_VI_staut_to_ap": dict( - transmitter_str="staut", - receiver_str="access_point", - access_category="AC_VI", - max_bandwidth_percentage=1.0, - validation=[ - dict(operator="<", phase="phase_1", stream="stream_VO_ap_to_staut") - ], - ), - } -} - -test_external_traffic_diff_staut_BE_ap_VO = { - "phase_1": { - "stream_VO_ap_to_staut": dict( - transmitter_str="access_point", - receiver_str="staut", - access_category="AC_VO", - max_bandwidth_percentage=0.85, - validation=[ - dict(operator=">=", bandwidth_percentage=0.89, rel_tolerance=0.03) - ], - ), - "stream_BE_staut_to_ap": dict( - transmitter_str="staut", - receiver_str="access_point", - access_category="AC_BE", - max_bandwidth_percentage=1.0, - validation=[ - dict(operator="<", phase="phase_1", stream="stream_VO_ap_to_staut") - ], - ), - } -} - -test_external_traffic_diff_staut_BK_ap_VO = { - "phase_1": { - "stream_VO_ap_to_staut": dict( - transmitter_str="access_point", - receiver_str="staut", - access_category="AC_VO", - max_bandwidth_percentage=0.85, - validation=[ - dict(operator=">=", bandwidth_percentage=0.89, rel_tolerance=0.03) - ], - ), - "stream_BK_staut_to_ap": dict( - transmitter_str="staut", - receiver_str="access_point", - access_category="AC_BK", - max_bandwidth_percentage=1.0, - validation=[ - dict(operator="<", phase="phase_1", stream="stream_VO_ap_to_staut") - ], - ), - } -} - -test_external_traffic_diff_staut_BE_ap_VI = { - "phase_1": { - "stream_VI_ap_to_staut": dict( - transmitter_str="access_point", - receiver_str="staut", - access_category="AC_VI", - max_bandwidth_percentage=0.85, - validation=[ - dict(operator=">=", bandwidth_percentage=0.89, rel_tolerance=0.03) - ], - ), - "stream_BE_staut_to_ap": dict( - transmitter_str="staut", - receiver_str="access_point", - access_category="AC_BE", - max_bandwidth_percentage=1.0, - validation=[ - dict(operator="<", phase="phase_1", stream="stream_VI_ap_to_staut") - ], - ), - } -} - -test_external_traffic_diff_staut_BK_ap_VI = { - "phase_1": { - "stream_VI_ap_to_staut": dict( - transmitter_str="access_point", - receiver_str="staut", - access_category="AC_VI", - max_bandwidth_percentage=0.85, - validation=[ - dict(operator=">=", bandwidth_percentage=0.89, rel_tolerance=0.03) - ], - ), - "stream_BK_staut_to_ap": dict( - transmitter_str="staut", - receiver_str="access_point", - access_category="AC_BK", - max_bandwidth_percentage=1.0, - validation=[ - dict(operator="<", phase="phase_1", stream="stream_VI_ap_to_staut") - ], - ), - } -} - -test_external_traffic_diff_staut_BK_ap_BE = { - "phase_1": { - "stream_BE_ap_to_staut": dict( - transmitter_str="access_point", - receiver_str="staut", - access_category="AC_BE", - max_bandwidth_percentage=0.85, - validation=[ - dict(operator=">=", bandwidth_percentage=0.81, rel_tolerance=0.03) - ], - ), - "stream_BK_staut_to_ap": dict( - transmitter_str="staut", - receiver_str="access_point", - access_category="AC_BK", - max_bandwidth_percentage=1.0, - validation=[ - dict(operator="<", phase="phase_1", stream="stream_BE_ap_to_staut") - ], - ), - } -} - -# Dual Internal/External Traffic Differetiation - -test_dual_traffic_diff_staut_VO_VI_ap_VI = { - "phase_1": { - "stream_VI_staut_to_ap": dict( - transmitter_str="staut", - receiver_str="access_point", - access_category="AC_VI", - max_bandwidth_percentage=0.5, - validation=[ - dict( - operator="==", - phase="phase_1", - stream="stream_VI_ap_to_staut", - max_bw_rel_tolerance=0.15, - ) - ], - ), - "stream_VO_staut_to_ap": dict( - transmitter_str="staut", - receiver_str="access_point", - access_category="AC_VO", - max_bandwidth_percentage=0.85, - validation=[ - dict(operator=">=", bandwidth_percentage=0.81, rel_tolerance=0.01) - ], - ), - "stream_VI_ap_to_staut": dict( - transmitter_str="access_point", - receiver_str="staut", - access_category="AC_VI", - max_bandwidth_percentage=0.5, - ), - } -} - -test_dual_traffic_diff_staut_VO_BE_ap_BE = { - "phase_1": { - "stream_BE_staut_to_ap": dict( - transmitter_str="staut", - receiver_str="access_point", - access_category="AC_BE", - max_bandwidth_percentage=0.5, - validation=[ - dict( - operator="==", - phase="phase_1", - stream="stream_BE_ap_to_staut", - max_bw_rel_tolerance=0.15, - ) - ], - ), - "stream_VO_staut_to_ap": dict( - transmitter_str="staut", - receiver_str="access_point", - access_category="AC_VO", - max_bandwidth_percentage=0.85, - validation=[ - dict(operator=">=", bandwidth_percentage=0.89, rel_tolerance=0.01) - ], - ), - "stream_BE_ap_to_staut": dict( - transmitter_str="access_point", - receiver_str="staut", - access_category="AC_BE", - max_bandwidth_percentage=0.5, - ), - } -} - -test_dual_traffic_diff_staut_VO_BK_ap_BK = { - "phase_1": { - "stream_BK_staut_to_ap": dict( - transmitter_str="staut", - receiver_str="access_point", - access_category="AC_BK", - max_bandwidth_percentage=0.5, - validation=[ - dict( - operator="==", - phase="phase_1", - stream="stream_BK_ap_to_staut", - max_bw_rel_tolerance=0.15, - ) - ], - ), - "stream_VO_staut_to_ap": dict( - transmitter_str="staut", - receiver_str="access_point", - access_category="AC_VO", - max_bandwidth_percentage=0.85, - validation=[ - dict(operator=">=", bandwidth_percentage=0.89, rel_tolerance=0.01) - ], - ), - "stream_BK_ap_to_staut": dict( - transmitter_str="access_point", - receiver_str="staut", - access_category="AC_BK", - max_bandwidth_percentage=0.5, - ), - } -} - -test_dual_traffic_diff_staut_VI_BE_ap_BE = { - "phase_1": { - "stream_BE_staut_to_ap": dict( - transmitter_str="staut", - receiver_str="access_point", - access_category="AC_BE", - max_bandwidth_percentage=0.5, - validation=[ - dict( - operator="==", - phase="phase_1", - stream="stream_BE_ap_to_staut", - max_bw_rel_tolerance=0.15, - ) - ], - ), - "stream_VI_staut_to_ap": dict( - transmitter_str="staut", - receiver_str="access_point", - access_category="AC_VI", - max_bandwidth_percentage=0.85, - validation=[ - dict(operator=">=", bandwidth_percentage=0.89, rel_tolerance=0.01) - ], - ), - "stream_BE_ap_to_staut": dict( - transmitter_str="access_point", - receiver_str="staut", - access_category="AC_BE", - max_bandwidth_percentage=0.5, - ), - } -} - -test_dual_traffic_diff_staut_VI_BK_ap_BK = { - "phase_1": { - "stream_BK_staut_to_ap": dict( - transmitter_str="staut", - receiver_str="access_point", - access_category="AC_BK", - max_bandwidth_percentage=0.5, - validation=[ - dict( - operator="==", - phase="phase_1", - stream="stream_BK_ap_to_staut", - max_bw_rel_tolerance=0.15, - ) - ], - ), - "stream_VI_staut_to_ap": dict( - transmitter_str="staut", - receiver_str="access_point", - access_category="AC_VI", - max_bandwidth_percentage=0.85, - validation=[ - dict(operator=">=", bandwidth_percentage=0.89, rel_tolerance=0.01) - ], - ), - "stream_BK_ap_to_staut": dict( - transmitter_str="access_point", - receiver_str="staut", - access_category="AC_BK", - max_bandwidth_percentage=0.5, - ), - } -} - -test_dual_traffic_diff_staut_BE_BK_ap_BK = { - "phase_1": { - "stream_BK_staut_to_ap": dict( - transmitter_str="staut", - receiver_str="access_point", - access_category="AC_BK", - max_bandwidth_percentage=0.5, - validation=[ - dict( - operator="==", - phase="phase_1", - stream="stream_BK_ap_to_staut", - max_bw_rel_tolerance=0.15, - ) - ], - ), - "stream_BE_staut_to_ap": dict( - transmitter_str="staut", - receiver_str="access_point", - access_category="AC_BE", - max_bandwidth_percentage=0.85, - validation=[ - dict(operator=">=", bandwidth_percentage=0.81, rel_tolerance=0.01) - ], - ), - "stream_BK_ap_to_staut": dict( - transmitter_str="access_point", - receiver_str="staut", - access_category="AC_BK", - max_bandwidth_percentage=0.5, - ), - } -} - -# ACM Bit Conformance Tests (Single station, as WFA test below uses two) -test_acm_bit_on_VI = { - "phase_1": { - "stream_VI_staut_to_ap": dict( - transmitter_str="staut", - receiver_str="access_point", - access_category="AC_VI", - max_bandwidth_percentage=0.6, - validation=[ - # TODO(): This should technically be an "or" - dict( - operator="<", - phase="phase_1", - stream="stream_BE_staut_to_ap_1", - bandwidth_percentage=1.15, - rel_tolerance=0.05, - ), - dict( - operator="<", - phase="phase_1", - stream="stream_BE_staut_to_ap_2", - bandwidth_percentage=1.15, - rel_tolerance=0.05, - ), - ], - ), - "stream_BE_staut_to_ap_1": dict( - transmitter_str="staut", - receiver_str="access_point", - access_category="AC_BE", - max_bandwidth_percentage=0.6, - ), - "stream_BE_staut_to_ap_2": dict( - transmitter_str="staut", - receiver_str="access_point", - access_category="AC_BE", - max_bandwidth_percentage=0.6, - ), - } -} - -# AC Parameter Modificiation Tests (Single station, as WFA test below uses two) -test_ac_param_degrade_VI = { - "phase_1": { - "stream_VI_staut_to_ap": dict( - transmitter_str="staut", - receiver_str="access_point", - access_category="AC_VI", - max_bandwidth_percentage=0.6, - validation=[ - # TODO(): This should technically be an "or" - dict( - operator="<", - phase="phase_1", - stream="stream_BE_staut_to_ap_1", - bandwidth_percentage=1.15, - rel_tolerance=0.05, - ), - dict( - operator="<", - phase="phase_1", - stream="stream_BE_staut_to_ap_2", - bandwidth_percentage=1.15, - rel_tolerance=0.05, - ), - ], - ), - "stream_BE_staut_to_ap_1": dict( - transmitter_str="staut", - receiver_str="access_point", - access_category="AC_BE", - max_bandwidth_percentage=0.6, - ), - "stream_BE_staut_to_ap_2": dict( - transmitter_str="staut", - receiver_str="access_point", - access_category="AC_BE", - max_bandwidth_percentage=0.6, - ), - } -} - -test_ac_param_degrade_VO = { - "phase_1": { - "stream_VO_staut_to_ap": dict( - transmitter_str="staut", - receiver_str="access_point", - access_category="AC_VO", - max_bandwidth_percentage=0.6, - validation=[ - # TODO(): This should technically be an "or" - dict( - operator="<", - phase="phase_1", - stream="stream_BE_staut_to_ap_1", - bandwidth_percentage=1.15, - rel_tolerance=0.05, - ), - dict( - operator="<", - phase="phase_1", - stream="stream_BE_staut_to_ap_2", - bandwidth_percentage=1.15, - rel_tolerance=0.05, - ), - ], - ), - "stream_BE_staut_to_ap_1": dict( - transmitter_str="staut", - receiver_str="access_point", - access_category="AC_BE", - max_bandwidth_percentage=0.6, - ), - "stream_BE_staut_to_ap_2": dict( - transmitter_str="staut", - receiver_str="access_point", - access_category="AC_BE", - max_bandwidth_percentage=0.6, - ), - } -} - -test_ac_param_improve_BE = { - "phase_1": { - "stream_BE_staut_to_ap": dict( - transmitter_str="staut", - receiver_str="access_point", - access_category="AC_BE", - max_bandwidth_percentage=0.6, - validation=[ - # TODO(): This should technically be an "or" - dict( - operator=">", - phase="phase_1", - stream="stream_VI_staut_to_ap_1", - bandwidth_percentage=0.869, - rel_tolerance=0.05, - ), - dict( - operator=">", - phase="phase_1", - stream="stream_VI_staut_to_ap_2", - bandwidth_percentage=0.869, - rel_tolerance=0.05, - ), - ], - ), - "stream_VI_staut_to_ap_1": dict( - transmitter_str="staut", - receiver_str="access_point", - access_category="AC_VI", - max_bandwidth_percentage=0.6, - ), - "stream_VI_staut_to_ap_2": dict( - transmitter_str="staut", - receiver_str="access_point", - access_category="AC_VI", - max_bandwidth_percentage=0.6, - ), - } -} - -test_ac_param_improve_BK = { - "phase_1": { - "stream_BK_staut_to_ap": dict( - transmitter_str="staut", - receiver_str="access_point", - access_category="AC_BK", - max_bandwidth_percentage=0.6, - validation=[ - # TODO(): This should technically be an "or" - dict( - operator=">", - phase="phase_1", - stream="stream_VI_staut_to_ap_1", - bandwidth_percentage=0.869, - rel_tolerance=0.05, - ), - dict( - operator=">", - phase="phase_1", - stream="stream_VI_staut_to_ap_2", - bandwidth_percentage=0.869, - rel_tolerance=0.05, - ), - ], - ), - "stream_VI_staut_to_ap_1": dict( - transmitter_str="staut", - receiver_str="access_point", - access_category="AC_VI", - max_bandwidth_percentage=0.6, - ), - "stream_VI_staut_to_ap_2": dict( - transmitter_str="staut", - receiver_str="access_point", - access_category="AC_VI", - max_bandwidth_percentage=0.6, - ), - } -} -# WFA Test Plan Cases - -# Traffic Differentiation in Single BSS (Single Station) -test_wfa_traffic_diff_single_station_staut_BE_ap_VI_BE = { - "phase_1": { - "steam_BE_ap_to_staut": dict( - transmitter_str="access_point", - receiver_str="staut", - access_category="AC_BE", - max_bandwidth_percentage=0.45, - ), - "stream_VI_ap_to_staut": dict( - transmitter_str="access_point", - receiver_str="staut", - access_category="AC_VI", - max_bandwidth_percentage=0.45, - ), - }, - "phase_2": { - "steam_BE_ap_to_staut": dict( - transmitter_str="access_point", - receiver_str="staut", - access_category="AC_BE", - max_bandwidth_percentage=0.45, - ), - "stream_VI_ap_to_staut": dict( - transmitter_str="access_point", - receiver_str="staut", - access_category="AC_VI", - max_bandwidth_percentage=0.45, - validation=[ - dict( - operator=">=", - phase="phase_1", - stream="stream_VI_ap_to_staut", - bandwidth_percentage=0.85, - rel_tolerance=0.01, - ) - ], - ), - "stream_BE_staut_to_ap": dict( - transmitter_str="staut", - receiver_str="access_point", - access_category="AC_BE", - max_bandwidth_percentage=0.65, - ), - }, -} - -test_wfa_traffic_diff_single_station_staut_VI_BE = { - "phase_1": { - "stream_BE_staut_to_ap": dict( - transmitter_str="staut", - receiver_str="access_point", - access_category="AC_BE", - max_bandwidth_percentage=0.45, - ), - "stream_VI_staut_to_ap": dict( - transmitter_str="staut", - receiver_str="access_point", - access_category="AC_VI", - max_bandwidth_percentage=0.45, - ), - }, - "phase_2": { - "stream_BE_staut_to_ap_1": dict( - transmitter_str="staut", - receiver_str="access_point", - access_category="AC_BE", - max_bandwidth_percentage=0.45, - ), - "stream_VI_staut_to_ap": dict( - transmitter_str="staut", - receiver_str="access_point", - access_category="AC_VI", - max_bandwidth_percentage=0.45, - validation=[ - dict( - operator=">=", - phase="phase_1", - stream="stream_VI_staut_to_ap", - bandwidth_percentage=0.89, - rel_tolerance=0.01, - ) - ], - ), - "stream_BE_staut_to_ap_2": dict( - transmitter_str="staut", - receiver_str="access_point", - access_category="AC_BE", - max_bandwidth_percentage=0.65, - ), - }, -} - -test_wfa_traffic_diff_single_station_staut_VI_BE_ap_BE = { - "phase_1": { - "stream_BE_staut_to_ap": dict( - transmitter_str="staut", - receiver_str="access_point", - access_category="AC_BE", - max_bandwidth_percentage=0.45, - ), - "stream_VI_staut_to_ap": dict( - transmitter_str="staut", - receiver_str="access_point", - access_category="AC_VI", - max_bandwidth_percentage=0.45, - ), - }, - "phase_2": { - "stream_BE_staut_to_ap": dict( - transmitter_str="staut", - receiver_str="access_point", - access_category="AC_BE", - max_bandwidth_percentage=0.45, - ), - "stream_VI_staut_to_ap": dict( - transmitter_str="staut", - receiver_str="access_point", - access_category="AC_VI", - max_bandwidth_percentage=0.45, - validation=[ - dict( - operator=">=", - phase="phase_1", - stream="stream_VI_staut_to_ap", - bandwidth_percentage=0.87, - rel_tolerance=0.01, - ) - ], - ), - "stream_BE_ap_to_staut": dict( - transmitter_str="access_point", - receiver_str="staut", - access_category="AC_BE", - max_bandwidth_percentage=0.65, - ), - }, -} - -test_wfa_traffic_diff_single_station_staut_BE_BK_ap_BK = { - "phase_1": { - "stream_BK_staut_to_ap": dict( - transmitter_str="staut", - receiver_str="access_point", - access_category="AC_BK", - max_bandwidth_percentage=0.45, - ), - "stream_BE_staut_to_ap": dict( - transmitter_str="staut", - receiver_str="access_point", - access_category="AC_BE", - max_bandwidth_percentage=0.45, - ), - }, - "phase_2": { - "stream_BK_staut_to_ap": dict( - transmitter_str="staut", - receiver_str="access_point", - access_category="AC_BK", - max_bandwidth_percentage=0.45, - ), - "stream_BE_staut_to_ap": dict( - transmitter_str="staut", - receiver_str="access_point", - access_category="AC_BE", - max_bandwidth_percentage=0.45, - validation=[ - dict( - operator=">=", - phase="phase_1", - stream="stream_BE_staut_to_ap", - bandwidth_percentage=0.81, - rel_tolerance=0.01, - ) - ], - ), - "stream_BK_ap_to_staut": dict( - transmitter_str="access_point", - receiver_str="staut", - access_category="AC_BK", - max_bandwidth_percentage=0.65, - ), - }, -} - -test_wfa_traffic_diff_single_station_staut_VO_VI_ap_VI = { - "phase_1": { - "stream_VI_staut_to_ap": dict( - transmitter_str="staut", - receiver_str="access_point", - access_category="AC_VI", - max_bandwidth_percentage=0.45, - ), - "stream_VO_staut_to_ap": dict( - transmitter_str="staut", - receiver_str="access_point", - access_category="AC_VO", - max_bandwidth_percentage=0.45, - ), - }, - "phase_2": { - "stream_VI_staut_to_ap": dict( - transmitter_str="staut", - receiver_str="access_point", - access_category="AC_VI", - max_bandwidth_percentage=0.45, - ), - "stream_VO_staut_to_ap": dict( - transmitter_str="staut", - receiver_str="access_point", - access_category="AC_VO", - max_bandwidth_percentage=0.45, - validation=[ - dict( - operator=">=", - phase="phase_1", - stream="stream_VO_staut_to_ap", - bandwidth_percentage=0.81, - rel_tolerance=0.01, - ) - ], - ), - "stream_VI_ap_to_staut": dict( - transmitter_str="access_point", - receiver_str="staut", - access_category="AC_VI", - max_bandwidth_percentage=0.65, - ), - }, -} - -# Traffic Differentiation in Single BSS (Two Stations) -test_wfa_traffic_diff_two_stations_staut_BE_secondary_VI_BE = { - "phase_1": { - "steam_BE_staut_to_ap": dict( - transmitter_str="staut", - receiver_str="access_point", - access_category="AC_BE", - max_bandwidth_percentage=0.45, - ), - "stream_VI_secondary_to_ap": dict( - transmitter_str="secondary_sta", - receiver_str="access_point", - access_category="AC_VI", - max_bandwidth_percentage=0.45, - ), - }, - "phase_2": { - "steam_BE_staut_to_ap": dict( - transmitter_str="staut", - receiver_str="access_point", - access_category="AC_BE", - max_bandwidth_percentage=0.45, - ), - "stream_VI_secondary_to_ap": dict( - transmitter_str="secondary_sta", - receiver_str="access_point", - access_category="AC_VI", - max_bandwidth_percentage=0.45, - validation=[ - dict( - operator=">=", - phase="phase_1", - stream="stream_VI_secondary_to_ap", - bandwidth_percentage=0.90, - rel_tolerance=0.01, - ) - ], - ), - "stream_BE_secondary_to_ap": dict( - transmitter_str="secondary_sta", - receiver_str="access_point", - access_category="AC_BE", - max_bandwidth_percentage=0.65, - ), - }, -} - -test_wfa_traffic_diff_two_stations_staut_VI_secondary_BE = { - "phase_1": { - "steam_VI_staut_to_ap": dict( - transmitter_str="staut", - receiver_str="access_point", - access_category="AC_VI", - max_bandwidth_percentage=0.45, - ), - "stream_BE_secondary_to_ap": dict( - transmitter_str="secondary_sta", - receiver_str="access_point", - access_category="AC_BE", - max_bandwidth_percentage=0.45, - ), - }, - "phase_2": { - "steam_VI_staut_to_ap": dict( - transmitter_str="staut", - receiver_str="access_point", - access_category="AC_BE", - max_bandwidth_percentage=0.45, - validation=[ - dict( - operator=">=", - phase="phase_1", - stream="steam_VI_staut_to_ap", - bandwidth_percentage=0.88, - rel_tolerance=0.01, - ) - ], - ), - "stream_BE_secondary_to_ap_1": dict( - transmitter_str="secondary_sta", - receiver_str="access_point", - access_category="AC_BE", - max_bandwidth_percentage=0.45, - ), - "stream_BE_secondary_to_ap_2": dict( - transmitter_str="secondary_sta", - receiver_str="access_point", - access_category="AC_BE", - max_bandwidth_percentage=0.65, - ), - }, -} - -test_wfa_traffic_diff_two_stations_staut_BK_secondary_BE_BK = { - "phase_1": { - "steam_BK_staut_to_ap": dict( - transmitter_str="staut", - receiver_str="access_point", - access_category="AC_BK", - max_bandwidth_percentage=0.45, - ), - "stream_BE_secondary_to_ap": dict( - transmitter_str="secondary_sta", - receiver_str="access_point", - access_category="AC_BE", - max_bandwidth_percentage=0.45, - ), - }, - "phase_2": { - "steam_BK_staut_to_ap": dict( - transmitter_str="staut", - receiver_str="access_point", - access_category="AC_BK", - max_bandwidth_percentage=0.45, - ), - "stream_BE_secondary_to_ap": dict( - transmitter_str="secondary_sta", - receiver_str="access_point", - access_category="AC_BE", - max_bandwidth_percentage=0.45, - validation=[ - dict( - operator=">=", - phase="phase_1", - stream="stream_BE_secondary_to_ap", - bandwidth_percentage=0.90, - rel_tolerance=0.01, - ) - ], - ), - "stream_BK_secondary_to_ap": dict( - transmitter_str="secondary_sta", - receiver_str="access_point", - access_category="AC_BK", - max_bandwidth_percentage=0.65, - ), - }, -} - -test_wfa_traffic_diff_two_stations_staut_VI_secondary_VO_VI = { - "phase_1": { - "steam_VI_staut_to_ap": dict( - transmitter_str="staut", - receiver_str="access_point", - access_category="AC_VI", - max_bandwidth_percentage=0.45, - ), - "stream_VO_secondary_to_ap": dict( - transmitter_str="secondary_sta", - receiver_str="access_point", - access_category="AC_VO", - max_bandwidth_percentage=0.45, - ), - }, - "phase_2": { - "steam_VI_staut_to_ap": dict( - transmitter_str="staut", - receiver_str="access_point", - access_category="AC_VI", - max_bandwidth_percentage=0.45, - ), - "stream_VO_secondary_to_ap": dict( - transmitter_str="secondary_sta", - receiver_str="access_point", - access_category="AC_VO", - max_bandwidth_percentage=0.45, - validation=[ - dict( - operator=">=", - phase="phase_1", - stream="stream_VO_secondary_to_ap", - bandwidth_percentage=0.90, - rel_tolerance=0.01, - ) - ], - ), - "stream_VI_secondary_to_ap": dict( - transmitter_str="secondary_sta", - receiver_str="access_point", - access_category="AC_VI", - max_bandwidth_percentage=0.65, - ), - }, -} - -test_wfa_acm_bit_on_VI = { - "phase_1": { - "stream_VI_staut_to_ap": dict( - transmitter_str="staut", - receiver_str="access_point", - access_category="AC_VI", - max_bandwidth_percentage=0.65, - validation=[ - # TODO(): This should technically be an "or" - dict( - operator="<", - phase="phase_1", - stream="stream_BE_secondary_to_ap_1", - bandwidth_percentage=1.15, - rel_tolerance=0.05, - ), - dict( - operator="<", - phase="phase_1", - stream="stream_BE_secondary_to_ap_2", - bandwidth_percentage=1.15, - rel_tolerance=0.05, - ), - ], - ), - "stream_BE_secondary_to_ap_1": dict( - transmitter_str="secondary_sta", - receiver_str="access_point", - access_category="AC_BE", - max_bandwidth_percentage=0.65, - ), - "stream_BE_secondary_to_ap_2": dict( - transmitter_str="secondary_sta", - receiver_str="access_point", - access_category="AC_BE", - max_bandwidth_percentage=0.65, - ), - } -} - -test_wfa_ac_param_degrade_VI = { - "phase_1": { - "stream_VI_staut_to_ap": dict( - transmitter_str="staut", - receiver_str="access_point", - access_category="AC_VI", - max_bandwidth_percentage=0.65, - validation=[ - # TODO(): This should technically be an "or" - dict( - operator="<", - phase="phase_1", - stream="stream_BE_secondary_to_ap_1", - bandwidth_percentage=1.15, - rel_tolerance=0.05, - ), - dict( - operator="<", - phase="phase_1", - stream="stream_BE_secondary_to_ap_2", - bandwidth_percentage=1.15, - rel_tolerance=0.05, - ), - ], - ), - "stream_BE_secondary_to_ap_1": dict( - transmitter_str="secondary_sta", - receiver_str="access_point", - access_category="AC_BE", - max_bandwidth_percentage=0.65, - ), - "stream_BE_secondary_to_ap_2": dict( - transmitter_str="secondary_sta", - receiver_str="access_point", - access_category="AC_BE", - max_bandwidth_percentage=0.65, - ), - } -}
diff --git a/src/antlion/test_utils/net/__init__.py b/src/antlion/test_utils/net/__init__.py deleted file mode 100644 index e69de29..0000000 --- a/src/antlion/test_utils/net/__init__.py +++ /dev/null
diff --git a/src/antlion/test_utils/net/connectivity_const.py b/src/antlion/test_utils/net/connectivity_const.py deleted file mode 100644 index 05495f0..0000000 --- a/src/antlion/test_utils/net/connectivity_const.py +++ /dev/null
@@ -1,172 +0,0 @@ -#!/usr/bin/env python3 -# -# Copyright 2022 The Fuchsia Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import enum - -###################################################### -# ConnectivityManager.NetworkCallback events -###################################################### -EVENT_NETWORK_CALLBACK = "NetworkCallback" - -# event types -NETWORK_CB_PRE_CHECK = "PreCheck" -NETWORK_CB_AVAILABLE = "Available" -NETWORK_CB_LOSING = "Losing" -NETWORK_CB_LOST = "Lost" -NETWORK_CB_UNAVAILABLE = "Unavailable" -NETWORK_CB_CAPABILITIES_CHANGED = "CapabilitiesChanged" -NETWORK_CB_SUSPENDED = "Suspended" -NETWORK_CB_RESUMED = "Resumed" -NETWORK_CB_LINK_PROPERTIES_CHANGED = "LinkPropertiesChanged" -NETWORK_CB_INVALID = "Invalid" - -# event data keys -NETWORK_CB_KEY_ID = "id" -NETWORK_CB_KEY_EVENT = "networkCallbackEvent" -NETWORK_CB_KEY_MAX_MS_TO_LIVE = "maxMsToLive" -NETWORK_CB_KEY_RSSI = "rssi" -NETWORK_CB_KEY_INTERFACE_NAME = "interfaceName" -NETWORK_CB_KEY_CREATE_TS = "creation_timestamp" -NETWORK_CB_KEY_CURRENT_TS = "current_timestamp" -NETWORK_CB_KEY_NETWORK_SPECIFIER = "network_specifier" -NETWORK_CB_KEY_TRANSPORT_INFO = "transport_info" - -# Constants for VPN connection status -VPN_STATE_DISCONNECTED = 0 -VPN_STATE_INITIALIZING = 1 -VPN_STATE_CONNECTING = 2 -VPN_STATE_CONNECTED = 3 -VPN_STATE_TIMEOUT = 4 -VPN_STATE_FAILED = 5 -# TODO gmoturu: determine the exact timeout value -# This is a random value as of now -VPN_TIMEOUT = 30 - -# Connectiivty Manager constants -TYPE_MOBILE = 0 -TYPE_WIFI = 1 - -# Network request related constants. -NETWORK_CAP_TRANSPORT_WIFI = TYPE_WIFI -NETWORK_CAP_CAPABILITY_INTERNET = 12 - -# Network request related keys. -NETWORK_CAP_TRANSPORT_TYPE_KEY = "TransportType" -NETWORK_CAP_CAPABILITY_KEY = "Capability" - -# Multipath preference constants -MULTIPATH_PREFERENCE_NONE = 0 -MULTIPATH_PREFERENCE_HANDOVER = 1 << 0 -MULTIPATH_PREFERENCE_RELIABILITY = 1 << 1 -MULTIPATH_PREFERENCE_PERFORMANCE = 1 << 2 - -# Private DNS constants -DNS_GOOGLE_HOSTNAME = "dns.google" -DNS_QUAD9_HOSTNAME = "dns.quad9.net" -DNS_CLOUDFLARE_HOSTNAME = "1dot1dot1dot1.cloudflare-dns.com" -DOH_CLOUDFLARE_HOSTNAME = "cloudflare-dns.com" -PRIVATE_DNS_MODE_OFF = "off" -PRIVATE_DNS_MODE_OPPORTUNISTIC = "opportunistic" -PRIVATE_DNS_MODE_STRICT = "hostname" - -DNS_SUPPORT_TYPE = { - DNS_GOOGLE_HOSTNAME: ["Do53", "DoT", "DoH"], - DNS_CLOUDFLARE_HOSTNAME: ["Do53", "DoT"], - DOH_CLOUDFLARE_HOSTNAME: ["DoH"], -} - -DNS_GOOGLE_ADDR_V4 = ["8.8.4.4", "8.8.8.8"] -DNS_GOOGLE_ADDR_V6 = ["2001:4860:4860::8888", "2001:4860:4860::8844"] -DNS_CLOUDFLARE_ADDR_V4 = ["1.1.1.1", "1.0.0.1"] -DOH_CLOUDFLARE_ADDR_V4 = ["104.16.248.249", "104.16.249.249"] -DOH_CLOUDFLARE_ADDR_V6 = ["2606:4700::6810:f8f9", "2606:4700::6810:f9f9"] - -# IpSec constants -SOCK_STREAM = 1 -SOCK_DGRAM = 2 -AF_INET = 2 -AF_INET6 = 10 -DIRECTION_IN = 0 -DIRECTION_OUT = 1 -MODE_TRANSPORT = 0 -MODE_TUNNEL = 1 -CRYPT_NULL = "ecb(cipher_null)" -CRYPT_AES_CBC = "cbc(aes)" -AUTH_HMAC_MD5 = "hmac(md5)" -AUTH_HMAC_SHA1 = "hmac(sha1)" -AUTH_HMAC_SHA256 = "hmac(sha256)" -AUTH_HMAC_SHA384 = "hmac(sha384)" -AUTH_HMAC_SHA512 = "hmac(sha512)" -AUTH_CRYPT_AES_GCM = "rfc4106(gcm(aes))" - - -# Constants for VpnProfile -class VpnProfile(object): - """This class contains all the possible - parameters required for VPN connection - """ - - NAME = "name" - TYPE = "type" - SERVER = "server" - USER = "username" - PWD = "password" - DNS = "dnsServers" - SEARCH_DOMAINS = "searchDomains" - ROUTES = "routes" - MPPE = "mppe" - L2TP_SECRET = "l2tpSecret" - IPSEC_ID = "ipsecIdentifier" - IPSEC_SECRET = "ipsecSecret" - IPSEC_USER_CERT = "ipsecUserCert" - IPSEC_CA_CERT = "ipsecCaCert" - IPSEC_SERVER_CERT = "ipsecServerCert" - - -# Enums for VPN profile types -class VpnProfileType(enum.Enum): - """Integer constant for each type of VPN""" - - PPTP = 0 - L2TP_IPSEC_PSK = 1 - L2TP_IPSEC_RSA = 2 - IPSEC_XAUTH_PSK = 3 - IPSEC_XAUTH_RSA = 4 - IPSEC_HYBRID_RSA = 5 - IKEV2_IPSEC_USER_PASS = 6 - IKEV2_IPSEC_PSK = 7 - IKEV2_IPSEC_RSA = 8 - - -# Constants for config file -class VpnReqParams(object): - """Config file parameters required for - VPN connection - """ - - vpn_server_addresses = "vpn_server_addresses" - vpn_verify_addresses = "vpn_verify_addresses" - vpn_username = "vpn_username" - vpn_password = "vpn_password" - psk_secret = "psk_secret" - client_pkcs_file_name = "client_pkcs_file_name" - cert_path_vpnserver = "cert_path_vpnserver" - cert_password = "cert_password" - pptp_mppe = "pptp_mppe" - ipsec_server_type = "ipsec_server_type" - wifi_network = "wifi_network" - vpn_identity = "vpn_identity" - vpn_server_hostname = "vpn_server_hostname"
diff --git a/src/antlion/test_utils/net/net_test_utils.py b/src/antlion/test_utils/net/net_test_utils.py deleted file mode 100644 index 4eb47ac..0000000 --- a/src/antlion/test_utils/net/net_test_utils.py +++ /dev/null
@@ -1,582 +0,0 @@ -#!/usr/bin/env python3 -# -# Copyright 2022 The Fuchsia Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -import logging -import os -import re -import time -import urllib.request - -from antlion import signals -from antlion import utils -from antlion.controllers import adb -from antlion.controllers.adb_lib.error import AdbError -from antlion.libs.proc import job -from antlion.utils import start_standing_subprocess -from antlion.utils import stop_standing_subprocess -from antlion.test_utils.net import connectivity_const as cconst - -from mobly import asserts - -VPN_CONST = cconst.VpnProfile -VPN_TYPE = cconst.VpnProfileType -VPN_PARAMS = cconst.VpnReqParams -TCPDUMP_PATH = "/data/local/tmp/" -USB_CHARGE_MODE = "svc usb setFunctions" -USB_TETHERING_MODE = "svc usb setFunctions rndis" -ENABLE_HARDWARE_OFFLOAD = "settings put global tether_offload_disabled 0" -DISABLE_HARDWARE_OFFLOAD = "settings put global tether_offload_disabled 1" -DEVICE_IP_ADDRESS = "ip address" -LOCALHOST = "192.168.1.1" - -# Time to wait for radio to up and running after reboot -WAIT_TIME_AFTER_REBOOT = 10 - -GCE_SSH = "gcloud compute ssh " -GCE_SCP = "gcloud compute scp " - - -def set_chrome_browser_permissions(ad): - """Set chrome browser start with no-first-run verification. - - Give permission to read from and write to storage - - Args: - ad: android device object - """ - commands = [ - "pm grant com.android.chrome " "android.permission.READ_EXTERNAL_STORAGE", - "pm grant com.android.chrome " "android.permission.WRITE_EXTERNAL_STORAGE", - "rm /data/local/chrome-command-line", - "am set-debug-app --persistent com.android.chrome", - 'echo "chrome --no-default-browser-check --no-first-run ' - '--disable-fre" > /data/local/tmp/chrome-command-line', - ] - for cmd in commands: - try: - ad.adb.shell(cmd) - except AdbError: - logging.warning("adb command %s failed on %s" % (cmd, ad.serial)) - - -def verify_ping_to_vpn_ip(ad, vpn_ping_addr): - """Verify if IP behind VPN server is pingable. - - Ping should pass, if VPN is connected. - Ping should fail, if VPN is disconnected. - - Args: - ad: android device object - vpn_ping_addr: target ping addr - """ - ping_result = None - pkt_loss = "100% packet loss" - logging.info("Pinging: %s" % vpn_ping_addr) - try: - ping_result = ad.adb.shell("ping -c 3 -W 2 %s" % vpn_ping_addr) - except AdbError: - pass - return ping_result and pkt_loss not in ping_result - - -def legacy_vpn_connection_test_logic(ad, vpn_profile, vpn_ping_addr): - """Test logic for each legacy VPN connection. - - Steps: - 1. Generate profile for the VPN type - 2. Establish connection to the server - 3. Verify that connection is established using LegacyVpnInfo - 4. Verify the connection by pinging the IP behind VPN - 5. Stop the VPN connection - 6. Check the connection status - 7. Verify that ping to IP behind VPN fails - - Args: - ad: Android device object - vpn_profile: object contains attribute for create vpn profile - vpn_ping_addr: addr to verify vpn connection - """ - # Wait for sometime so that VPN server flushes all interfaces and - # connections after graceful termination - time.sleep(10) - - ad.adb.shell("ip xfrm state flush") - ad.log.info("Connecting to: %s", vpn_profile) - ad.droid.vpnStartLegacyVpn(vpn_profile) - time.sleep(cconst.VPN_TIMEOUT) - - connected_vpn_info = ad.droid.vpnGetLegacyVpnInfo() - asserts.assert_equal( - connected_vpn_info["state"], - cconst.VPN_STATE_CONNECTED, - "Unable to establish VPN connection for %s" % vpn_profile, - ) - - ping_result = verify_ping_to_vpn_ip(ad, vpn_ping_addr) - ip_xfrm_state = ad.adb.shell("ip xfrm state") - match_obj = re.search(r"hmac(.*)", "%s" % ip_xfrm_state) - if match_obj: - ip_xfrm_state = format(match_obj.group(0)).split() - ad.log.info("HMAC for ESP is %s " % ip_xfrm_state[0]) - - ad.droid.vpnStopLegacyVpn() - asserts.assert_true( - ping_result, - "Ping to the internal IP failed. " "Expected to pass as VPN is connected", - ) - - connected_vpn_info = ad.droid.vpnGetLegacyVpnInfo() - asserts.assert_true( - not connected_vpn_info, - "Unable to terminate VPN connection for %s" % vpn_profile, - ) - - -def download_load_certs( - ad, vpn_params, vpn_type, vpn_server_addr, ipsec_server_type, log_path -): - """Download the certificates from VPN server and push to sdcard of DUT. - - Args: - ad: android device object - vpn_params: vpn params from config file - vpn_type: 1 of the 6 VPN types - vpn_server_addr: server addr to connect to - ipsec_server_type: ipsec version - strongswan or openswan - log_path: log path to download cert - - Returns: - Client cert file name on DUT's sdcard - """ - url = "http://%s%s%s" % ( - vpn_server_addr, - vpn_params["cert_path_vpnserver"], - vpn_params["client_pkcs_file_name"], - ) - logging.info("URL is: %s" % url) - if vpn_server_addr == LOCALHOST: - ad.droid.httpDownloadFile(url, "/sdcard/") - return vpn_params["client_pkcs_file_name"] - - local_cert_name = "%s_%s_%s" % ( - vpn_type.name, - ipsec_server_type, - vpn_params["client_pkcs_file_name"], - ) - local_file_path = os.path.join(log_path, local_cert_name) - try: - ret = urllib.request.urlopen(url) - with open(local_file_path, "wb") as f: - f.write(ret.read()) - except Exception: - asserts.fail("Unable to download certificate from the server") - - ad.adb.push("%s sdcard/" % local_file_path) - return local_cert_name - - -def generate_legacy_vpn_profile( - ad, vpn_params, vpn_type, vpn_server_addr, ipsec_server_type, log_path -): - """Generate legacy VPN profile for a VPN. - - Args: - ad: android device object - vpn_params: vpn params from config file - vpn_type: 1 of the 6 VPN types - vpn_server_addr: server addr to connect to - ipsec_server_type: ipsec version - strongswan or openswan - log_path: log path to download cert - - Returns: - Vpn profile - """ - vpn_profile = { - VPN_CONST.USER: vpn_params["vpn_username"], - VPN_CONST.PWD: vpn_params["vpn_password"], - VPN_CONST.TYPE: vpn_type.value, - VPN_CONST.SERVER: vpn_server_addr, - } - vpn_profile[VPN_CONST.NAME] = "test_%s_%s" % (vpn_type.name, ipsec_server_type) - if vpn_type.name == "PPTP": - vpn_profile[VPN_CONST.NAME] = "test_%s" % vpn_type.name - - psk_set = set(["L2TP_IPSEC_PSK", "IPSEC_XAUTH_PSK"]) - rsa_set = set(["L2TP_IPSEC_RSA", "IPSEC_XAUTH_RSA", "IPSEC_HYBRID_RSA"]) - - if vpn_type.name in psk_set: - vpn_profile[VPN_CONST.IPSEC_SECRET] = vpn_params["psk_secret"] - elif vpn_type.name in rsa_set: - cert_name = download_load_certs( - ad, vpn_params, vpn_type, vpn_server_addr, ipsec_server_type, log_path - ) - vpn_profile[VPN_CONST.IPSEC_USER_CERT] = cert_name.split(".")[0] - ad.droid.installCertificate(vpn_profile, cert_name, vpn_params["cert_password"]) - else: - vpn_profile[VPN_CONST.MPPE] = "mppe" - - return vpn_profile - - -def generate_ikev2_vpn_profile(ad, vpn_params, vpn_type, server_addr, log_path): - """Generate VPN profile for IKEv2 VPN. - - Args: - ad: android device object. - vpn_params: vpn params from config file. - vpn_type: ikev2 vpn type. - server_addr: vpn server addr. - log_path: log path to download cert. - - Returns: - Vpn profile. - """ - vpn_profile = { - VPN_CONST.TYPE: vpn_type.value, - VPN_CONST.SERVER: server_addr, - } - - if vpn_type.name == "IKEV2_IPSEC_USER_PASS": - vpn_profile[VPN_CONST.USER] = vpn_params["vpn_username"] - vpn_profile[VPN_CONST.PWD] = vpn_params["vpn_password"] - vpn_profile[VPN_CONST.IPSEC_ID] = vpn_params["vpn_identity"] - cert_name = download_load_certs( - ad, - vpn_params, - vpn_type, - vpn_params["server_addr"], - "IKEV2_IPSEC_USER_PASS", - log_path, - ) - vpn_profile[VPN_CONST.IPSEC_CA_CERT] = cert_name.split(".")[0] - ad.droid.installCertificate(vpn_profile, cert_name, vpn_params["cert_password"]) - elif vpn_type.name == "IKEV2_IPSEC_PSK": - vpn_profile[VPN_CONST.IPSEC_ID] = vpn_params["vpn_identity"] - vpn_profile[VPN_CONST.IPSEC_SECRET] = vpn_params["psk_secret"] - else: - vpn_profile[VPN_CONST.IPSEC_ID] = "%s@%s" % ( - vpn_params["vpn_identity"], - server_addr, - ) - logging.info("ID: %s@%s" % (vpn_params["vpn_identity"], server_addr)) - cert_name = download_load_certs( - ad, - vpn_params, - vpn_type, - vpn_params["server_addr"], - "IKEV2_IPSEC_RSA", - log_path, - ) - vpn_profile[VPN_CONST.IPSEC_USER_CERT] = cert_name.split(".")[0] - vpn_profile[VPN_CONST.IPSEC_CA_CERT] = cert_name.split(".")[0] - ad.droid.installCertificate(vpn_profile, cert_name, vpn_params["cert_password"]) - - return vpn_profile - - -def start_tcpdump(ad, test_name, interface="any"): - """Start tcpdump on all interfaces. - - Args: - ad: android device object. - test_name: tcpdump file name will have this - """ - ad.log.info("Starting tcpdump on all interfaces") - ad.adb.shell("killall -9 tcpdump", ignore_status=True) - ad.adb.shell("mkdir %s" % TCPDUMP_PATH, ignore_status=True) - ad.adb.shell("rm -rf %s/*" % TCPDUMP_PATH, ignore_status=True) - - file_name = "%s/tcpdump_%s_%s.pcap" % (TCPDUMP_PATH, ad.serial, test_name) - ad.log.info("tcpdump file is %s", file_name) - cmd = "adb -s {} shell tcpdump -i {} -s0 -w {}".format( - ad.serial, interface, file_name - ) - try: - return start_standing_subprocess(cmd, 5) - except Exception: - ad.log.exception("Could not start standing process %s" % repr(cmd)) - - return None - - -def stop_tcpdump( - ad, proc, test_name, pull_dump=True, adb_pull_timeout=adb.DEFAULT_ADB_PULL_TIMEOUT -): - """Stops tcpdump on any iface. - - Pulls the tcpdump file in the tcpdump dir if necessary. - - Args: - ad: android device object. - proc: need to know which pid to stop - test_name: test name to save the tcpdump file - pull_dump: pull tcpdump file or not - adb_pull_timeout: timeout for adb_pull - - Returns: - log_path of the tcpdump file - """ - ad.log.info("Stopping and pulling tcpdump if any") - if proc is None: - return None - try: - stop_standing_subprocess(proc) - except Exception as e: - ad.log.warning(e) - if pull_dump: - log_path = os.path.join(ad.device_log_path, "TCPDUMP_%s" % ad.serial) - os.makedirs(log_path, exist_ok=True) - ad.adb.pull("%s/. %s" % (TCPDUMP_PATH, log_path), timeout=adb_pull_timeout) - ad.adb.shell("rm -rf %s/*" % TCPDUMP_PATH, ignore_status=True) - file_name = "tcpdump_%s_%s.pcap" % (ad.serial, test_name) - return "%s/%s" % (log_path, file_name) - return None - - -def start_tcpdump_gce_server(ad, test_name, dest_port, gce): - """Start tcpdump on gce server. - - Args: - ad: android device object - test_name: test case name - dest_port: port to collect tcpdump - gce: dictionary of gce instance - - Returns: - process id and pcap file path from gce server - """ - ad.log.info("Starting tcpdump on gce server") - - # pcap file name - fname = "/tmp/%s_%s_%s_%s" % ( - test_name, - ad.model, - ad.serial, - time.strftime("%Y-%m-%d_%H-%M-%S", time.localtime(time.time())), - ) - - # start tcpdump - tcpdump_cmd = ( - "sudo bash -c 'tcpdump -i %s -w %s.pcap port %s > \ - %s.txt 2>&1 & echo $!'" - % (gce["interface"], fname, dest_port, fname) - ) - gcloud_ssh_cmd = "%s --project=%s --zone=%s %s@%s --command " % ( - GCE_SSH, - gce["project"], - gce["zone"], - gce["username"], - gce["hostname"], - ) - gce_ssh_cmd = '%s "%s"' % (gcloud_ssh_cmd, tcpdump_cmd) - utils.exe_cmd(gce_ssh_cmd) - - # get process id - ps_cmd = '%s "ps aux | grep tcpdump | grep %s"' % (gcloud_ssh_cmd, fname) - tcpdump_pid = utils.exe_cmd(ps_cmd).decode("utf-8", "ignore").split() - if not tcpdump_pid: - raise signals.TestFailure("Failed to start tcpdump on gce server") - return tcpdump_pid[1], fname - - -def stop_tcpdump_gce_server(ad, tcpdump_pid, fname, gce): - """Stop and pull tcpdump file from gce server. - - Args: - ad: android device object - tcpdump_pid: process id for tcpdump file - fname: tcpdump file path - gce: dictionary of gce instance - - Returns: - pcap file from gce server - """ - ad.log.info("Stop and pull pcap file from gce server") - - # stop tcpdump - tcpdump_cmd = "sudo kill %s" % tcpdump_pid - gcloud_ssh_cmd = "%s --project=%s --zone=%s %s@%s --command " % ( - GCE_SSH, - gce["project"], - gce["zone"], - gce["username"], - gce["hostname"], - ) - gce_ssh_cmd = '%s "%s"' % (gcloud_ssh_cmd, tcpdump_cmd) - utils.exe_cmd(gce_ssh_cmd) - - # verify tcpdump is stopped - ps_cmd = '%s "ps aux | grep tcpdump"' % gcloud_ssh_cmd - res = utils.exe_cmd(ps_cmd).decode("utf-8", "ignore") - if tcpdump_pid in res.split(): - raise signals.TestFailure("Failed to stop tcpdump on gce server") - if not fname: - return None - - # pull pcap file - gcloud_scp_cmd = "%s --project=%s --zone=%s %s@%s:" % ( - GCE_SCP, - gce["project"], - gce["zone"], - gce["username"], - gce["hostname"], - ) - pull_file = "%s%s.pcap %s/" % (gcloud_scp_cmd, fname, ad.device_log_path) - utils.exe_cmd(pull_file) - if not os.path.exists("%s/%s.pcap" % (ad.device_log_path, fname.split("/")[-1])): - raise signals.TestFailure("Failed to pull tcpdump from gce server") - - # delete pcaps - utils.exe_cmd('%s "sudo rm %s.*"' % (gcloud_ssh_cmd, fname)) - - # return pcap file - pcap_file = "%s/%s.pcap" % (ad.device_log_path, fname.split("/")[-1]) - return pcap_file - - -def is_ipaddress_ipv6(ip_address): - """Verify if the given string is a valid IPv6 address. - - Args: - ip_address: string containing the IP address - - Returns: - True: if valid ipv6 address - False: if not - """ - try: - socket.inet_pton(socket.AF_INET6, ip_address) - return True - except socket.error: - return False - - -def set_cap_net_raw_capability(): - """Set the CAP_NET_RAW capability - - To send the Scapy packets, we need to get the CAP_NET_RAW capability first. - """ - cap_net_raw = "sudo setcap cap_net_raw=eip $(readlink -f $(which act.py))" - utils.exe_cmd(cap_net_raw) - cap_python = "sudo setcap cap_net_raw=eip $(readlink -f $(which python))" - utils.exe_cmd(cap_python) - - -def stop_usb_tethering(ad): - """Stop USB tethering. - - Args: - ad: android device object - """ - ad.log.info("Stopping USB Tethering") - ad.stop_services() - ad.adb.shell(USB_CHARGE_MODE) - ad.adb.wait_for_device() - ad.start_services() - - -def wait_for_new_iface(old_ifaces): - """Wait for the new interface to come up. - - Args: - old_ifaces: list of old interfaces - """ - old_set = set(old_ifaces) - # Try 10 times to find a new interface with a 1s sleep every time - # (equivalent to a 9s timeout) - for _ in range(0, 10): - new_ifaces = set(get_if_list()) - old_set - asserts.assert_true( - len(new_ifaces) < 2, "Too many new interfaces after turning on " "tethering" - ) - if len(new_ifaces) == 1: - # enable the new iface before return - new_iface = new_ifaces.pop() - enable_iface(new_iface) - return new_iface - time.sleep(1) - asserts.fail("Timeout waiting for tethering interface on host") - - -def get_if_list(): - """Returns a list containing all network interfaces. - - The newest version of Scapy.get_if_list() returns the cached interfaces, - which might be out-dated, and unable to perceive the interface changes. - Use this method when need to monitoring the network interfaces changes. - Reference: https://github.com/secdev/scapy/pull/2707 - - Returns: - A list of the latest network interfaces. For example: - ['cvd-ebr', ..., 'eno1', 'enx4afa19a8dde1', 'lo', 'wlxd03745d68d88'] - """ - from scapy.config import conf - from scapy.compat import plain_str - - # Get ifconfig output - result = job.run([conf.prog.ifconfig]) - if result.exit_status: - raise asserts.fail( - "Failed to execute ifconfig: {}".format(plain_str(result.stderr)) - ) - - interfaces = [ - line[: line.find(":")] - for line in plain_str(result.stdout).splitlines() - if ": flags" in line.lower() - ] - return interfaces - - -def enable_hardware_offload(ad): - """Enable hardware offload using adb shell command. - - Args: - ad: Android device object - """ - ad.log.info("Enabling hardware offload.") - ad.adb.shell(ENABLE_HARDWARE_OFFLOAD, ignore_status=True) - ad.reboot() - time.sleep(WAIT_TIME_AFTER_REBOOT) - - -def disable_hardware_offload(ad): - """Disable hardware offload using adb shell command. - - Args: - ad: Android device object - """ - ad.log.info("Disabling hardware offload.") - ad.adb.shell(DISABLE_HARDWARE_OFFLOAD, ignore_status=True) - ad.reboot() - time.sleep(WAIT_TIME_AFTER_REBOOT) - - -def enable_iface(iface): - """Enable network interfaces. - - Some network interface might disabled as default, need to enable before - using it. - - Args: - iface: network interface that need to enable - """ - from scapy.compat import plain_str - - result = job.run("sudo ifconfig %s up" % (iface), ignore_status=True) - if result.exit_status: - raise asserts.fail( - "Failed to execute ifconfig: {}".format(plain_str(result.stderr)) - )
diff --git a/src/antlion/test_utils/wifi/OWNERS b/src/antlion/test_utils/wifi/OWNERS deleted file mode 100644 index 10e4214..0000000 --- a/src/antlion/test_utils/wifi/OWNERS +++ /dev/null
@@ -1,5 +0,0 @@ -bkleung@google.com -gmoturu@google.com -hsiuchangchen@google.com - -include platform/packages/modules/Wifi:/WIFI_OWNERS
diff --git a/src/antlion/test_utils/wifi/__init__.py b/src/antlion/test_utils/wifi/__init__.py deleted file mode 100644 index e69de29..0000000 --- a/src/antlion/test_utils/wifi/__init__.py +++ /dev/null
diff --git a/src/antlion/test_utils/wifi/base_test.py b/src/antlion/test_utils/wifi/base_test.py deleted file mode 100644 index 7e97d8f..0000000 --- a/src/antlion/test_utils/wifi/base_test.py +++ /dev/null
@@ -1,1058 +0,0 @@ -#!/usr/bin/env python3 -# -# Copyright 2022 The Fuchsia Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -""" - Base Class for Defining Common WiFi Test Functionality -""" - -import copy -import os -import time - -from antlion import context -from antlion import signals -from antlion import utils -from antlion.base_test import BaseTestClass -from antlion.controllers.ap_lib import hostapd_ap_preset -from antlion.controllers.ap_lib import hostapd_bss_settings -from antlion.controllers.ap_lib import hostapd_constants -from antlion.controllers.ap_lib import hostapd_security -from antlion.keys import Config -from antlion.test_utils.net import net_test_utils as nutils -from antlion.test_utils.wifi import wifi_test_utils as wutils - -from mobly import asserts -from mobly.base_test import STAGE_NAME_TEARDOWN_CLASS - -WifiEnums = wutils.WifiEnums -AP_1 = 0 -AP_2 = 1 -MAX_AP_COUNT = 2 - - -class WifiBaseTest(BaseTestClass): - def __init__(self, configs): - super().__init__(configs) - self.enable_packet_log = False - self.packet_log_2g = hostapd_constants.AP_DEFAULT_CHANNEL_2G - self.packet_log_5g = hostapd_constants.AP_DEFAULT_CHANNEL_5G - self.tcpdump_proc = [] - self.packet_log_pid = {} - - def setup_class(self): - if hasattr(self, "attenuators") and self.attenuators: - for attenuator in self.attenuators: - attenuator.set_atten(0) - opt_param = ["pixel_models", "cnss_diag_file", "country_code_file"] - self.unpack_userparams(opt_param_names=opt_param) - if hasattr(self, "cnss_diag_file"): - if isinstance(self.cnss_diag_file, list): - self.cnss_diag_file = self.cnss_diag_file[0] - if not os.path.isfile(self.cnss_diag_file): - self.cnss_diag_file = os.path.join( - self.user_params[Config.key_config_path.value], self.cnss_diag_file - ) - if self.enable_packet_log and hasattr(self, "packet_capture"): - self.packet_logger = self.packet_capture[0] - self.packet_logger.configure_monitor_mode("2G", self.packet_log_2g) - self.packet_logger.configure_monitor_mode("5G", self.packet_log_5g) - if hasattr(self, "android_devices"): - for ad in self.android_devices: - wutils.wifi_test_device_init(ad) - if hasattr(self, "country_code_file"): - if isinstance(self.country_code_file, list): - self.country_code_file = self.country_code_file[0] - if not os.path.isfile(self.country_code_file): - self.country_code_file = os.path.join( - self.user_params[Config.key_config_path.value], - self.country_code_file, - ) - self.country_code = utils.load_config(self.country_code_file)[ - "country" - ] - else: - self.country_code = WifiEnums.CountryCode.US - wutils.set_wifi_country_code(ad, self.country_code) - - def setup_test(self): - if ( - hasattr(self, "android_devices") - and hasattr(self, "cnss_diag_file") - and hasattr(self, "pixel_models") - ): - wutils.start_cnss_diags( - self.android_devices, self.cnss_diag_file, self.pixel_models - ) - self.tcpdump_proc = [] - if hasattr(self, "android_devices"): - for ad in self.android_devices: - proc = nutils.start_tcpdump(ad, self.test_name) - self.tcpdump_proc.append((ad, proc)) - if hasattr(self, "packet_logger"): - self.packet_log_pid = wutils.start_pcap( - self.packet_logger, "dual", self.test_name - ) - - def teardown_test(self): - if ( - hasattr(self, "android_devices") - and hasattr(self, "cnss_diag_file") - and hasattr(self, "pixel_models") - ): - wutils.stop_cnss_diags(self.android_devices, self.pixel_models) - for proc in self.tcpdump_proc: - nutils.stop_tcpdump(proc[0], proc[1], self.test_name, pull_dump=False) - self.tcpdump_proc = [] - if hasattr(self, "packet_logger") and self.packet_log_pid: - wutils.stop_pcap(self.packet_logger, self.packet_log_pid, test_status=True) - self.packet_log_pid = {} - - def teardown_class(self): - begin_time = utils.get_current_epoch_time() - super().teardown_class() - for device in getattr(self, "fuchsia_devices", []): - device.take_bug_report(STAGE_NAME_TEARDOWN_CLASS, begin_time) - - def on_fail(self, test_name, begin_time): - if hasattr(self, "android_devices"): - for ad in self.android_devices: - ad.take_bug_report(test_name, begin_time) - ad.cat_adb_log(test_name, begin_time) - wutils.get_ssrdumps(ad) - if hasattr(self, "cnss_diag_file") and hasattr(self, "pixel_models"): - wutils.stop_cnss_diags(self.android_devices, self.pixel_models) - for ad in self.android_devices: - wutils.get_cnss_diag_log(ad) - for proc in self.tcpdump_proc: - nutils.stop_tcpdump(proc[0], proc[1], self.test_name) - self.tcpdump_proc = [] - if hasattr(self, "packet_logger") and self.packet_log_pid: - wutils.stop_pcap(self.packet_logger, self.packet_log_pid, test_status=False) - self.packet_log_pid = {} - - # Gets a wlan_device log and calls the generic device fail on DUT. - for device in getattr(self, "fuchsia_devices", []): - self.on_device_fail(device, test_name, begin_time) - - def on_device_fail(self, device, test_name, begin_time): - """Gets a generic device DUT bug report. - - This method takes a bug report if the device has the - 'take_bug_report_on_fail' config value, and if the flag is true. This - method also power cycles if 'hard_reboot_on_fail' is True. - - Args: - device: Generic device to gather logs from. - test_name: Name of the test that triggered this function. - begin_time: Logline format timestamp taken when the test started. - """ - if ( - not hasattr(device, "take_bug_report_on_fail") - or device.take_bug_report_on_fail - ): - device.take_bug_report(test_name, begin_time) - - if hasattr(device, "hard_reboot_on_fail") and device.hard_reboot_on_fail: - device.reboot(reboot_type="hard", testbed_pdus=self.pdu_devices) - - def download_ap_logs(self): - """Downloads the DHCP and hostapad logs from the access_point. - - Using the current TestClassContext and TestCaseContext this method pulls - the DHCP and hostapd logs and outputs them to the correct path. - """ - current_path = context.get_current_context().get_full_output_path() - - dhcp_log = self.access_point.get_dhcp_logs() - if dhcp_log: - dhcp_log_path = os.path.join(current_path, "dhcp_log.txt") - with open(dhcp_log_path, "w") as f: - f.write(dhcp_log) - - hostapd_logs = self.access_point.get_hostapd_logs() - for interface in hostapd_logs: - hostapd_log_path = os.path.join( - current_path, f"hostapd_log_{interface}.txt" - ) - with open(hostapd_log_path, "w") as f: - f.write(hostapd_logs[interface]) - - radvd_log = self.access_point.get_radvd_logs() - if radvd_log: - radvd_log_path = os.path.join(current_path, "radvd_log.txt") - with open(radvd_log_path, "w") as f: - f.write(radvd_log) - - def get_psk_network( - self, - mirror_ap, - reference_networks, - hidden=False, - same_ssid=False, - security_mode=hostapd_constants.WPA2_STRING, - ssid_length_2g=hostapd_constants.AP_SSID_LENGTH_2G, - ssid_length_5g=hostapd_constants.AP_SSID_LENGTH_5G, - passphrase_length_2g=hostapd_constants.AP_PASSPHRASE_LENGTH_2G, - passphrase_length_5g=hostapd_constants.AP_PASSPHRASE_LENGTH_5G, - ): - """Generates SSID and passphrase for a WPA2 network using random - generator. - - Args: - mirror_ap: Boolean, determines if both APs use the same hostapd - config or different configs. - reference_networks: List of PSK networks. - same_ssid: Boolean, determines if both bands on AP use the same - SSID. - ssid_length_2gecond AP Int, number of characters to use for 2G SSID. - ssid_length_5g: Int, number of characters to use for 5G SSID. - passphrase_length_2g: Int, length of password for 2G network. - passphrase_length_5g: Int, length of password for 5G network. - - Returns: A dict of 2G and 5G network lists for hostapd configuration. - - """ - network_dict_2g = {} - network_dict_5g = {} - ref_5g_security = security_mode - ref_2g_security = security_mode - - if same_ssid: - ref_2g_ssid = "xg_%s" % utils.rand_ascii_str(ssid_length_2g) - ref_5g_ssid = ref_2g_ssid - - ref_2g_passphrase = utils.rand_ascii_str(passphrase_length_2g) - ref_5g_passphrase = ref_2g_passphrase - - else: - ref_2g_ssid = "2g_%s" % utils.rand_ascii_str(ssid_length_2g) - ref_2g_passphrase = utils.rand_ascii_str(passphrase_length_2g) - - ref_5g_ssid = "5g_%s" % utils.rand_ascii_str(ssid_length_5g) - ref_5g_passphrase = utils.rand_ascii_str(passphrase_length_5g) - - network_dict_2g = { - "SSID": ref_2g_ssid, - "security": ref_2g_security, - "password": ref_2g_passphrase, - "hiddenSSID": hidden, - } - - network_dict_5g = { - "SSID": ref_5g_ssid, - "security": ref_5g_security, - "password": ref_5g_passphrase, - "hiddenSSID": hidden, - } - - ap = 0 - for ap in range(MAX_AP_COUNT): - reference_networks.append( - {"2g": copy.copy(network_dict_2g), "5g": copy.copy(network_dict_5g)} - ) - if not mirror_ap: - break - return {"2g": network_dict_2g, "5g": network_dict_5g} - - def get_open_network( - self, - mirror_ap, - open_network, - hidden=False, - same_ssid=False, - ssid_length_2g=hostapd_constants.AP_SSID_LENGTH_2G, - ssid_length_5g=hostapd_constants.AP_SSID_LENGTH_5G, - security_mode="none", - ): - """Generates SSIDs for a open network using a random generator. - - Args: - mirror_ap: Boolean, determines if both APs use the same hostapd - config or different configs. - open_network: List of open networks. - same_ssid: Boolean, determines if both bands on AP use the same - SSID. - ssid_length_2g: Int, number of characters to use for 2G SSID. - ssid_length_5g: Int, number of characters to use for 5G SSID. - security_mode: 'none' for open and 'OWE' for WPA3 OWE. - - Returns: A dict of 2G and 5G network lists for hostapd configuration. - - """ - network_dict_2g = {} - network_dict_5g = {} - - if same_ssid: - open_2g_ssid = "xg_%s" % utils.rand_ascii_str(ssid_length_2g) - open_5g_ssid = open_2g_ssid - - else: - open_2g_ssid = "2g_%s" % utils.rand_ascii_str(ssid_length_2g) - open_5g_ssid = "5g_%s" % utils.rand_ascii_str(ssid_length_5g) - - network_dict_2g = { - "SSID": open_2g_ssid, - "security": security_mode, - "hiddenSSID": hidden, - } - - network_dict_5g = { - "SSID": open_5g_ssid, - "security": security_mode, - "hiddenSSID": hidden, - } - - ap = 0 - for ap in range(MAX_AP_COUNT): - open_network.append( - {"2g": copy.copy(network_dict_2g), "5g": copy.copy(network_dict_5g)} - ) - if not mirror_ap: - break - return {"2g": network_dict_2g, "5g": network_dict_5g} - - def get_wep_network( - self, - mirror_ap, - networks, - hidden=False, - same_ssid=False, - ssid_length_2g=hostapd_constants.AP_SSID_LENGTH_2G, - ssid_length_5g=hostapd_constants.AP_SSID_LENGTH_5G, - passphrase_length_2g=hostapd_constants.AP_PASSPHRASE_LENGTH_2G, - passphrase_length_5g=hostapd_constants.AP_PASSPHRASE_LENGTH_5G, - ): - """Generates SSID and passphrase for a WEP network using random - generator. - - Args: - mirror_ap: Boolean, determines if both APs use the same hostapd - config or different configs. - networks: List of WEP networks. - same_ssid: Boolean, determines if both bands on AP use the same - SSID. - ssid_length_2gecond AP Int, number of characters to use for 2G SSID. - ssid_length_5g: Int, number of characters to use for 5G SSID. - passphrase_length_2g: Int, length of password for 2G network. - passphrase_length_5g: Int, length of password for 5G network. - - Returns: A dict of 2G and 5G network lists for hostapd configuration. - - """ - network_dict_2g = {} - network_dict_5g = {} - ref_5g_security = hostapd_constants.WEP_STRING - ref_2g_security = hostapd_constants.WEP_STRING - - if same_ssid: - ref_2g_ssid = "xg_%s" % utils.rand_ascii_str(ssid_length_2g) - ref_5g_ssid = ref_2g_ssid - - ref_2g_passphrase = utils.rand_hex_str(passphrase_length_2g) - ref_5g_passphrase = ref_2g_passphrase - - else: - ref_2g_ssid = "2g_%s" % utils.rand_ascii_str(ssid_length_2g) - ref_2g_passphrase = utils.rand_hex_str(passphrase_length_2g) - - ref_5g_ssid = "5g_%s" % utils.rand_ascii_str(ssid_length_5g) - ref_5g_passphrase = utils.rand_hex_str(passphrase_length_5g) - - network_dict_2g = { - "SSID": ref_2g_ssid, - "security": ref_2g_security, - "wepKeys": [ref_2g_passphrase] * 4, - "hiddenSSID": hidden, - } - - network_dict_5g = { - "SSID": ref_5g_ssid, - "security": ref_5g_security, - "wepKeys": [ref_2g_passphrase] * 4, - "hiddenSSID": hidden, - } - - ap = 0 - for ap in range(MAX_AP_COUNT): - networks.append( - {"2g": copy.copy(network_dict_2g), "5g": copy.copy(network_dict_5g)} - ) - if not mirror_ap: - break - return {"2g": network_dict_2g, "5g": network_dict_5g} - - def update_bssid(self, ap_instance, ap, network, band): - """Get bssid and update network dictionary. - - Args: - ap_instance: Accesspoint index that was configured. - ap: Accesspoint object corresponding to ap_instance. - network: Network dictionary. - band: Wifi networks' band. - - """ - bssid = ap.get_bssid_from_ssid(network["SSID"], band) - - if network["security"] == hostapd_constants.WPA2_STRING: - # TODO:(bamahadev) Change all occurances of reference_networks - # to wpa_networks. - self.reference_networks[ap_instance][band]["bssid"] = bssid - if network["security"] == hostapd_constants.WPA_STRING: - self.wpa_networks[ap_instance][band]["bssid"] = bssid - if network["security"] == hostapd_constants.WEP_STRING: - self.wep_networks[ap_instance][band]["bssid"] = bssid - if network["security"] == hostapd_constants.ENT_STRING: - if "bssid" not in self.ent_networks[ap_instance][band]: - self.ent_networks[ap_instance][band]["bssid"] = bssid - else: - self.ent_networks_pwd[ap_instance][band]["bssid"] = bssid - if network["security"] == "none": - self.open_network[ap_instance][band]["bssid"] = bssid - - def populate_bssid(self, ap_instance, ap, networks_5g, networks_2g): - """Get bssid for a given SSID and add it to the network dictionary. - - Args: - ap_instance: Accesspoint index that was configured. - ap: Accesspoint object corresponding to ap_instance. - networks_5g: List of 5g networks configured on the APs. - networks_2g: List of 2g networks configured on the APs. - - """ - - if not (networks_5g or networks_2g): - return - - for network in networks_5g: - if "channel" in network: - continue - self.update_bssid(ap_instance, ap, network, hostapd_constants.BAND_5G) - - for network in networks_2g: - if "channel" in network: - continue - self.update_bssid(ap_instance, ap, network, hostapd_constants.BAND_2G) - - def configure_openwrt_ap_and_start( - self, - channel_5g=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - channel_2g=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - channel_5g_ap2=None, - channel_2g_ap2=None, - ssid_length_2g=hostapd_constants.AP_SSID_LENGTH_2G, - passphrase_length_2g=hostapd_constants.AP_PASSPHRASE_LENGTH_2G, - ssid_length_5g=hostapd_constants.AP_SSID_LENGTH_5G, - passphrase_length_5g=hostapd_constants.AP_PASSPHRASE_LENGTH_5G, - mirror_ap=False, - hidden=False, - same_ssid=False, - open_network=False, - wpa1_network=False, - wpa_network=False, - wep_network=False, - ent_network=False, - ent_network_pwd=False, - owe_network=False, - sae_network=False, - saemixed_network=False, - radius_conf_2g=None, - radius_conf_5g=None, - radius_conf_pwd=None, - ap_count=1, - ieee80211w=None, - ): - """Create, configure and start OpenWrt AP. - - Args: - channel_5g: 5G channel to configure. - channel_2g: 2G channel to configure. - channel_5g_ap2: 5G channel to configure on AP2. - channel_2g_ap2: 2G channel to configure on AP2. - ssid_length_2g: Int, number of characters to use for 2G SSID. - passphrase_length_2g: Int, length of password for 2G network. - ssid_length_5g: Int, number of characters to use for 5G SSID. - passphrase_length_5g: Int, length of password for 5G network. - same_ssid: Boolean, determines if both bands on AP use the same SSID. - open_network: Boolean, to check if open network should be configured. - wpa_network: Boolean, to check if wpa network should be configured. - wep_network: Boolean, to check if wep network should be configured. - ent_network: Boolean, to check if ent network should be configured. - ent_network_pwd: Boolean, to check if ent pwd network should be configured. - owe_network: Boolean, to check if owe network should be configured. - sae_network: Boolean, to check if sae network should be configured. - saemixed_network: Boolean, to check if saemixed network should be configured. - radius_conf_2g: dictionary with enterprise radius server details. - radius_conf_5g: dictionary with enterprise radius server details. - radius_conf_pwd: dictionary with enterprise radiuse server details. - ap_count: APs to configure. - ieee80211w:PMF to configure - """ - if mirror_ap and ap_count == 1: - raise ValueError("ap_count cannot be 1 if mirror_ap is True.") - if (channel_5g_ap2 or channel_2g_ap2) and ap_count == 1: - raise ValueError("ap_count cannot be 1 if channels of AP2 are provided.") - # we are creating a channel list for 2G and 5G bands. The list is of - # size 2 and this is based on the assumption that each testbed will have - # at most 2 APs. - if not channel_5g_ap2: - channel_5g_ap2 = channel_5g - if not channel_2g_ap2: - channel_2g_ap2 = channel_2g - channels_2g = [channel_2g, channel_2g_ap2] - channels_5g = [channel_5g, channel_5g_ap2] - - self.reference_networks = [] - self.wpa1_networks = [] - self.wpa_networks = [] - self.wep_networks = [] - self.ent_networks = [] - self.ent_networks_pwd = [] - self.open_network = [] - self.owe_networks = [] - self.sae_networks = [] - self.saemixed_networks = [] - self.bssid_map = [] - for i in range(ap_count): - network_list = [] - if wpa1_network: - wpa1_dict = self.get_psk_network( - mirror_ap, - self.wpa1_networks, - hidden, - same_ssid, - ssid_length_2g, - ssid_length_5g, - passphrase_length_2g, - passphrase_length_5g, - ) - wpa1_dict[hostapd_constants.BAND_2G]["security"] = "psk" - wpa1_dict[hostapd_constants.BAND_5G]["security"] = "psk" - wpa1_dict[hostapd_constants.BAND_2G]["ieee80211w"] = ieee80211w - wpa1_dict[hostapd_constants.BAND_5G]["ieee80211w"] = ieee80211w - self.wpa1_networks.append(wpa1_dict) - network_list.append(wpa1_dict) - if wpa_network: - wpa_dict = self.get_psk_network( - mirror_ap, - self.reference_networks, - hidden, - same_ssid, - ssid_length_2g, - ssid_length_5g, - passphrase_length_2g, - passphrase_length_5g, - ) - wpa_dict[hostapd_constants.BAND_2G]["security"] = "psk2" - wpa_dict[hostapd_constants.BAND_5G]["security"] = "psk2" - wpa_dict[hostapd_constants.BAND_2G]["ieee80211w"] = ieee80211w - wpa_dict[hostapd_constants.BAND_5G]["ieee80211w"] = ieee80211w - self.wpa_networks.append(wpa_dict) - network_list.append(wpa_dict) - if wep_network: - wep_dict = self.get_wep_network( - mirror_ap, - self.wep_networks, - hidden, - same_ssid, - ssid_length_2g, - ssid_length_5g, - ) - network_list.append(wep_dict) - if ent_network: - ent_dict = self.get_open_network( - mirror_ap, - self.ent_networks, - hidden, - same_ssid, - ssid_length_2g, - ssid_length_5g, - ) - ent_dict["2g"]["security"] = "wpa2" - ent_dict["2g"].update(radius_conf_2g) - ent_dict["5g"]["security"] = "wpa2" - ent_dict["5g"].update(radius_conf_5g) - network_list.append(ent_dict) - if ent_network_pwd: - ent_pwd_dict = self.get_open_network( - mirror_ap, - self.ent_networks_pwd, - hidden, - same_ssid, - ssid_length_2g, - ssid_length_5g, - ) - ent_pwd_dict["2g"]["security"] = "wpa2" - ent_pwd_dict["2g"].update(radius_conf_pwd) - ent_pwd_dict["5g"]["security"] = "wpa2" - ent_pwd_dict["5g"].update(radius_conf_pwd) - network_list.append(ent_pwd_dict) - if open_network: - open_dict = self.get_open_network( - mirror_ap, - self.open_network, - hidden, - same_ssid, - ssid_length_2g, - ssid_length_5g, - ) - network_list.append(open_dict) - if owe_network: - owe_dict = self.get_open_network( - mirror_ap, - self.owe_networks, - hidden, - same_ssid, - ssid_length_2g, - ssid_length_5g, - "OWE", - ) - owe_dict[hostapd_constants.BAND_2G]["security"] = "owe" - owe_dict[hostapd_constants.BAND_5G]["security"] = "owe" - network_list.append(owe_dict) - if sae_network: - sae_dict = self.get_psk_network( - mirror_ap, - self.sae_networks, - hidden, - same_ssid, - hostapd_constants.SAE_KEY_MGMT, - ssid_length_2g, - ssid_length_5g, - passphrase_length_2g, - passphrase_length_5g, - ) - sae_dict[hostapd_constants.BAND_2G]["security"] = "sae" - sae_dict[hostapd_constants.BAND_5G]["security"] = "sae" - network_list.append(sae_dict) - if saemixed_network: - saemixed_dict = self.get_psk_network( - mirror_ap, - self.saemixed_networks, - hidden, - same_ssid, - hostapd_constants.SAE_KEY_MGMT, - ssid_length_2g, - ssid_length_5g, - passphrase_length_2g, - passphrase_length_5g, - ) - saemixed_dict[hostapd_constants.BAND_2G]["security"] = "sae-mixed" - saemixed_dict[hostapd_constants.BAND_5G]["security"] = "sae-mixed" - saemixed_dict[hostapd_constants.BAND_2G]["ieee80211w"] = ieee80211w - saemixed_dict[hostapd_constants.BAND_5G]["ieee80211w"] = ieee80211w - network_list.append(saemixed_dict) - self.access_points[i].configure_ap( - network_list, channels_2g[i], channels_5g[i] - ) - self.access_points[i].start_ap() - self.bssid_map.append(self.access_points[i].get_bssids_for_wifi_networks()) - if mirror_ap: - self.access_points[i + 1].configure_ap( - network_list, channels_2g[i + 1], channels_5g[i + 1] - ) - self.access_points[i + 1].start_ap() - self.bssid_map.append( - self.access_points[i + 1].get_bssids_for_wifi_networks() - ) - break - - def legacy_configure_ap_and_start( - self, - channel_5g=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - channel_2g=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - max_2g_networks=hostapd_constants.AP_DEFAULT_MAX_SSIDS_2G, - max_5g_networks=hostapd_constants.AP_DEFAULT_MAX_SSIDS_5G, - ap_ssid_length_2g=hostapd_constants.AP_SSID_LENGTH_2G, - ap_passphrase_length_2g=hostapd_constants.AP_PASSPHRASE_LENGTH_2G, - ap_ssid_length_5g=hostapd_constants.AP_SSID_LENGTH_5G, - ap_passphrase_length_5g=hostapd_constants.AP_PASSPHRASE_LENGTH_5G, - hidden=False, - same_ssid=False, - mirror_ap=True, - wpa_network=False, - wep_network=False, - ent_network=False, - radius_conf_2g=None, - radius_conf_5g=None, - ent_network_pwd=False, - radius_conf_pwd=None, - ap_count=1, - ): - config_count = 1 - count = 0 - - # For example, the NetworkSelector tests use 2 APs and require that - # both APs are not mirrored. - if not mirror_ap and ap_count == 1: - raise ValueError("ap_count cannot be 1 if mirror_ap is False.") - - if not mirror_ap: - config_count = ap_count - - self.user_params["reference_networks"] = [] - self.user_params["open_network"] = [] - if wpa_network: - self.user_params["wpa_networks"] = [] - if wep_network: - self.user_params["wep_networks"] = [] - if ent_network: - self.user_params["ent_networks"] = [] - if ent_network_pwd: - self.user_params["ent_networks_pwd"] = [] - - # kill hostapd & dhcpd if the cleanup was not successful - for i in range(len(self.access_points)): - self.log.debug("Check ap state and cleanup") - self._cleanup_hostapd_and_dhcpd(i) - - for count in range(config_count): - network_list_2g = [] - network_list_5g = [] - - orig_network_list_2g = [] - orig_network_list_5g = [] - - network_list_2g.append({"channel": channel_2g}) - network_list_5g.append({"channel": channel_5g}) - - networks_dict = self.get_psk_network( - mirror_ap, - self.user_params["reference_networks"], - hidden=hidden, - same_ssid=same_ssid, - ) - self.reference_networks = self.user_params["reference_networks"] - - network_list_2g.append(networks_dict["2g"]) - network_list_5g.append(networks_dict["5g"]) - - # When same_ssid is set, only configure one set of WPA networks. - # We cannot have more than one set because duplicate interface names - # are not allowed. - # TODO(bmahadev): Provide option to select the type of network, - # instead of defaulting to WPA. - if not same_ssid: - networks_dict = self.get_open_network( - mirror_ap, - self.user_params["open_network"], - hidden=hidden, - same_ssid=same_ssid, - ) - self.open_network = self.user_params["open_network"] - - network_list_2g.append(networks_dict["2g"]) - network_list_5g.append(networks_dict["5g"]) - - if wpa_network: - networks_dict = self.get_psk_network( - mirror_ap, - self.user_params["wpa_networks"], - hidden=hidden, - same_ssid=same_ssid, - security_mode=hostapd_constants.WPA_STRING, - ) - self.wpa_networks = self.user_params["wpa_networks"] - - network_list_2g.append(networks_dict["2g"]) - network_list_5g.append(networks_dict["5g"]) - - if wep_network: - networks_dict = self.get_wep_network( - mirror_ap, - self.user_params["wep_networks"], - hidden=hidden, - same_ssid=same_ssid, - ) - self.wep_networks = self.user_params["wep_networks"] - - network_list_2g.append(networks_dict["2g"]) - network_list_5g.append(networks_dict["5g"]) - - if ent_network: - networks_dict = self.get_open_network( - mirror_ap, - self.user_params["ent_networks"], - hidden=hidden, - same_ssid=same_ssid, - ) - networks_dict["2g"]["security"] = hostapd_constants.ENT_STRING - networks_dict["2g"].update(radius_conf_2g) - networks_dict["5g"]["security"] = hostapd_constants.ENT_STRING - networks_dict["5g"].update(radius_conf_5g) - self.ent_networks = self.user_params["ent_networks"] - - network_list_2g.append(networks_dict["2g"]) - network_list_5g.append(networks_dict["5g"]) - - if ent_network_pwd: - networks_dict = self.get_open_network( - mirror_ap, - self.user_params["ent_networks_pwd"], - hidden=hidden, - same_ssid=same_ssid, - ) - networks_dict["2g"]["security"] = hostapd_constants.ENT_STRING - networks_dict["2g"].update(radius_conf_pwd) - networks_dict["5g"]["security"] = hostapd_constants.ENT_STRING - networks_dict["5g"].update(radius_conf_pwd) - self.ent_networks_pwd = self.user_params["ent_networks_pwd"] - - network_list_2g.append(networks_dict["2g"]) - network_list_5g.append(networks_dict["5g"]) - - orig_network_list_5g = copy.copy(network_list_5g) - orig_network_list_2g = copy.copy(network_list_2g) - - if len(network_list_5g) > 1: - self.config_5g = self._generate_legacy_ap_config(network_list_5g) - if len(network_list_2g) > 1: - self.config_2g = self._generate_legacy_ap_config(network_list_2g) - - self.access_points[count].start_ap(self.config_2g) - self.access_points[count].start_ap(self.config_5g) - self.populate_bssid( - count, - self.access_points[count], - orig_network_list_5g, - orig_network_list_2g, - ) - - # Repeat configuration on the second router. - if mirror_ap and ap_count == 2: - self.access_points[AP_2].start_ap(self.config_2g) - self.access_points[AP_2].start_ap(self.config_5g) - self.populate_bssid( - AP_2, - self.access_points[AP_2], - orig_network_list_5g, - orig_network_list_2g, - ) - - def _kill_processes(self, ap, daemon): - """Kill hostapd and dhcpd daemons - - Args: - ap: AP to cleanup - daemon: process to kill - - Returns: True/False if killing process is successful - """ - self.log.info("Killing %s" % daemon) - pids = ap.ssh.run("pidof %s" % daemon, ignore_status=True) - if pids.stdout: - ap.ssh.run("kill %s" % pids.stdout, ignore_status=True) - time.sleep(3) - pids = ap.ssh.run("pidof %s" % daemon, ignore_status=True) - if pids.stdout: - return False - return True - - def _cleanup_hostapd_and_dhcpd(self, count): - """Check if AP was cleaned up properly - - Kill hostapd and dhcpd processes if cleanup was not successful in the - last run - - Args: - count: AP to check - - Returns: - New AccessPoint object if AP required cleanup - - Raises: - Error: if the AccessPoint timed out to setup - """ - ap = self.access_points[count] - phy_ifaces = ap.interfaces.get_physical_interface() - kill_hostapd = False - for iface in phy_ifaces: - if "2g_" in iface or "5g_" in iface or "xg_" in iface: - kill_hostapd = True - break - - if not kill_hostapd: - return - - self.log.debug("Cleanup AP") - if not self._kill_processes(ap, "hostapd") or not self._kill_processes( - ap, "dhcpd" - ): - raise ("Failed to cleanup AP") - - ap.__init__(self.user_params["AccessPoint"][count]) - - def _generate_legacy_ap_config(self, network_list): - bss_settings = [] - wlan_2g = self.access_points[AP_1].wlan_2g - wlan_5g = self.access_points[AP_1].wlan_5g - ap_settings = network_list.pop(0) - # TODO:(bmahadev) This is a bug. We should not have to pop the first - # network in the list and treat it as a separate case. Instead, - # create_ap_preset() should be able to take NULL ssid and security and - # build config based on the bss_Settings alone. - hostapd_config_settings = network_list.pop(0) - for network in network_list: - if "password" in network: - bss_settings.append( - hostapd_bss_settings.BssSettings( - name=network["SSID"], - ssid=network["SSID"], - hidden=network["hiddenSSID"], - security=hostapd_security.Security( - security_mode=network["security"], - password=network["password"], - ), - ) - ) - elif "wepKeys" in network: - bss_settings.append( - hostapd_bss_settings.BssSettings( - name=network["SSID"], - ssid=network["SSID"], - hidden=network["hiddenSSID"], - security=hostapd_security.Security( - security_mode=network["security"], - password=network["wepKeys"][0], - ), - ) - ) - elif network["security"] == hostapd_constants.ENT_STRING: - bss_settings.append( - hostapd_bss_settings.BssSettings( - name=network["SSID"], - ssid=network["SSID"], - hidden=network["hiddenSSID"], - security=hostapd_security.Security( - security_mode=network["security"], - radius_server_ip=network["radius_server_ip"], - radius_server_port=network["radius_server_port"], - radius_server_secret=network["radius_server_secret"], - ), - ) - ) - else: - bss_settings.append( - hostapd_bss_settings.BssSettings( - name=network["SSID"], - ssid=network["SSID"], - hidden=network["hiddenSSID"], - ) - ) - if "password" in hostapd_config_settings: - config = hostapd_ap_preset.create_ap_preset( - iface_wlan_2g=wlan_2g, - iface_wlan_5g=wlan_5g, - channel=ap_settings["channel"], - ssid=hostapd_config_settings["SSID"], - hidden=hostapd_config_settings["hiddenSSID"], - security=hostapd_security.Security( - security_mode=hostapd_config_settings["security"], - password=hostapd_config_settings["password"], - ), - bss_settings=bss_settings, - ) - elif "wepKeys" in hostapd_config_settings: - config = hostapd_ap_preset.create_ap_preset( - iface_wlan_2g=wlan_2g, - iface_wlan_5g=wlan_5g, - channel=ap_settings["channel"], - ssid=hostapd_config_settings["SSID"], - hidden=hostapd_config_settings["hiddenSSID"], - security=hostapd_security.Security( - security_mode=hostapd_config_settings["security"], - password=hostapd_config_settings["wepKeys"][0], - ), - bss_settings=bss_settings, - ) - else: - config = hostapd_ap_preset.create_ap_preset( - iface_wlan_2g=wlan_2g, - iface_wlan_5g=wlan_5g, - channel=ap_settings["channel"], - ssid=hostapd_config_settings["SSID"], - hidden=hostapd_config_settings["hiddenSSID"], - bss_settings=bss_settings, - ) - return config - - def configure_packet_capture( - self, - channel_5g=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - channel_2g=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ): - """Configure packet capture for 2G and 5G bands. - - Args: - channel_5g: Channel to set the monitor mode to for 5G band. - channel_2g: Channel to set the monitor mode to for 2G band. - """ - self.packet_capture = self.packet_capture[0] - result = self.packet_capture.configure_monitor_mode( - hostapd_constants.BAND_2G, channel_2g - ) - if not result: - raise ValueError("Failed to configure channel for 2G band") - - result = self.packet_capture.configure_monitor_mode( - hostapd_constants.BAND_5G, channel_5g - ) - if not result: - raise ValueError("Failed to configure channel for 5G band.") - - @staticmethod - def wifi_test_wrap(fn): - def _safe_wrap_test_case(self, *args, **kwargs): - test_id = "%s:%s:%s" % ( - self.__class__.__name__, - self.test_name, - self.log_begin_time.replace(" ", "-"), - ) - self.test_id = test_id - self.result_detail = "" - tries = int(self.user_params.get("wifi_auto_rerun", 3)) - for ad in self.android_devices: - ad.log_path = self.log_path - for i in range(tries + 1): - result = True - if i > 0: - log_string = "[Test Case] RETRY:%s %s" % (i, self.test_name) - self.log.info(log_string) - self._teardown_test(self.test_name) - self._setup_test(self.test_name) - try: - result = fn(self, *args, **kwargs) - except signals.TestFailure as e: - self.log.warn("Error msg: %s" % e) - if self.result_detail: - signal.details = self.result_detail - result = False - except signals.TestSignal: - if self.result_detail: - signal.details = self.result_detail - raise - except Exception as e: - self.log.exception(e) - asserts.fail(self.result_detail) - if result is False: - if i < tries: - continue - else: - break - if result is not False: - asserts.explicit_pass(self.result_detail) - else: - asserts.fail(self.result_detail) - - return _safe_wrap_test_case
diff --git a/src/antlion/test_utils/wifi/wifi_constants.py b/src/antlion/test_utils/wifi/wifi_constants.py deleted file mode 100644 index a348f81..0000000 --- a/src/antlion/test_utils/wifi/wifi_constants.py +++ /dev/null
@@ -1,120 +0,0 @@ -#!/usr/bin/env python3 -# -# Copyright 2022 The Fuchsia Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Constants for Wifi related events. -WIFI_CONNECTED = "WifiNetworkConnected" -WIFI_DISCONNECTED = "WifiNetworkDisconnected" -SUPPLICANT_CON_CHANGED = "SupplicantConnectionChanged" -WIFI_STATE_CHANGED = "WifiStateChanged" -WIFI_FORGET_NW_SUCCESS = "WifiManagerForgetNetworkOnSuccess" -WIFI_NETWORK_REQUEST_MATCH_CB_ON_MATCH = "WifiManagerNetworkRequestMatchCallbackOnMatch" -WIFI_NETWORK_REQUEST_MATCH_CB_ON_CONNECT_SUCCESS = ( - "WifiManagerNetworkRequestMatchCallbackOnUserSelectionConnectSuccess" -) -WIFI_NETWORK_REQUEST_MATCH_CB_ON_CONNECT_FAILURE = ( - "WifiManagerNetworkRequestMatchCallbackOnUserSelectionConnectFailure" -) -WIFI_NETWORK_CB_ON_AVAILABLE = "WifiManagerNetworkCallbackOnAvailable" -WIFI_NETWORK_CB_ON_UNAVAILABLE = "WifiManagerNetworkCallbackOnUnavailable" -WIFI_NETWORK_CB_ON_LOST = "WifiManagerNetworkCallbackOnLost" -WIFI_NETWORK_SUGGESTION_POST_CONNECTION = "WifiNetworkSuggestionPostConnection" -WIFI_SUBSYSTEM_RESTARTING = "WifiSubsystemRestarting" -WIFI_SUBSYSTEM_RESTARTED = "WifiSubsystemRestarted" - -# These constants will be used by the ACTS wifi tests. -CONNECT_BY_CONFIG_SUCCESS = "WifiManagerConnectByConfigOnSuccess" -CONNECT_BY_NETID_SUCCESS = "WifiManagerConnectByNetIdOnSuccess" - -# Softap related constants -SOFTAP_CALLBACK_EVENT = "WifiManagerSoftApCallback-" -# Callback Event for softap state change -# WifiManagerSoftApCallback-[callbackId]-OnStateChanged -SOFTAP_STATE_CHANGED = "-OnStateChanged" -SOFTAP_STATE_CHANGE_CALLBACK_KEY = "State" -WIFI_AP_DISABLING_STATE = 10 -WIFI_AP_DISABLED_STATE = 11 -WIFI_AP_ENABLING_STATE = 12 -WIFI_AP_ENABLED_STATE = 13 -WIFI_AP_FAILED_STATE = 14 - -SOFTAP_RANDOMIZATION_NONE = 0 -SOFTAP_RANDOMIZATION_PERSISTENT = 1 - -# Callback Event for client number change: -# WifiManagerSoftApCallback-[callbackId]-OnNumClientsChanged -SOFTAP_NUMBER_CLIENTS_CHANGED_WITH_INFO = "-OnConnectedClientsChangedWithInfo" -SOFTAP_NUMBER_CLIENTS_CHANGED = "-OnNumClientsChanged" -SOFTAP_NUMBER_CLIENTS_CALLBACK_KEY = "NumClients" -SOFTAP_CLIENTS_MACS_CALLBACK_KEY = "MacAddresses" -# Callback Event for softap info change -SOFTAP_INFO_CHANGED = "-OnInfoChanged" -SOFTAP_INFOLIST_CHANGED = "-OnInfoListChanged" -SOFTAP_INFO_FREQUENCY_CALLBACK_KEY = "frequency" -SOFTAP_INFO_BANDWIDTH_CALLBACK_KEY = "bandwidth" -SOFTAP_INFO_WIFISTANDARD_CALLBACK_KEY = "wifiStandard" -SOFTAP_INFO_AUTO_SHUTDOWN_CALLBACK_KEY = "autoShutdownTimeoutMillis" -SOFTAP_INFO_BSSID_CALLBACK_KEY = "bssid" -# Callback Event for softap client blocking -SOFTAP_BLOCKING_CLIENT_CONNECTING = "-OnBlockedClientConnecting" -SOFTAP_BLOCKING_CLIENT_REASON_KEY = "BlockedReason" -SOFTAP_BLOCKING_CLIENT_WIFICLIENT_KEY = "WifiClient" -SAP_CLIENT_BLOCK_REASON_CODE_BLOCKED_BY_USER = 0 -SAP_CLIENT_BLOCK_REASON_CODE_NO_MORE_STAS = 1 - -# Callback Event for softap capability -SOFTAP_CAPABILITY_CHANGED = "-OnCapabilityChanged" -SOFTAP_CAPABILITY_MAX_SUPPORTED_CLIENTS = "maxSupportedClients" -SOFTAP_CAPABILITY_24GHZ_SUPPORTED_CHANNEL_LIST = "supported2GHzChannellist" -SOFTAP_CAPABILITY_5GHZ_SUPPORTED_CHANNEL_LIST = "supported5GHzChannellist" -SOFTAP_CAPABILITY_6GHZ_SUPPORTED_CHANNEL_LIST = "supported6GHzChannellist" -SOFTAP_CAPABILITY_60GHZ_SUPPORTED_CHANNEL_LIST = "supported60GHzChannellist" -SOFTAP_CAPABILITY_FEATURE_ACS = "acsOffloadSupported" -SOFTAP_CAPABILITY_FEATURE_CLIENT_CONTROL = "clientForceDisconnectSupported" -SOFTAP_CAPABILITY_FEATURE_WPA3_SAE = "wpa3SaeSupported" -SOFTAP_CAPABILITY_FEATURE_IEEE80211AX = "ieee80211axSupported" -SOFTAP_CAPABILITY_FEATURE_24GHZ = "24gSupported" -SOFTAP_CAPABILITY_FEATURE_5GHZ = "5gSupported" -SOFTAP_CAPABILITY_FEATURE_6GHZ = "6gSupported" -SOFTAP_CAPABILITY_FEATURE_60GHZ = "60gSupported" - -DEFAULT_SOFTAP_TIMEOUT_S = 600 # 10 minutes - -# AP related constants -AP_MAIN = "main_AP" -AP_AUX = "aux_AP" -SSID = "SSID" - -# cnss_diag property related constants -DEVICES_USING_LEGACY_PROP = ["sailfish", "marlin", "walleye", "taimen", "muskie"] -CNSS_DIAG_PROP = "persist.vendor.sys.cnss.diag_txt" -LEGACY_CNSS_DIAG_PROP = "persist.sys.cnss.diag_txt" - -# Delay before registering the match callback. -NETWORK_REQUEST_CB_REGISTER_DELAY_SEC = 2 - -# Constants for JSONObject representation of CoexUnsafeChannel -COEX_BAND = "band" -COEX_BAND_24_GHZ = "24_GHZ" -COEX_BAND_5_GHZ = "5_GHZ" -COEX_CHANNEL = "channel" -COEX_POWER_CAP_DBM = "powerCapDbm" - -# Constants for bundle keys for CoexCallback#onCoexUnsafeChannelsChanged -KEY_COEX_UNSAFE_CHANNELS = "KEY_COEX_UNSAFE_CHANNELS" -KEY_COEX_RESTRICTIONS = "KEY_COEX_RESTRICTIONS" - -# WiFi standards -WIFI_STANDARD_11AX = 6
diff --git a/src/antlion/test_utils/wifi/wifi_performance_test_utils/__init__.py b/src/antlion/test_utils/wifi/wifi_performance_test_utils/__init__.py deleted file mode 100644 index 567077e..0000000 --- a/src/antlion/test_utils/wifi/wifi_performance_test_utils/__init__.py +++ /dev/null
@@ -1,770 +0,0 @@ -#!/usr/bin/env python3.4 -# -# Copyright 2022 The Fuchsia Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import collections -import importlib -import ipaddress -import logging -import numpy -import re -import time -from concurrent.futures import ThreadPoolExecutor - -from antlion import utils -from antlion.controllers.android_device import AndroidDevice -from antlion.controllers.utils_lib import ssh -from antlion.test_utils.wifi import wifi_test_utils as wutils -from antlion.test_utils.wifi.wifi_performance_test_utils import ping_utils -from antlion.test_utils.wifi.wifi_performance_test_utils import qcom_utils -from antlion.test_utils.wifi.wifi_performance_test_utils import brcm_utils - -from mobly import asserts - -SHORT_SLEEP = 1 -MED_SLEEP = 6 -CHANNELS_6GHz = ["6g{}".format(4 * x + 1) for x in range(59)] -BAND_TO_CHANNEL_MAP = { - "2.4GHz": list(range(1, 14)), - "UNII-1": [36, 40, 44, 48], - "UNII-2": [52, 56, 60, 64, 100, 104, 108, 112, 116, 120, 124, 128, 132, 140], - "UNII-3": [149, 153, 157, 161, 165], - "6GHz": CHANNELS_6GHz, -} -CHANNEL_TO_BAND_MAP = { - channel: band - for band, channels in BAND_TO_CHANNEL_MAP.items() - for channel in channels -} - - -# Decorators -def nonblocking(f): - """Creates a decorator transforming function calls to non-blocking""" - - def wrap(*args, **kwargs): - executor = ThreadPoolExecutor(max_workers=1) - thread_future = executor.submit(f, *args, **kwargs) - # Ensure resources are freed up when executor ruturns or raises - executor.shutdown(wait=False) - return thread_future - - return wrap - - -def detect_wifi_platform(dut): - if hasattr(dut, "wifi_platform"): - return dut.wifi_platform - qcom_check = len(dut.get_file_names("/vendor/firmware/wlan/qca_cld/")) - if qcom_check: - dut.wifi_platform = "qcom" - else: - dut.wifi_platform = "brcm" - return dut.wifi_platform - - -def detect_wifi_decorator(f): - def wrap(*args, **kwargs): - if "dut" in kwargs: - dut = kwargs["dut"] - else: - dut = next(arg for arg in args if type(arg) == AndroidDevice) - dut_package = ( - "acts_contrib.test_utils.wifi.wifi_performance_test_utils.{}_utils".format( - detect_wifi_platform(dut) - ) - ) - dut_package = importlib.import_module(dut_package) - f_decorated = getattr(dut_package, f.__name__, lambda: None) - return f_decorated(*args, **kwargs) - - return wrap - - -# JSON serializer -def serialize_dict(input_dict): - """Function to serialize dicts to enable JSON output""" - output_dict = collections.OrderedDict() - for key, value in input_dict.items(): - output_dict[_serialize_value(key)] = _serialize_value(value) - return output_dict - - -def _serialize_value(value): - """Function to recursively serialize dict entries to enable JSON output""" - if isinstance(value, tuple): - return str(value) - if isinstance(value, numpy.int64): - return int(value) - if isinstance(value, numpy.float64): - return float(value) - if isinstance(value, list): - return [_serialize_value(x) for x in value] - if isinstance(value, numpy.ndarray): - return [_serialize_value(x) for x in value] - elif isinstance(value, dict): - return serialize_dict(value) - elif type(value) in (float, int, bool, str): - return value - else: - return "Non-serializable object" - - -def extract_sub_dict(full_dict, fields): - sub_dict = collections.OrderedDict((field, full_dict[field]) for field in fields) - return sub_dict - - -# Miscellaneous Wifi Utilities -def check_skip_conditions(testcase_params, dut, access_point, ota_chamber=None): - """Checks if test should be skipped.""" - # Check battery level before test - if not health_check(dut, 10): - asserts.skip("DUT battery level too low.") - if not access_point.band_lookup_by_channel(testcase_params["channel"]): - asserts.skip("AP does not support requested channel.") - if ( - ota_chamber - and CHANNEL_TO_BAND_MAP[testcase_params["channel"]] - not in ota_chamber.SUPPORTED_BANDS - ): - asserts.skip("OTA chamber does not support requested channel.") - # Check if 6GHz is supported by checking capabilities in the US. - if not dut.droid.wifiCheckState(): - wutils.wifi_toggle_state(dut, True) - iw_list = dut.adb.shell("iw list") - supports_6ghz = "6135 MHz" in iw_list - supports_160mhz = "Supported Channel Width: 160 MHz" in iw_list - if testcase_params.get("bandwidth", 20) == 160 and not supports_160mhz: - asserts.skip("DUT does not support 160 MHz networks.") - if testcase_params.get("channel", 6) in CHANNELS_6GHz and not supports_6ghz: - asserts.skip("DUT does not support 6 GHz band.") - - -def validate_network(dut, ssid): - """Check that DUT has a valid internet connection through expected SSID - - Args: - dut: android device of interest - ssid: expected ssid - """ - try: - connected = wutils.validate_connection(dut, wait_time=3) is not None - current_network = dut.droid.wifiGetConnectionInfo() - except: - connected = False - current_network = None - if connected and current_network["SSID"] == ssid: - return True - else: - return False - - -def get_server_address(ssh_connection, dut_ip, subnet_mask): - """Get server address on a specific subnet, - - This function retrieves the LAN or WAN IP of a remote machine used in - testing. If subnet_mask is set to 'public' it returns a machines global ip, - else it returns the ip belonging to the dut local network given the dut's - ip and subnet mask. - - Args: - ssh_connection: object representing server for which we want an ip - dut_ip: string in ip address format, i.e., xxx.xxx.xxx.xxx - subnet_mask: string representing subnet mask (public for global ip) - """ - ifconfig_out = ssh_connection.run("ifconfig").stdout - ip_list = re.findall("inet (?:addr:)?(\d+.\d+.\d+.\d+)", ifconfig_out) - ip_list = [ipaddress.ip_address(ip) for ip in ip_list] - - if subnet_mask == "public": - for ip in ip_list: - # is_global is not used to allow for CGNAT ips in 100.x.y.z range - if not ip.is_private: - return str(ip) - else: - dut_network = ipaddress.ip_network( - "{}/{}".format(dut_ip, subnet_mask), strict=False - ) - for ip in ip_list: - if ip in dut_network: - return str(ip) - logging.error("No IP address found in requested subnet") - - -# Ping utilities -def get_ping_stats(src_device, dest_address, ping_duration, ping_interval, ping_size): - """Run ping to or from the DUT. - - The function computes either pings the DUT or pings a remote ip from - DUT. - - Args: - src_device: object representing device to ping from - dest_address: ip address to ping - ping_duration: timeout to set on the ping process (in seconds) - ping_interval: time between pings (in seconds) - ping_size: size of ping packet payload - Returns: - ping_result: dict containing ping results and other meta data - """ - ping_count = int(ping_duration / ping_interval) - ping_deadline = int(ping_count * ping_interval) + 1 - ping_cmd_linux = "ping -c {} -w {} -i {} -s {} -D".format( - ping_count, - ping_deadline, - ping_interval, - ping_size, - ) - - ping_cmd_macos = "ping -c {} -t {} -i {} -s {}".format( - ping_count, - ping_deadline, - ping_interval, - ping_size, - ) - - if isinstance(src_device, AndroidDevice): - ping_cmd = "{} {}".format(ping_cmd_linux, dest_address) - ping_output = src_device.adb.shell( - ping_cmd, timeout=ping_deadline + SHORT_SLEEP, ignore_status=True - ) - elif isinstance(src_device, ssh.connection.SshConnection): - platform = src_device.run("uname").stdout - if "linux" in platform.lower(): - ping_cmd = "sudo {} {}".format(ping_cmd_linux, dest_address) - elif "darwin" in platform.lower(): - ping_cmd = "sudo {} {}| while IFS= read -r line; do printf '[%s] %s\n' \"$(gdate '+%s.%N')\" \"$line\"; done".format( - ping_cmd_macos, dest_address - ) - ping_output = src_device.run( - ping_cmd, timeout=ping_deadline + SHORT_SLEEP, ignore_status=True - ).stdout - else: - raise TypeError( - "Unable to ping using src_device of type %s." % type(src_device) - ) - return ping_utils.PingResult(ping_output.splitlines()) - - -@nonblocking -def get_ping_stats_nb( - src_device, dest_address, ping_duration, ping_interval, ping_size -): - return get_ping_stats( - src_device, dest_address, ping_duration, ping_interval, ping_size - ) - - -# Iperf utilities -@nonblocking -def start_iperf_client_nb(iperf_client, iperf_server_address, iperf_args, tag, timeout): - return iperf_client.start(iperf_server_address, iperf_args, tag, timeout) - - -def get_iperf_arg_string( - duration, - reverse_direction, - interval=1, - traffic_type="TCP", - socket_size=None, - num_processes=1, - udp_throughput="1000M", - ipv6=False, -): - """Function to format iperf client arguments. - - This function takes in iperf client parameters and returns a properly - formatter iperf arg string to be used in throughput tests. - - Args: - duration: iperf duration in seconds - reverse_direction: boolean controlling the -R flag for iperf clients - interval: iperf print interval - traffic_type: string specifying TCP or UDP traffic - socket_size: string specifying TCP window or socket buffer, e.g., 2M - num_processes: int specifying number of iperf processes - udp_throughput: string specifying TX throughput in UDP tests, e.g. 100M - ipv6: boolean controlling the use of IP V6 - Returns: - iperf_args: string of formatted iperf args - """ - iperf_args = "-i {} -t {} -J ".format(interval, duration) - if ipv6: - iperf_args = iperf_args + "-6 " - if traffic_type.upper() == "UDP": - iperf_args = iperf_args + "-u -b {} -l 1470 -P {} ".format( - udp_throughput, num_processes - ) - elif traffic_type.upper() == "TCP": - iperf_args = iperf_args + "-P {} ".format(num_processes) - if socket_size: - iperf_args = iperf_args + "-w {} ".format(socket_size) - if reverse_direction: - iperf_args = iperf_args + " -R" - return iperf_args - - -# Attenuator Utilities -def atten_by_label(atten_list, path_label, atten_level): - """Attenuate signals according to their path label. - - Args: - atten_list: list of attenuators to iterate over - path_label: path label on which to set desired attenuation - atten_level: attenuation desired on path - """ - for atten in atten_list: - if path_label in atten.path: - atten.set_atten(atten_level, retry=True) - - -def get_atten_for_target_rssi(target_rssi, attenuators, dut, ping_server): - """Function to estimate attenuation to hit a target RSSI. - - This function estimates a constant attenuation setting on all atennuation - ports to hit a target RSSI. The estimate is not meant to be exact or - guaranteed. - - Args: - target_rssi: rssi of interest - attenuators: list of attenuator ports - dut: android device object assumed connected to a wifi network. - ping_server: ssh connection object to ping server - Returns: - target_atten: attenuation setting to achieve target_rssi - """ - logging.info("Searching attenuation for RSSI = {}dB".format(target_rssi)) - # Set attenuator to 0 dB - for atten in attenuators: - atten.set_atten(0, strict=False, retry=True) - # Start ping traffic - dut_ip = dut.droid.connectivityGetIPv4Addresses("wlan0")[0] - # Measure starting RSSI - ping_future = get_ping_stats_nb( - src_device=ping_server, - dest_address=dut_ip, - ping_duration=1.5, - ping_interval=0.02, - ping_size=64, - ) - current_rssi = get_connected_rssi( - dut, - num_measurements=4, - polling_frequency=0.25, - first_measurement_delay=0.5, - disconnect_warning=1, - ignore_samples=1, - ) - current_rssi = current_rssi["signal_poll_rssi"]["mean"] - ping_future.result() - target_atten = 0 - logging.debug( - "RSSI @ {0:.2f}dB attenuation = {1:.2f}".format(target_atten, current_rssi) - ) - within_range = 0 - for idx in range(20): - atten_delta = max(min(current_rssi - target_rssi, 20), -20) - target_atten = int((target_atten + atten_delta) * 4) / 4 - if target_atten < 0: - return 0 - if target_atten > attenuators[0].get_max_atten(): - return attenuators[0].get_max_atten() - for atten in attenuators: - atten.set_atten(target_atten, strict=False, retry=True) - ping_future = get_ping_stats_nb( - src_device=ping_server, - dest_address=dut_ip, - ping_duration=1.5, - ping_interval=0.02, - ping_size=64, - ) - current_rssi = get_connected_rssi( - dut, - num_measurements=4, - polling_frequency=0.25, - first_measurement_delay=0.5, - disconnect_warning=1, - ignore_samples=1, - ) - current_rssi = current_rssi["signal_poll_rssi"]["mean"] - ping_future.result() - logging.info( - "RSSI @ {0:.2f}dB attenuation = {1:.2f}".format(target_atten, current_rssi) - ) - if abs(current_rssi - target_rssi) < 1: - if within_range: - logging.info( - "Reached RSSI: {0:.2f}. Target RSSI: {1:.2f}." - "Attenuation: {2:.2f}, Iterations = {3:.2f}".format( - current_rssi, target_rssi, target_atten, idx - ) - ) - return target_atten - else: - within_range = True - else: - within_range = False - return target_atten - - -def get_current_atten_dut_chain_map(attenuators, dut, ping_server, ping_from_dut=False): - """Function to detect mapping between attenuator ports and DUT chains. - - This function detects the mapping between attenuator ports and DUT chains - in cases where DUT chains are connected to only one attenuator port. The - function assumes the DUT is already connected to a wifi network. The - function starts by measuring per chain RSSI at 0 attenuation, then - attenuates one port at a time looking for the chain that reports a lower - RSSI. - - Args: - attenuators: list of attenuator ports - dut: android device object assumed connected to a wifi network. - ping_server: ssh connection object to ping server - ping_from_dut: boolean controlling whether to ping from or to dut - Returns: - chain_map: list of dut chains, one entry per attenuator port - """ - # Set attenuator to 0 dB - for atten in attenuators: - atten.set_atten(0, strict=False, retry=True) - # Start ping traffic - dut_ip = dut.droid.connectivityGetIPv4Addresses("wlan0")[0] - if ping_from_dut: - ping_future = get_ping_stats_nb( - dut, ping_server._settings.hostname, 11, 0.02, 64 - ) - else: - ping_future = get_ping_stats_nb(ping_server, dut_ip, 11, 0.02, 64) - # Measure starting RSSI - base_rssi = get_connected_rssi(dut, 4, 0.25, 1) - chain0_base_rssi = base_rssi["chain_0_rssi"]["mean"] - chain1_base_rssi = base_rssi["chain_1_rssi"]["mean"] - if chain0_base_rssi < -70 or chain1_base_rssi < -70: - logging.warning("RSSI might be too low to get reliable chain map.") - # Compile chain map by attenuating one path at a time and seeing which - # chain's RSSI degrades - chain_map = [] - for test_atten in attenuators: - # Set one attenuator to 30 dB down - test_atten.set_atten(30, strict=False, retry=True) - # Get new RSSI - test_rssi = get_connected_rssi(dut, 4, 0.25, 1) - # Assign attenuator to path that has lower RSSI - if ( - chain0_base_rssi > -70 - and chain0_base_rssi - test_rssi["chain_0_rssi"]["mean"] > 10 - ): - chain_map.append("DUT-Chain-0") - elif ( - chain1_base_rssi > -70 - and chain1_base_rssi - test_rssi["chain_1_rssi"]["mean"] > 10 - ): - chain_map.append("DUT-Chain-1") - else: - chain_map.append(None) - # Reset attenuator to 0 - test_atten.set_atten(0, strict=False, retry=True) - ping_future.result() - logging.debug("Chain Map: {}".format(chain_map)) - return chain_map - - -def get_full_rf_connection_map( - attenuators, dut, ping_server, networks, ping_from_dut=False -): - """Function to detect per-network connections between attenuator and DUT. - - This function detects the mapping between attenuator ports and DUT chains - on all networks in its arguments. The function connects the DUT to each - network then calls get_current_atten_dut_chain_map to get the connection - map on the current network. The function outputs the results in two formats - to enable easy access when users are interested in indexing by network or - attenuator port. - - Args: - attenuators: list of attenuator ports - dut: android device object assumed connected to a wifi network. - ping_server: ssh connection object to ping server - networks: dict of network IDs and configs - Returns: - rf_map_by_network: dict of RF connections indexed by network. - rf_map_by_atten: list of RF connections indexed by attenuator - """ - for atten in attenuators: - atten.set_atten(0, strict=False, retry=True) - - rf_map_by_network = collections.OrderedDict() - rf_map_by_atten = [[] for atten in attenuators] - for net_id, net_config in networks.items(): - wutils.reset_wifi(dut) - wutils.wifi_connect( - dut, - net_config, - num_of_tries=1, - assert_on_fail=False, - check_connectivity=False, - ) - rf_map_by_network[net_id] = get_current_atten_dut_chain_map( - attenuators, dut, ping_server, ping_from_dut - ) - for idx, chain in enumerate(rf_map_by_network[net_id]): - if chain: - rf_map_by_atten[idx].append({"network": net_id, "dut_chain": chain}) - logging.debug("RF Map (by Network): {}".format(rf_map_by_network)) - logging.debug("RF Map (by Atten): {}".format(rf_map_by_atten)) - - return rf_map_by_network, rf_map_by_atten - - -# Generic device utils -def get_dut_temperature(dut): - """Function to get dut temperature. - - The function fetches and returns the reading from the temperature sensor - used for skin temperature and thermal throttling. - - Args: - dut: AndroidDevice of interest - Returns: - temperature: device temperature. 0 if temperature could not be read - """ - candidate_zones = [ - "/sys/devices/virtual/thermal/tz-by-name/skin-therm/temp", - "/sys/devices/virtual/thermal/tz-by-name/sdm-therm-monitor/temp", - "/sys/devices/virtual/thermal/tz-by-name/sdm-therm-adc/temp", - "/sys/devices/virtual/thermal/tz-by-name/back_therm/temp", - "/dev/thermal/tz-by-name/quiet_therm/temp", - ] - for zone in candidate_zones: - try: - temperature = int(dut.adb.shell("cat {}".format(zone))) - break - except: - temperature = 0 - if temperature == 0: - logging.debug("Could not check DUT temperature.") - elif temperature > 100: - temperature = temperature / 1000 - return temperature - - -def wait_for_dut_cooldown(dut, target_temp=50, timeout=300): - """Function to wait for a DUT to cool down. - - Args: - dut: AndroidDevice of interest - target_temp: target cooldown temperature - timeout: maxt time to wait for cooldown - """ - start_time = time.time() - while time.time() - start_time < timeout: - temperature = get_dut_temperature(dut) - if temperature < target_temp: - break - time.sleep(SHORT_SLEEP) - elapsed_time = time.time() - start_time - logging.debug( - "DUT Final Temperature: {}C. Cooldown duration: {}".format( - temperature, elapsed_time - ) - ) - - -def health_check(dut, batt_thresh=5, temp_threshold=53, cooldown=1): - """Function to check health status of a DUT. - - The function checks both battery levels and temperature to avoid DUT - powering off during the test. - - Args: - dut: AndroidDevice of interest - batt_thresh: battery level threshold - temp_threshold: temperature threshold - cooldown: flag to wait for DUT to cool down when overheating - Returns: - health_check: boolean confirming device is healthy - """ - health_check = True - battery_level = utils.get_battery_level(dut) - if battery_level < batt_thresh: - logging.warning("Battery level low ({}%)".format(battery_level)) - health_check = False - else: - logging.debug("Battery level = {}%".format(battery_level)) - - temperature = get_dut_temperature(dut) - if temperature > temp_threshold: - if cooldown: - logging.warning("Waiting for DUT to cooldown. ({} C)".format(temperature)) - wait_for_dut_cooldown(dut, target_temp=temp_threshold - 5) - else: - logging.warning("DUT Overheating ({} C)".format(temperature)) - health_check = False - else: - logging.debug("DUT Temperature = {} C".format(temperature)) - return health_check - - -# Wifi Device Utils -def empty_rssi_result(): - return collections.OrderedDict( - [("data", []), ("mean", float("nan")), ("stdev", float("nan"))] - ) - - -@nonblocking -def get_connected_rssi_nb( - dut, - num_measurements=1, - polling_frequency=SHORT_SLEEP, - first_measurement_delay=0, - disconnect_warning=True, - ignore_samples=0, - interface="wlan0", -): - return get_connected_rssi( - dut, - num_measurements, - polling_frequency, - first_measurement_delay, - disconnect_warning, - ignore_samples, - interface, - ) - - -@detect_wifi_decorator -def get_connected_rssi( - dut, - num_measurements=1, - polling_frequency=SHORT_SLEEP, - first_measurement_delay=0, - disconnect_warning=True, - ignore_samples=0, - interface="wlan0", -): - """Gets all RSSI values reported for the connected access point/BSSID. - - Args: - dut: android device object from which to get RSSI - num_measurements: number of scans done, and RSSIs collected - polling_frequency: time to wait between RSSI measurements - disconnect_warning: boolean controlling disconnection logging messages - ignore_samples: number of leading samples to ignore - Returns: - connected_rssi: dict containing the measurements results for - all reported RSSI values (signal_poll, per chain, etc.) and their - statistics - """ - - -@nonblocking -def get_scan_rssi_nb(dut, tracked_bssids, num_measurements=1): - return get_scan_rssi(dut, tracked_bssids, num_measurements) - - -@detect_wifi_decorator -def get_scan_rssi(dut, tracked_bssids, num_measurements=1): - """Gets scan RSSI for specified BSSIDs. - - Args: - dut: android device object from which to get RSSI - tracked_bssids: array of BSSIDs to gather RSSI data for - num_measurements: number of scans done, and RSSIs collected - Returns: - scan_rssi: dict containing the measurement results as well as the - statistics of the scan RSSI for all BSSIDs in tracked_bssids - """ - - -@detect_wifi_decorator -def get_sw_signature(dut): - """Function that checks the signature for wifi firmware and config files. - - Returns: - bdf_signature: signature consisting of last three digits of bdf cksums - fw_signature: floating point firmware version, i.e., major.minor - """ - - -@detect_wifi_decorator -def get_country_code(dut): - """Function that returns the current wifi country code.""" - - -@detect_wifi_decorator -def push_config(dut, config_file): - """Function to push Wifi BDF files - - This function checks for existing wifi bdf files and over writes them all, - for simplicity, with the bdf file provided in the arguments. The dut is - rebooted for the bdf file to take effect - - Args: - dut: dut to push bdf file to - config_file: path to bdf_file to push - """ - - -@detect_wifi_decorator -def start_wifi_logging(dut): - """Function to start collecting wifi-related logs""" - - -@detect_wifi_decorator -def stop_wifi_logging(dut): - """Function to start collecting wifi-related logs""" - - -@detect_wifi_decorator -def push_firmware(dut, firmware_files): - """Function to push Wifi firmware files - - Args: - dut: dut to push bdf file to - firmware_files: path to wlanmdsp.mbn file - datamsc_file: path to Data.msc file - """ - - -@detect_wifi_decorator -def disable_beamforming(dut): - """Function to disable beamforming.""" - - -@detect_wifi_decorator -def set_nss_capability(dut, nss): - """Function to set number of spatial streams supported.""" - - -@detect_wifi_decorator -def set_chain_mask(dut, chain_mask): - """Function to set DUT chain mask. - - Args: - dut: android device - chain_mask: desired chain mask in [0, 1, '2x2'] - """ - - -# Link layer stats utilities -class LinkLayerStats: - def __new__(self, dut, llstats_enabled=True): - if detect_wifi_platform(dut) == "qcom": - return qcom_utils.LinkLayerStats(dut, llstats_enabled) - else: - return brcm_utils.LinkLayerStats(dut, llstats_enabled)
diff --git a/src/antlion/test_utils/wifi/wifi_performance_test_utils/bokeh_figure.py b/src/antlion/test_utils/wifi/wifi_performance_test_utils/bokeh_figure.py deleted file mode 100644 index 0c9aec3..0000000 --- a/src/antlion/test_utils/wifi/wifi_performance_test_utils/bokeh_figure.py +++ /dev/null
@@ -1,389 +0,0 @@ -#!/usr/bin/env python3.4 -# -# Copyright 2022 The Fuchsia Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import bokeh, bokeh.plotting, bokeh.io -import collections -import itertools -import json -import math - - -# Plotting Utilities -class BokehFigure: - """Class enabling simplified Bokeh plotting.""" - - COLORS = [ - "black", - "blue", - "blueviolet", - "brown", - "burlywood", - "cadetblue", - "cornflowerblue", - "crimson", - "cyan", - "darkblue", - "darkgreen", - "darkmagenta", - "darkorange", - "darkred", - "deepskyblue", - "goldenrod", - "green", - "grey", - "indigo", - "navy", - "olive", - "orange", - "red", - "salmon", - "teal", - "yellow", - ] - MARKERS = [ - "asterisk", - "circle", - "circle_cross", - "circle_x", - "cross", - "diamond", - "diamond_cross", - "hex", - "inverted_triangle", - "square", - "square_x", - "square_cross", - "triangle", - "x", - ] - - TOOLS = "box_zoom,box_select,pan,crosshair,redo,undo,reset,hover,save" - - def __init__( - self, - title=None, - x_label=None, - primary_y_label=None, - secondary_y_label=None, - height=700, - width=1100, - title_size="15pt", - axis_label_size="12pt", - legend_label_size="12pt", - axis_tick_label_size="12pt", - x_axis_type="auto", - sizing_mode="scale_both", - json_file=None, - ): - if json_file: - self.load_from_json(json_file) - else: - self.figure_data = [] - self.fig_property = { - "title": title, - "x_label": x_label, - "primary_y_label": primary_y_label, - "secondary_y_label": secondary_y_label, - "num_lines": 0, - "height": height, - "width": width, - "title_size": title_size, - "axis_label_size": axis_label_size, - "legend_label_size": legend_label_size, - "axis_tick_label_size": axis_tick_label_size, - "x_axis_type": x_axis_type, - "sizing_mode": sizing_mode, - } - - def init_plot(self): - self.plot = bokeh.plotting.figure( - sizing_mode=self.fig_property["sizing_mode"], - plot_width=self.fig_property["width"], - plot_height=self.fig_property["height"], - title=self.fig_property["title"], - tools=self.TOOLS, - x_axis_type=self.fig_property["x_axis_type"], - output_backend="webgl", - ) - tooltips = [ - ("index", "$index"), - ("(x,y)", "($x, $y)"), - ] - hover_set = [] - for line in self.figure_data: - hover_set.extend(line["hover_text"].keys()) - hover_set = set(hover_set) - for item in hover_set: - tooltips.append((item, "@{}".format(item))) - self.plot.hover.tooltips = tooltips - self.plot.add_tools(bokeh.models.tools.WheelZoomTool(dimensions="width")) - self.plot.add_tools(bokeh.models.tools.WheelZoomTool(dimensions="height")) - - def _filter_line(self, x_data, y_data, hover_text=None): - """Function to remove NaN points from bokeh plots.""" - x_data_filtered = [] - y_data_filtered = [] - hover_text_filtered = {} - for idx, xy in enumerate( - itertools.zip_longest(x_data, y_data, fillvalue=float("nan")) - ): - if not math.isnan(xy[1]): - x_data_filtered.append(xy[0]) - y_data_filtered.append(xy[1]) - if hover_text: - for key, value in hover_text.items(): - hover_text_filtered.setdefault(key, []) - hover_text_filtered[key].append( - value[idx] if len(value) > idx else "" - ) - return x_data_filtered, y_data_filtered, hover_text_filtered - - def add_line( - self, - x_data, - y_data, - legend, - hover_text=None, - color=None, - width=3, - style="solid", - marker=None, - marker_size=10, - shaded_region=None, - y_axis="default", - ): - """Function to add line to existing BokehFigure. - - Args: - x_data: list containing x-axis values for line - y_data: list containing y_axis values for line - legend: string containing line title - hover_text: text to display when hovering over lines - color: string describing line color - width: integer line width - style: string describing line style, e.g, solid or dashed - marker: string specifying line marker, e.g., cross - shaded region: data describing shaded region to plot - y_axis: identifier for y-axis to plot line against - """ - if y_axis not in ["default", "secondary"]: - raise ValueError("y_axis must be default or secondary") - if color == None: - color = self.COLORS[self.fig_property["num_lines"] % len(self.COLORS)] - if style == "dashed": - style = [5, 5] - if isinstance(hover_text, list): - hover_text = {"info": hover_text} - x_data_filter, y_data_filter, hover_text_filter = self._filter_line( - x_data, y_data, hover_text - ) - self.figure_data.append( - { - "x_data": x_data_filter, - "y_data": y_data_filter, - "legend": legend, - "hover_text": hover_text_filter, - "color": color, - "width": width, - "style": style, - "marker": marker, - "marker_size": marker_size, - "shaded_region": shaded_region, - "y_axis": y_axis, - } - ) - self.fig_property["num_lines"] += 1 - - def add_scatter( - self, - x_data, - y_data, - legend, - hover_text=None, - color=None, - marker=None, - marker_size=10, - y_axis="default", - ): - """Function to add line to existing BokehFigure. - - Args: - x_data: list containing x-axis values for line - y_data: list containing y_axis values for line - legend: string containing line title - hover_text: text to display when hovering over lines - color: string describing line color - marker: string specifying marker, e.g., cross - y_axis: identifier for y-axis to plot line against - """ - if y_axis not in ["default", "secondary"]: - raise ValueError("y_axis must be default or secondary") - if color == None: - color = self.COLORS[self.fig_property["num_lines"] % len(self.COLORS)] - if marker == None: - marker = self.MARKERS[self.fig_property["num_lines"] % len(self.MARKERS)] - self.figure_data.append( - { - "x_data": x_data, - "y_data": y_data, - "legend": legend, - "hover_text": hover_text, - "color": color, - "width": 0, - "style": "solid", - "marker": marker, - "marker_size": marker_size, - "shaded_region": None, - "y_axis": y_axis, - } - ) - self.fig_property["num_lines"] += 1 - - def generate_figure(self, output_file=None, save_json=True): - """Function to generate and save BokehFigure. - - Args: - output_file: string specifying output file path - """ - self.init_plot() - two_axes = False - for line in self.figure_data: - data_dict = {"x": line["x_data"], "y": line["y_data"]} - for key, value in line["hover_text"].items(): - data_dict[key] = value - source = bokeh.models.ColumnDataSource(data=data_dict) - if line["width"] > 0: - self.plot.line( - x="x", - y="y", - legend_label=line["legend"], - line_width=line["width"], - color=line["color"], - line_dash=line["style"], - name=line["y_axis"], - y_range_name=line["y_axis"], - source=source, - ) - if line["shaded_region"]: - band_x = line["shaded_region"]["x_vector"] - band_x.extend(line["shaded_region"]["x_vector"][::-1]) - band_y = line["shaded_region"]["lower_limit"] - band_y.extend(line["shaded_region"]["upper_limit"][::-1]) - self.plot.patch( - band_x, band_y, color="#7570B3", line_alpha=0.1, fill_alpha=0.1 - ) - if line["marker"] in self.MARKERS: - marker_func = getattr(self.plot, line["marker"]) - marker_func( - x="x", - y="y", - size=line["marker_size"], - legend_label=line["legend"], - line_color=line["color"], - fill_color=line["color"], - name=line["y_axis"], - y_range_name=line["y_axis"], - source=source, - ) - if line["y_axis"] == "secondary": - two_axes = True - - # x-axis formatting - self.plot.xaxis.axis_label = self.fig_property["x_label"] - self.plot.x_range.range_padding = 0 - self.plot.xaxis[0].axis_label_text_font_size = self.fig_property[ - "axis_label_size" - ] - self.plot.xaxis.major_label_text_font_size = self.fig_property[ - "axis_tick_label_size" - ] - # y-axis formatting - self.plot.yaxis[0].axis_label = self.fig_property["primary_y_label"] - self.plot.yaxis[0].axis_label_text_font_size = self.fig_property[ - "axis_label_size" - ] - self.plot.yaxis.major_label_text_font_size = self.fig_property[ - "axis_tick_label_size" - ] - self.plot.y_range = bokeh.models.DataRange1d(names=["default"]) - if two_axes and "secondary" not in self.plot.extra_y_ranges: - self.plot.extra_y_ranges = { - "secondary": bokeh.models.DataRange1d(names=["secondary"]) - } - self.plot.add_layout( - bokeh.models.LinearAxis( - y_range_name="secondary", - axis_label=self.fig_property["secondary_y_label"], - axis_label_text_font_size=self.fig_property["axis_label_size"], - ), - "right", - ) - # plot formatting - self.plot.legend.location = "top_right" - self.plot.legend.click_policy = "hide" - self.plot.title.text_font_size = self.fig_property["title_size"] - self.plot.legend.label_text_font_size = self.fig_property["legend_label_size"] - - if output_file is not None: - self.save_figure(output_file, save_json) - return self.plot - - def load_from_json(self, file_path): - with open(file_path, "r") as json_file: - fig_dict = json.load(json_file) - self.fig_property = fig_dict["fig_property"] - self.figure_data = fig_dict["figure_data"] - - def _save_figure_json(self, output_file): - """Function to save a json format of a figure""" - figure_dict = collections.OrderedDict( - fig_property=self.fig_property, figure_data=self.figure_data - ) - output_file = output_file.replace(".html", "_plot_data.json") - with open(output_file, "w") as outfile: - json.dump(figure_dict, outfile, indent=4) - - def save_figure(self, output_file, save_json=True): - """Function to save BokehFigure. - - Args: - output_file: string specifying output file path - save_json: flag controlling json outputs - """ - if save_json: - self._save_figure_json(output_file) - bokeh.io.output_file(output_file) - bokeh.io.save(self.plot) - - @staticmethod - def save_figures(figure_array, output_file_path, save_json=True): - """Function to save list of BokehFigures in one file. - - Args: - figure_array: list of BokehFigure object to be plotted - output_file: string specifying output file path - """ - for idx, figure in enumerate(figure_array): - figure.generate_figure() - if save_json: - json_file_path = output_file_path.replace( - ".html", "{}-plot_data.json".format(idx) - ) - figure._save_figure_json(json_file_path) - plot_array = [figure.plot for figure in figure_array] - all_plots = bokeh.layouts.column(children=plot_array, sizing_mode="scale_width") - bokeh.plotting.output_file(output_file_path) - bokeh.plotting.save(all_plots)
diff --git a/src/antlion/test_utils/wifi/wifi_performance_test_utils/brcm_utils.py b/src/antlion/test_utils/wifi/wifi_performance_test_utils/brcm_utils.py deleted file mode 100644 index 2c0dc4c..0000000 --- a/src/antlion/test_utils/wifi/wifi_performance_test_utils/brcm_utils.py +++ /dev/null
@@ -1,734 +0,0 @@ -#!/usr/bin/env python3.4 -# -# Copyright 2022 The Fuchsia Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import collections -import hashlib -import itertools -import logging -import math -import numpy -import re -import statistics -import time - -VERY_SHORT_SLEEP = 0.5 -SHORT_SLEEP = 1 -MED_SLEEP = 6 -DISCONNECTION_MESSAGE_BRCM = "driver adapter not found" -RSSI_ERROR_VAL = float("nan") -RATE_TABLE = { - "HT": { - 1: { - 20: [7.2, 14.4, 21.7, 28.9, 43.4, 57.8, 65.0, 72.2], - 40: [15.0, 30.0, 45.0, 60.0, 90.0, 120.0, 135.0, 150.0], - }, - 2: { - 20: [ - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 14.4, - 28.8, - 43.4, - 57.8, - 86.8, - 115.6, - 130, - 144.4, - ], - 40: [0, 0, 0, 0, 0, 0, 0, 0, 30, 60, 90, 120, 180, 240, 270, 300], - }, - }, - "VHT": { - 1: { - 20: [ - 7.2, - 14.4, - 21.7, - 28.9, - 43.4, - 57.8, - 65.0, - 72.2, - 86.7, - 96.2, - 129.0, - 143.4, - ], - 40: [ - 15.0, - 30.0, - 45.0, - 60.0, - 90.0, - 120.0, - 135.0, - 150.0, - 180.0, - 200.0, - 258, - 286.8, - ], - 80: [ - 32.5, - 65.0, - 97.5, - 130.0, - 195.0, - 260.0, - 292.5, - 325.0, - 390.0, - 433.3, - 540.4, - 600.4, - ], - 160: [ - 65.0, - 130.0, - 195.0, - 260.0, - 390.0, - 520.0, - 585.0, - 650.0, - 780.0, - 1080.8, - 1200.8, - ], - }, - 2: { - 20: [ - 14.4, - 28.8, - 43.4, - 57.8, - 86.8, - 115.6, - 130, - 144.4, - 173.4, - 192.4, - 258, - 286.8, - ], - 40: [30, 60, 90, 120, 180, 240, 270, 300, 360, 400, 516, 573.6], - 80: [65, 130, 195, 260, 390, 520, 585, 650, 780, 866.6, 1080.8, 1200.8], - 160: [130, 260, 390, 520, 780, 1040, 1170, 1300, 1560, 2161.6, 2401.6], - }, - }, - "HE": { - 1: { - 20: [ - 8.6, - 17.2, - 25.8, - 34.4, - 51.6, - 68.8, - 77.4, - 86.0, - 103.2, - 114.7, - 129.0, - 143.4, - ], - 40: [ - 17.2, - 34.4, - 51.6, - 68.8, - 103.2, - 137.6, - 154.8, - 172, - 206.4, - 229.4, - 258, - 286.8, - ], - 80: [ - 36.0, - 72.1, - 108.1, - 144.1, - 216.2, - 288.2, - 324.3, - 360.3, - 432.4, - 480.4, - 540.4, - 600.4, - ], - 160: [ - 72, - 144.2, - 216.2, - 288.2, - 432.4, - 576.4, - 648.6, - 720.6, - 864.8, - 960.8, - 1080.8, - 1200.8, - ], - }, - 2: { - 20: [ - 17.2, - 34.4, - 51.6, - 68.8, - 103.2, - 137.6, - 154.8, - 172, - 206.4, - 229.4, - 258, - 286.8, - ], - 40: [ - 34.4, - 68.8, - 103.2, - 137.6, - 206.4, - 275.2, - 309.6, - 344, - 412.8, - 458.8, - 516, - 573.6, - ], - 80: [ - 72, - 144.2, - 216.2, - 288.2, - 432.4, - 576.4, - 648.6, - 720.6, - 864.8, - 960.8, - 1080.8, - 1200.8, - ], - 160: [ - 144, - 288.4, - 432.4, - 576.4, - 864.8, - 1152.8, - 1297.2, - 1441.2, - 1729.6, - 1921.6, - 2161.6, - 2401.6, - ], - }, - }, -} - - -# Rssi Utilities -def empty_rssi_result(): - return collections.OrderedDict([("data", []), ("mean", None), ("stdev", None)]) - - -def get_connected_rssi( - dut, - num_measurements=1, - polling_frequency=SHORT_SLEEP, - first_measurement_delay=0, - disconnect_warning=True, - ignore_samples=0, - interface="wlan0", -): - # yapf: disable - connected_rssi = collections.OrderedDict( - [('time_stamp', []), - ('bssid', []), ('ssid', []), ('frequency', []), - ('signal_poll_rssi', empty_rssi_result()), - ('signal_poll_avg_rssi', empty_rssi_result()), - ('chain_0_rssi', empty_rssi_result()), - ('chain_1_rssi', empty_rssi_result())]) - - # yapf: enable - previous_bssid = "disconnected" - t0 = time.time() - time.sleep(first_measurement_delay) - for idx in range(num_measurements): - measurement_start_time = time.time() - connected_rssi["time_stamp"].append(measurement_start_time - t0) - # Get signal poll RSSI - try: - status_output = dut.adb.shell("wpa_cli -i {} status".format(interface)) - except: - status_output = "" - match = re.search("bssid=.*", status_output) - if match: - current_bssid = match.group(0).split("=")[1] - connected_rssi["bssid"].append(current_bssid) - else: - current_bssid = "disconnected" - connected_rssi["bssid"].append(current_bssid) - if disconnect_warning and previous_bssid != "disconnected": - logging.warning("WIFI DISCONNECT DETECTED!") - - previous_bssid = current_bssid - match = re.search("\s+ssid=.*", status_output) - if match: - ssid = match.group(0).split("=")[1] - connected_rssi["ssid"].append(ssid) - else: - connected_rssi["ssid"].append("disconnected") - - # TODO: SEARCH MAP ; PICK CENTER CHANNEL - match = re.search("\s+freq=.*", status_output) - if match: - frequency = int(match.group(0).split("=")[1]) - connected_rssi["frequency"].append(frequency) - else: - connected_rssi["frequency"].append(RSSI_ERROR_VAL) - - if interface == "wlan0": - try: - per_chain_rssi = dut.adb.shell("wl phy_rssi_ant") - chain_0_rssi = re.search( - r"rssi\[0\]\s(?P<chain_0_rssi>[0-9\-]*)", per_chain_rssi - ) - if chain_0_rssi: - chain_0_rssi = int(chain_0_rssi.group("chain_0_rssi")) - else: - chain_0_rssi = -float("inf") - chain_1_rssi = re.search( - r"rssi\[1\]\s(?P<chain_1_rssi>[0-9\-]*)", per_chain_rssi - ) - if chain_1_rssi: - chain_1_rssi = int(chain_1_rssi.group("chain_1_rssi")) - else: - chain_1_rssi = -float("inf") - except: - chain_0_rssi = RSSI_ERROR_VAL - chain_1_rssi = RSSI_ERROR_VAL - connected_rssi["chain_0_rssi"]["data"].append(chain_0_rssi) - connected_rssi["chain_1_rssi"]["data"].append(chain_1_rssi) - combined_rssi = math.pow(10, chain_0_rssi / 10) + math.pow( - 10, chain_1_rssi / 10 - ) - combined_rssi = 10 * math.log10(combined_rssi) - connected_rssi["signal_poll_rssi"]["data"].append(combined_rssi) - connected_rssi["signal_poll_avg_rssi"]["data"].append(combined_rssi) - else: - try: - signal_poll_output = dut.adb.shell( - "wpa_cli -i {} signal_poll".format(interface) - ) - except: - signal_poll_output = "" - match = re.search("RSSI=.*", signal_poll_output) - if match: - temp_rssi = int(match.group(0).split("=")[1]) - if temp_rssi == -9999 or temp_rssi == 0: - connected_rssi["signal_poll_rssi"]["data"].append(RSSI_ERROR_VAL) - else: - connected_rssi["signal_poll_rssi"]["data"].append(temp_rssi) - else: - connected_rssi["signal_poll_rssi"]["data"].append(RSSI_ERROR_VAL) - connected_rssi["chain_0_rssi"]["data"].append(RSSI_ERROR_VAL) - connected_rssi["chain_1_rssi"]["data"].append(RSSI_ERROR_VAL) - measurement_elapsed_time = time.time() - measurement_start_time - time.sleep(max(0, polling_frequency - measurement_elapsed_time)) - - # Statistics, Statistics - for key, val in connected_rssi.copy().items(): - if "data" not in val: - continue - filtered_rssi_values = [x for x in val["data"] if not math.isnan(x)] - if len(filtered_rssi_values) > ignore_samples: - filtered_rssi_values = filtered_rssi_values[ignore_samples:] - if filtered_rssi_values: - connected_rssi[key]["mean"] = statistics.mean(filtered_rssi_values) - if len(filtered_rssi_values) > 1: - connected_rssi[key]["stdev"] = statistics.stdev(filtered_rssi_values) - else: - connected_rssi[key]["stdev"] = 0 - else: - connected_rssi[key]["mean"] = RSSI_ERROR_VAL - connected_rssi[key]["stdev"] = RSSI_ERROR_VAL - - return connected_rssi - - -def get_scan_rssi(dut, tracked_bssids, num_measurements=1): - scan_rssi = collections.OrderedDict() - for bssid in tracked_bssids: - scan_rssi[bssid] = empty_rssi_result() - for idx in range(num_measurements): - scan_output = dut.adb.shell("cmd wifi start-scan") - time.sleep(MED_SLEEP) - scan_output = dut.adb.shell("cmd wifi list-scan-results") - for bssid in tracked_bssids: - bssid_result = re.search(bssid + ".*", scan_output, flags=re.IGNORECASE) - if bssid_result: - bssid_result = bssid_result.group(0).split() - scan_rssi[bssid]["data"].append(int(bssid_result[2])) - else: - scan_rssi[bssid]["data"].append(RSSI_ERROR_VAL) - # Compute mean RSSIs. Only average valid readings. - # Output RSSI_ERROR_VAL if no readings found. - for key, val in scan_rssi.items(): - filtered_rssi_values = [x for x in val["data"] if not math.isnan(x)] - if filtered_rssi_values: - scan_rssi[key]["mean"] = statistics.mean(filtered_rssi_values) - if len(filtered_rssi_values) > 1: - scan_rssi[key]["stdev"] = statistics.stdev(filtered_rssi_values) - else: - scan_rssi[key]["stdev"] = 0 - else: - scan_rssi[key]["mean"] = RSSI_ERROR_VAL - scan_rssi[key]["stdev"] = RSSI_ERROR_VAL - return scan_rssi - - -def get_sw_signature(dut): - bdf_output = dut.adb.shell("cksum /vendor/firmware/bcmdhd*") - logging.debug("BDF Checksum output: {}".format(bdf_output)) - bdf_signature = ( - sum([int(line.split(" ")[0]) for line in bdf_output.splitlines()]) % 1000 - ) - - fw_version = dut.adb.shell("getprop vendor.wlan.firmware.version") - driver_version = dut.adb.shell("getprop vendor.wlan.driver.version") - logging.debug( - "Firmware version : {}. Driver version: {}".format(fw_version, driver_version) - ) - fw_signature = "{}+{}".format(fw_version, driver_version) - fw_signature = int(hashlib.md5(fw_signature.encode()).hexdigest(), 16) % 1000 - serial_hash = int(hashlib.md5(dut.serial.encode()).hexdigest(), 16) % 1000 - return { - "config_signature": bdf_signature, - "fw_signature": fw_signature, - "serial_hash": serial_hash, - } - - -def get_country_code(dut): - try: - country_code = dut.adb.shell("wl country").split(" ")[0] - except: - country_code = "XZ" - if country_code == "XZ": - country_code = "WW" - logging.debug("Country code: {}".format(country_code)) - return country_code - - -def push_config(dut, config_file): - config_files_list = dut.adb.shell("ls /vendor/etc/*.cal").splitlines() - for dst_file in config_files_list: - dut.push_system_file(config_file, dst_file) - dut.reboot() - - -def start_wifi_logging(dut): - pass - - -def stop_wifi_logging(dut): - pass - - -def push_firmware(dut, firmware_files): - """Function to push Wifi firmware files - - Args: - dut: dut to push bdf file to - firmware_files: path to wlanmdsp.mbn file - datamsc_file: path to Data.msc file - """ - for file in firmware_files: - dut.push_system_file(file, "/vendor/firmware/") - dut.reboot() - - -def disable_beamforming(dut): - dut.adb.shell("wl txbf 0") - - -def set_nss_capability(dut, nss): - dut.adb.shell("wl he omi -r {} -t {}".format(nss, nss)) - - -def set_chain_mask(dut, chain): - if chain == "2x2": - chain = 3 - else: - chain = chain + 1 - # Get current chain mask - try: - curr_tx_chain = int(dut.adb.shell("wl txchain")) - curr_rx_chain = int(dut.adb.shell("wl rxchain")) - except: - curr_tx_chain = -1 - curr_rx_chain = -1 - if curr_tx_chain == chain and curr_rx_chain == chain: - return - # Set chain mask if needed - dut.adb.shell("wl down") - time.sleep(VERY_SHORT_SLEEP) - dut.adb.shell("wl txchain 0x{}".format(chain)) - dut.adb.shell("wl rxchain 0x{}".format(chain)) - dut.adb.shell("wl up") - - -class LinkLayerStats: - LLSTATS_CMD = "wl dump ampdu; wl counters;" - LL_STATS_CLEAR_CMD = "wl dump_clear ampdu; wl reset_cnts;" - BW_REGEX = re.compile(r"Chanspec:.+ (?P<bandwidth>[0-9]+)MHz") - MCS_REGEX = re.compile(r"(?P<count>[0-9]+)\((?P<percent>[0-9]+)%\)") - RX_REGEX = re.compile( - r"RX (?P<mode>\S+)\s+:\s*(?P<nss1>[0-9, ,(,),%]*)" - "\n\s*:?\s*(?P<nss2>[0-9, ,(,),%]*)" - ) - TX_REGEX = re.compile( - r"TX (?P<mode>\S+)\s+:\s*(?P<nss1>[0-9, ,(,),%]*)" - "\n\s*:?\s*(?P<nss2>[0-9, ,(,),%]*)" - ) - TX_PER_REGEX = re.compile( - r"(?P<mode>\S+) PER\s+:\s*(?P<nss1>[0-9, ,(,),%]*)" - "\n\s*:?\s*(?P<nss2>[0-9, ,(,),%]*)" - ) - RX_FCS_REGEX = re.compile( - r"rxbadfcs (?P<rx_bad_fcs>[0-9]*).+\n.+goodfcs (?P<rx_good_fcs>[0-9]*)" - ) - RX_AGG_REGEX = re.compile(r"rxmpduperampdu (?P<aggregation>[0-9]*)") - TX_AGG_REGEX = re.compile(r" mpduperampdu (?P<aggregation>[0-9]*)") - TX_AGG_STOP_REGEX = re.compile( - r"agg stop reason: tot_agg_tried (?P<agg_tried>[0-9]+) agg_txcancel (?P<agg_canceled>[0-9]+) (?P<agg_stop_reason>.+)" - ) - TX_AGG_STOP_REASON_REGEX = re.compile( - r"(?P<reason>\w+) [0-9]+ \((?P<value>[0-9]+%)\)" - ) - MCS_ID = collections.namedtuple( - "mcs_id", ["mode", "num_streams", "bandwidth", "mcs", "gi"] - ) - MODE_MAP = {"0": "11a/g", "1": "11b", "2": "11n", "3": "11ac"} - BW_MAP = {"0": 20, "1": 40, "2": 80} - - def __init__(self, dut, llstats_enabled=True): - self.dut = dut - self.llstats_enabled = llstats_enabled - self.llstats_cumulative = self._empty_llstats() - self.llstats_incremental = self._empty_llstats() - - def update_stats(self): - if self.llstats_enabled: - try: - llstats_output = self.dut.adb.shell(self.LLSTATS_CMD, timeout=1) - self.dut.adb.shell_nb(self.LL_STATS_CLEAR_CMD) - - wl_join = self.dut.adb.shell("wl status") - self.bandwidth = int( - re.search(self.BW_REGEX, wl_join).group("bandwidth") - ) - except: - llstats_output = "" - else: - llstats_output = "" - self._update_stats(llstats_output) - - def reset_stats(self): - self.llstats_cumulative = self._empty_llstats() - self.llstats_incremental = self._empty_llstats() - - def _empty_llstats(self): - return collections.OrderedDict( - mcs_stats=collections.OrderedDict(), - mpdu_stats=collections.OrderedDict(), - summary=collections.OrderedDict(), - ) - - def _empty_mcs_stat(self): - return collections.OrderedDict( - txmpdu=0, rxmpdu=0, mpdu_lost=0, retries=0, retries_short=0, retries_long=0 - ) - - def _mcs_id_to_string(self, mcs_id): - mcs_string = "{} Nss{} MCS{} GI{}".format( - mcs_id.mode, mcs_id.num_streams, mcs_id.mcs, mcs_id.gi - ) - return mcs_string - - def _parse_mcs_stats(self, llstats_output): - llstats_dict = {} - # Look for per-peer stats - match = re.search(self.RX_REGEX, llstats_output) - if not match: - self.reset_stats() - return collections.OrderedDict() - # Find and process all matches for per stream stats - rx_match_iter = re.finditer(self.RX_REGEX, llstats_output) - tx_match_iter = re.finditer(self.TX_REGEX, llstats_output) - tx_per_match_iter = re.finditer(self.TX_PER_REGEX, llstats_output) - for rx_match, tx_match, tx_per_match in zip( - rx_match_iter, tx_match_iter, tx_per_match_iter - ): - mode = rx_match.group("mode") - mode = "HT" if mode == "MCS" else mode - for nss in [1, 2]: - rx_mcs_iter = re.finditer(self.MCS_REGEX, rx_match.group(nss + 1)) - tx_mcs_iter = re.finditer(self.MCS_REGEX, tx_match.group(nss + 1)) - tx_per_iter = re.finditer(self.MCS_REGEX, tx_per_match.group(nss + 1)) - for mcs, (rx_mcs_stats, tx_mcs_stats, tx_per_mcs_stats) in enumerate( - itertools.zip_longest(rx_mcs_iter, tx_mcs_iter, tx_per_iter) - ): - current_mcs = self.MCS_ID( - mode, - nss, - self.bandwidth, - mcs + int(8 * (mode == "HT") * (nss - 1)), - 0, - ) - current_stats = collections.OrderedDict( - txmpdu=int(tx_mcs_stats.group("count")) if tx_mcs_stats else 0, - rxmpdu=int(rx_mcs_stats.group("count")) if rx_mcs_stats else 0, - mpdu_lost=0, - retries=tx_per_mcs_stats.group("count") - if tx_per_mcs_stats - else 0, - retries_short=0, - retries_long=0, - mcs_id=current_mcs, - ) - llstats_dict[self._mcs_id_to_string(current_mcs)] = current_stats - return llstats_dict - - def _parse_mpdu_stats(self, llstats_output): - rx_agg_match = re.search(self.RX_AGG_REGEX, llstats_output) - tx_agg_match = re.search(self.TX_AGG_REGEX, llstats_output) - tx_agg_stop_match = re.search(self.TX_AGG_STOP_REGEX, llstats_output) - rx_fcs_match = re.search(self.RX_FCS_REGEX, llstats_output) - - if rx_agg_match and tx_agg_match and tx_agg_stop_match and rx_fcs_match: - agg_stop_dict = collections.OrderedDict( - rx_aggregation=int(rx_agg_match.group("aggregation")), - tx_aggregation=int(tx_agg_match.group("aggregation")), - tx_agg_tried=int(tx_agg_stop_match.group("agg_tried")), - tx_agg_canceled=int(tx_agg_stop_match.group("agg_canceled")), - rx_good_fcs=int(rx_fcs_match.group("rx_good_fcs")), - rx_bad_fcs=int(rx_fcs_match.group("rx_bad_fcs")), - agg_stop_reason=collections.OrderedDict(), - ) - agg_reason_match = re.finditer( - self.TX_AGG_STOP_REASON_REGEX, - tx_agg_stop_match.group("agg_stop_reason"), - ) - for reason_match in agg_reason_match: - agg_stop_dict["agg_stop_reason"][ - reason_match.group("reason") - ] = reason_match.group("value") - - else: - agg_stop_dict = collections.OrderedDict( - rx_aggregation=0, - tx_aggregation=0, - tx_agg_tried=0, - tx_agg_canceled=0, - rx_good_fcs=0, - rx_bad_fcs=0, - agg_stop_reason=None, - ) - return agg_stop_dict - - def _generate_stats_summary(self, llstats_dict): - llstats_summary = collections.OrderedDict( - common_tx_mcs=None, - common_tx_mcs_count=0, - common_tx_mcs_freq=0, - common_rx_mcs=None, - common_rx_mcs_count=0, - common_rx_mcs_freq=0, - rx_per=float("nan"), - ) - mcs_ids = [] - tx_mpdu = [] - rx_mpdu = [] - phy_rates = [] - for mcs_str, mcs_stats in llstats_dict["mcs_stats"].items(): - mcs_id = mcs_stats["mcs_id"] - mcs_ids.append(mcs_str) - tx_mpdu.append(mcs_stats["txmpdu"]) - rx_mpdu.append(mcs_stats["rxmpdu"]) - phy_rates.append( - RATE_TABLE[mcs_id.mode][mcs_id.num_streams][mcs_id.bandwidth][ - mcs_id.mcs - ] - ) - if len(tx_mpdu) == 0 or len(rx_mpdu) == 0: - return llstats_summary - llstats_summary["common_tx_mcs"] = mcs_ids[numpy.argmax(tx_mpdu)] - llstats_summary["common_tx_mcs_count"] = numpy.max(tx_mpdu) - llstats_summary["common_rx_mcs"] = mcs_ids[numpy.argmax(rx_mpdu)] - llstats_summary["common_rx_mcs_count"] = numpy.max(rx_mpdu) - if sum(tx_mpdu) and sum(rx_mpdu): - llstats_summary["mean_tx_phy_rate"] = numpy.average( - phy_rates, weights=tx_mpdu - ) - llstats_summary["mean_rx_phy_rate"] = numpy.average( - phy_rates, weights=rx_mpdu - ) - llstats_summary["common_tx_mcs_freq"] = llstats_summary[ - "common_tx_mcs_count" - ] / sum(tx_mpdu) - llstats_summary["common_rx_mcs_freq"] = llstats_summary[ - "common_rx_mcs_count" - ] / sum(rx_mpdu) - total_rx_frames = ( - llstats_dict["mpdu_stats"]["rx_good_fcs"] - + llstats_dict["mpdu_stats"]["rx_bad_fcs"] - ) - if total_rx_frames: - llstats_summary["rx_per"] = ( - llstats_dict["mpdu_stats"]["rx_bad_fcs"] / (total_rx_frames) - ) * 100 - return llstats_summary - - def _update_stats(self, llstats_output): - self.llstats_cumulative = self._empty_llstats() - self.llstats_incremental = self._empty_llstats() - self.llstats_incremental["raw_output"] = llstats_output - self.llstats_incremental["mcs_stats"] = self._parse_mcs_stats(llstats_output) - self.llstats_incremental["mpdu_stats"] = self._parse_mpdu_stats(llstats_output) - self.llstats_incremental["summary"] = self._generate_stats_summary( - self.llstats_incremental - ) - self.llstats_cumulative["summary"] = self._generate_stats_summary( - self.llstats_cumulative - )
diff --git a/src/antlion/test_utils/wifi/wifi_performance_test_utils/ping_utils.py b/src/antlion/test_utils/wifi/wifi_performance_test_utils/ping_utils.py deleted file mode 100644 index 8d0dff5..0000000 --- a/src/antlion/test_utils/wifi/wifi_performance_test_utils/ping_utils.py +++ /dev/null
@@ -1,132 +0,0 @@ -#!/usr/bin/env python3.4 -# -# Copyright 2022 The Fuchsia Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import re - -RTT_REGEX = re.compile(r"^\[(?P<timestamp>\S+)\] .*? time=(?P<rtt>\S+)") -LOSS_REGEX = re.compile(r"(?P<loss>\S+)% packet loss") - - -class PingResult(object): - """An object that contains the results of running ping command. - - Attributes: - connected: True if a connection was made. False otherwise. - packet_loss_percentage: The total percentage of packets lost. - transmission_times: The list of PingTransmissionTimes containing the - timestamps gathered for transmitted packets. - rtts: An list-like object enumerating all round-trip-times of - transmitted packets. - timestamps: A list-like object enumerating the beginning timestamps of - each packet transmission. - ping_interarrivals: A list-like object enumerating the amount of time - between the beginning of each subsequent transmission. - """ - - def __init__(self, ping_output): - self.packet_loss_percentage = 100 - self.transmission_times = [] - - self.rtts = _ListWrap(self.transmission_times, lambda entry: entry.rtt) - self.timestamps = _ListWrap( - self.transmission_times, lambda entry: entry.timestamp - ) - self.ping_interarrivals = _PingInterarrivals(self.transmission_times) - - self.start_time = 0 - for line in ping_output: - if "loss" in line: - match = re.search(LOSS_REGEX, line) - self.packet_loss_percentage = float(match.group("loss")) - if "time=" in line: - match = re.search(RTT_REGEX, line) - if self.start_time == 0: - self.start_time = float(match.group("timestamp")) - self.transmission_times.append( - PingTransmissionTimes( - float(match.group("timestamp")) - self.start_time, - float(match.group("rtt")), - ) - ) - self.connected = len(ping_output) > 1 and self.packet_loss_percentage < 100 - - def __getitem__(self, item): - if item == "rtt": - return self.rtts - if item == "connected": - return self.connected - if item == "packet_loss_percentage": - return self.packet_loss_percentage - raise ValueError("Invalid key. Please use an attribute instead.") - - def as_dict(self): - return { - "connected": 1 if self.connected else 0, - "rtt": list(self.rtts), - "time_stamp": list(self.timestamps), - "ping_interarrivals": list(self.ping_interarrivals), - "packet_loss_percentage": self.packet_loss_percentage, - } - - -class PingTransmissionTimes(object): - """A class that holds the timestamps for a packet sent via the ping command. - - Attributes: - rtt: The round trip time for the packet sent. - timestamp: The timestamp the packet started its trip. - """ - - def __init__(self, timestamp, rtt): - self.rtt = rtt - self.timestamp = timestamp - - -class _ListWrap(object): - """A convenient helper class for treating list iterators as native lists.""" - - def __init__(self, wrapped_list, func): - self.__wrapped_list = wrapped_list - self.__func = func - - def __getitem__(self, key): - return self.__func(self.__wrapped_list[key]) - - def __iter__(self): - for item in self.__wrapped_list: - yield self.__func(item) - - def __len__(self): - return len(self.__wrapped_list) - - -class _PingInterarrivals(object): - """A helper class for treating ping interarrivals as a native list.""" - - def __init__(self, ping_entries): - self.__ping_entries = ping_entries - - def __getitem__(self, key): - return ( - self.__ping_entries[key + 1].timestamp - self.__ping_entries[key].timestamp - ) - - def __iter__(self): - for index in range(len(self.__ping_entries) - 1): - yield self[index] - - def __len__(self): - return max(0, len(self.__ping_entries) - 1)
diff --git a/src/antlion/test_utils/wifi/wifi_performance_test_utils/qcom_utils.py b/src/antlion/test_utils/wifi/wifi_performance_test_utils/qcom_utils.py deleted file mode 100644 index 2f50cf1..0000000 --- a/src/antlion/test_utils/wifi/wifi_performance_test_utils/qcom_utils.py +++ /dev/null
@@ -1,470 +0,0 @@ -#!/usr/bin/env python3.4 -# -# Copyright 2022 The Fuchsia Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import collections -import hashlib -import logging -import math -import os -import re -import statistics -import time - -from mobly import asserts - -SHORT_SLEEP = 1 -MED_SLEEP = 6 -STATION_DUMP = "iw {} station dump" -SCAN = "wpa_cli scan" -SCAN_RESULTS = "wpa_cli scan_results" -SIGNAL_POLL = "wpa_cli signal_poll" -WPA_CLI_STATUS = "wpa_cli status" -RSSI_ERROR_VAL = float("nan") -FW_REGEX = re.compile(r"FW:(?P<firmware>\S+) HW:") - - -# Rssi Utilities -def empty_rssi_result(): - return collections.OrderedDict([("data", []), ("mean", None), ("stdev", None)]) - - -def get_connected_rssi( - dut, - num_measurements=1, - polling_frequency=SHORT_SLEEP, - first_measurement_delay=0, - disconnect_warning=True, - ignore_samples=0, - interface="wlan0", -): - # yapf: disable - connected_rssi = collections.OrderedDict( - [('time_stamp', []), - ('bssid', []), ('ssid', []), ('frequency', []), - ('signal_poll_rssi', empty_rssi_result()), - ('signal_poll_avg_rssi', empty_rssi_result()), - ('chain_0_rssi', empty_rssi_result()), - ('chain_1_rssi', empty_rssi_result())]) - # yapf: enable - previous_bssid = "disconnected" - t0 = time.time() - time.sleep(first_measurement_delay) - for idx in range(num_measurements): - measurement_start_time = time.time() - connected_rssi["time_stamp"].append(measurement_start_time - t0) - # Get signal poll RSSI - try: - status_output = dut.adb.shell("wpa_cli -i {} status".format(interface)) - except: - status_output = "" - match = re.search("bssid=.*", status_output) - if match: - current_bssid = match.group(0).split("=")[1] - connected_rssi["bssid"].append(current_bssid) - else: - current_bssid = "disconnected" - connected_rssi["bssid"].append(current_bssid) - if disconnect_warning and previous_bssid != "disconnected": - logging.warning("WIFI DISCONNECT DETECTED!") - previous_bssid = current_bssid - match = re.search("\s+ssid=.*", status_output) - if match: - ssid = match.group(0).split("=")[1] - connected_rssi["ssid"].append(ssid) - else: - connected_rssi["ssid"].append("disconnected") - try: - signal_poll_output = dut.adb.shell( - "wpa_cli -i {} signal_poll".format(interface) - ) - except: - signal_poll_output = "" - match = re.search("FREQUENCY=.*", signal_poll_output) - if match: - frequency = int(match.group(0).split("=")[1]) - connected_rssi["frequency"].append(frequency) - else: - connected_rssi["frequency"].append(RSSI_ERROR_VAL) - match = re.search("RSSI=.*", signal_poll_output) - if match: - temp_rssi = int(match.group(0).split("=")[1]) - if temp_rssi == -9999 or temp_rssi == 0: - connected_rssi["signal_poll_rssi"]["data"].append(RSSI_ERROR_VAL) - else: - connected_rssi["signal_poll_rssi"]["data"].append(temp_rssi) - else: - connected_rssi["signal_poll_rssi"]["data"].append(RSSI_ERROR_VAL) - match = re.search("AVG_RSSI=.*", signal_poll_output) - if match: - connected_rssi["signal_poll_avg_rssi"]["data"].append( - int(match.group(0).split("=")[1]) - ) - else: - connected_rssi["signal_poll_avg_rssi"]["data"].append(RSSI_ERROR_VAL) - - # Get per chain RSSI - try: - per_chain_rssi = dut.adb.shell(STATION_DUMP.format(interface)) - except: - per_chain_rssi = "" - match = re.search(".*signal avg:.*", per_chain_rssi) - if match: - per_chain_rssi = per_chain_rssi[ - per_chain_rssi.find("[") + 1 : per_chain_rssi.find("]") - ] - per_chain_rssi = per_chain_rssi.split(", ") - connected_rssi["chain_0_rssi"]["data"].append(int(per_chain_rssi[0])) - connected_rssi["chain_1_rssi"]["data"].append(int(per_chain_rssi[1])) - else: - connected_rssi["chain_0_rssi"]["data"].append(RSSI_ERROR_VAL) - connected_rssi["chain_1_rssi"]["data"].append(RSSI_ERROR_VAL) - measurement_elapsed_time = time.time() - measurement_start_time - time.sleep(max(0, polling_frequency - measurement_elapsed_time)) - - # Compute mean RSSIs. Only average valid readings. - # Output RSSI_ERROR_VAL if no valid connected readings found. - for key, val in connected_rssi.copy().items(): - if "data" not in val: - continue - filtered_rssi_values = [x for x in val["data"] if not math.isnan(x)] - if len(filtered_rssi_values) > ignore_samples: - filtered_rssi_values = filtered_rssi_values[ignore_samples:] - if filtered_rssi_values: - connected_rssi[key]["mean"] = statistics.mean(filtered_rssi_values) - if len(filtered_rssi_values) > 1: - connected_rssi[key]["stdev"] = statistics.stdev(filtered_rssi_values) - else: - connected_rssi[key]["stdev"] = 0 - else: - connected_rssi[key]["mean"] = RSSI_ERROR_VAL - connected_rssi[key]["stdev"] = RSSI_ERROR_VAL - return connected_rssi - - -def get_scan_rssi(dut, tracked_bssids, num_measurements=1): - scan_rssi = collections.OrderedDict() - for bssid in tracked_bssids: - scan_rssi[bssid] = empty_rssi_result() - for idx in range(num_measurements): - scan_output = dut.adb.shell(SCAN) - time.sleep(MED_SLEEP) - scan_output = dut.adb.shell(SCAN_RESULTS) - for bssid in tracked_bssids: - bssid_result = re.search(bssid + ".*", scan_output, flags=re.IGNORECASE) - if bssid_result: - bssid_result = bssid_result.group(0).split("\t") - scan_rssi[bssid]["data"].append(int(bssid_result[2])) - else: - scan_rssi[bssid]["data"].append(RSSI_ERROR_VAL) - # Compute mean RSSIs. Only average valid readings. - # Output RSSI_ERROR_VAL if no readings found. - for key, val in scan_rssi.items(): - filtered_rssi_values = [x for x in val["data"] if not math.isnan(x)] - if filtered_rssi_values: - scan_rssi[key]["mean"] = statistics.mean(filtered_rssi_values) - if len(filtered_rssi_values) > 1: - scan_rssi[key]["stdev"] = statistics.stdev(filtered_rssi_values) - else: - scan_rssi[key]["stdev"] = 0 - else: - scan_rssi[key]["mean"] = RSSI_ERROR_VAL - scan_rssi[key]["stdev"] = RSSI_ERROR_VAL - return scan_rssi - - -def get_sw_signature(dut): - bdf_output = dut.adb.shell("cksum /vendor/firmware/bdwlan*") - logging.debug("BDF Checksum output: {}".format(bdf_output)) - bdf_signature = ( - sum([int(line.split(" ")[0]) for line in bdf_output.splitlines()]) % 1000 - ) - - fw_output = dut.adb.shell("halutil -logger -get fw") - logging.debug("Firmware version output: {}".format(fw_output)) - fw_version = re.search(FW_REGEX, fw_output).group("firmware") - fw_signature = fw_version.split(".")[-3:-1] - fw_signature = float(".".join(fw_signature)) - serial_hash = int(hashlib.md5(dut.serial.encode()).hexdigest(), 16) % 1000 - return { - "config_signature": bdf_signature, - "fw_signature": fw_signature, - "serial_hash": serial_hash, - } - - -def get_country_code(dut): - country_code = dut.adb.shell("iw reg get | grep country | head -1") - country_code = country_code.split(":")[0].split(" ")[1] - if country_code == "00": - country_code = "WW" - return country_code - - -def push_config(dut, config_file): - config_files_list = dut.adb.shell("ls /vendor/firmware/bdwlan*").splitlines() - for dst_file in config_files_list: - dut.push_system_file(config_file, dst_file) - dut.reboot() - - -def start_wifi_logging(dut): - dut.droid.wifiEnableVerboseLogging(1) - msg = "Failed to enable WiFi verbose logging." - asserts.assert_equal(dut.droid.wifiGetVerboseLoggingLevel(), 1, msg) - logging.info("Starting CNSS logs") - dut.adb.shell( - "find /data/vendor/wifi/wlan_logs/ -type f -delete", ignore_status=True - ) - dut.adb.shell_nb("cnss_diag -f -s") - - -def stop_wifi_logging(dut): - logging.info("Stopping CNSS logs") - dut.adb.shell("killall cnss_diag") - logs = dut.get_file_names("/data/vendor/wifi/wlan_logs/") - if logs: - dut.log.info("Pulling cnss_diag logs %s", logs) - log_path = os.path.join(dut.device_log_path, "CNSS_DIAG_%s" % dut.serial) - os.makedirs(log_path, exist_ok=True) - dut.pull_files(logs, log_path) - - -def push_firmware(dut, firmware_files): - """Function to push Wifi firmware files - - Args: - dut: dut to push bdf file to - firmware_files: path to wlanmdsp.mbn file - datamsc_file: path to Data.msc file - """ - for file in firmware_files: - dut.push_system_file(file, "/vendor/firmware/") - dut.reboot() - - -def _set_ini_fields(ini_file_path, ini_field_dict): - template_regex = r"^{}=[0-9,.x-]+" - with open(ini_file_path, "r") as f: - ini_lines = f.read().splitlines() - for idx, line in enumerate(ini_lines): - for field_name, field_value in ini_field_dict.items(): - line_regex = re.compile(template_regex.format(field_name)) - if re.match(line_regex, line): - ini_lines[idx] = "{}={}".format(field_name, field_value) - print(ini_lines[idx]) - with open(ini_file_path, "w") as f: - f.write("\n".join(ini_lines) + "\n") - - -def _edit_dut_ini(dut, ini_fields): - """Function to edit Wifi ini files.""" - dut_ini_path = "/vendor/firmware/wlan/qca_cld/WCNSS_qcom_cfg.ini" - local_ini_path = os.path.expanduser("~/WCNSS_qcom_cfg.ini") - dut.pull_files(dut_ini_path, local_ini_path) - - _set_ini_fields(local_ini_path, ini_fields) - - dut.push_system_file(local_ini_path, dut_ini_path) - dut.reboot() - - -def set_chain_mask(dut, chain_mask): - curr_mask = getattr(dut, "chain_mask", "2x2") - if curr_mask == chain_mask: - return - dut.chain_mask = chain_mask - if chain_mask == "2x2": - ini_fields = { - "gEnable2x2": 2, - "gSetTxChainmask1x1": 1, - "gSetRxChainmask1x1": 1, - "gDualMacFeatureDisable": 6, - "gDot11Mode": 0, - } - else: - ini_fields = { - "gEnable2x2": 0, - "gSetTxChainmask1x1": chain_mask + 1, - "gSetRxChainmask1x1": chain_mask + 1, - "gDualMacFeatureDisable": 1, - "gDot11Mode": 0, - } - _edit_dut_ini(dut, ini_fields) - - -def set_wifi_mode(dut, mode): - TX_MODE_DICT = { - "Auto": 0, - "11n": 4, - "11ac": 9, - "11abg": 1, - "11b": 2, - "11": 3, - "11g only": 5, - "11n only": 6, - "11b only": 7, - "11ac only": 8, - } - - ini_fields = { - "gEnable2x2": 2, - "gSetTxChainmask1x1": 1, - "gSetRxChainmask1x1": 1, - "gDualMacFeatureDisable": 6, - "gDot11Mode": TX_MODE_DICT[mode], - } - _edit_dut_ini(dut, ini_fields) - - -class LinkLayerStats: - LLSTATS_CMD = "cat /d/wlan0/ll_stats" - PEER_REGEX = "LL_STATS_PEER_ALL" - MCS_REGEX = re.compile( - r"preamble: (?P<mode>\S+), nss: (?P<num_streams>\S+), bw: (?P<bw>\S+), " - "mcs: (?P<mcs>\S+), bitrate: (?P<rate>\S+), txmpdu: (?P<txmpdu>\S+), " - "rxmpdu: (?P<rxmpdu>\S+), mpdu_lost: (?P<mpdu_lost>\S+), " - "retries: (?P<retries>\S+), retries_short: (?P<retries_short>\S+), " - "retries_long: (?P<retries_long>\S+)" - ) - MCS_ID = collections.namedtuple( - "mcs_id", ["mode", "num_streams", "bandwidth", "mcs", "rate"] - ) - MODE_MAP = {"0": "11a/g", "1": "11b", "2": "11n", "3": "11ac"} - BW_MAP = {"0": 20, "1": 40, "2": 80} - - def __init__(self, dut, llstats_enabled=True): - self.dut = dut - self.llstats_enabled = llstats_enabled - self.llstats_cumulative = self._empty_llstats() - self.llstats_incremental = self._empty_llstats() - - def update_stats(self): - if self.llstats_enabled: - try: - llstats_output = self.dut.adb.shell(self.LLSTATS_CMD, timeout=0.1) - except: - llstats_output = "" - else: - llstats_output = "" - self._update_stats(llstats_output) - - def reset_stats(self): - self.llstats_cumulative = self._empty_llstats() - self.llstats_incremental = self._empty_llstats() - - def _empty_llstats(self): - return collections.OrderedDict( - mcs_stats=collections.OrderedDict(), summary=collections.OrderedDict() - ) - - def _empty_mcs_stat(self): - return collections.OrderedDict( - txmpdu=0, rxmpdu=0, mpdu_lost=0, retries=0, retries_short=0, retries_long=0 - ) - - def _mcs_id_to_string(self, mcs_id): - mcs_string = "{} {}MHz Nss{} MCS{} {}Mbps".format( - mcs_id.mode, mcs_id.bandwidth, mcs_id.num_streams, mcs_id.mcs, mcs_id.rate - ) - return mcs_string - - def _parse_mcs_stats(self, llstats_output): - llstats_dict = {} - # Look for per-peer stats - match = re.search(self.PEER_REGEX, llstats_output) - if not match: - self.reset_stats() - return collections.OrderedDict() - # Find and process all matches for per stream stats - match_iter = re.finditer(self.MCS_REGEX, llstats_output) - for match in match_iter: - current_mcs = self.MCS_ID( - self.MODE_MAP[match.group("mode")], - int(match.group("num_streams")) + 1, - self.BW_MAP[match.group("bw")], - int(match.group("mcs")), - int(match.group("rate"), 16) / 1000, - ) - current_stats = collections.OrderedDict( - txmpdu=int(match.group("txmpdu")), - rxmpdu=int(match.group("rxmpdu")), - mpdu_lost=int(match.group("mpdu_lost")), - retries=int(match.group("retries")), - retries_short=int(match.group("retries_short")), - retries_long=int(match.group("retries_long")), - ) - llstats_dict[self._mcs_id_to_string(current_mcs)] = current_stats - return llstats_dict - - def _diff_mcs_stats(self, new_stats, old_stats): - stats_diff = collections.OrderedDict() - for stat_key in new_stats.keys(): - stats_diff[stat_key] = new_stats[stat_key] - old_stats[stat_key] - return stats_diff - - def _generate_stats_summary(self, llstats_dict): - llstats_summary = collections.OrderedDict( - common_tx_mcs=None, - common_tx_mcs_count=0, - common_tx_mcs_freq=0, - common_rx_mcs=None, - common_rx_mcs_count=0, - common_rx_mcs_freq=0, - rx_per=float("nan"), - ) - - txmpdu_count = 0 - rxmpdu_count = 0 - for mcs_id, mcs_stats in llstats_dict["mcs_stats"].items(): - if mcs_stats["txmpdu"] > llstats_summary["common_tx_mcs_count"]: - llstats_summary["common_tx_mcs"] = mcs_id - llstats_summary["common_tx_mcs_count"] = mcs_stats["txmpdu"] - if mcs_stats["rxmpdu"] > llstats_summary["common_rx_mcs_count"]: - llstats_summary["common_rx_mcs"] = mcs_id - llstats_summary["common_rx_mcs_count"] = mcs_stats["rxmpdu"] - txmpdu_count += mcs_stats["txmpdu"] - rxmpdu_count += mcs_stats["rxmpdu"] - if txmpdu_count: - llstats_summary["common_tx_mcs_freq"] = ( - llstats_summary["common_tx_mcs_count"] / txmpdu_count - ) - if rxmpdu_count: - llstats_summary["common_rx_mcs_freq"] = ( - llstats_summary["common_rx_mcs_count"] / rxmpdu_count - ) - return llstats_summary - - def _update_stats(self, llstats_output): - # Parse stats - new_llstats = self._empty_llstats() - new_llstats["mcs_stats"] = self._parse_mcs_stats(llstats_output) - # Save old stats and set new cumulative stats - old_llstats = self.llstats_cumulative.copy() - self.llstats_cumulative = new_llstats.copy() - # Compute difference between new and old stats - self.llstats_incremental = self._empty_llstats() - for mcs_id, new_mcs_stats in new_llstats["mcs_stats"].items(): - old_mcs_stats = old_llstats["mcs_stats"].get(mcs_id, self._empty_mcs_stat()) - self.llstats_incremental["mcs_stats"][mcs_id] = self._diff_mcs_stats( - new_mcs_stats, old_mcs_stats - ) - # Generate llstats summary - self.llstats_incremental["summary"] = self._generate_stats_summary( - self.llstats_incremental - ) - self.llstats_cumulative["summary"] = self._generate_stats_summary( - self.llstats_cumulative - )
diff --git a/src/antlion/test_utils/wifi/wifi_power_test_utils.py b/src/antlion/test_utils/wifi/wifi_power_test_utils.py deleted file mode 100644 index dba8461..0000000 --- a/src/antlion/test_utils/wifi/wifi_power_test_utils.py +++ /dev/null
@@ -1,416 +0,0 @@ -#!/usr/bin/env python3 -# -# Copyright 2022 The Fuchsia Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import logging -import time -from antlion import utils -from antlion.libs.proc import job -from antlion.controllers.ap_lib import bridge_interface as bi -from antlion.test_utils.wifi import wifi_test_utils as wutils -from antlion.controllers.adb_lib.error import AdbCommandError -from antlion.controllers.ap_lib import hostapd_security -from antlion.controllers.ap_lib import hostapd_ap_preset - -# http://www.secdev.org/projects/scapy/ -# On ubuntu, sudo pip3 install scapy -import scapy.all as scapy - -GET_FROM_PHONE = "get_from_dut" -GET_FROM_AP = "get_from_ap" -ENABLED_MODULATED_DTIM = "gEnableModulatedDTIM=" -MAX_MODULATED_DTIM = "gMaxLIModulatedDTIM=" - - -def change_dtim(ad, gEnableModulatedDTIM, gMaxLIModulatedDTIM=10): - """Function to change the DTIM setting in the phone. - - Args: - ad: the target android device, AndroidDevice object - gEnableModulatedDTIM: Modulated DTIM, int - gMaxLIModulatedDTIM: Maximum modulated DTIM, int - """ - ad.log.info("Sets dtim to {}".format(gEnableModulatedDTIM)) - - # In P21 the dtim setting method changed and an AdbCommandError will take - # place to get ini_file_phone. Thus add try/except block for the old method. - # If error occurs, use change_dtim_adb method later. Otherwise, first trying - # to find the ini file with DTIM settings - try: - ini_file_phone = ad.adb.shell("ls /vendor/firmware/wlan/*/*.ini") - - except AdbCommandError as e: - # Gets AdbCommandError, change dtim later with change_dtim_adb merthod. - # change_dtim_adb requires that wifi connection is on. - ad.log.info("Gets AdbCommandError, change dtim with change_dtim_adb.") - change_dtim_adb(ad, gEnableModulatedDTIM) - return 0 - - ini_file_local = ini_file_phone.split("/")[-1] - - # Pull the file and change the DTIM to desired value - ad.adb.pull("{} {}".format(ini_file_phone, ini_file_local)) - - with open(ini_file_local, "r") as fin: - for line in fin: - if ENABLED_MODULATED_DTIM in line: - gE_old = line.strip("\n") - gEDTIM_old = line.strip(ENABLED_MODULATED_DTIM).strip("\n") - if MAX_MODULATED_DTIM in line: - gM_old = line.strip("\n") - gMDTIM_old = line.strip(MAX_MODULATED_DTIM).strip("\n") - fin.close() - if ( - int(gEDTIM_old) == gEnableModulatedDTIM - and int(gMDTIM_old) == gMaxLIModulatedDTIM - ): - ad.log.info("Current DTIM is already the desired value," "no need to reset it") - return 0 - - gE_new = ENABLED_MODULATED_DTIM + str(gEnableModulatedDTIM) - gM_new = MAX_MODULATED_DTIM + str(gMaxLIModulatedDTIM) - - sed_gE = "sed -i 's/{}/{}/g' {}".format(gE_old, gE_new, ini_file_local) - sed_gM = "sed -i 's/{}/{}/g' {}".format(gM_old, gM_new, ini_file_local) - job.run(sed_gE) - job.run(sed_gM) - - # Push the file to the phone - push_file_to_phone(ad, ini_file_local, ini_file_phone) - ad.log.info("DTIM changes checked in and rebooting...") - ad.reboot() - # Wait for auto-wifi feature to start - time.sleep(20) - ad.adb.shell("dumpsys battery set level 100") - ad.log.info("DTIM updated and device back from reboot") - return 1 - - -def change_dtim_adb(ad, gEnableModulatedDTIM): - """Function to change the DTIM setting in the P21 phone. - - This method should be run after connecting wifi. - - Args: - ad: the target android device, AndroidDevice object - gEnableModulatedDTIM: Modulated DTIM, int - """ - ad.log.info("Changes DTIM to {} with adb".format(gEnableModulatedDTIM)) - ad.adb.root() - screen_status = ad.adb.shell("dumpsys nfc | grep Screen") - screen_is_on = "ON_UNLOCKED" in screen_status - - # To read the dtim with 'adb shell wl bcn_li_dtim', the screen should be off - if screen_is_on: - ad.log.info("The screen is on. Set it to off before change dtim") - ad.droid.goToSleepNow() - time_limit_seconds = 60 - _wait_screen_off(ad, time_limit_seconds) - - old_dtim = _read_dtim_adb(ad) - ad.log.info("The dtim before change is {}".format(old_dtim)) - try: - if int(old_dtim) == gEnableModulatedDTIM: - ad.log.info( - "Current DTIM is already the desired value," "no need to reset it" - ) - if screen_is_on: - ad.log.info("Changes the screen to the original on status") - ad.droid.wakeUpNow() - return - except Exception as e: - ad.log.info("old_dtim is not available from adb") - - current_dtim = _set_dtim(ad, gEnableModulatedDTIM) - ad.log.info("Old DTIM is {}, current DTIM is {}".format(old_dtim, current_dtim)) - if screen_is_on: - ad.log.info("Changes the screen to the original on status") - ad.droid.wakeUpNow() - - -def _set_dtim(ad, gEnableModulatedDTIM): - out = ad.adb.shell("halutil -dtim_config {}".format(gEnableModulatedDTIM)) - ad.log.info("set dtim to {}, stdout: {}".format(gEnableModulatedDTIM, out)) - return _read_dtim_adb(ad) - - -def _read_dtim_adb(ad): - try: - old_dtim = ad.adb.shell("wl bcn_li_dtim") - return old_dtim - except Exception as e: - ad.log.info("When reading dtim get error {}".format(e)) - return "The dtim value is not available from adb" - - -def _wait_screen_off(ad, time_limit_seconds): - while time_limit_seconds > 0: - screen_status = ad.adb.shell("dumpsys nfc | grep Screen") - if "OFF_UNLOCKED" in screen_status: - ad.log.info("The screen status is {}".format(screen_status)) - return - time.sleep(1) - time_limit_seconds -= 1 - raise TimeoutError( - "Timed out while waiting the screen off after {} " - "seconds.".format(time_limit_seconds) - ) - - -def push_file_to_phone(ad, file_local, file_phone): - """Function to push local file to android phone. - - Args: - ad: the target android device - file_local: the locla file to push - file_phone: the file/directory on the phone to be pushed - """ - ad.adb.root() - cmd_out = ad.adb.remount() - if "Permission denied" in cmd_out: - ad.log.info("Need to disable verity first and reboot") - ad.adb.disable_verity() - time.sleep(1) - ad.reboot() - ad.log.info("Verity disabled and device back from reboot") - ad.adb.root() - ad.adb.remount() - time.sleep(1) - ad.adb.push("{} {}".format(file_local, file_phone)) - - -def ap_setup(ap, network, bandwidth=80, dtim_period=None): - """Set up the whirlwind AP with provided network info. - - Args: - ap: access_point object of the AP - network: dict with information of the network, including ssid, password - bssid, channel etc. - bandwidth: the operation bandwidth for the AP, default 80MHz - dtim_period: the dtim period of access point - Returns: - brconfigs: the bridge interface configs - """ - log = logging.getLogger() - bss_settings = [] - ssid = network[wutils.WifiEnums.SSID_KEY] - if "password" in network.keys(): - password = network["password"] - security = hostapd_security.Security(security_mode="wpa", password=password) - else: - security = hostapd_security.Security(security_mode=None, password=None) - channel = network["channel"] - config = hostapd_ap_preset.create_ap_preset( - channel=channel, - ssid=ssid, - dtim_period=dtim_period, - security=security, - bss_settings=bss_settings, - vht_bandwidth=bandwidth, - profile_name="whirlwind", - iface_wlan_2g=ap.wlan_2g, - iface_wlan_5g=ap.wlan_5g, - ) - config_bridge = ap.generate_bridge_configs(channel) - brconfigs = bi.BridgeInterfaceConfigs( - config_bridge[0], config_bridge[1], config_bridge[2] - ) - ap.bridge.startup(brconfigs) - ap.start_ap(config) - log.info("AP started on channel {} with SSID {}".format(channel, ssid)) - return brconfigs - - -def run_iperf_client_nonblocking(ad, server_host, extra_args=""): - """Start iperf client on the device with nohup. - - Return status as true if iperf client start successfully. - And data flow information as results. - - Args: - ad: the android device under test - server_host: Address of the iperf server. - extra_args: A string representing extra arguments for iperf client, - e.g. "-i 1 -t 30". - - """ - log = logging.getLogger() - ad.adb.shell_nb( - "nohup >/dev/null 2>&1 sh -c 'iperf3 -c {} {} &'".format( - server_host, extra_args - ) - ) - log.info("IPerf client started") - - -def get_wifi_rssi(ad): - """Get the RSSI of the device. - - Args: - ad: the android device under test - Returns: - RSSI: the rssi level of the device - """ - RSSI = ad.droid.wifiGetConnectionInfo()["rssi"] - return RSSI - - -def get_phone_ip(ad): - """Get the WiFi IP address of the phone. - - Args: - ad: the android device under test - Returns: - IP: IP address of the phone for WiFi, as a string - """ - IP = ad.droid.connectivityGetIPv4Addresses("wlan0")[0] - - return IP - - -def get_phone_mac(ad): - """Get the WiFi MAC address of the phone. - - Args: - ad: the android device under test - Returns: - mac: MAC address of the phone for WiFi, as a string - """ - mac = ad.droid.wifiGetConnectionInfo()["mac_address"] - - return mac - - -def get_phone_ipv6(ad): - """Get the WiFi IPV6 address of the phone. - - Args: - ad: the android device under test - Returns: - IPv6: IPv6 address of the phone for WiFi, as a string - """ - IPv6 = ad.droid.connectivityGetLinkLocalIpv6Address("wlan0")[:-6] - - return IPv6 - - -def wait_for_dhcp(interface_name): - """Wait the DHCP address assigned to desired interface. - - Getting DHCP address takes time and the wait time isn't constant. Utilizing - utils.timeout to keep trying until success - - Args: - interface_name: desired interface name - Returns: - ip: ip address of the desired interface name - Raise: - TimeoutError: After timeout, if no DHCP assigned, raise - """ - log = logging.getLogger() - reset_host_interface(interface_name) - start_time = time.time() - time_limit_seconds = 60 - ip = "0.0.0.0" - while start_time + time_limit_seconds > time.time(): - ip = scapy.get_if_addr(interface_name) - if ip == "0.0.0.0": - time.sleep(1) - else: - log.info("DHCP address assigned to %s as %s" % (interface_name, ip)) - return ip - raise TimeoutError( - "Timed out while getting if_addr after %s seconds." % time_limit_seconds - ) - - -def reset_host_interface(intferface_name): - """Reset the host interface. - - Args: - intferface_name: the desired interface to reset - """ - log = logging.getLogger() - intf_down_cmd = "ifconfig %s down" % intferface_name - intf_up_cmd = "ifconfig %s up" % intferface_name - try: - job.run(intf_down_cmd) - time.sleep(10) - job.run(intf_up_cmd) - log.info("{} has been reset".format(intferface_name)) - except job.Error: - raise Exception("No such interface") - - -def bringdown_host_interface(intferface_name): - """Reset the host interface. - - Args: - intferface_name: the desired interface to reset - """ - log = logging.getLogger() - intf_down_cmd = "ifconfig %s down" % intferface_name - try: - job.run(intf_down_cmd) - time.sleep(2) - log.info("{} has been brought down".format(intferface_name)) - except job.Error: - raise Exception("No such interface") - - -def create_pkt_config(test_class): - """Creates the config for generating multicast packets - - Args: - test_class: object with all networking paramters - - Returns: - Dictionary with the multicast packet config - """ - addr_type = ( - scapy.IPV6_ADDR_LINKLOCAL - if test_class.ipv6_src_type == "LINK_LOCAL" - else scapy.IPV6_ADDR_GLOBAL - ) - - mac_dst = test_class.mac_dst - if GET_FROM_PHONE in test_class.mac_dst: - mac_dst = get_phone_mac(test_class.dut) - - ipv4_dst = test_class.ipv4_dst - if GET_FROM_PHONE in test_class.ipv4_dst: - ipv4_dst = get_phone_ip(test_class.dut) - - ipv6_dst = test_class.ipv6_dst - if GET_FROM_PHONE in test_class.ipv6_dst: - ipv6_dst = get_phone_ipv6(test_class.dut) - - ipv4_gw = test_class.ipv4_gwt - if GET_FROM_AP in test_class.ipv4_gwt: - ipv4_gw = test_class.access_point.ssh_settings.hostname - - pkt_gen_config = { - "interf": test_class.pkt_sender.interface, - "subnet_mask": test_class.sub_mask, - "src_mac": test_class.mac_src, - "dst_mac": mac_dst, - "src_ipv4": test_class.ipv4_src, - "dst_ipv4": ipv4_dst, - "src_ipv6": test_class.ipv6_src, - "src_ipv6_type": addr_type, - "dst_ipv6": ipv6_dst, - "gw_ipv4": ipv4_gw, - } - return pkt_gen_config
diff --git a/src/antlion/test_utils/wifi/wifi_test_utils.py b/src/antlion/test_utils/wifi/wifi_test_utils.py deleted file mode 100755 index 9c04b59..0000000 --- a/src/antlion/test_utils/wifi/wifi_test_utils.py +++ /dev/null
@@ -1,2999 +0,0 @@ -#!/usr/bin/env python3 -# -# Copyright 2022 The Fuchsia Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import logging -import os -import re -import shutil -import time - -from collections import namedtuple -from enum import IntEnum -from queue import Empty - -from tenacity import retry, stop_after_attempt, wait_fixed - -from antlion import context -from antlion import signals -from antlion import utils -from antlion.controllers import attenuator -from antlion.controllers.ap_lib import hostapd_security -from antlion.controllers.ap_lib import hostapd_ap_preset -from antlion.controllers.ap_lib.hostapd_constants import BAND_2G -from antlion.controllers.ap_lib.hostapd_constants import BAND_5G -from antlion.test_utils.wifi import wifi_constants - -from mobly import asserts - -# Default timeout used for reboot, toggle WiFi and Airplane mode, -# for the system to settle down after the operation. -DEFAULT_TIMEOUT = 10 -# Number of seconds to wait for events that are supposed to happen quickly. -# Like onSuccess for start background scan and confirmation on wifi state -# change. -SHORT_TIMEOUT = 30 -ROAMING_TIMEOUT = 30 -WIFI_CONNECTION_TIMEOUT_DEFAULT = 30 -DEFAULT_SCAN_TRIES = 3 -DEFAULT_CONNECT_TRIES = 3 -# Speed of light in m/s. -SPEED_OF_LIGHT = 299792458 - -DEFAULT_PING_ADDR = "https://www.google.com/robots.txt" - -CNSS_DIAG_CONFIG_PATH = "/data/vendor/wifi/cnss_diag/" -CNSS_DIAG_CONFIG_FILE = "cnss_diag.conf" - -ROAMING_ATTN = { - "AP1_on_AP2_off": [0, 0, 95, 95], - "AP1_off_AP2_on": [95, 95, 0, 0], - "default": [0, 0, 0, 0], -} - - -class WifiEnums: - SSID_KEY = "SSID" # Used for Wifi & SoftAp - SSID_PATTERN_KEY = "ssidPattern" - NETID_KEY = "network_id" - BSSID_KEY = "BSSID" # Used for Wifi & SoftAp - BSSID_PATTERN_KEY = "bssidPattern" - PWD_KEY = "password" # Used for Wifi & SoftAp - frequency_key = "frequency" - HIDDEN_KEY = "hiddenSSID" # Used for Wifi & SoftAp - IS_APP_INTERACTION_REQUIRED = "isAppInteractionRequired" - IS_USER_INTERACTION_REQUIRED = "isUserInteractionRequired" - IS_SUGGESTION_METERED = "isMetered" - PRIORITY = "priority" - SECURITY = "security" # Used for Wifi & SoftAp - - # Used for SoftAp - AP_BAND_KEY = "apBand" - AP_CHANNEL_KEY = "apChannel" - AP_BANDS_KEY = "apBands" - AP_CHANNEL_FREQUENCYS_KEY = "apChannelFrequencies" - AP_MAC_RANDOMIZATION_SETTING_KEY = "MacRandomizationSetting" - AP_BRIDGED_OPPORTUNISTIC_SHUTDOWN_ENABLE_KEY = ( - "BridgedModeOpportunisticShutdownEnabled" - ) - AP_IEEE80211AX_ENABLED_KEY = "Ieee80211axEnabled" - AP_MAXCLIENTS_KEY = "MaxNumberOfClients" - AP_SHUTDOWNTIMEOUT_KEY = "ShutdownTimeoutMillis" - AP_SHUTDOWNTIMEOUTENABLE_KEY = "AutoShutdownEnabled" - AP_CLIENTCONTROL_KEY = "ClientControlByUserEnabled" - AP_ALLOWEDLIST_KEY = "AllowedClientList" - AP_BLOCKEDLIST_KEY = "BlockedClientList" - - WIFI_CONFIG_SOFTAP_BAND_2G = 1 - WIFI_CONFIG_SOFTAP_BAND_5G = 2 - WIFI_CONFIG_SOFTAP_BAND_2G_5G = 3 - WIFI_CONFIG_SOFTAP_BAND_6G = 4 - WIFI_CONFIG_SOFTAP_BAND_2G_6G = 5 - WIFI_CONFIG_SOFTAP_BAND_5G_6G = 6 - WIFI_CONFIG_SOFTAP_BAND_ANY = 7 - - # DO NOT USE IT for new test case! Replaced by WIFI_CONFIG_SOFTAP_BAND_ - WIFI_CONFIG_APBAND_2G = WIFI_CONFIG_SOFTAP_BAND_2G - WIFI_CONFIG_APBAND_5G = WIFI_CONFIG_SOFTAP_BAND_5G - WIFI_CONFIG_APBAND_AUTO = WIFI_CONFIG_SOFTAP_BAND_2G_5G - - WIFI_CONFIG_APBAND_2G_OLD = 0 - WIFI_CONFIG_APBAND_5G_OLD = 1 - WIFI_CONFIG_APBAND_AUTO_OLD = -1 - - WIFI_WPS_INFO_PBC = 0 - WIFI_WPS_INFO_DISPLAY = 1 - WIFI_WPS_INFO_KEYPAD = 2 - WIFI_WPS_INFO_LABEL = 3 - WIFI_WPS_INFO_INVALID = 4 - - class SoftApSecurityType: - OPEN = "NONE" - WPA2 = "WPA2_PSK" - WPA3_SAE_TRANSITION = "WPA3_SAE_TRANSITION" - WPA3_SAE = "WPA3_SAE" - - class CountryCode: - AUSTRALIA = "AU" - CHINA = "CN" - GERMANY = "DE" - JAPAN = "JP" - UK = "GB" - US = "US" - UNKNOWN = "UNKNOWN" - - # Start of Macros for EAP - # EAP types - class Eap(IntEnum): - NONE = -1 - PEAP = 0 - TLS = 1 - TTLS = 2 - PWD = 3 - SIM = 4 - AKA = 5 - AKA_PRIME = 6 - UNAUTH_TLS = 7 - - # EAP Phase2 types - class EapPhase2(IntEnum): - NONE = 0 - PAP = 1 - MSCHAP = 2 - MSCHAPV2 = 3 - GTC = 4 - - class Enterprise: - # Enterprise Config Macros - EMPTY_VALUE = "NULL" - EAP = "eap" - PHASE2 = "phase2" - IDENTITY = "identity" - ANON_IDENTITY = "anonymous_identity" - PASSWORD = "password" - SUBJECT_MATCH = "subject_match" - ALTSUBJECT_MATCH = "altsubject_match" - DOM_SUFFIX_MATCH = "domain_suffix_match" - CLIENT_CERT = "client_cert" - CA_CERT = "ca_cert" - ENGINE = "engine" - ENGINE_ID = "engine_id" - PRIVATE_KEY_ID = "key_id" - REALM = "realm" - PLMN = "plmn" - FQDN = "FQDN" - FRIENDLY_NAME = "providerFriendlyName" - ROAMING_IDS = "roamingConsortiumIds" - OCSP = "ocsp" - - # End of Macros for EAP - - class ScanResult: - CHANNEL_WIDTH_20MHZ = 0 - CHANNEL_WIDTH_40MHZ = 1 - CHANNEL_WIDTH_80MHZ = 2 - CHANNEL_WIDTH_160MHZ = 3 - CHANNEL_WIDTH_80MHZ_PLUS_MHZ = 4 - - # Macros for wifi rtt. - class RttType(IntEnum): - TYPE_ONE_SIDED = 1 - TYPE_TWO_SIDED = 2 - - class RttPeerType(IntEnum): - PEER_TYPE_AP = 1 - PEER_TYPE_STA = 2 # Requires NAN. - PEER_P2P_GO = 3 - PEER_P2P_CLIENT = 4 - PEER_NAN = 5 - - class RttPreamble(IntEnum): - PREAMBLE_LEGACY = 0x01 - PREAMBLE_HT = 0x02 - PREAMBLE_VHT = 0x04 - - class RttBW(IntEnum): - BW_5_SUPPORT = 0x01 - BW_10_SUPPORT = 0x02 - BW_20_SUPPORT = 0x04 - BW_40_SUPPORT = 0x08 - BW_80_SUPPORT = 0x10 - BW_160_SUPPORT = 0x20 - - class Rtt(IntEnum): - STATUS_SUCCESS = 0 - STATUS_FAILURE = 1 - STATUS_FAIL_NO_RSP = 2 - STATUS_FAIL_REJECTED = 3 - STATUS_FAIL_NOT_SCHEDULED_YET = 4 - STATUS_FAIL_TM_TIMEOUT = 5 - STATUS_FAIL_AP_ON_DIFF_CHANNEL = 6 - STATUS_FAIL_NO_CAPABILITY = 7 - STATUS_ABORTED = 8 - STATUS_FAIL_INVALID_TS = 9 - STATUS_FAIL_PROTOCOL = 10 - STATUS_FAIL_SCHEDULE = 11 - STATUS_FAIL_BUSY_TRY_LATER = 12 - STATUS_INVALID_REQ = 13 - STATUS_NO_WIFI = 14 - STATUS_FAIL_FTM_PARAM_OVERRIDE = 15 - - REASON_UNSPECIFIED = -1 - REASON_NOT_AVAILABLE = -2 - REASON_INVALID_LISTENER = -3 - REASON_INVALID_REQUEST = -4 - - class RttParam: - device_type = "deviceType" - request_type = "requestType" - BSSID = "bssid" - channel_width = "channelWidth" - frequency = "frequency" - center_freq0 = "centerFreq0" - center_freq1 = "centerFreq1" - number_burst = "numberBurst" - interval = "interval" - num_samples_per_burst = "numSamplesPerBurst" - num_retries_per_measurement_frame = "numRetriesPerMeasurementFrame" - num_retries_per_FTMR = "numRetriesPerFTMR" - lci_request = "LCIRequest" - lcr_request = "LCRRequest" - burst_timeout = "burstTimeout" - preamble = "preamble" - bandwidth = "bandwidth" - margin = "margin" - - RTT_MARGIN_OF_ERROR = { - RttBW.BW_80_SUPPORT: 2, - RttBW.BW_40_SUPPORT: 5, - RttBW.BW_20_SUPPORT: 5, - } - - # Macros as specified in the WifiScanner code. - WIFI_BAND_UNSPECIFIED = 0 # not specified - WIFI_BAND_24_GHZ = 1 # 2.4 GHz band - WIFI_BAND_5_GHZ = 2 # 5 GHz band without DFS channels - WIFI_BAND_5_GHZ_DFS_ONLY = 4 # 5 GHz band with DFS channels - WIFI_BAND_5_GHZ_WITH_DFS = 6 # 5 GHz band with DFS channels - WIFI_BAND_BOTH = 3 # both bands without DFS channels - WIFI_BAND_BOTH_WITH_DFS = 7 # both bands with DFS channels - - REPORT_EVENT_AFTER_BUFFER_FULL = 0 - REPORT_EVENT_AFTER_EACH_SCAN = 1 - REPORT_EVENT_FULL_SCAN_RESULT = 2 - - SCAN_TYPE_LOW_LATENCY = 0 - SCAN_TYPE_LOW_POWER = 1 - SCAN_TYPE_HIGH_ACCURACY = 2 - - # US Wifi frequencies - ALL_2G_FREQUENCIES = [ - 2412, - 2417, - 2422, - 2427, - 2432, - 2437, - 2442, - 2447, - 2452, - 2457, - 2462, - ] - DFS_5G_FREQUENCIES = [ - 5260, - 5280, - 5300, - 5320, - 5500, - 5520, - 5540, - 5560, - 5580, - 5600, - 5620, - 5640, - 5660, - 5680, - 5700, - 5720, - ] - NONE_DFS_5G_FREQUENCIES = [5180, 5200, 5220, 5240, 5745, 5765, 5785, 5805, 5825] - ALL_5G_FREQUENCIES = DFS_5G_FREQUENCIES + NONE_DFS_5G_FREQUENCIES - - band_to_frequencies = { - WIFI_BAND_24_GHZ: ALL_2G_FREQUENCIES, - WIFI_BAND_5_GHZ: NONE_DFS_5G_FREQUENCIES, - WIFI_BAND_5_GHZ_DFS_ONLY: DFS_5G_FREQUENCIES, - WIFI_BAND_5_GHZ_WITH_DFS: ALL_5G_FREQUENCIES, - WIFI_BAND_BOTH: ALL_2G_FREQUENCIES + NONE_DFS_5G_FREQUENCIES, - WIFI_BAND_BOTH_WITH_DFS: ALL_5G_FREQUENCIES + ALL_2G_FREQUENCIES, - } - - # TODO: add all of the band mapping. - softap_band_frequencies = { - WIFI_CONFIG_SOFTAP_BAND_2G: ALL_2G_FREQUENCIES, - WIFI_CONFIG_SOFTAP_BAND_5G: ALL_5G_FREQUENCIES, - } - - # All Wifi frequencies to channels lookup. - freq_to_channel = { - 2412: 1, - 2417: 2, - 2422: 3, - 2427: 4, - 2432: 5, - 2437: 6, - 2442: 7, - 2447: 8, - 2452: 9, - 2457: 10, - 2462: 11, - 2467: 12, - 2472: 13, - 2484: 14, - 4915: 183, - 4920: 184, - 4925: 185, - 4935: 187, - 4940: 188, - 4945: 189, - 4960: 192, - 4980: 196, - 5035: 7, - 5040: 8, - 5045: 9, - 5055: 11, - 5060: 12, - 5080: 16, - 5170: 34, - 5180: 36, - 5190: 38, - 5200: 40, - 5210: 42, - 5220: 44, - 5230: 46, - 5240: 48, - 5260: 52, - 5280: 56, - 5300: 60, - 5320: 64, - 5500: 100, - 5520: 104, - 5540: 108, - 5560: 112, - 5580: 116, - 5600: 120, - 5620: 124, - 5640: 128, - 5660: 132, - 5680: 136, - 5700: 140, - 5745: 149, - 5765: 153, - 5785: 157, - 5795: 159, - 5805: 161, - 5825: 165, - } - - # All Wifi channels to frequencies lookup. - channel_2G_to_freq = { - 1: 2412, - 2: 2417, - 3: 2422, - 4: 2427, - 5: 2432, - 6: 2437, - 7: 2442, - 8: 2447, - 9: 2452, - 10: 2457, - 11: 2462, - 12: 2467, - 13: 2472, - 14: 2484, - } - - channel_5G_to_freq = { - 183: 4915, - 184: 4920, - 185: 4925, - 187: 4935, - 188: 4940, - 189: 4945, - 192: 4960, - 196: 4980, - 7: 5035, - 8: 5040, - 9: 5045, - 11: 5055, - 12: 5060, - 16: 5080, - 34: 5170, - 36: 5180, - 38: 5190, - 40: 5200, - 42: 5210, - 44: 5220, - 46: 5230, - 48: 5240, - 50: 5250, - 52: 5260, - 56: 5280, - 60: 5300, - 64: 5320, - 100: 5500, - 104: 5520, - 108: 5540, - 112: 5560, - 116: 5580, - 120: 5600, - 124: 5620, - 128: 5640, - 132: 5660, - 136: 5680, - 140: 5700, - 149: 5745, - 151: 5755, - 153: 5765, - 155: 5775, - 157: 5785, - 159: 5795, - 161: 5805, - 165: 5825, - } - - channel_6G_to_freq = {4 * x + 1: 5955 + 20 * x for x in range(59)} - - channel_to_freq = { - "2G": channel_2G_to_freq, - "5G": channel_5G_to_freq, - "6G": channel_6G_to_freq, - } - - -class WifiChannelBase: - ALL_2G_FREQUENCIES = [] - DFS_5G_FREQUENCIES = [] - NONE_DFS_5G_FREQUENCIES = [] - ALL_5G_FREQUENCIES = DFS_5G_FREQUENCIES + NONE_DFS_5G_FREQUENCIES - MIX_CHANNEL_SCAN = [] - - def band_to_freq(self, band): - _band_to_frequencies = { - WifiEnums.WIFI_BAND_24_GHZ: self.ALL_2G_FREQUENCIES, - WifiEnums.WIFI_BAND_5_GHZ: self.NONE_DFS_5G_FREQUENCIES, - WifiEnums.WIFI_BAND_5_GHZ_DFS_ONLY: self.DFS_5G_FREQUENCIES, - WifiEnums.WIFI_BAND_5_GHZ_WITH_DFS: self.ALL_5G_FREQUENCIES, - WifiEnums.WIFI_BAND_BOTH: self.ALL_2G_FREQUENCIES - + self.NONE_DFS_5G_FREQUENCIES, - WifiEnums.WIFI_BAND_BOTH_WITH_DFS: self.ALL_5G_FREQUENCIES - + self.ALL_2G_FREQUENCIES, - } - return _band_to_frequencies[band] - - -class WifiChannelUS(WifiChannelBase): - # US Wifi frequencies - ALL_2G_FREQUENCIES = [ - 2412, - 2417, - 2422, - 2427, - 2432, - 2437, - 2442, - 2447, - 2452, - 2457, - 2462, - ] - NONE_DFS_5G_FREQUENCIES = [5180, 5200, 5220, 5240, 5745, 5765, 5785, 5805, 5825] - MIX_CHANNEL_SCAN = [ - 2412, - 2437, - 2462, - 5180, - 5200, - 5280, - 5260, - 5300, - 5500, - 5320, - 5520, - 5560, - 5700, - 5745, - 5805, - ] - - def __init__(self, model=None, support_addition_channel=[]): - if model in support_addition_channel: - self.ALL_2G_FREQUENCIES = [ - 2412, - 2417, - 2422, - 2427, - 2432, - 2437, - 2442, - 2447, - 2452, - 2457, - 2462, - 2467, - 2472, - ] - self.DFS_5G_FREQUENCIES = [ - 5260, - 5280, - 5300, - 5320, - 5500, - 5520, - 5540, - 5560, - 5580, - 5600, - 5620, - 5640, - 5660, - 5680, - 5700, - 5720, - ] - self.ALL_5G_FREQUENCIES = self.DFS_5G_FREQUENCIES + self.NONE_DFS_5G_FREQUENCIES - - -class WifiReferenceNetworks: - """Class to parse and return networks of different band and - auth type from reference_networks - """ - - def __init__(self, obj): - self.reference_networks = obj - self.WIFI_2G = "2g" - self.WIFI_5G = "5g" - - self.secure_networks_2g = [] - self.secure_networks_5g = [] - self.open_networks_2g = [] - self.open_networks_5g = [] - self._parse_networks() - - def _parse_networks(self): - for network in self.reference_networks: - for key in network: - if key == self.WIFI_2G: - if "password" in network[key]: - self.secure_networks_2g.append(network[key]) - else: - self.open_networks_2g.append(network[key]) - else: - if "password" in network[key]: - self.secure_networks_5g.append(network[key]) - else: - self.open_networks_5g.append(network[key]) - - def return_2g_secure_networks(self): - return self.secure_networks_2g - - def return_5g_secure_networks(self): - return self.secure_networks_5g - - def return_2g_open_networks(self): - return self.open_networks_2g - - def return_5g_open_networks(self): - return self.open_networks_5g - - def return_secure_networks(self): - return self.secure_networks_2g + self.secure_networks_5g - - def return_open_networks(self): - return self.open_networks_2g + self.open_networks_5g - - -def _assert_on_fail_handler(func, assert_on_fail, *args, **kwargs): - """Wrapper function that handles the bahevior of assert_on_fail. - - When assert_on_fail is True, let all test signals through, which can - terminate test cases directly. When assert_on_fail is False, the wrapper - raises no test signals and reports operation status by returning True or - False. - - Args: - func: The function to wrap. This function reports operation status by - raising test signals. - assert_on_fail: A boolean that specifies if the output of the wrapper - is test signal based or return value based. - args: Positional args for func. - kwargs: Name args for func. - - Returns: - If assert_on_fail is True, returns True/False to signal operation - status, otherwise return nothing. - """ - try: - func(*args, **kwargs) - if not assert_on_fail: - return True - except signals.TestSignal: - if assert_on_fail: - raise - return False - - -def assert_network_in_list(target, network_list): - """Makes sure a specified target Wi-Fi network exists in a list of Wi-Fi - networks. - - Args: - target: A dict representing a Wi-Fi network. - E.g. {WifiEnums.SSID_KEY: "SomeNetwork"} - network_list: A list of dicts, each representing a Wi-Fi network. - """ - match_results = match_networks(target, network_list) - asserts.assert_true( - match_results, - "Target network %s, does not exist in network list %s" % (target, network_list), - ) - - -def match_networks(target_params, networks): - """Finds the WiFi networks that match a given set of parameters in a list - of WiFi networks. - - To be considered a match, the network should contain every key-value pair - of target_params - - Args: - target_params: A dict with 1 or more key-value pairs representing a Wi-Fi network. - E.g { 'SSID': 'wh_ap1_5g', 'BSSID': '30:b5:c2:33:e4:47' } - networks: A list of dict objects representing WiFi networks. - - Returns: - The networks that match the target parameters. - """ - results = [] - asserts.assert_true( - target_params, "Expected networks object 'target_params' is empty" - ) - for n in networks: - add_network = 1 - for k, v in target_params.items(): - if k not in n: - add_network = 0 - break - if n[k] != v: - add_network = 0 - break - if add_network: - results.append(n) - return results - - -def wait_for_wifi_state(ad, state, assert_on_fail=True): - """Waits for the device to transition to the specified wifi state - - Args: - ad: An AndroidDevice object. - state: Wifi state to wait for. - assert_on_fail: If True, error checks in this function will raise test - failure signals. - - Returns: - If assert_on_fail is False, function returns True if the device transitions - to the specified state, False otherwise. If assert_on_fail is True, no return value. - """ - return _assert_on_fail_handler( - _wait_for_wifi_state, assert_on_fail, ad, state=state - ) - - -def _wait_for_wifi_state(ad, state): - """Toggles the state of wifi. - - TestFailure signals are raised when something goes wrong. - - Args: - ad: An AndroidDevice object. - state: Wifi state to wait for. - """ - if state == ad.droid.wifiCheckState(): - # Check if the state is already achieved, so we don't wait for the - # state change event by mistake. - return - ad.droid.wifiStartTrackingStateChange() - fail_msg = "Device did not transition to Wi-Fi state to %s on %s." % ( - state, - ad.serial, - ) - try: - ad.ed.wait_for_event( - wifi_constants.WIFI_STATE_CHANGED, - lambda x: x["data"]["enabled"] == state, - SHORT_TIMEOUT, - ) - except Empty: - asserts.assert_equal(state, ad.droid.wifiCheckState(), fail_msg) - finally: - ad.droid.wifiStopTrackingStateChange() - - -def wifi_toggle_state(ad, new_state=None, assert_on_fail=True): - """Toggles the state of wifi. - - Args: - ad: An AndroidDevice object. - new_state: Wifi state to set to. If None, opposite of the current state. - assert_on_fail: If True, error checks in this function will raise test - failure signals. - - Returns: - If assert_on_fail is False, function returns True if the toggle was - successful, False otherwise. If assert_on_fail is True, no return value. - """ - return _assert_on_fail_handler( - _wifi_toggle_state, assert_on_fail, ad, new_state=new_state - ) - - -def _wifi_toggle_state(ad, new_state=None): - """Toggles the state of wifi. - - TestFailure signals are raised when something goes wrong. - - Args: - ad: An AndroidDevice object. - new_state: The state to set Wi-Fi to. If None, opposite of the current - state will be set. - """ - if new_state is None: - new_state = not ad.droid.wifiCheckState() - elif new_state == ad.droid.wifiCheckState(): - # Check if the new_state is already achieved, so we don't wait for the - # state change event by mistake. - return - ad.droid.wifiStartTrackingStateChange() - ad.log.info("Setting Wi-Fi state to %s.", new_state) - ad.ed.clear_all_events() - # Setting wifi state. - ad.droid.wifiToggleState(new_state) - time.sleep(2) - fail_msg = "Failed to set Wi-Fi state to %s on %s." % (new_state, ad.serial) - try: - ad.ed.wait_for_event( - wifi_constants.WIFI_STATE_CHANGED, - lambda x: x["data"]["enabled"] == new_state, - SHORT_TIMEOUT, - ) - except Empty: - asserts.assert_equal(new_state, ad.droid.wifiCheckState(), fail_msg) - finally: - ad.droid.wifiStopTrackingStateChange() - - -def reset_wifi(ad): - """Clears all saved Wi-Fi networks on a device. - - This will turn Wi-Fi on. - - Args: - ad: An AndroidDevice object. - - """ - networks = ad.droid.wifiGetConfiguredNetworks() - if not networks: - return - removed = [] - for n in networks: - if n["networkId"] not in removed: - ad.droid.wifiForgetNetwork(n["networkId"]) - removed.append(n["networkId"]) - else: - continue - try: - event = ad.ed.pop_event( - wifi_constants.WIFI_FORGET_NW_SUCCESS, SHORT_TIMEOUT - ) - except Empty: - logging.warning("Could not confirm the removal of network %s.", n) - # Check again to see if there's any network left. - asserts.assert_true( - not ad.droid.wifiGetConfiguredNetworks(), - "Failed to remove these configured Wi-Fi networks: %s" % networks, - ) - - -def toggle_airplane_mode_on_and_off(ad): - """Turn ON and OFF Airplane mode. - - ad: An AndroidDevice object. - Returns: Assert if turning on/off Airplane mode fails. - - """ - ad.log.debug("Toggling Airplane mode ON.") - asserts.assert_true( - utils.force_airplane_mode(ad, True), - "Can not turn on airplane mode on: %s" % ad.serial, - ) - time.sleep(DEFAULT_TIMEOUT) - ad.log.debug("Toggling Airplane mode OFF.") - asserts.assert_true( - utils.force_airplane_mode(ad, False), - "Can not turn on airplane mode on: %s" % ad.serial, - ) - time.sleep(DEFAULT_TIMEOUT) - - -def toggle_wifi_off_and_on(ad): - """Turn OFF and ON WiFi. - - ad: An AndroidDevice object. - Returns: Assert if turning off/on WiFi fails. - - """ - ad.log.debug("Toggling wifi OFF.") - wifi_toggle_state(ad, False) - time.sleep(DEFAULT_TIMEOUT) - ad.log.debug("Toggling wifi ON.") - wifi_toggle_state(ad, True) - time.sleep(DEFAULT_TIMEOUT) - - -def wifi_forget_network(ad, net_ssid): - """Remove configured Wifi network on an android device. - - Args: - ad: android_device object for forget network. - net_ssid: ssid of network to be forget - - """ - networks = ad.droid.wifiGetConfiguredNetworks() - if not networks: - return - removed = [] - for n in networks: - if net_ssid in n[WifiEnums.SSID_KEY] and n["networkId"] not in removed: - ad.droid.wifiForgetNetwork(n["networkId"]) - removed.append(n["networkId"]) - try: - event = ad.ed.pop_event( - wifi_constants.WIFI_FORGET_NW_SUCCESS, SHORT_TIMEOUT - ) - except Empty: - asserts.fail("Failed to remove network %s." % n) - break - - -def wifi_test_device_init(ad, country_code=WifiEnums.CountryCode.US): - """Initializes an android device for wifi testing. - - 0. Make sure SL4A connection is established on the android device. - 1. Disable location service's WiFi scan. - 2. Turn WiFi on. - 3. Clear all saved networks. - 4. Set country code to US. - 5. Enable WiFi verbose logging. - 6. Sync device time with computer time. - 7. Turn off cellular data. - 8. Turn off ambient display. - """ - utils.require_sl4a((ad,)) - ad.droid.wifiScannerToggleAlwaysAvailable(False) - msg = "Failed to turn off location service's scan." - asserts.assert_true(not ad.droid.wifiScannerIsAlwaysAvailable(), msg) - wifi_toggle_state(ad, True) - reset_wifi(ad) - ad.droid.wifiEnableVerboseLogging(1) - msg = "Failed to enable WiFi verbose logging." - asserts.assert_equal(ad.droid.wifiGetVerboseLoggingLevel(), 1, msg) - # We don't verify the following settings since they are not critical. - # Set wpa_supplicant log level to EXCESSIVE. - output = ad.adb.shell( - "wpa_cli -i wlan0 -p -g@android:wpa_wlan0 IFNAME=" "wlan0 log_level EXCESSIVE", - ignore_status=True, - ) - ad.log.info("wpa_supplicant log change status: %s", output) - utils.sync_device_time(ad) - ad.droid.telephonyToggleDataConnection(False) - set_wifi_country_code(ad, country_code) - utils.set_ambient_display(ad, False) - - -def set_wifi_country_code(ad, country_code): - """Sets the wifi country code on the device. - - Args: - ad: An AndroidDevice object. - country_code: 2 letter ISO country code - - Raises: - An RpcException if unable to set the country code. - """ - try: - ad.adb.shell("cmd wifi force-country-code enabled %s" % country_code) - except Exception as e: - ad.droid.wifiSetCountryCode(WifiEnums.CountryCode.US) - - -def start_wifi_connection_scan(ad): - """Starts a wifi connection scan and wait for results to become available. - - Args: - ad: An AndroidDevice object. - """ - ad.ed.clear_all_events() - ad.droid.wifiStartScan() - try: - ad.ed.pop_event("WifiManagerScanResultsAvailable", 60) - except Empty: - asserts.fail("Wi-Fi results did not become available within 60s.") - - -def start_wifi_connection_scan_and_return_status(ad): - """ - Starts a wifi connection scan and wait for results to become available - or a scan failure to be reported. - - Args: - ad: An AndroidDevice object. - Returns: - True: if scan succeeded & results are available - False: if scan failed - """ - ad.ed.clear_all_events() - ad.droid.wifiStartScan() - try: - events = ad.ed.pop_events("WifiManagerScan(ResultsAvailable|Failure)", 60) - except Empty: - asserts.fail("Wi-Fi scan results/failure did not become available within 60s.") - # If there are multiple matches, we check for atleast one success. - for event in events: - if event["name"] == "WifiManagerScanResultsAvailable": - return True - elif event["name"] == "WifiManagerScanFailure": - ad.log.debug("Scan failure received") - return False - - -def start_wifi_connection_scan_and_check_for_network(ad, network_ssid, max_tries=3): - """ - Start connectivity scans & checks if the |network_ssid| is seen in - scan results. The method performs a max of |max_tries| connectivity scans - to find the network. - - Args: - ad: An AndroidDevice object. - network_ssid: SSID of the network we are looking for. - max_tries: Number of scans to try. - Returns: - True: if network_ssid is found in scan results. - False: if network_ssid is not found in scan results. - """ - start_time = time.time() - for num_tries in range(max_tries): - if start_wifi_connection_scan_and_return_status(ad): - scan_results = ad.droid.wifiGetScanResults() - match_results = match_networks( - {WifiEnums.SSID_KEY: network_ssid}, scan_results - ) - if len(match_results) > 0: - ad.log.debug( - "Found network in %s seconds." % (time.time() - start_time) - ) - return True - ad.log.debug("Did not find network in %s seconds." % (time.time() - start_time)) - return False - - -def start_wifi_connection_scan_and_ensure_network_found(ad, network_ssid, max_tries=3): - """ - Start connectivity scans & ensure the |network_ssid| is seen in - scan results. The method performs a max of |max_tries| connectivity scans - to find the network. - This method asserts on failure! - - Args: - ad: An AndroidDevice object. - network_ssid: SSID of the network we are looking for. - max_tries: Number of scans to try. - """ - ad.log.info("Starting scans to ensure %s is present", network_ssid) - assert_msg = ( - "Failed to find " + network_ssid + " in scan results" - " after " + str(max_tries) + " tries" - ) - asserts.assert_true( - start_wifi_connection_scan_and_check_for_network(ad, network_ssid, max_tries), - assert_msg, - ) - - -def start_wifi_connection_scan_and_ensure_network_not_found( - ad, network_ssid, max_tries=3 -): - """ - Start connectivity scans & ensure the |network_ssid| is not seen in - scan results. The method performs a max of |max_tries| connectivity scans - to find the network. - This method asserts on failure! - - Args: - ad: An AndroidDevice object. - network_ssid: SSID of the network we are looking for. - max_tries: Number of scans to try. - """ - ad.log.info("Starting scans to ensure %s is not present", network_ssid) - assert_msg = ( - "Found " + network_ssid + " in scan results" - " after " + str(max_tries) + " tries" - ) - asserts.assert_false( - start_wifi_connection_scan_and_check_for_network(ad, network_ssid, max_tries), - assert_msg, - ) - - -def start_wifi_background_scan(ad, scan_setting): - """Starts wifi background scan. - - Args: - ad: android_device object to initiate connection on. - scan_setting: A dict representing the settings of the scan. - - Returns: - If scan was started successfully, event data of success event is returned. - """ - idx = ad.droid.wifiScannerStartBackgroundScan(scan_setting) - event = ad.ed.pop_event("WifiScannerScan{}onSuccess".format(idx), SHORT_TIMEOUT) - return event["data"] - - -def save_wifi_soft_ap_config( - ad, - wifi_config, - band=None, - hidden=None, - security=None, - password=None, - channel=None, - max_clients=None, - shutdown_timeout_enable=None, - shutdown_timeout_millis=None, - client_control_enable=None, - allowedList=None, - blockedList=None, - bands=None, - channel_frequencys=None, - mac_randomization_setting=None, - bridged_opportunistic_shutdown_enabled=None, - ieee80211ax_enabled=None, -): - """Save a soft ap configuration and verified - Args: - ad: android_device to set soft ap configuration. - wifi_config: a soft ap configuration object, at least include SSID. - band: specifies the band for the soft ap. - hidden: specifies the soft ap need to broadcast its SSID or not. - security: specifies the security type for the soft ap. - password: specifies the password for the soft ap. - channel: specifies the channel for the soft ap. - max_clients: specifies the maximum connected client number. - shutdown_timeout_enable: specifies the auto shut down enable or not. - shutdown_timeout_millis: specifies the shut down timeout value. - client_control_enable: specifies the client control enable or not. - allowedList: specifies allowed clients list. - blockedList: specifies blocked clients list. - bands: specifies the band list for the soft ap. - channel_frequencys: specifies the channel frequency list for soft ap. - mac_randomization_setting: specifies the mac randomization setting. - bridged_opportunistic_shutdown_enabled: specifies the opportunistic - shutdown enable or not. - ieee80211ax_enabled: specifies the ieee80211ax enable or not. - """ - if security and password: - wifi_config[WifiEnums.SECURITY] = security - wifi_config[WifiEnums.PWD_KEY] = password - if hidden is not None: - wifi_config[WifiEnums.HIDDEN_KEY] = hidden - if max_clients is not None: - wifi_config[WifiEnums.AP_MAXCLIENTS_KEY] = max_clients - if shutdown_timeout_enable is not None: - wifi_config[WifiEnums.AP_SHUTDOWNTIMEOUTENABLE_KEY] = shutdown_timeout_enable - if shutdown_timeout_millis is not None: - wifi_config[WifiEnums.AP_SHUTDOWNTIMEOUT_KEY] = shutdown_timeout_millis - if client_control_enable is not None: - wifi_config[WifiEnums.AP_CLIENTCONTROL_KEY] = client_control_enable - if allowedList is not None: - wifi_config[WifiEnums.AP_ALLOWEDLIST_KEY] = allowedList - if blockedList is not None: - wifi_config[WifiEnums.AP_BLOCKEDLIST_KEY] = blockedList - if mac_randomization_setting is not None: - wifi_config[ - WifiEnums.AP_MAC_RANDOMIZATION_SETTING_KEY - ] = mac_randomization_setting - if bridged_opportunistic_shutdown_enabled is not None: - wifi_config[ - WifiEnums.AP_BRIDGED_OPPORTUNISTIC_SHUTDOWN_ENABLE_KEY - ] = bridged_opportunistic_shutdown_enabled - if ieee80211ax_enabled is not None: - wifi_config[WifiEnums.AP_IEEE80211AX_ENABLED_KEY] = ieee80211ax_enabled - if channel_frequencys is not None: - wifi_config[WifiEnums.AP_CHANNEL_FREQUENCYS_KEY] = channel_frequencys - elif bands is not None: - wifi_config[WifiEnums.AP_BANDS_KEY] = bands - elif band is not None: - if channel is not None: - wifi_config[WifiEnums.AP_BAND_KEY] = band - wifi_config[WifiEnums.AP_CHANNEL_KEY] = channel - else: - wifi_config[WifiEnums.AP_BAND_KEY] = band - - if ( - WifiEnums.AP_CHANNEL_KEY in wifi_config - and wifi_config[WifiEnums.AP_CHANNEL_KEY] == 0 - ): - del wifi_config[WifiEnums.AP_CHANNEL_KEY] - - if ( - WifiEnums.SECURITY in wifi_config - and wifi_config[WifiEnums.SECURITY] == WifiEnums.SoftApSecurityType.OPEN - ): - del wifi_config[WifiEnums.SECURITY] - del wifi_config[WifiEnums.PWD_KEY] - - asserts.assert_true( - ad.droid.wifiSetWifiApConfiguration(wifi_config), - "Failed to set WifiAp Configuration", - ) - - wifi_ap = ad.droid.wifiGetApConfiguration() - asserts.assert_true( - wifi_ap[WifiEnums.SSID_KEY] == wifi_config[WifiEnums.SSID_KEY], - "Hotspot SSID doesn't match", - ) - if WifiEnums.SECURITY in wifi_config: - asserts.assert_true( - wifi_ap[WifiEnums.SECURITY] == wifi_config[WifiEnums.SECURITY], - "Hotspot Security doesn't match", - ) - if WifiEnums.PWD_KEY in wifi_config: - asserts.assert_true( - wifi_ap[WifiEnums.PWD_KEY] == wifi_config[WifiEnums.PWD_KEY], - "Hotspot Password doesn't match", - ) - - if WifiEnums.HIDDEN_KEY in wifi_config: - asserts.assert_true( - wifi_ap[WifiEnums.HIDDEN_KEY] == wifi_config[WifiEnums.HIDDEN_KEY], - "Hotspot hidden setting doesn't match", - ) - - if WifiEnums.AP_CHANNEL_KEY in wifi_config: - asserts.assert_true( - wifi_ap[WifiEnums.AP_CHANNEL_KEY] == wifi_config[WifiEnums.AP_CHANNEL_KEY], - "Hotspot Channel doesn't match", - ) - if WifiEnums.AP_MAXCLIENTS_KEY in wifi_config: - asserts.assert_true( - wifi_ap[WifiEnums.AP_MAXCLIENTS_KEY] - == wifi_config[WifiEnums.AP_MAXCLIENTS_KEY], - "Hotspot Max Clients doesn't match", - ) - if WifiEnums.AP_SHUTDOWNTIMEOUTENABLE_KEY in wifi_config: - asserts.assert_true( - wifi_ap[WifiEnums.AP_SHUTDOWNTIMEOUTENABLE_KEY] - == wifi_config[WifiEnums.AP_SHUTDOWNTIMEOUTENABLE_KEY], - "Hotspot ShutDown feature flag doesn't match", - ) - if WifiEnums.AP_SHUTDOWNTIMEOUT_KEY in wifi_config: - asserts.assert_true( - wifi_ap[WifiEnums.AP_SHUTDOWNTIMEOUT_KEY] - == wifi_config[WifiEnums.AP_SHUTDOWNTIMEOUT_KEY], - "Hotspot ShutDown timeout setting doesn't match", - ) - if WifiEnums.AP_CLIENTCONTROL_KEY in wifi_config: - asserts.assert_true( - wifi_ap[WifiEnums.AP_CLIENTCONTROL_KEY] - == wifi_config[WifiEnums.AP_CLIENTCONTROL_KEY], - "Hotspot Client control flag doesn't match", - ) - if WifiEnums.AP_ALLOWEDLIST_KEY in wifi_config: - asserts.assert_true( - wifi_ap[WifiEnums.AP_ALLOWEDLIST_KEY] - == wifi_config[WifiEnums.AP_ALLOWEDLIST_KEY], - "Hotspot Allowed List doesn't match", - ) - if WifiEnums.AP_BLOCKEDLIST_KEY in wifi_config: - asserts.assert_true( - wifi_ap[WifiEnums.AP_BLOCKEDLIST_KEY] - == wifi_config[WifiEnums.AP_BLOCKEDLIST_KEY], - "Hotspot Blocked List doesn't match", - ) - - if WifiEnums.AP_MAC_RANDOMIZATION_SETTING_KEY in wifi_config: - asserts.assert_true( - wifi_ap[WifiEnums.AP_MAC_RANDOMIZATION_SETTING_KEY] - == wifi_config[WifiEnums.AP_MAC_RANDOMIZATION_SETTING_KEY], - "Hotspot Mac randomization setting doesn't match", - ) - - if WifiEnums.AP_BRIDGED_OPPORTUNISTIC_SHUTDOWN_ENABLE_KEY in wifi_config: - asserts.assert_true( - wifi_ap[WifiEnums.AP_BRIDGED_OPPORTUNISTIC_SHUTDOWN_ENABLE_KEY] - == wifi_config[WifiEnums.AP_BRIDGED_OPPORTUNISTIC_SHUTDOWN_ENABLE_KEY], - "Hotspot bridged shutdown enable setting doesn't match", - ) - - if WifiEnums.AP_IEEE80211AX_ENABLED_KEY in wifi_config: - asserts.assert_true( - wifi_ap[WifiEnums.AP_IEEE80211AX_ENABLED_KEY] - == wifi_config[WifiEnums.AP_IEEE80211AX_ENABLED_KEY], - "Hotspot 80211 AX enable setting doesn't match", - ) - - if WifiEnums.AP_CHANNEL_FREQUENCYS_KEY in wifi_config: - asserts.assert_true( - wifi_ap[WifiEnums.AP_CHANNEL_FREQUENCYS_KEY] - == wifi_config[WifiEnums.AP_CHANNEL_FREQUENCYS_KEY], - "Hotspot channels setting doesn't match", - ) - - -def toggle_wifi_and_wait_for_reconnection( - ad, network, num_of_tries=1, assert_on_fail=True -): - """Toggle wifi state and then wait for Android device to reconnect to - the provided wifi network. - - This expects the device to be already connected to the provided network. - - Logic steps are - 1. Ensure that we're connected to the network. - 2. Turn wifi off. - 3. Wait for 10 seconds. - 4. Turn wifi on. - 5. Wait for the "connected" event, then confirm the connected ssid is the - one requested. - - Args: - ad: android_device object to initiate connection on. - network: A dictionary representing the network to await connection. The - dictionary must have the key "SSID". - num_of_tries: An integer that is the number of times to try before - delaring failure. Default is 1. - assert_on_fail: If True, error checks in this function will raise test - failure signals. - - Returns: - If assert_on_fail is False, function returns True if the toggle was - successful, False otherwise. If assert_on_fail is True, no return value. - """ - return _assert_on_fail_handler( - _toggle_wifi_and_wait_for_reconnection, - assert_on_fail, - ad, - network, - num_of_tries=num_of_tries, - ) - - -def _toggle_wifi_and_wait_for_reconnection(ad, network, num_of_tries=3): - """Toggle wifi state and then wait for Android device to reconnect to - the provided wifi network. - - This expects the device to be already connected to the provided network. - - Logic steps are - 1. Ensure that we're connected to the network. - 2. Turn wifi off. - 3. Wait for 10 seconds. - 4. Turn wifi on. - 5. Wait for the "connected" event, then confirm the connected ssid is the - one requested. - - This will directly fail a test if anything goes wrong. - - Args: - ad: android_device object to initiate connection on. - network: A dictionary representing the network to await connection. The - dictionary must have the key "SSID". - num_of_tries: An integer that is the number of times to try before - delaring failure. Default is 1. - """ - expected_ssid = network[WifiEnums.SSID_KEY] - # First ensure that we're already connected to the provided network. - verify_con = {WifiEnums.SSID_KEY: expected_ssid} - verify_wifi_connection_info(ad, verify_con) - # Now toggle wifi state and wait for the connection event. - wifi_toggle_state(ad, False) - time.sleep(10) - wifi_toggle_state(ad, True) - ad.droid.wifiStartTrackingStateChange() - try: - connect_result = None - for i in range(num_of_tries): - try: - connect_result = ad.ed.pop_event(wifi_constants.WIFI_CONNECTED, 30) - break - except Empty: - pass - asserts.assert_true( - connect_result, - "Failed to connect to Wi-Fi network %s on %s" % (network, ad.serial), - ) - logging.debug("Connection result on %s: %s.", ad.serial, connect_result) - actual_ssid = connect_result["data"][WifiEnums.SSID_KEY] - asserts.assert_equal( - actual_ssid, - expected_ssid, - "Connected to the wrong network on %s." - "Expected %s, but got %s." % (ad.serial, expected_ssid, actual_ssid), - ) - logging.info("Connected to Wi-Fi network %s on %s", actual_ssid, ad.serial) - finally: - ad.droid.wifiStopTrackingStateChange() - - -def wait_for_connect( - ad, expected_ssid=None, expected_id=None, tries=2, assert_on_fail=True -): - """Wait for a connect event. - - This will directly fail a test if anything goes wrong. - - Args: - ad: An Android device object. - expected_ssid: SSID of the network to connect to. - expected_id: Network Id of the network to connect to. - tries: An integer that is the number of times to try before failing. - assert_on_fail: If True, error checks in this function will raise test - failure signals. - - Returns: - Returns a value only if assert_on_fail is false. - Returns True if the connection was successful, False otherwise. - """ - return _assert_on_fail_handler( - _wait_for_connect, assert_on_fail, ad, expected_ssid, expected_id, tries - ) - - -def _wait_for_connect(ad, expected_ssid=None, expected_id=None, tries=2): - """Wait for a connect event. - - Args: - ad: An Android device object. - expected_ssid: SSID of the network to connect to. - expected_id: Network Id of the network to connect to. - tries: An integer that is the number of times to try before failing. - """ - ad.droid.wifiStartTrackingStateChange() - try: - connect_result = _wait_for_connect_event( - ad, ssid=expected_ssid, id=expected_id, tries=tries - ) - asserts.assert_true( - connect_result, "Failed to connect to Wi-Fi network %s" % expected_ssid - ) - ad.log.debug("Wi-Fi connection result: %s.", connect_result) - actual_ssid = connect_result["data"][WifiEnums.SSID_KEY] - if expected_ssid: - asserts.assert_equal( - actual_ssid, expected_ssid, "Connected to the wrong network" - ) - actual_id = connect_result["data"][WifiEnums.NETID_KEY] - if expected_id: - asserts.assert_equal( - actual_id, expected_id, "Connected to the wrong network" - ) - ad.log.info("Connected to Wi-Fi network %s.", actual_ssid) - except Empty: - asserts.fail("Failed to start connection process to %s" % expected_ssid) - except Exception as error: - ad.log.error("Failed to connect to %s with error %s", expected_ssid, error) - raise signals.TestFailure("Failed to connect to %s network" % expected_ssid) - finally: - ad.droid.wifiStopTrackingStateChange() - - -def _wait_for_connect_event(ad, ssid=None, id=None, tries=1): - """Wait for a connect event on queue and pop when available. - - Args: - ad: An Android device object. - ssid: SSID of the network to connect to. - id: Network Id of the network to connect to. - tries: An integer that is the number of times to try before failing. - - Returns: - A dict with details of the connection data, which looks like this: - { - 'time': 1485460337798, - 'name': 'WifiNetworkConnected', - 'data': { - 'rssi': -27, - 'is_24ghz': True, - 'mac_address': '02:00:00:00:00:00', - 'network_id': 1, - 'BSSID': '30:b5:c2:33:d3:fc', - 'ip_address': 117483712, - 'link_speed': 54, - 'supplicant_state': 'completed', - 'hidden_ssid': False, - 'SSID': 'wh_ap1_2g', - 'is_5ghz': False} - } - - """ - conn_result = None - - # If ssid and network id is None, just wait for any connect event. - if id is None and ssid is None: - for i in range(tries): - try: - conn_result = ad.ed.pop_event(wifi_constants.WIFI_CONNECTED, 30) - break - except Empty: - pass - else: - # If ssid or network id is specified, wait for specific connect event. - for i in range(tries): - try: - conn_result = ad.ed.pop_event(wifi_constants.WIFI_CONNECTED, 30) - if id and conn_result["data"][WifiEnums.NETID_KEY] == id: - break - elif ssid and conn_result["data"][WifiEnums.SSID_KEY] == ssid: - break - except Empty: - pass - - return conn_result - - -def wait_for_disconnect(ad, timeout=10): - """Wait for a disconnect event within the specified timeout. - - Args: - ad: Android device object. - timeout: Timeout in seconds. - - """ - try: - ad.droid.wifiStartTrackingStateChange() - event = ad.ed.pop_event("WifiNetworkDisconnected", timeout) - except Empty: - raise signals.TestFailure("Device did not disconnect from the network") - finally: - ad.droid.wifiStopTrackingStateChange() - - -def ensure_no_disconnect(ad, duration=10): - """Ensure that there is no disconnect for the specified duration. - - Args: - ad: Android device object. - duration: Duration in seconds. - - """ - try: - ad.droid.wifiStartTrackingStateChange() - event = ad.ed.pop_event("WifiNetworkDisconnected", duration) - raise signals.TestFailure("Device disconnected from the network") - except Empty: - pass - finally: - ad.droid.wifiStopTrackingStateChange() - - -def connect_to_wifi_network( - ad, - network, - assert_on_fail=True, - check_connectivity=True, - hidden=False, - num_of_scan_tries=DEFAULT_SCAN_TRIES, - num_of_connect_tries=DEFAULT_CONNECT_TRIES, -): - """Connection logic for open and psk wifi networks. - - Args: - ad: AndroidDevice to use for connection - network: network info of the network to connect to - assert_on_fail: If true, errors from wifi_connect will raise - test failure signals. - hidden: Is the Wifi network hidden. - num_of_scan_tries: The number of times to try scan - interface before declaring failure. - num_of_connect_tries: The number of times to try - connect wifi before declaring failure. - """ - if hidden: - start_wifi_connection_scan_and_ensure_network_not_found( - ad, network[WifiEnums.SSID_KEY], max_tries=num_of_scan_tries - ) - else: - start_wifi_connection_scan_and_ensure_network_found( - ad, network[WifiEnums.SSID_KEY], max_tries=num_of_scan_tries - ) - wifi_connect( - ad, - network, - num_of_tries=num_of_connect_tries, - assert_on_fail=assert_on_fail, - check_connectivity=check_connectivity, - ) - - -def connect_to_wifi_network_with_id(ad, network_id, network_ssid): - """Connect to the given network using network id and verify SSID. - - Args: - network_id: int Network Id of the network. - network_ssid: string SSID of the network. - - Returns: True if connect using network id was successful; - False otherwise. - - """ - start_wifi_connection_scan_and_ensure_network_found(ad, network_ssid) - wifi_connect_by_id(ad, network_id) - connect_data = ad.droid.wifiGetConnectionInfo() - connect_ssid = connect_data[WifiEnums.SSID_KEY] - ad.log.debug( - "Expected SSID = %s Connected SSID = %s" % (network_ssid, connect_ssid) - ) - if connect_ssid != network_ssid: - return False - return True - - -def wifi_connect( - ad, network, num_of_tries=1, assert_on_fail=True, check_connectivity=True -): - """Connect an Android device to a wifi network. - - Initiate connection to a wifi network, wait for the "connected" event, then - confirm the connected ssid is the one requested. - - This will directly fail a test if anything goes wrong. - - Args: - ad: android_device object to initiate connection on. - network: A dictionary representing the network to connect to. The - dictionary must have the key "SSID". - num_of_tries: An integer that is the number of times to try before - delaring failure. Default is 1. - assert_on_fail: If True, error checks in this function will raise test - failure signals. - - Returns: - Returns a value only if assert_on_fail is false. - Returns True if the connection was successful, False otherwise. - """ - return _assert_on_fail_handler( - _wifi_connect, - assert_on_fail, - ad, - network, - num_of_tries=num_of_tries, - check_connectivity=check_connectivity, - ) - - -def _wifi_connect(ad, network, num_of_tries=1, check_connectivity=True): - """Connect an Android device to a wifi network. - - Initiate connection to a wifi network, wait for the "connected" event, then - confirm the connected ssid is the one requested. - - This will directly fail a test if anything goes wrong. - - Args: - ad: android_device object to initiate connection on. - network: A dictionary representing the network to connect to. The - dictionary must have the key "SSID". - num_of_tries: An integer that is the number of times to try before - delaring failure. Default is 1. - """ - asserts.assert_true( - WifiEnums.SSID_KEY in network, - "Key '%s' must be present in network definition." % WifiEnums.SSID_KEY, - ) - ad.droid.wifiStartTrackingStateChange() - expected_ssid = network[WifiEnums.SSID_KEY] - ad.droid.wifiConnectByConfig(network) - ad.log.info("Starting connection process to %s", expected_ssid) - try: - event = ad.ed.pop_event(wifi_constants.CONNECT_BY_CONFIG_SUCCESS, 30) - connect_result = _wait_for_connect_event( - ad, ssid=expected_ssid, tries=num_of_tries - ) - asserts.assert_true( - connect_result, - "Failed to connect to Wi-Fi network %s on %s" % (network, ad.serial), - ) - ad.log.debug("Wi-Fi connection result: %s.", connect_result) - actual_ssid = connect_result["data"][WifiEnums.SSID_KEY] - asserts.assert_equal( - actual_ssid, - expected_ssid, - "Connected to the wrong network on %s." % ad.serial, - ) - ad.log.info("Connected to Wi-Fi network %s.", actual_ssid) - - if check_connectivity: - internet = validate_connection(ad, DEFAULT_PING_ADDR) - if not internet: - raise signals.TestFailure( - "Failed to connect to internet on %s" % expected_ssid - ) - except Empty: - asserts.fail( - "Failed to start connection process to %s on %s" % (network, ad.serial) - ) - except Exception as error: - ad.log.error("Failed to connect to %s with error %s", expected_ssid, error) - raise signals.TestFailure("Failed to connect to %s network" % network) - - finally: - ad.droid.wifiStopTrackingStateChange() - - -def wifi_connect_by_id(ad, network_id, num_of_tries=3, assert_on_fail=True): - """Connect an Android device to a wifi network using network Id. - - Start connection to the wifi network, with the given network Id, wait for - the "connected" event, then verify the connected network is the one requested. - - This will directly fail a test if anything goes wrong. - - Args: - ad: android_device object to initiate connection on. - network_id: Integer specifying the network id of the network. - num_of_tries: An integer that is the number of times to try before - delaring failure. Default is 1. - assert_on_fail: If True, error checks in this function will raise test - failure signals. - - Returns: - Returns a value only if assert_on_fail is false. - Returns True if the connection was successful, False otherwise. - """ - _assert_on_fail_handler( - _wifi_connect_by_id, assert_on_fail, ad, network_id, num_of_tries - ) - - -def _wifi_connect_by_id(ad, network_id, num_of_tries=1): - """Connect an Android device to a wifi network using it's network id. - - Start connection to the wifi network, with the given network id, wait for - the "connected" event, then verify the connected network is the one requested. - - Args: - ad: android_device object to initiate connection on. - network_id: Integer specifying the network id of the network. - num_of_tries: An integer that is the number of times to try before - delaring failure. Default is 1. - """ - ad.droid.wifiStartTrackingStateChange() - # Clear all previous events. - ad.ed.clear_all_events() - ad.droid.wifiConnectByNetworkId(network_id) - ad.log.info("Starting connection to network with id %d", network_id) - try: - event = ad.ed.pop_event(wifi_constants.CONNECT_BY_NETID_SUCCESS, 60) - connect_result = _wait_for_connect_event(ad, id=network_id, tries=num_of_tries) - asserts.assert_true( - connect_result, "Failed to connect to Wi-Fi network using network id" - ) - ad.log.debug("Wi-Fi connection result: %s", connect_result) - actual_id = connect_result["data"][WifiEnums.NETID_KEY] - asserts.assert_equal( - actual_id, - network_id, - "Connected to the wrong network on %s." - "Expected network id = %d, but got %d." - % (ad.serial, network_id, actual_id), - ) - expected_ssid = connect_result["data"][WifiEnums.SSID_KEY] - ad.log.info( - "Connected to Wi-Fi network %s with %d network id.", - expected_ssid, - network_id, - ) - - internet = validate_connection(ad, DEFAULT_PING_ADDR) - if not internet: - raise signals.TestFailure( - "Failed to connect to internet on %s" % expected_ssid - ) - except Empty: - asserts.fail( - "Failed to connect to network with id %d on %s" % (network_id, ad.serial) - ) - except Exception as error: - ad.log.error( - "Failed to connect to network with id %d with error %s", network_id, error - ) - raise signals.TestFailure( - "Failed to connect to network with network" " id %d" % network_id - ) - finally: - ad.droid.wifiStopTrackingStateChange() - - -def wifi_connect_using_network_request(ad, network, network_specifier, num_of_tries=3): - """Connect an Android device to a wifi network using network request. - - Trigger a network request with the provided network specifier, - wait for the "onMatch" event, ensure that the scan results in "onMatch" - event contain the specified network, then simulate the user granting the - request with the specified network selected. Then wait for the "onAvailable" - network callback indicating successful connection to network. - - Args: - ad: android_device object to initiate connection on. - network_specifier: A dictionary representing the network specifier to - use. - network: A dictionary representing the network to connect to. The - dictionary must have the key "SSID". - num_of_tries: An integer that is the number of times to try before - delaring failure. - Returns: - key: Key corresponding to network request. - """ - key = ad.droid.connectivityRequestWifiNetwork(network_specifier, 0) - ad.log.info("Sent network request %s with %s " % (key, network_specifier)) - # Need a delay here because UI interaction should only start once wifi - # starts processing the request. - time.sleep(wifi_constants.NETWORK_REQUEST_CB_REGISTER_DELAY_SEC) - _wait_for_wifi_connect_after_network_request(ad, network, key, num_of_tries) - return key - - -def wait_for_wifi_connect_after_network_request( - ad, network, key, num_of_tries=3, assert_on_fail=True -): - """ - Simulate and verify the connection flow after initiating the network - request. - - Wait for the "onMatch" event, ensure that the scan results in "onMatch" - event contain the specified network, then simulate the user granting the - request with the specified network selected. Then wait for the "onAvailable" - network callback indicating successful connection to network. - - Args: - ad: android_device object to initiate connection on. - network: A dictionary representing the network to connect to. The - dictionary must have the key "SSID". - key: Key corresponding to network request. - num_of_tries: An integer that is the number of times to try before - delaring failure. - assert_on_fail: If True, error checks in this function will raise test - failure signals. - - Returns: - Returns a value only if assert_on_fail is false. - Returns True if the connection was successful, False otherwise. - """ - _assert_on_fail_handler( - _wait_for_wifi_connect_after_network_request, - assert_on_fail, - ad, - network, - key, - num_of_tries, - ) - - -def _wait_for_wifi_connect_after_network_request(ad, network, key, num_of_tries=3): - """ - Simulate and verify the connection flow after initiating the network - request. - - Wait for the "onMatch" event, ensure that the scan results in "onMatch" - event contain the specified network, then simulate the user granting the - request with the specified network selected. Then wait for the "onAvailable" - network callback indicating successful connection to network. - - Args: - ad: android_device object to initiate connection on. - network: A dictionary representing the network to connect to. The - dictionary must have the key "SSID". - key: Key corresponding to network request. - num_of_tries: An integer that is the number of times to try before - delaring failure. - """ - asserts.assert_true( - WifiEnums.SSID_KEY in network, - "Key '%s' must be present in network definition." % WifiEnums.SSID_KEY, - ) - ad.droid.wifiStartTrackingStateChange() - expected_ssid = network[WifiEnums.SSID_KEY] - ad.droid.wifiRegisterNetworkRequestMatchCallback() - # Wait for the platform to scan and return a list of networks - # matching the request - try: - matched_network = None - for _ in [0, num_of_tries]: - on_match_event = ad.ed.pop_event( - wifi_constants.WIFI_NETWORK_REQUEST_MATCH_CB_ON_MATCH, 60 - ) - asserts.assert_true( - on_match_event, "Network request on match not received." - ) - matched_scan_results = on_match_event["data"] - ad.log.debug("Network request on match results %s", matched_scan_results) - matched_network = match_networks( - {WifiEnums.SSID_KEY: network[WifiEnums.SSID_KEY]}, matched_scan_results - ) - ad.log.debug("Network request on match %s", matched_network) - if matched_network: - break - - asserts.assert_true(matched_network, "Target network %s not found" % network) - - ad.droid.wifiSendUserSelectionForNetworkRequestMatch(network) - ad.log.info("Sent user selection for network request %s", expected_ssid) - - # Wait for the platform to connect to the network. - connected_network = None - # WifiInfo is attached to TransportInfo only in S. - if not ad.droid.isSdkAtLeastS(): - connected_network = ad.droid.wifiGetConnectionInfo() - ad.log.info("Connected to network %s", connected_network) - asserts.assert_equal( - connected_network[WifiEnums.SSID_KEY], - expected_ssid, - "Connected to the wrong network." - "Expected %s, but got %s." % (network, connected_network), - ) - except Empty: - asserts.fail("Failed to connect to %s" % expected_ssid) - except Exception as error: - ad.log.error("Failed to connect to %s with error %s" % (expected_ssid, error)) - raise signals.TestFailure("Failed to connect to %s network" % network) - finally: - ad.droid.wifiStopTrackingStateChange() - - -def wifi_passpoint_connect(ad, passpoint_network, num_of_tries=1, assert_on_fail=True): - """Connect an Android device to a wifi network. - - Initiate connection to a wifi network, wait for the "connected" event, then - confirm the connected ssid is the one requested. - - This will directly fail a test if anything goes wrong. - - Args: - ad: android_device object to initiate connection on. - passpoint_network: SSID of the Passpoint network to connect to. - num_of_tries: An integer that is the number of times to try before - delaring failure. Default is 1. - assert_on_fail: If True, error checks in this function will raise test - failure signals. - - Returns: - If assert_on_fail is False, function returns network id, if the connect was - successful, False otherwise. If assert_on_fail is True, no return value. - """ - _assert_on_fail_handler( - _wifi_passpoint_connect, - assert_on_fail, - ad, - passpoint_network, - num_of_tries=num_of_tries, - ) - - -def _wifi_passpoint_connect(ad, passpoint_network, num_of_tries=1): - """Connect an Android device to a wifi network. - - Initiate connection to a wifi network, wait for the "connected" event, then - confirm the connected ssid is the one requested. - - This will directly fail a test if anything goes wrong. - - Args: - ad: android_device object to initiate connection on. - passpoint_network: SSID of the Passpoint network to connect to. - num_of_tries: An integer that is the number of times to try before - delaring failure. Default is 1. - """ - ad.droid.wifiStartTrackingStateChange() - expected_ssid = passpoint_network - ad.log.info("Starting connection process to passpoint %s", expected_ssid) - - try: - connect_result = _wait_for_connect_event(ad, expected_ssid, num_of_tries) - asserts.assert_true( - connect_result, - "Failed to connect to WiFi passpoint network %s on" - " %s" % (expected_ssid, ad.serial), - ) - ad.log.info("Wi-Fi connection result: %s.", connect_result) - actual_ssid = connect_result["data"][WifiEnums.SSID_KEY] - asserts.assert_equal( - actual_ssid, - expected_ssid, - "Connected to the wrong network on %s." % ad.serial, - ) - ad.log.info("Connected to Wi-Fi passpoint network %s.", actual_ssid) - - internet = validate_connection(ad, DEFAULT_PING_ADDR) - if not internet: - raise signals.TestFailure( - "Failed to connect to internet on %s" % expected_ssid - ) - except Exception as error: - ad.log.error( - "Failed to connect to passpoint network %s with error %s", - expected_ssid, - error, - ) - raise signals.TestFailure( - "Failed to connect to %s passpoint network" % expected_ssid - ) - - finally: - ad.droid.wifiStopTrackingStateChange() - - -def delete_passpoint(ad, fqdn): - """Delete a required Passpoint configuration.""" - try: - ad.droid.removePasspointConfig(fqdn) - return True - except Exception as error: - ad.log.error( - "Failed to remove passpoint configuration with FQDN=%s " "and error=%s", - fqdn, - error, - ) - return False - - -def start_wifi_single_scan(ad, scan_setting): - """Starts wifi single shot scan. - - Args: - ad: android_device object to initiate connection on. - scan_setting: A dict representing the settings of the scan. - - Returns: - If scan was started successfully, event data of success event is returned. - """ - idx = ad.droid.wifiScannerStartScan(scan_setting) - event = ad.ed.pop_event("WifiScannerScan%sonSuccess" % idx, SHORT_TIMEOUT) - ad.log.debug("Got event %s", event) - return event["data"] - - -def track_connection(ad, network_ssid, check_connection_count): - """Track wifi connection to network changes for given number of counts - - Args: - ad: android_device object for forget network. - network_ssid: network ssid to which connection would be tracked - check_connection_count: Integer for maximum number network connection - check. - Returns: - True if connection to given network happen, else return False. - """ - ad.droid.wifiStartTrackingStateChange() - while check_connection_count > 0: - connect_network = ad.ed.pop_event("WifiNetworkConnected", 120) - ad.log.info("Connected to network %s", connect_network) - if ( - WifiEnums.SSID_KEY in connect_network["data"] - and connect_network["data"][WifiEnums.SSID_KEY] == network_ssid - ): - return True - check_connection_count -= 1 - ad.droid.wifiStopTrackingStateChange() - return False - - -def get_scan_time_and_channels(wifi_chs, scan_setting, stime_channel): - """Calculate the scan time required based on the band or channels in scan - setting - - Args: - wifi_chs: Object of channels supported - scan_setting: scan setting used for start scan - stime_channel: scan time per channel - - Returns: - scan_time: time required for completing a scan - scan_channels: channel used for scanning - """ - scan_time = 0 - scan_channels = [] - if "band" in scan_setting and "channels" not in scan_setting: - scan_channels = wifi_chs.band_to_freq(scan_setting["band"]) - elif "channels" in scan_setting and "band" not in scan_setting: - scan_channels = scan_setting["channels"] - scan_time = len(scan_channels) * stime_channel - for channel in scan_channels: - if channel in WifiEnums.DFS_5G_FREQUENCIES: - scan_time += 132 # passive scan time on DFS - return scan_time, scan_channels - - -def start_wifi_track_bssid(ad, track_setting): - """Start tracking Bssid for the given settings. - - Args: - ad: android_device object. - track_setting: Setting for which the bssid tracking should be started - - Returns: - If tracking started successfully, event data of success event is returned. - """ - idx = ad.droid.wifiScannerStartTrackingBssids( - track_setting["bssidInfos"], track_setting["apLostThreshold"] - ) - event = ad.ed.pop_event("WifiScannerBssid{}onSuccess".format(idx), SHORT_TIMEOUT) - return event["data"] - - -def convert_pem_key_to_pkcs8(in_file, out_file): - """Converts the key file generated by us to the format required by - Android using openssl. - - The input file must have the extension "pem". The output file must - have the extension "der". - - Args: - in_file: The original key file. - out_file: The full path to the converted key file, including - filename. - """ - asserts.assert_true(in_file.endswith(".pem"), "Input file has to be .pem.") - asserts.assert_true(out_file.endswith(".der"), "Output file has to be .der.") - cmd = ( - "openssl pkcs8 -inform PEM -in {} -outform DER -out {} -nocrypt" " -topk8" - ).format(in_file, out_file) - utils.exe_cmd(cmd) - - -def validate_connection( - ad, ping_addr=DEFAULT_PING_ADDR, wait_time=15, ping_gateway=True -): - """Validate internet connection by pinging the address provided. - - Args: - ad: android_device object. - ping_addr: address on internet for pinging. - wait_time: wait for some time before validating connection - - Returns: - ping output if successful, NULL otherwise. - """ - android_version = int(ad.adb.shell("getprop ro.vendor.build.version.release")) - # wait_time to allow for DHCP to complete. - for i in range(wait_time): - if ad.droid.connectivityNetworkIsConnected(): - if ( - android_version > 10 and ad.droid.connectivityGetIPv4DefaultGateway() - ) or android_version < 11: - break - time.sleep(1) - ping = False - try: - ping = ad.droid.httpPing(ping_addr) - ad.log.info("Http ping result: %s.", ping) - except: - pass - if android_version > 10 and not ping and ping_gateway: - ad.log.info("Http ping failed. Pinging default gateway") - gw = ad.droid.connectivityGetIPv4DefaultGateway() - result = ad.adb.shell("ping -c 6 {}".format(gw)) - ad.log.info("Default gateway ping result: %s" % result) - ping = False if "100% packet loss" in result else True - return ping - - -# TODO(angli): This can only verify if an actual value is exactly the same. -# Would be nice to be able to verify an actual value is one of serveral. -def verify_wifi_connection_info(ad, expected_con): - """Verifies that the information of the currently connected wifi network is - as expected. - - Args: - expected_con: A dict representing expected key-value pairs for wifi - connection. e.g. {"SSID": "test_wifi"} - """ - current_con = ad.droid.wifiGetConnectionInfo() - case_insensitive = ["BSSID", "supplicant_state"] - ad.log.debug("Current connection: %s", current_con) - for k, expected_v in expected_con.items(): - # Do not verify authentication related fields. - if k == "password": - continue - msg = "Field %s does not exist in wifi connection info %s." % (k, current_con) - if k not in current_con: - raise signals.TestFailure(msg) - actual_v = current_con[k] - if k in case_insensitive: - actual_v = actual_v.lower() - expected_v = expected_v.lower() - msg = "Expected %s to be %s, actual %s is %s." % (k, expected_v, k, actual_v) - if actual_v != expected_v: - raise signals.TestFailure(msg) - - -def check_autoconnect_to_open_network(ad, conn_timeout=WIFI_CONNECTION_TIMEOUT_DEFAULT): - """Connects to any open WiFI AP - Args: - timeout value in sec to wait for UE to connect to a WiFi AP - Returns: - True if UE connects to WiFi AP (supplicant_state = completed) - False if UE fails to complete connection within WIFI_CONNECTION_TIMEOUT time. - """ - if ad.droid.wifiCheckState(): - return True - ad.droid.wifiToggleState() - wifi_connection_state = None - timeout = time.time() + conn_timeout - while wifi_connection_state != "completed": - wifi_connection_state = ad.droid.wifiGetConnectionInfo()["supplicant_state"] - if time.time() > timeout: - ad.log.warning("Failed to connect to WiFi AP") - return False - return True - - -def expand_enterprise_config_by_phase2(config): - """Take an enterprise config and generate a list of configs, each with - a different phase2 auth type. - - Args: - config: A dict representing enterprise config. - - Returns - A list of enterprise configs. - """ - results = [] - phase2_types = WifiEnums.EapPhase2 - if config[WifiEnums.Enterprise.EAP] == WifiEnums.Eap.PEAP: - # Skip unsupported phase2 types for PEAP. - phase2_types = [WifiEnums.EapPhase2.GTC, WifiEnums.EapPhase2.MSCHAPV2] - for phase2_type in phase2_types: - # Skip a special case for passpoint TTLS. - if ( - WifiEnums.Enterprise.FQDN in config - and phase2_type == WifiEnums.EapPhase2.GTC - ): - continue - c = dict(config) - c[WifiEnums.Enterprise.PHASE2] = phase2_type.value - results.append(c) - return results - - -def generate_eap_test_name(config, ad=None): - """Generates a test case name based on an EAP configuration. - - Args: - config: A dict representing an EAP credential. - ad object: Redundant but required as the same param is passed - to test_func in run_generated_tests - - Returns: - A string representing the name of a generated EAP test case. - """ - eap = WifiEnums.Eap - eap_phase2 = WifiEnums.EapPhase2 - Ent = WifiEnums.Enterprise - name = "test_connect-" - eap_name = "" - for e in eap: - if e.value == config[Ent.EAP]: - eap_name = e.name - break - if "peap0" in config[WifiEnums.SSID_KEY].lower(): - eap_name = "PEAP0" - if "peap1" in config[WifiEnums.SSID_KEY].lower(): - eap_name = "PEAP1" - name += eap_name - if Ent.PHASE2 in config: - for e in eap_phase2: - if e.value == config[Ent.PHASE2]: - name += "-{}".format(e.name) - break - return name - - -def group_attenuators(attenuators): - """Groups a list of attenuators into attenuator groups for backward - compatibility reasons. - - Most legacy Wi-Fi setups have two attenuators each connected to a separate - AP. The new Wi-Fi setup has four attenuators, each connected to one channel - on an AP, so two of them are connected to one AP. - - To make the existing scripts work in the new setup, when the script needs - to attenuate one AP, it needs to set attenuation on both attenuators - connected to the same AP. - - This function groups attenuators properly so the scripts work in both - legacy and new Wi-Fi setups. - - Args: - attenuators: A list of attenuator objects, either two or four in length. - - Raises: - signals.TestFailure is raised if the attenuator list does not have two - or four objects. - """ - attn0 = attenuator.AttenuatorGroup("AP0") - attn1 = attenuator.AttenuatorGroup("AP1") - # Legacy testbed setup has two attenuation channels. - num_of_attns = len(attenuators) - if num_of_attns == 2: - attn0.add(attenuators[0]) - attn1.add(attenuators[1]) - elif num_of_attns == 4: - attn0.add(attenuators[0]) - attn0.add(attenuators[1]) - attn1.add(attenuators[2]) - attn1.add(attenuators[3]) - else: - asserts.fail( - ( - "Either two or four attenuators are required for this " - "test, but found %s" - ) - % num_of_attns - ) - return [attn0, attn1] - - -def set_attns(attenuator, attn_val_name, roaming_attn=ROAMING_ATTN): - """Sets attenuation values on attenuators used in this test. - - Args: - attenuator: The attenuator object. - attn_val_name: Name of the attenuation value pair to use. - roaming_attn: Dictionary specifying the attenuation params. - """ - logging.info("Set attenuation values to %s", roaming_attn[attn_val_name]) - try: - attenuator[0].set_atten(roaming_attn[attn_val_name][0]) - attenuator[1].set_atten(roaming_attn[attn_val_name][1]) - attenuator[2].set_atten(roaming_attn[attn_val_name][2]) - attenuator[3].set_atten(roaming_attn[attn_val_name][3]) - except: - logging.exception("Failed to set attenuation values %s.", attn_val_name) - raise - - -def set_attns_steps( - attenuators, atten_val_name, roaming_attn=ROAMING_ATTN, steps=10, wait_time=12 -): - """Set attenuation values on attenuators used in this test. It will change - the attenuation values linearly from current value to target value step by - step. - - Args: - attenuators: The list of attenuator objects that you want to change - their attenuation value. - atten_val_name: Name of the attenuation value pair to use. - roaming_attn: Dictionary specifying the attenuation params. - steps: Number of attenuator changes to reach the target value. - wait_time: Sleep time for each change of attenuator. - """ - logging.info( - "Set attenuation values to %s in %d step(s)", - roaming_attn[atten_val_name], - steps, - ) - start_atten = [attenuator.get_atten() for attenuator in attenuators] - target_atten = roaming_attn[atten_val_name] - for current_step in range(steps): - progress = (current_step + 1) / steps - for i, attenuator in enumerate(attenuators): - amount_since_start = (target_atten[i] - start_atten[i]) * progress - attenuator.set_atten(round(start_atten[i] + amount_since_start)) - time.sleep(wait_time) - - -def trigger_roaming_and_validate( - dut, attenuator, attn_val_name, expected_con, roaming_attn=ROAMING_ATTN -): - """Sets attenuators to trigger roaming and validate the DUT connected - to the BSSID expected. - - Args: - attenuator: The attenuator object. - attn_val_name: Name of the attenuation value pair to use. - expected_con: The network information of the expected network. - roaming_attn: Dictionary specifying the attenaution params. - """ - expected_con = { - WifiEnums.SSID_KEY: expected_con[WifiEnums.SSID_KEY], - WifiEnums.BSSID_KEY: expected_con["bssid"], - } - set_attns_steps(attenuator, attn_val_name, roaming_attn) - - verify_wifi_connection_info(dut, expected_con) - expected_bssid = expected_con[WifiEnums.BSSID_KEY] - logging.info("Roamed to %s successfully", expected_bssid) - if not validate_connection(dut): - raise signals.TestFailure("Fail to connect to internet on %s" % expected_bssid) - - -def create_softap_config(): - """Create a softap config with random ssid and password.""" - ap_ssid = "softap_" + utils.rand_ascii_str(8) - ap_password = utils.rand_ascii_str(8) - logging.info("softap setup: %s %s", ap_ssid, ap_password) - config = { - WifiEnums.SSID_KEY: ap_ssid, - WifiEnums.PWD_KEY: ap_password, - } - return config - - -def wait_for_expected_number_of_softap_clients( - ad, callbackId, expected_num_of_softap_clients -): - """Wait for the number of softap clients to be updated as expected. - Args: - callbackId: Id of the callback associated with registering. - expected_num_of_softap_clients: expected number of softap clients. - """ - eventStr = ( - wifi_constants.SOFTAP_CALLBACK_EVENT - + str(callbackId) - + wifi_constants.SOFTAP_NUMBER_CLIENTS_CHANGED - ) - clientData = ad.ed.pop_event(eventStr, SHORT_TIMEOUT)["data"] - clientCount = clientData[wifi_constants.SOFTAP_NUMBER_CLIENTS_CALLBACK_KEY] - clientMacAddresses = clientData[wifi_constants.SOFTAP_CLIENTS_MACS_CALLBACK_KEY] - asserts.assert_equal( - clientCount, - expected_num_of_softap_clients, - "The number of softap clients doesn't match the expected number", - ) - asserts.assert_equal( - len(clientMacAddresses), - expected_num_of_softap_clients, - "The number of mac addresses doesn't match the expected number", - ) - for macAddress in clientMacAddresses: - asserts.assert_true( - checkMacAddress(macAddress), "An invalid mac address was returned" - ) - - -def checkMacAddress(input): - """Validate whether a string is a valid mac address or not. - - Args: - input: The string to validate. - - Returns: True/False, returns true for a valid mac address and false otherwise. - """ - macValidationRegex = "[0-9a-f]{2}([-:]?)[0-9a-f]{2}(\\1[0-9a-f]{2}){4}$" - if re.match(macValidationRegex, input.lower()): - return True - return False - - -def wait_for_expected_softap_state(ad, callbackId, expected_softap_state): - """Wait for the expected softap state change. - Args: - callbackId: Id of the callback associated with registering. - expected_softap_state: The expected softap state. - """ - eventStr = ( - wifi_constants.SOFTAP_CALLBACK_EVENT - + str(callbackId) - + wifi_constants.SOFTAP_STATE_CHANGED - ) - asserts.assert_equal( - ad.ed.pop_event(eventStr, SHORT_TIMEOUT)["data"][ - wifi_constants.SOFTAP_STATE_CHANGE_CALLBACK_KEY - ], - expected_softap_state, - "Softap state doesn't match with expected state", - ) - - -def get_current_number_of_softap_clients(ad, callbackId): - """pop up all of softap client updated event from queue. - Args: - callbackId: Id of the callback associated with registering. - - Returns: - If exist aleast callback, returns last updated number_of_softap_clients. - Returns None when no any match callback event in queue. - """ - eventStr = ( - wifi_constants.SOFTAP_CALLBACK_EVENT - + str(callbackId) - + wifi_constants.SOFTAP_NUMBER_CLIENTS_CHANGED - ) - events = ad.ed.pop_all(eventStr) - for event in events: - num_of_clients = event["data"][ - wifi_constants.SOFTAP_NUMBER_CLIENTS_CALLBACK_KEY - ] - if len(events) == 0: - return None - return num_of_clients - - -def get_current_softap_info(ad, callbackId, need_to_wait): - """pop up all of softap info changed event from queue. - Args: - callbackId: Id of the callback associated with registering. - need_to_wait: Wait for the info callback event before pop all. - Returns: - Returns last updated information of softap. - """ - eventStr = ( - wifi_constants.SOFTAP_CALLBACK_EVENT - + str(callbackId) - + wifi_constants.SOFTAP_INFO_CHANGED - ) - ad.log.debug("softap info dump from eventStr %s", eventStr) - frequency = 0 - bandwidth = 0 - if need_to_wait: - event = ad.ed.pop_event(eventStr, SHORT_TIMEOUT) - frequency = event["data"][wifi_constants.SOFTAP_INFO_FREQUENCY_CALLBACK_KEY] - bandwidth = event["data"][wifi_constants.SOFTAP_INFO_BANDWIDTH_CALLBACK_KEY] - ad.log.info( - "softap info updated, frequency is %s, bandwidth is %s", - frequency, - bandwidth, - ) - - events = ad.ed.pop_all(eventStr) - for event in events: - frequency = event["data"][wifi_constants.SOFTAP_INFO_FREQUENCY_CALLBACK_KEY] - bandwidth = event["data"][wifi_constants.SOFTAP_INFO_BANDWIDTH_CALLBACK_KEY] - ad.log.info("softap info, frequency is %s, bandwidth is %s", frequency, bandwidth) - return frequency, bandwidth - - -def get_current_softap_infos(ad, callbackId, need_to_wait): - """pop up all of softap info list changed event from queue. - Args: - callbackId: Id of the callback associated with registering. - need_to_wait: Wait for the info callback event before pop all. - Returns: - Returns last updated informations of softap. - """ - eventStr = ( - wifi_constants.SOFTAP_CALLBACK_EVENT - + str(callbackId) - + wifi_constants.SOFTAP_INFOLIST_CHANGED - ) - ad.log.debug("softap info dump from eventStr %s", eventStr) - - if need_to_wait: - event = ad.ed.pop_event(eventStr, SHORT_TIMEOUT) - infos = event["data"] - - events = ad.ed.pop_all(eventStr) - for event in events: - infos = event["data"] - - for info in infos: - frequency = info[wifi_constants.SOFTAP_INFO_FREQUENCY_CALLBACK_KEY] - bandwidth = info[wifi_constants.SOFTAP_INFO_BANDWIDTH_CALLBACK_KEY] - wifistandard = info[wifi_constants.SOFTAP_INFO_WIFISTANDARD_CALLBACK_KEY] - bssid = info[wifi_constants.SOFTAP_INFO_BSSID_CALLBACK_KEY] - ad.log.info( - "softap info, freq:%s, bw:%s, wifistandard:%s, bssid:%s", - frequency, - bandwidth, - wifistandard, - bssid, - ) - - return infos - - -def get_current_softap_capability(ad, callbackId, need_to_wait): - """pop up all of softap info list changed event from queue. - Args: - callbackId: Id of the callback associated with registering. - need_to_wait: Wait for the info callback event before pop all. - Returns: - Returns last updated capability of softap. - """ - eventStr = ( - wifi_constants.SOFTAP_CALLBACK_EVENT - + str(callbackId) - + wifi_constants.SOFTAP_CAPABILITY_CHANGED - ) - ad.log.debug("softap capability dump from eventStr %s", eventStr) - if need_to_wait: - event = ad.ed.pop_event(eventStr, SHORT_TIMEOUT) - capability = event["data"] - - events = ad.ed.pop_all(eventStr) - for event in events: - capability = event["data"] - - return capability - - -def get_ssrdumps(ad): - """Pulls dumps in the ssrdump dir - Args: - ad: android device object. - """ - logs = ad.get_file_names("/data/vendor/ssrdump/") - if logs: - ad.log.info("Pulling ssrdumps %s", logs) - log_path = os.path.join(ad.device_log_path, "SSRDUMPS_%s" % ad.serial) - os.makedirs(log_path, exist_ok=True) - ad.pull_files(logs, log_path) - ad.adb.shell("find /data/vendor/ssrdump/ -type f -delete", ignore_status=True) - - -def start_pcap(pcap, wifi_band, test_name): - """Start packet capture in monitor mode. - - Args: - pcap: packet capture object - wifi_band: '2g' or '5g' or 'dual' - test_name: test name to be used for pcap file name - - Returns: - Dictionary with wifi band as key and the tuple - (pcap Process object, log directory) as the value - """ - log_dir = os.path.join( - context.get_current_context().get_full_output_path(), "PacketCapture" - ) - os.makedirs(log_dir, exist_ok=True) - if wifi_band == "dual": - bands = [BAND_2G, BAND_5G] - else: - bands = [wifi_band] - procs = {} - for band in bands: - proc = pcap.start_packet_capture(band, log_dir, test_name) - procs[band] = (proc, os.path.join(log_dir, test_name)) - return procs - - -def stop_pcap(pcap, procs, test_status=None): - """Stop packet capture in monitor mode. - - Since, the pcap logs in monitor mode can be very large, we will - delete them if they are not required. 'test_status' if True, will delete - the pcap files. If False, we will keep them. - - Args: - pcap: packet capture object - procs: dictionary returned by start_pcap - test_status: status of the test case - """ - for proc, fname in procs.values(): - pcap.stop_packet_capture(proc) - - if test_status: - shutil.rmtree(os.path.dirname(fname)) - - -def verify_mac_not_found_in_pcap(ad, mac, packets): - """Verify that a mac address is not found in the captured packets. - - Args: - ad: android device object - mac: string representation of the mac address - packets: packets obtained by rdpcap(pcap_fname) - """ - for pkt in packets: - logging.debug("Packet Summary = %s", pkt.summary()) - if mac in pkt.summary(): - asserts.fail( - "Device %s caught Factory MAC: %s in packet sniffer." - "Packet = %s" % (ad.serial, mac, pkt.show()) - ) - - -def verify_mac_is_found_in_pcap(ad, mac, packets): - """Verify that a mac address is found in the captured packets. - - Args: - ad: android device object - mac: string representation of the mac address - packets: packets obtained by rdpcap(pcap_fname) - """ - for pkt in packets: - if mac in pkt.summary(): - return - asserts.fail( - "Did not find MAC = %s in packet sniffer." "for device %s" % (mac, ad.serial) - ) - - -def start_cnss_diags(ads, cnss_diag_file, pixel_models): - for ad in ads: - start_cnss_diag(ad, cnss_diag_file, pixel_models) - - -def start_cnss_diag(ad, cnss_diag_file, pixel_models): - """Start cnss_diag to record extra wifi logs - - Args: - ad: android device object. - cnss_diag_file: cnss diag config file to push to device. - pixel_models: pixel devices. - """ - if ad.model not in pixel_models: - ad.log.info("Device not supported to collect pixel logger") - return - if ad.model in wifi_constants.DEVICES_USING_LEGACY_PROP: - prop = wifi_constants.LEGACY_CNSS_DIAG_PROP - else: - prop = wifi_constants.CNSS_DIAG_PROP - if ad.adb.getprop(prop) != "true": - if not int( - ad.adb.shell( - "ls -l %s%s | wc -l" % (CNSS_DIAG_CONFIG_PATH, CNSS_DIAG_CONFIG_FILE) - ) - ): - ad.adb.push("%s %s" % (cnss_diag_file, CNSS_DIAG_CONFIG_PATH)) - ad.adb.shell( - "find /data/vendor/wifi/cnss_diag/wlan_logs/ -type f -delete", - ignore_status=True, - ) - ad.adb.shell("setprop %s true" % prop, ignore_status=True) - - -def stop_cnss_diags(ads, pixel_models): - for ad in ads: - stop_cnss_diag(ad, pixel_models) - - -def stop_cnss_diag(ad, pixel_models): - """Stops cnss_diag - - Args: - ad: android device object. - pixel_models: pixel devices. - """ - if ad.model not in pixel_models: - ad.log.info("Device not supported to collect pixel logger") - return - if ad.model in wifi_constants.DEVICES_USING_LEGACY_PROP: - prop = wifi_constants.LEGACY_CNSS_DIAG_PROP - else: - prop = wifi_constants.CNSS_DIAG_PROP - ad.adb.shell("setprop %s false" % prop, ignore_status=True) - - -def get_cnss_diag_log(ad): - """Pulls the cnss_diag logs in the wlan_logs dir - Args: - ad: android device object. - """ - logs = ad.get_file_names("/data/vendor/wifi/cnss_diag/wlan_logs/") - if logs: - ad.log.info("Pulling cnss_diag logs %s", logs) - log_path = os.path.join(ad.device_log_path, "CNSS_DIAG_%s" % ad.serial) - os.makedirs(log_path, exist_ok=True) - ad.pull_files(logs, log_path) - - -LinkProbeResult = namedtuple( - "LinkProbeResult", ("is_success", "stdout", "elapsed_time", "failure_reason") -) - - -def send_link_probe(ad): - """Sends a link probe to the currently connected AP, and returns whether the - probe succeeded or not. - - Args: - ad: android device object - Returns: - LinkProbeResult namedtuple - """ - stdout = ad.adb.shell("cmd wifi send-link-probe") - asserts.assert_false( - "Error" in stdout or "Exception" in stdout, - "Exception while sending link probe: " + stdout, - ) - - is_success = False - elapsed_time = None - failure_reason = None - if "succeeded" in stdout: - is_success = True - elapsed_time = next( - (int(token) for token in stdout.split() if token.isdigit()), None - ) - elif "failed with reason" in stdout: - failure_reason = next( - (int(token) for token in stdout.split() if token.isdigit()), None - ) - else: - asserts.fail("Unexpected link probe result: " + stdout) - - return LinkProbeResult( - is_success=is_success, - stdout=stdout, - elapsed_time=elapsed_time, - failure_reason=failure_reason, - ) - - -def send_link_probes(ad, num_probes, delay_sec): - """Sends a sequence of link probes to the currently connected AP, and - returns whether the probes succeeded or not. - - Args: - ad: android device object - num_probes: number of probes to perform - delay_sec: delay time between probes, in seconds - Returns: - List[LinkProbeResult] one LinkProbeResults for each probe - """ - logging.info("Sending link probes") - results = [] - for _ in range(num_probes): - # send_link_probe() will also fail the test if it sees an exception - # in the stdout of the adb shell command - result = send_link_probe(ad) - logging.info("link probe results: " + str(result)) - results.append(result) - time.sleep(delay_sec) - - return results - - -def ap_setup(test, index, ap, network, bandwidth=80, channel=6): - """Set up the AP with provided network info. - - Args: - test: the calling test class object. - index: int, index of the AP. - ap: access_point object of the AP. - network: dict with information of the network, including ssid, - password and bssid. - bandwidth: the operation bandwidth for the AP, default 80MHz. - channel: the channel number for the AP. - Returns: - brconfigs: the bridge interface configs - """ - bss_settings = [] - ssid = network[WifiEnums.SSID_KEY] - test.access_points[index].close() - time.sleep(5) - - # Configure AP as required. - if "password" in network.keys(): - password = network["password"] - security = hostapd_security.Security(security_mode="wpa", password=password) - else: - security = hostapd_security.Security(security_mode=None, password=None) - config = hostapd_ap_preset.create_ap_preset( - channel=channel, - ssid=ssid, - security=security, - bss_settings=bss_settings, - vht_bandwidth=bandwidth, - profile_name="whirlwind", - iface_wlan_2g=ap.wlan_2g, - iface_wlan_5g=ap.wlan_5g, - ) - ap.start_ap(config) - logging.info("AP started on channel {} with SSID {}".format(channel, ssid)) - - -def turn_ap_off(test, AP): - """Bring down hostapd on the Access Point. - Args: - test: The test class object. - AP: int, indicating which AP to turn OFF. - """ - hostapd_2g = test.access_points[AP - 1]._aps["wlan0"].hostapd - if hostapd_2g.is_alive(): - hostapd_2g.stop() - logging.debug("Turned WLAN0 AP%d off" % AP) - hostapd_5g = test.access_points[AP - 1]._aps["wlan1"].hostapd - if hostapd_5g.is_alive(): - hostapd_5g.stop() - logging.debug("Turned WLAN1 AP%d off" % AP) - - -def turn_ap_on(test, AP): - """Bring up hostapd on the Access Point. - Args: - test: The test class object. - AP: int, indicating which AP to turn ON. - """ - hostapd_2g = test.access_points[AP - 1]._aps["wlan0"].hostapd - if not hostapd_2g.is_alive(): - hostapd_2g.start(hostapd_2g.config) - logging.debug("Turned WLAN0 AP%d on" % AP) - hostapd_5g = test.access_points[AP - 1]._aps["wlan1"].hostapd - if not hostapd_5g.is_alive(): - hostapd_5g.start(hostapd_5g.config) - logging.debug("Turned WLAN1 AP%d on" % AP) - - -def turn_location_off_and_scan_toggle_off(ad): - """Turns off wifi location scans.""" - utils.set_location_service(ad, False) - ad.droid.wifiScannerToggleAlwaysAvailable(False) - msg = "Failed to turn off location service's scan." - asserts.assert_true(not ad.droid.wifiScannerIsAlwaysAvailable(), msg) - - -def set_softap_channel(dut, ap_iface="wlan1", cs_count=10, channel=2462): - """Set SoftAP mode channel - - Args: - dut: android device object - ap_iface: interface of SoftAP mode. - cs_count: how many beacon frames before switch channel, default = 10 - channel: a wifi channel. - """ - chan_switch_cmd = "hostapd_cli -i {} chan_switch {} {}" - chan_switch_cmd_show = chan_switch_cmd.format(ap_iface, cs_count, channel) - dut.log.info("adb shell {}".format(chan_switch_cmd_show)) - chan_switch_result = dut.adb.shell( - chan_switch_cmd.format(ap_iface, cs_count, channel) - ) - if chan_switch_result == "OK": - dut.log.info("switch hotspot channel to {}".format(channel)) - return chan_switch_result - - asserts.fail("Failed to switch hotspot channel") - - -def get_wlan0_link(dut): - """get wlan0 interface status""" - get_wlan0 = "wpa_cli -iwlan0 -g@android:wpa_wlan0 IFNAME=wlan0 status" - out = dut.adb.shell(get_wlan0) - out = dict(re.findall(r'(\S+)=(".*?"|\S+)', out)) - asserts.assert_true("ssid" in out, "Client doesn't connect to any network") - return out - - -def verify_11ax_wifi_connection(ad, wifi6_supported_models, wifi6_ap): - """Verify 11ax for wifi connection. - - Args: - ad: adndroid device object - wifi6_supported_models: device supporting 11ax. - wifi6_ap: if the AP supports 11ax. - """ - if wifi6_ap and ad.model in wifi6_supported_models: - logging.info("Verifying 11ax. Model: %s" % ad.model) - asserts.assert_true( - ad.droid.wifiGetConnectionStandard() == wifi_constants.WIFI_STANDARD_11AX, - "DUT did not connect to 11ax.", - ) - - -def verify_11ax_softap(dut, dut_client, wifi6_supported_models): - """Verify 11ax SoftAp if devices support it. - - Check if both DUT and DUT client supports 11ax, then SoftAp turns on - with 11ax mode and DUT client can connect to it. - - Args: - dut: Softap device. - dut_client: Client connecting to softap. - wifi6_supported_models: List of device models supporting 11ax. - """ - if ( - dut.model in wifi6_supported_models - and dut_client.model in wifi6_supported_models - ): - logging.info( - "Verifying 11ax softap. DUT model: %s, DUT Client model: %s", - dut.model, - dut_client.model, - ) - asserts.assert_true( - dut_client.droid.wifiGetConnectionStandard() - == wifi_constants.WIFI_STANDARD_11AX, - "DUT failed to start SoftAp in 11ax.", - ) - - -def check_available_channels_in_bands_2_5(dut, country_code): - """Check if DUT is capable of enable BridgedAp. - #TODO: Find a way to make this function flexible by taking an argument. - - Args: - country_code: country code, e.g., 'US', 'JP'. - Returns: - True: If DUT is capable of enable BridgedAp. - False: If DUT is not capable of enable BridgedAp. - """ - set_wifi_country_code(dut, country_code) - country = dut.droid.wifiGetCountryCode() - dut.log.info("DUT current country code : {}".format(country)) - # Wi-Fi ON and OFF to make sure country code take effet. - wifi_toggle_state(dut, True) - wifi_toggle_state(dut, False) - - # Register SoftAp Callback and get SoftAp capability. - callbackId = dut.droid.registerSoftApCallback() - capability = get_current_softap_capability(dut, callbackId, True) - dut.droid.unregisterSoftApCallback(callbackId) - - if ( - capability[wifi_constants.SOFTAP_CAPABILITY_24GHZ_SUPPORTED_CHANNEL_LIST] - and capability[wifi_constants.SOFTAP_CAPABILITY_5GHZ_SUPPORTED_CHANNEL_LIST] - ): - return True - return False - - -@retry(stop=stop_after_attempt(5), wait=wait_fixed(2)) -def validate_ping_between_two_clients(dut1, dut2): - """Make 2 DUT ping each other. - - Args: - dut1: An AndroidDevice object. - dut2: An AndroidDevice object. - """ - # Get DUTs' IPv4 addresses. - dut1_ip = "" - dut2_ip = "" - try: - dut1_ip = dut1.droid.connectivityGetIPv4Addresses("wlan0")[0] - except IndexError as e: - dut1.log.info( - "{} has no Wi-Fi connection, cannot get IPv4 address.".format(dut1.serial) - ) - try: - dut2_ip = dut2.droid.connectivityGetIPv4Addresses("wlan0")[0] - except IndexError as e: - dut2.log.info( - "{} has no Wi-Fi connection, cannot get IPv4 address.".format(dut2.serial) - ) - # Test fail if not able to obtain two DUT's IPv4 addresses. - asserts.assert_true( - dut1_ip and dut2_ip, "Ping failed because no DUT's IPv4 address" - ) - - dut1.log.info("{} IPv4 addresses : {}".format(dut1.serial, dut1_ip)) - dut2.log.info("{} IPv4 addresses : {}".format(dut2.serial, dut2_ip)) - - # Two clients ping each other - dut1.log.info("{} ping {}".format(dut1_ip, dut2_ip)) - asserts.assert_true( - utils.adb_shell_ping(dut1, count=10, dest_ip=dut2_ip, timeout=20), - "%s ping %s failed" % (dut1.serial, dut2_ip), - ) - - dut2.log.info("{} ping {}".format(dut2_ip, dut1_ip)) - asserts.assert_true( - utils.adb_shell_ping(dut2, count=10, dest_ip=dut1_ip, timeout=20), - "%s ping %s failed" % (dut2.serial, dut1_ip), - )
diff --git a/src/antlion/tests/BUILD.gn b/src/antlion/tests/BUILD.gn deleted file mode 100644 index e0d98ba..0000000 --- a/src/antlion/tests/BUILD.gn +++ /dev/null
@@ -1,31 +0,0 @@ -# Copyright 2023 The Fuchsia Authors. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -assert(is_host, "antlion tests only supported for host testing") - -group("e2e_tests") { - testonly = true - public_deps = [ - "dhcp:e2e_tests", - "examples:e2e_tests", - "wlan:e2e_tests", - "wlan_policy:e2e_tests", - ] -} - -group("e2e_tests_quick") { - testonly = true - public_deps = [ - "examples:e2e_tests_quick", - "wlan:e2e_tests_quick", - "wlan_policy:e2e_tests", - ] -} - -group("e2e_tests_manual") { - testonly = true - public_deps = [ - "wlan:e2e_tests_manual", - ] -}
diff --git a/src/antlion/tests/dhcp/BUILD.gn b/src/antlion/tests/dhcp/BUILD.gn deleted file mode 100644 index c3acdd3..0000000 --- a/src/antlion/tests/dhcp/BUILD.gn +++ /dev/null
@@ -1,38 +0,0 @@ -# Copyright 2023 The Fuchsia Authors. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -import("//third_party/antlion/antlion_host_test.gni") -import("//third_party/antlion/environments.gni") - -assert(is_host, "antlion tests only supported for host testing") - -antlion_host_test("dhcpv4_duplicate_address_test") { - main_source = "Dhcpv4DuplicateAddressTest.py" - environments = display_ap_envs -} - -antlion_host_test("dhcpv4_interop_basic_test") { - main_source = "Dhcpv4InteropBasicTest.py" - environments = display_ap_envs -} - -antlion_host_test("dhcpv4_interop_combinatorial_options_test") { - main_source = "Dhcpv4InteropCombinatorialOptionsTest.py" - environments = display_ap_envs -} - -antlion_host_test("dhcpv4_interop_fixture_test") { - main_source = "Dhcpv4InteropFixtureTest.py" - environments = display_ap_envs -} - -group("e2e_tests") { - testonly = true - public_deps = [ - ":dhcpv4_duplicate_address_test($host_toolchain)", - ":dhcpv4_interop_basic_test($host_toolchain)", - ":dhcpv4_interop_combinatorial_options_test($host_toolchain)", - ":dhcpv4_interop_fixture_test($host_toolchain)", - ] -}
diff --git a/src/antlion/tests/dhcp/Dhcpv4DuplicateAddressTest.py b/src/antlion/tests/dhcp/Dhcpv4DuplicateAddressTest.py deleted file mode 100644 index 4614e59..0000000 --- a/src/antlion/tests/dhcp/Dhcpv4DuplicateAddressTest.py +++ /dev/null
@@ -1,120 +0,0 @@ -#!/usr/bin/env python3 -# -# Copyright 2022 The Fuchsia Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import re - -from antlion.controllers.ap_lib import dhcp_config -from antlion.controllers.utils_lib.commands import ip -from antlion.test_utils.dhcp import base_test - -from mobly import asserts, test_runner - - -class Dhcpv4DuplicateAddressTest(base_test.Dhcpv4InteropFixture): - def setup_test(self): - super().setup_test() - self.extra_addresses = [] - self.ap_params = self.setup_ap() - self.ap_ip_cmd = ip.LinuxIpCommand(self.access_point.ssh) - - def teardown_test(self): - super().teardown_test() - for ip in self.extra_addresses: - self.ap_ip_cmd.remove_ipv4_address(self.ap_params["id"], ip) - - def test_duplicate_address_assignment(self): - """It's possible for a DHCP server to assign an address that already exists on the network. - DHCP clients are expected to perform a "gratuitous ARP" of the to-be-assigned address, and - refuse to assign that address. Clients should also recover by asking for a different - address. - """ - # Modify subnet to hold fewer addresses. - # A '/29' has 8 addresses (6 usable excluding router / broadcast) - subnet = next(self.ap_params["network"].subnets(new_prefix=29)) - subnet_conf = dhcp_config.Subnet( - subnet=subnet, - router=self.ap_params["ip"], - # When the DHCP server is considering dynamically allocating an IP address to a client, - # it first sends an ICMP Echo request (a ping) to the address being assigned. It waits - # for a second, and if no ICMP Echo response has been heard, it assigns the address. - # If a response is heard, the lease is abandoned, and the server does not respond to - # the client. - # The ping-check configuration parameter can be used to control checking - if its value - # is false, no ping check is done. - additional_parameters={"ping-check": "false"}, - ) - dhcp_conf = dhcp_config.DhcpConfig(subnets=[subnet_conf]) - self.access_point.start_dhcp(dhcp_conf=dhcp_conf) - - # Add each of the usable IPs as an alias for the router's interface, such that the router - # will respond to any pings on it. - for ip in subnet.hosts(): - self.ap_ip_cmd.add_ipv4_address(self.ap_params["id"], ip) - # Ensure we remove the address in self.teardown_test() even if the test fails - self.extra_addresses.append(ip) - - self.connect(ap_params=self.ap_params) - with asserts.assert_raises(ConnectionError): - self.get_device_ipv4_addr() - - # Per spec, the flow should be: - # Discover -> Offer -> Request -> Ack -> client optionally performs DAD - dhcp_logs = self.access_point.get_dhcp_logs() - for expected_message in [ - r"DHCPDISCOVER from \S+", - r"DHCPOFFER on [0-9.]+ to \S+", - r"DHCPREQUEST for [0-9.]+", - r"DHCPACK on [0-9.]+", - r"DHCPDECLINE of [0-9.]+ from \S+ via .*: abandoned", - r"Abandoning IP address [0-9.]+: declined", - ]: - asserts.assert_true( - re.search(expected_message, dhcp_logs), - f"Did not find expected message ({expected_message}) in dhcp logs: {dhcp_logs}" - + "\n", - ) - - # Remove each of the IP aliases. - # Note: this also removes the router's address (e.g. 192.168.1.1), so pinging the - # router after this will not work. - while self.extra_addresses: - self.ap_ip_cmd.remove_ipv4_address( - self.ap_params["id"], self.extra_addresses.pop() - ) - - # Now, we should get an address successfully - ip = self.get_device_ipv4_addr() - dhcp_logs = self.access_point.get_dhcp_logs() - - expected_string = f"DHCPREQUEST for {ip}" - asserts.assert_true( - dhcp_logs.count(expected_string) >= 1, - f'Incorrect count of DHCP Requests ("{expected_string}") in logs: ' - + dhcp_logs - + "\n", - ) - - expected_string = f"DHCPACK on {ip}" - asserts.assert_true( - dhcp_logs.count(expected_string) >= 1, - f'Incorrect count of DHCP Acks ("{expected_string}") in logs: ' - + dhcp_logs - + "\n", - ) - - -if __name__ == "__main__": - test_runner.main()
diff --git a/src/antlion/tests/dhcp/Dhcpv4InteropBasicTest.py b/src/antlion/tests/dhcp/Dhcpv4InteropBasicTest.py deleted file mode 100644 index b3d1ce9..0000000 --- a/src/antlion/tests/dhcp/Dhcpv4InteropBasicTest.py +++ /dev/null
@@ -1,99 +0,0 @@ -#!/usr/bin/env python3 -# -# Copyright 2022 The Fuchsia Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import time -import re - -from antlion.controllers.ap_lib import dhcp_config -from antlion.test_utils.dhcp import base_test - -from mobly import asserts, test_runner - - -class Dhcpv4InteropBasicTest(base_test.Dhcpv4InteropFixture): - """DhcpV4 tests which validate basic DHCP client/server interactions.""" - - def test_basic_dhcp_assignment(self): - self.run_test_case_expect_dhcp_success( - "basic_dhcp_assignment", - settings={"dhcp_options": {}, "dhcp_parameters": {}}, - ) - - def test_pool_allows_unknown_clients(self): - self.run_test_case_expect_dhcp_success( - "pool_allows_unknown_clients", - settings={ - "dhcp_options": {}, - "dhcp_parameters": {"allow": "unknown-clients"}, - }, - ) - - def test_pool_disallows_unknown_clients(self): - ap_params = self.setup_ap() - subnet_conf = dhcp_config.Subnet( - subnet=ap_params["network"], - router=ap_params["ip"], - additional_parameters={"deny": "unknown-clients"}, - ) - dhcp_conf = dhcp_config.DhcpConfig(subnets=[subnet_conf]) - self.access_point.start_dhcp(dhcp_conf=dhcp_conf) - - self.connect(ap_params=ap_params) - with asserts.assert_raises(ConnectionError): - self.get_device_ipv4_addr() - - dhcp_logs = self.access_point.get_dhcp_logs() - asserts.assert_true( - re.search(r"DHCPDISCOVER from .*no free leases", dhcp_logs), - "Did not find expected message in dhcp logs: " + dhcp_logs + "\n", - ) - - def test_lease_renewal(self): - """Validates that a client renews their DHCP lease.""" - LEASE_TIME = 30 - ap_params = self.setup_ap() - subnet_conf = dhcp_config.Subnet( - subnet=ap_params["network"], router=ap_params["ip"] - ) - dhcp_conf = dhcp_config.DhcpConfig( - subnets=[subnet_conf], - default_lease_time=LEASE_TIME, - max_lease_time=LEASE_TIME, - ) - self.access_point.start_dhcp(dhcp_conf=dhcp_conf) - self.connect(ap_params=ap_params) - ip = self.get_device_ipv4_addr() - - SLEEP_TIME = LEASE_TIME + 3 - self.log.info(f"Sleeping {SLEEP_TIME}s to await DHCP renewal") - time.sleep(SLEEP_TIME) - - dhcp_logs = self.access_point.get_dhcp_logs() - # Fuchsia renews at LEASE_TIME / 2, so there should be at least 2 DHCPREQUESTs in logs. - # The log lines look like: - # INFO dhcpd[17385]: DHCPREQUEST for 192.168.9.2 from f8:0f:f9:3d:ce:d1 via wlan1 - # INFO dhcpd[17385]: DHCPACK on 192.168.9.2 to f8:0f:f9:3d:ce:d1 via wlan1 - expected_string = f"DHCPREQUEST for {ip}" - asserts.assert_true( - dhcp_logs.count(expected_string) >= 2, - f'Not enough DHCP renewals ("{expected_string}") in logs: ' - + dhcp_logs - + "\n", - ) - - -if __name__ == "__main__": - test_runner.main()
diff --git a/src/antlion/tests/dhcp/Dhcpv4InteropCombinatorialOptionsTest.py b/src/antlion/tests/dhcp/Dhcpv4InteropCombinatorialOptionsTest.py deleted file mode 100644 index 7e7b379..0000000 --- a/src/antlion/tests/dhcp/Dhcpv4InteropCombinatorialOptionsTest.py +++ /dev/null
@@ -1,133 +0,0 @@ -#!/usr/bin/env python3 -# -# Copyright 2022 The Fuchsia Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import random - -from collections import namedtuple -from typing import Dict, Union - -from antlion.test_utils.dhcp import base_test - -from mobly import asserts, test_runner - -OPT_NUM_DOMAIN_SEARCH = 119 -OPT_NUM_DOMAIN_NAME = 15 - -Test = namedtuple(typename="Args", field_names=["name", "settings"]) - - -class Dhcpv4InteropCombinatorialOptionsTest(base_test.Dhcpv4InteropFixture): - """DhcpV4 tests which validate combinations of DHCP options.""" - - def setup_generated_tests(self) -> None: - self.generate_tests( - self.run_test_case_expect_dhcp_success, - lambda name, *_: f"test_{name}", - [ - Test( - "domain_name_valid", - { - "dhcp_options": { - "domain-name": '"example.test"', - "dhcp-parameter-request-list": OPT_NUM_DOMAIN_NAME, - }, - "dhcp_parameters": {}, - }, - ), - Test( - "domain_name_invalid", - { - "dhcp_options": { - "domain-name": '"example.invalid"', - "dhcp-parameter-request-list": OPT_NUM_DOMAIN_NAME, - }, - "dhcp_parameters": {}, - }, - ), - Test( - "domain_search_valid", - { - "dhcp_options": { - "domain-name": '"example.test"', - "dhcp-parameter-request-list": OPT_NUM_DOMAIN_SEARCH, - }, - "dhcp_parameters": {}, - }, - ), - Test( - "domain_search_invalid", - { - "dhcp_options": { - "domain-name": '"example.invalid"', - "dhcp-parameter-request-list": OPT_NUM_DOMAIN_SEARCH, - }, - "dhcp_parameters": {}, - }, - ), - Test( - "max_sized_message", - { - "dhcp_options": self._generate_max_sized_message_dhcp_options(), - "dhcp_parameters": {}, - }, - ), - ], - ) - - def _generate_max_sized_message_dhcp_options(self) -> Dict[str, Union[int, str]]: - """Generates the DHCP options for max sized message test. - - The RFC limits DHCP payloads to 576 bytes unless the client signals it - can handle larger payloads, which it does by sending DHCP option 57, - "Maximum DHCP Message Size". Despite being able to accept larger - payloads, clients typically don't advertise this. The test verifies that - the client accepts a large message split across multiple ethernet - frames. The test is created by sending many bytes of options through the - domain-name-servers option, which is of unbounded length (though is - compressed per RFC1035 section 4.1.4). - - Returns: - A dict of DHCP options. - """ - typical_ethernet_mtu = 1500 - - long_dns_setting = ", ".join( - f'"ns{num}.example"' - for num in random.sample(range(100_000, 1_000_000), 250) - ) - # RFC1035 compression means any shared suffix ('.example' in this case) - # will be deduplicated. Calculate approximate length by removing that - # suffix. - long_dns_setting_len = len( - long_dns_setting.replace(", ", "") - .replace('"', "") - .replace(".example", "") - .encode("utf-8") - ) - asserts.assert_true( - long_dns_setting_len > typical_ethernet_mtu, - "Expected to generate message greater than ethernet mtu", - ) - - return { - "dhcp-max-message-size": long_dns_setting_len * 2, - "domain-search": long_dns_setting, - "dhcp-parameter-request-list": OPT_NUM_DOMAIN_SEARCH, - } - - -if __name__ == "__main__": - test_runner.main()
diff --git a/src/antlion/tests/dhcp/Dhcpv4InteropFixtureTest.py b/src/antlion/tests/dhcp/Dhcpv4InteropFixtureTest.py deleted file mode 100644 index ebbf866..0000000 --- a/src/antlion/tests/dhcp/Dhcpv4InteropFixtureTest.py +++ /dev/null
@@ -1,63 +0,0 @@ -#!/usr/bin/env python3 -# -# Copyright 2022 The Fuchsia Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from antlion.controllers.ap_lib import dhcp_config -from antlion.test_utils.dhcp import base_test - -from mobly import asserts, test_runner - - -class Dhcpv4InteropFixtureTest(base_test.Dhcpv4InteropFixture): - """Tests which validate the behavior of the Dhcpv4InteropFixture. - - In theory, these are more similar to unit tests than ACTS tests, but - since they interact with hardware (specifically, the AP), we have to - write and run them like the rest of the ACTS tests.""" - - def test_invalid_options_not_accepted(self): - """Ensures the DHCP server doesn't accept invalid options""" - ap_params = self.setup_ap() - subnet_conf = dhcp_config.Subnet( - subnet=ap_params["network"], - router=ap_params["ip"], - additional_options={"foo": "bar"}, - ) - dhcp_conf = dhcp_config.DhcpConfig(subnets=[subnet_conf]) - with asserts.assert_raises_regex(Exception, r"failed to start"): - self.access_point.start_dhcp(dhcp_conf=dhcp_conf) - - def test_invalid_parameters_not_accepted(self): - """Ensures the DHCP server doesn't accept invalid parameters""" - ap_params = self.setup_ap() - subnet_conf = dhcp_config.Subnet( - subnet=ap_params["network"], - router=ap_params["ip"], - additional_parameters={"foo": "bar"}, - ) - dhcp_conf = dhcp_config.DhcpConfig(subnets=[subnet_conf]) - with asserts.assert_raises_regex(Exception, r"failed to start"): - self.access_point.start_dhcp(dhcp_conf=dhcp_conf) - - def test_no_dhcp_server_started(self): - """Validates that the test fixture does not start a DHCP server.""" - ap_params = self.setup_ap() - self.connect(ap_params=ap_params) - with asserts.assert_raises(ConnectionError): - self.get_device_ipv4_addr() - - -if __name__ == "__main__": - test_runner.main()
diff --git a/src/antlion/tests/examples/BUILD.gn b/src/antlion/tests/examples/BUILD.gn deleted file mode 100644 index 066d515..0000000 --- a/src/antlion/tests/examples/BUILD.gn +++ /dev/null
@@ -1,27 +0,0 @@ -# Copyright 2023 The Fuchsia Authors. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -import("//third_party/antlion/antlion_host_test.gni") -import("//third_party/antlion/environments.gni") - -assert(is_host, "antlion tests only supported for host testing") - -antlion_host_test("sl4f_sanity_test") { - main_source = "Sl4fSanityTest.py" - environments = display_envs + [ qemu_env ] -} - -group("e2e_tests_quick") { - testonly = true - public_deps = [ - ":sl4f_sanity_test($host_toolchain)", - ] -} - -group("e2e_tests") { - testonly = true - public_deps = [ - ":sl4f_sanity_test($host_toolchain)", - ] -}
diff --git a/src/antlion/tests/examples/Sl4fSanityTest.py b/src/antlion/tests/examples/Sl4fSanityTest.py deleted file mode 100644 index 82c04f3..0000000 --- a/src/antlion/tests/examples/Sl4fSanityTest.py +++ /dev/null
@@ -1,48 +0,0 @@ -#!/usr/bin/env python3 -# -# Copyright 2022 The Fuchsia Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -""" -Test to verify SL4F is running on a Fuchsia device and can communicate with -antlion successfully. -""" - -import logging -from typing import List - -from antlion.controllers import fuchsia_device -from antlion.controllers.fuchsia_device import FuchsiaDevice - -from mobly import asserts, test_runner, base_test - - -class Sl4fSanityTest(base_test.BaseTestClass): - def setup_class(self): - self.log = logging.getLogger() - self.fuchsia_devices: List[FuchsiaDevice] = self.register_controller( - fuchsia_device - ) - - asserts.abort_class_if( - len(self.fuchsia_devices) == 0, "Requires at least one Fuchsia device" - ) - - def test_example(self): - for fuchsia_device in self.fuchsia_devices: - res = fuchsia_device.sl4f.netstack_lib.netstackListInterfaces() - self.log.info(res) - - -if __name__ == "__main__": - test_runner.main()
diff --git a/src/antlion/tests/flash/FlashTest.py b/src/antlion/tests/flash/FlashTest.py deleted file mode 100644 index 7c5399f..0000000 --- a/src/antlion/tests/flash/FlashTest.py +++ /dev/null
@@ -1,123 +0,0 @@ -#!/usr/bin/env python3 -# -# Copyright 2022 The Fuchsia Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -""" -Script for to flash Fuchsia devices and reports the DUT's version of Fuchsia in -the Sponge test result properties. Uses the built in flashing tool for -fuchsia_devices. -""" - -import logging -from typing import List - -from antlion.controllers import fuchsia_device, pdu -from antlion.controllers.fuchsia_device import FuchsiaDevice -from antlion.controllers.pdu import PduDevice -from antlion.utils import get_device - -from mobly import asserts, base_test, signals, test_runner - -MAX_FLASH_ATTEMPTS = 3 - - -class FlashTest(base_test.BaseTestClass): - def setup_class(self): - self.log = logging.getLogger() - self.fuchsia_devices: List[FuchsiaDevice] = self.register_controller( - fuchsia_device - ) - self.pdu_devices: List[PduDevice] = self.register_controller(pdu) - self.failed_to_get_device_info = False - - def teardown_class(self): - # Verify that FlashTest successfully reported the DUT version. This is - # working around a flaw in ACTS where signals.TestAbortAll does not - # report any errors. - # - # TODO(http://b/253515812): This has been fixed in Mobly already. Remove - # teardown_class and change "TestError" to "abort_all" in - # test_flash_devices once we move to Mobly. - if self.failed_to_get_device_info: - asserts.abort_all("Failed to get DUT device information") - - return super().teardown_class() - - def test_flash_devices(self) -> None: - """Flashes a Fuchsia device for testing. - - This method calls the fuchsia_device reboot() with 'flash' argument. - This kicks off a flash, not pave, of the fuchsia device. It also soft - reboots the device. On error it will attempt to reflash up to - MAX_FLASH_ATTEMPTS hard rebooting inbetween each attempt. - """ - for device in self.fuchsia_devices: - flash_counter = 0 - while True: - try: - device.reboot( - reboot_type="flash", use_ssh=True, unreachable_timeout=120 - ) - self.log.info(f"{device.orig_ip} has been flashed.") - break - except Exception as err: - self.log.error( - f"Failed to flash {device.orig_ip} with error:\n{err}" - ) - - if not device.device_pdu_config: - asserts.abort_all( - f"Failed to flash {device.orig_ip} and no PDU" - "available for hard reboot" - ) - - flash_counter = flash_counter + 1 - if flash_counter == MAX_FLASH_ATTEMPTS: - asserts.abort_all( - f"Failed to flash {device.orig_ip} after" - f"{MAX_FLASH_ATTEMPTS} attempts" - ) - - self.log.info( - f"Hard rebooting {device.orig_ip} and retrying flash." - ) - device.reboot(reboot_type="hard", testbed_pdus=self.pdu_devices) - - # Report the new Fuchsia version - try: - dut = get_device(self.fuchsia_devices, "DUT") - version = dut.version() - device_name = dut.device_name() - product_name = dut.product_name() - - self.record_data( - { - "sponge_properties": { - "DUT_VERSION": version, - "DUT_NAME": device_name, - "DUT_PRODUCT": product_name, - }, - } - ) - - self.log.info(f"DUT version: {version}") - self.log.info(f"DUT name: {device_name}") - self.log.info(f"DUT product: {product_name}") - except Exception as e: - self.failed_to_get_device_info = True - raise signals.TestError(f"Failed to get DUT device information: {e}") from e - - -if __name__ == "__main__": - test_runner.main()
diff --git a/src/antlion/tests/logging/FuchsiaLoggingTest.py b/src/antlion/tests/logging/FuchsiaLoggingTest.py deleted file mode 100644 index a5e2db8..0000000 --- a/src/antlion/tests/logging/FuchsiaLoggingTest.py +++ /dev/null
@@ -1,61 +0,0 @@ -#!/usr/bin/env python3 -# -# Copyright 2022 The Fuchsia Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from mobly import asserts, base_test, signals, test_runner -from typing import List - -from antlion.controllers import fuchsia_device -from antlion.controllers.fuchsia_device import FuchsiaDevice - -MESSAGE = "Logging Test" - - -class FuchsiaLoggingTest(base_test.BaseTestClass): - def setup_class(self): - self.fuchsia_devices: List[FuchsiaDevice] = self.register_controller( - fuchsia_device - ) - - asserts.abort_class_if( - len(self.fuchsia_devices) == 0, "Requires at least one Fuchsia device" - ) - - self.dut = self.fuchsia_devices[0] - - def test_log_err(self): - result = self.dut.sl4f.logging_lib.logE(MESSAGE) - if result.get("error") is None: - signals.TestPass(result.get("result")) - else: - signals.TestFailure(result.get("error")) - - def test_log_info(self): - result = self.dut.sl4f.logging_lib.logI(MESSAGE) - if result.get("error") is None: - signals.TestPass(result.get("result")) - else: - signals.TestFailure(result.get("error")) - - def test_log_warn(self): - result = self.dut.sl4f.logging_lib.logW(MESSAGE) - if result.get("error") is None: - signals.TestPass(result.get("result")) - else: - signals.TestFailure(result.get("error")) - - -if __name__ == "__main__": - test_runner.main()
diff --git a/src/antlion/tests/netstack/NetstackIfaceTest.py b/src/antlion/tests/netstack/NetstackIfaceTest.py deleted file mode 100644 index fce3197..0000000 --- a/src/antlion/tests/netstack/NetstackIfaceTest.py +++ /dev/null
@@ -1,164 +0,0 @@ -#!/usr/bin/env python3 -# -# Copyright 2022 The Fuchsia Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import logging -from typing import List - -from antlion.controllers import fuchsia_device -from antlion.controllers.fuchsia_device import FuchsiaDevice - -from mobly import asserts, signals, test_runner, base_test - - -class NetstackIfaceTest(base_test.BaseTestClass): - default_timeout = 10 - active_scan_callback_list = [] - active_adv_callback_list = [] - droid = None - - def setup_class(self): - self.log = logging.getLogger() - self.fuchsia_devices: List[FuchsiaDevice] = self.register_controller( - fuchsia_device - ) - - asserts.abort_class_if( - len(self.fuchsia_devices) == 0, "Requires at least one Fuchsia device" - ) - - self.dut = self.fuchsia_devices[0] - - def _enable_all_interfaces(self): - interfaces = self.dut.sl4f.netstack_lib.netstackListInterfaces() - for item in interfaces.get("result"): - identifier = item.get("id") - self.dut.sl4f.netstack_lib.enableInterface(identifier) - - def setup_test(self): - # Always make sure all interfaces listed are in an up state. - self._enable_all_interfaces() - - def teardown_test(self): - # Always make sure all interfaces listed are in an up state. - self._enable_all_interfaces() - - def test_list_interfaces(self): - """Test listing all interfaces. - - Steps: - 1. Call ListInterfaces FIDL api. - 2. Verify there is at least one interface returned. - - Expected Result: - There were no errors in retrieving the list of interfaces. - There was at least one interface in the list. - - Returns: - signals.TestPass if no errors - signals.TestFailure if there are any errors during the test. - - TAGS: Netstack - Priority: 1 - """ - interfaces = self.dut.sl4f.netstack_lib.netstackListInterfaces() - if interfaces.get("error") is not None: - raise signals.TestFailure("Failed with {}".format(interfaces.get("error"))) - if len(interfaces.get("result")) < 1: - raise signals.TestFailure("No interfaces found.") - self.log.info("Interfaces found: {}".format(interfaces.get("result"))) - raise signals.TestPass("Success") - - def test_toggle_wlan_interface(self): - """Test toggling the wlan interface if it exists. - - Steps: - 1. Call ListInterfaces FIDL api. - 2. Find the wlan interface. - 3. Disable the interface. - 4. Verify interface attributes in a down state. - 5. Enable the interface. - 6. Verify interface attributes in an up state. - - Expected Result: - WLAN interface was successfully brought down and up again. - - Returns: - signals.TestPass if no errors - signals.TestFailure if there are any errors during the test. - signals.TestSkip if there are no wlan interfaces. - - TAGS: Netstack - Priority: 1 - """ - - def get_wlan_interfaces(): - result = self.dut.sl4f.netstack_lib.netstackListInterfaces() - if error := result.get("error"): - raise signals.TestFailure(f"unable to list interfaces: {error}") - return [ - interface - for interface in result.get("result") - if "wlan" in interface.get("name") - ] - - def get_ids(interfaces): - return [get_id(interface) for interface in interfaces] - - wlan_interfaces = get_wlan_interfaces() - if not wlan_interfaces: - raise signals.TestSkip("no wlan interface found") - interface_ids = get_ids(wlan_interfaces) - - # Disable the interfaces. - for identifier in interface_ids: - result = self.dut.sl4f.netstack_lib.disableInterface(identifier) - if error := result.get("error"): - raise signals.TestFailure( - f"failed to disable wlan interface {identifier}: {error}" - ) - - # Retrieve the interfaces again. - disabled_wlan_interfaces = get_wlan_interfaces() - disabled_interface_ids = get_ids(wlan_interfaces) - - if not disabled_interface_ids == interface_ids: - raise signals.TestFailure( - f"disabled interface IDs do not match original interface IDs: original={interface_ids} disabled={disabled_interface_ids}" - ) - - # Check the current state of the interfaces. - for interface in disabled_interfaces: - if len(interface_info.get("ipv4_addresses")) > 0: - raise signals.TestFailure( - f"no Ipv4 Address should be present: {interface}" - ) - - # TODO (35981): Verify other values when interface down. - - # Re-enable the interfaces. - for identifier in disabled_interface_ids: - result = self.dut.sl4f.netstack_lib.enableInterface(identifier) - if error := result.get("error"): - raise signals.TestFailure( - f"failed to enable wlan interface {identifier}: {error}" - ) - - # TODO (35981): Verify other values when interface up. - raise signals.TestPass("Success") - - -if __name__ == "__main__": - test_runner.main()
diff --git a/src/antlion/tests/netstack/ToggleWlanInterfaceStressTest.py b/src/antlion/tests/netstack/ToggleWlanInterfaceStressTest.py deleted file mode 100644 index 36b52ad..0000000 --- a/src/antlion/tests/netstack/ToggleWlanInterfaceStressTest.py +++ /dev/null
@@ -1,95 +0,0 @@ -#!/usr/bin/env python3 -# -# Copyright 2022 The Fuchsia Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import logging -import time -from typing import List - -from antlion.controllers import fuchsia_device -from antlion.controllers.fuchsia_device import FuchsiaDevice -from antlion.test_utils.abstract_devices.wlan_device import create_wlan_device - -from mobly import asserts, base_test, signals, test_runner - - -class ToggleWlanInterfaceStressTest(base_test.BaseTestClass): - def setup_class(self): - self.log = logging.getLogger() - self.fuchsia_devices: List[FuchsiaDevice] = self.register_controller( - fuchsia_device - ) - - asserts.abort_class_if( - len(self.fuchsia_devices) == 0, "Requires at least one Fuchsia device" - ) - - self.dut = create_wlan_device(self.fuchsia_devices[0]) - - def test_iface_toggle_and_ping(self): - """Test that we don't error out when toggling WLAN interfaces. - - Steps: - 1. Find a WLAN interface - 2. Destroy it - 3. Create a new WLAN interface - 4. Ping after association - 5. Repeat 1-4 1,000 times - - Expected Result: - Verify there are no errors in destroying the wlan interface. - - Returns: - signals.TestPass if no errors - signals.TestFailure if there are any errors during the test. - - TAGS: WLAN, Stability - Priority: 1 - """ - - # Test assumes you've already connected to some AP. - - for i in range(1000): - wlan_interfaces = self.dut.get_wlan_interface_id_list() - print(wlan_interfaces) - if len(wlan_interfaces) < 1: - raise signals.TestFailure("Not enough wlan interfaces for test") - if not self.dut.destroy_wlan_interface(wlan_interfaces[0]): - raise signals.TestFailure("Failed to destroy WLAN interface") - # Really make sure it is dead - self.fuchsia_devices[0].ssh.run(f"wlan iface del {wlan_interfaces[0]}") - # Grace period - time.sleep(2) - self.fuchsia_devices[0].ssh.run("wlan iface new --phy 0 --role Client") - end_time = time.time() + 300 - while time.time() < end_time: - time.sleep(1) - if self.dut.is_connected(): - try: - ping_result = self.dut.ping("8.8.8.8", 10, 1000, 1000, 25) - print(ping_result) - except Exception as err: - # TODO: Once we gain more stability, fail test when pinging fails - print("some err {}".format(err)) - time.sleep(2) # give time for some traffic - break - if not self.dut.is_connected(): - raise signals.TestFailure("Failed at iteration {}".format(i + 1)) - self.log.info("Iteration {} successful".format(i + 1)) - raise signals.TestPass("Success") - - -if __name__ == "__main__": - test_runner.main()
diff --git a/src/antlion/tests/wlan/BUILD.gn b/src/antlion/tests/wlan/BUILD.gn deleted file mode 100644 index 717fed9..0000000 --- a/src/antlion/tests/wlan/BUILD.gn +++ /dev/null
@@ -1,31 +0,0 @@ -# Copyright 2023 The Fuchsia Authors. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -assert(is_host, "antlion tests only supported for host testing") - -group("e2e_tests") { - testonly = true - public_deps = [ - "compliance:e2e_tests", - "facade:e2e_tests", - "functional:e2e_tests", - "misc:e2e_tests", - "performance:e2e_tests", - ] -} - -group("e2e_tests_quick") { - testonly = true - public_deps = [ - "functional:e2e_tests_quick", - ] -} - -group("e2e_tests_manual") { - testonly = true - public_deps = [ - "functional:e2e_tests_manual", - "performance:e2e_tests_manual", - ] -}
diff --git a/src/antlion/tests/wlan/compliance/BUILD.gn b/src/antlion/tests/wlan/compliance/BUILD.gn deleted file mode 100644 index bdfc396..0000000 --- a/src/antlion/tests/wlan/compliance/BUILD.gn +++ /dev/null
@@ -1,44 +0,0 @@ -# Copyright 2023 The Fuchsia Authors. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -import("//third_party/antlion/antlion_host_test.gni") -import("//third_party/antlion/environments.gni") - -assert(is_host, "antlion tests only supported for host testing") - -antlion_host_test("vape_interop_test") { - main_source = "VapeInteropTest.py" - environments = display_ap_envs -} - -antlion_host_test("wlan_phy_compliance_11ac_test") { - main_source = "WlanPhyCompliance11ACTest.py" - environments = display_ap_envs -} - -antlion_host_test("wlan_phy_compliance_11n_test") { - main_source = "WlanPhyCompliance11NTest.py" - environments = display_ap_envs -} - -antlion_host_test("wlan_phy_compliance_abg_test") { - main_source = "WlanPhyComplianceABGTest.py" - environments = display_ap_envs -} - -antlion_host_test("wlan_security_compliance_abg_test") { - main_source = "WlanSecurityComplianceABGTest.py" - environments = display_ap_envs -} - -group("e2e_tests") { - testonly = true - public_deps = [ - ":vape_interop_test($host_toolchain)", - ":wlan_phy_compliance_11ac_test($host_toolchain)", - ":wlan_phy_compliance_11n_test($host_toolchain)", - ":wlan_phy_compliance_abg_test($host_toolchain)", - ":wlan_security_compliance_abg_test($host_toolchain)", - ] -}
diff --git a/src/antlion/tests/wlan/compliance/VapeInteropTest.py b/src/antlion/tests/wlan/compliance/VapeInteropTest.py deleted file mode 100644 index 32b39c6..0000000 --- a/src/antlion/tests/wlan/compliance/VapeInteropTest.py +++ /dev/null
@@ -1,923 +0,0 @@ -#!/usr/bin/env python3 -# -# Copyright 2022 The Fuchsia Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from antlion import utils -from antlion.controllers.access_point import setup_ap -from antlion.controllers.ap_lib import hostapd_constants -from antlion.controllers.ap_lib.hostapd_security import Security -from antlion.test_utils.abstract_devices.wlan_device import create_wlan_device -from antlion.test_utils.wifi import base_test - -from mobly import asserts, test_runner - - -class VapeInteropTest(base_test.WifiBaseTest): - """Tests interoperability with mock third party AP profiles. - - Test Bed Requirement: - * One Android or Fuchsia Device - * One Whirlwind Access Point - """ - - def setup_class(self): - super().setup_class() - - device_type = self.user_params.get("dut", "fuchsia_devices") - if device_type == "fuchsia_devices": - self.dut = create_wlan_device(self.fuchsia_devices[0]) - elif device_type == "android_devices": - self.dut = create_wlan_device(self.android_devices[0]) - else: - raise ValueError( - f'Invalid "dut" type specified in config: "{device_type}".' - 'Expected "fuchsia_devices" or "android_devices".' - ) - - self.access_point = self.access_points[0] - - # Same for both 2g and 5g - self.ssid = utils.rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_2G) - self.password = utils.rand_ascii_str(hostapd_constants.AP_PASSPHRASE_LENGTH_2G) - self.security_profile_wpa2 = Security( - security_mode=hostapd_constants.WPA2_STRING, - password=self.password, - wpa2_cipher=hostapd_constants.WPA2_DEFAULT_CIPER, - ) - - self.access_point.stop_all_aps() - - def setup_test(self): - if hasattr(self, "android_devices"): - for ad in self.android_devices: - ad.droid.wakeLockAcquireBright() - ad.droid.wakeUpNow() - self.dut.wifi_toggle_state(True) - - def teardown_test(self): - if hasattr(self, "android_devices"): - for ad in self.android_devices: - ad.droid.wakeLockRelease() - ad.droid.goToSleepNow() - self.dut.turn_location_off_and_scan_toggle_off() - self.dut.disconnect() - self.dut.reset_wifi() - self.download_ap_logs() - self.access_point.stop_all_aps() - - def on_fail(self, test_name, begin_time): - super().on_fail(test_name, begin_time) - self.access_point.stop_all_aps() - - def test_associate_actiontec_pk5000_24ghz_open(self): - setup_ap( - access_point=self.access_point, - profile_name="actiontec_pk5000", - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.ssid, - ) - asserts.assert_true(self.dut.associate(self.ssid), "Failed to connect.") - - def test_associate_actiontec_pk5000_24ghz_wpa2(self): - setup_ap( - access_point=self.access_point, - profile_name="actiontec_pk5000", - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.ssid, - security=self.security_profile_wpa2, - password=self.password, - ) - asserts.assert_true( - self.dut.associate( - self.ssid, - target_pwd=self.password, - target_security=hostapd_constants.WPA2_STRING, - ), - "Failed to connect.", - ) - - def test_associate_actiontec_mi424wr_24ghz_open(self): - setup_ap( - access_point=self.access_point, - profile_name="actiontec_mi424wr", - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.ssid, - ) - asserts.assert_true(self.dut.associate(self.ssid), "Failed to connect.") - - def test_associate_actiontec_mi424wr_24ghz_wpa2(self): - setup_ap( - access_point=self.access_point, - profile_name="actiontec_mi424wr", - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.ssid, - security=self.security_profile_wpa2, - password=self.password, - ) - asserts.assert_true( - self.dut.associate( - self.ssid, - target_pwd=self.password, - target_security=hostapd_constants.WPA2_STRING, - ), - "Failed to connect.", - ) - - def test_associate_asus_rtac66u_24ghz_open(self): - setup_ap( - access_point=self.access_point, - profile_name="asus_rtac66u", - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.ssid, - ) - asserts.assert_true(self.dut.associate(self.ssid), "Failed to connect.") - - def test_associate_asus_rtac66u_24ghz_wpa2(self): - setup_ap( - access_point=self.access_point, - profile_name="asus_rtac66u", - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.ssid, - security=self.security_profile_wpa2, - password=self.password, - ) - asserts.assert_true( - self.dut.associate( - self.ssid, - target_pwd=self.password, - target_security=hostapd_constants.WPA2_STRING, - ), - "Failed to connect.", - ) - - def test_associate_asus_rtac66u_5ghz_open(self): - setup_ap( - access_point=self.access_point, - profile_name="asus_rtac66u", - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=self.ssid, - ) - asserts.assert_true(self.dut.associate(self.ssid), "Failed to connect.") - - def test_associate_asus_rtac66u_5ghz_wpa2(self): - setup_ap( - access_point=self.access_point, - profile_name="asus_rtac66u", - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=self.ssid, - security=self.security_profile_wpa2, - password=self.password, - ) - asserts.assert_true( - self.dut.associate( - self.ssid, - target_pwd=self.password, - target_security=hostapd_constants.WPA2_STRING, - ), - "Failed to connect.", - ) - - def test_associate_asus_rtac86u_24ghz_open(self): - setup_ap( - access_point=self.access_point, - profile_name="asus_rtac86u", - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.ssid, - ) - asserts.assert_true(self.dut.associate(self.ssid), "Failed to connect.") - - def test_associate_asus_rtac86u_24ghz_wpa2(self): - setup_ap( - access_point=self.access_point, - profile_name="asus_rtac86u", - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.ssid, - security=self.security_profile_wpa2, - password=self.password, - ) - asserts.assert_true( - self.dut.associate( - self.ssid, - target_pwd=self.password, - target_security=hostapd_constants.WPA2_STRING, - ), - "Failed to connect.", - ) - - def test_associate_asus_rtac86u_5ghz_open(self): - setup_ap( - access_point=self.access_point, - profile_name="asus_rtac86u", - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=self.ssid, - ) - asserts.assert_true(self.dut.associate(self.ssid), "Failed to connect.") - - def test_associate_asus_rtac86u_5ghz_wpa2(self): - setup_ap( - access_point=self.access_point, - profile_name="asus_rtac86u", - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=self.ssid, - security=self.security_profile_wpa2, - password=self.password, - ) - asserts.assert_true( - self.dut.associate( - self.ssid, - target_pwd=self.password, - target_security=hostapd_constants.WPA2_STRING, - ), - "Failed to connect.", - ) - - def test_associate_asus_rtac5300_24ghz_open(self): - setup_ap( - access_point=self.access_point, - profile_name="asus_rtac5300", - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.ssid, - ) - asserts.assert_true(self.dut.associate(self.ssid), "Failed to connect.") - - def test_associate_asus_rtac5300_24ghz_wpa2(self): - setup_ap( - access_point=self.access_point, - profile_name="asus_rtac5300", - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.ssid, - security=self.security_profile_wpa2, - password=self.password, - ) - asserts.assert_true( - self.dut.associate( - self.ssid, - target_pwd=self.password, - target_security=hostapd_constants.WPA2_STRING, - ), - "Failed to connect.", - ) - - def test_associate_asus_rtac5300_5ghz_open(self): - setup_ap( - access_point=self.access_point, - profile_name="asus_rtac5300", - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=self.ssid, - ) - asserts.assert_true(self.dut.associate(self.ssid), "Failed to connect.") - - def test_associate_asus_rtac5300_5ghz_wpa2(self): - setup_ap( - access_point=self.access_point, - profile_name="asus_rtac5300", - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=self.ssid, - security=self.security_profile_wpa2, - password=self.password, - ) - asserts.assert_true( - self.dut.associate( - self.ssid, - target_pwd=self.password, - target_security=hostapd_constants.WPA2_STRING, - ), - "Failed to connect.", - ) - - def test_associate_asus_rtn56u_24ghz_open(self): - setup_ap( - access_point=self.access_point, - profile_name="asus_rtn56u", - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.ssid, - ) - asserts.assert_true(self.dut.associate(self.ssid), "Failed to connect.") - - def test_associate_asus_rtn56u_24ghz_wpa2(self): - setup_ap( - access_point=self.access_point, - profile_name="asus_rtn56u", - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.ssid, - security=self.security_profile_wpa2, - password=self.password, - ) - asserts.assert_true( - self.dut.associate( - self.ssid, - target_pwd=self.password, - target_security=hostapd_constants.WPA2_STRING, - ), - "Failed to connect.", - ) - - def test_associate_asus_rtn56u_5ghz_open(self): - setup_ap( - access_point=self.access_point, - profile_name="asus_rtn56u", - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=self.ssid, - ) - asserts.assert_true(self.dut.associate(self.ssid), "Failed to connect.") - - def test_associate_asus_rtn56u_5ghz_wpa2(self): - setup_ap( - access_point=self.access_point, - profile_name="asus_rtn56u", - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=self.ssid, - security=self.security_profile_wpa2, - password=self.password, - ) - asserts.assert_true( - self.dut.associate( - self.ssid, - target_pwd=self.password, - target_security=hostapd_constants.WPA2_STRING, - ), - "Failed to connect.", - ) - - def test_associate_asus_rtn66u_24ghz_open(self): - setup_ap( - access_point=self.access_point, - profile_name="asus_rtn66u", - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.ssid, - ) - asserts.assert_true(self.dut.associate(self.ssid), "Failed to connect.") - - def test_associate_asus_rtn66u_24ghz_wpa2(self): - setup_ap( - access_point=self.access_point, - profile_name="asus_rtn66u", - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.ssid, - security=self.security_profile_wpa2, - password=self.password, - ) - asserts.assert_true( - self.dut.associate( - self.ssid, - target_pwd=self.password, - target_security=hostapd_constants.WPA2_STRING, - ), - "Failed to connect.", - ) - - def test_associate_asus_rtn66u_5ghz_open(self): - setup_ap( - access_point=self.access_point, - profile_name="asus_rtn66u", - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=self.ssid, - ) - asserts.assert_true(self.dut.associate(self.ssid), "Failed to connect.") - - def test_associate_asus_rtn66u_5ghz_wpa2(self): - setup_ap( - access_point=self.access_point, - profile_name="asus_rtn66u", - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=self.ssid, - security=self.security_profile_wpa2, - password=self.password, - ) - asserts.assert_true( - self.dut.associate( - self.ssid, - target_pwd=self.password, - target_security=hostapd_constants.WPA2_STRING, - ), - "Failed to connect.", - ) - - def test_associate_belkin_f9k1001v5_24ghz_open(self): - setup_ap( - access_point=self.access_point, - profile_name="belkin_f9k1001v5", - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.ssid, - ) - asserts.assert_true(self.dut.associate(self.ssid), "Failed to connect.") - - def test_associate_belkin_f9k1001v5_24ghz_wpa2(self): - setup_ap( - access_point=self.access_point, - profile_name="belkin_f9k1001v5", - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.ssid, - security=self.security_profile_wpa2, - password=self.password, - ) - asserts.assert_true( - self.dut.associate( - self.ssid, - target_pwd=self.password, - target_security=hostapd_constants.WPA2_STRING, - ), - "Failed to connect.", - ) - - def test_associate_linksys_ea4500_24ghz_open(self): - setup_ap( - access_point=self.access_point, - profile_name="linksys_ea4500", - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.ssid, - ) - asserts.assert_true(self.dut.associate(self.ssid), "Failed to connect.") - - def test_associate_linksys_ea4500_24ghz_wpa2(self): - setup_ap( - access_point=self.access_point, - profile_name="linksys_ea4500", - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.ssid, - security=self.security_profile_wpa2, - password=self.password, - ) - asserts.assert_true( - self.dut.associate( - self.ssid, - target_pwd=self.password, - target_security=hostapd_constants.WPA2_STRING, - ), - "Failed to connect.", - ) - - def test_associate_linksys_ea4500_5ghz_open(self): - setup_ap( - access_point=self.access_point, - profile_name="linksys_ea4500", - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=self.ssid, - ) - asserts.assert_true(self.dut.associate(self.ssid), "Failed to connect.") - - def test_associate_linksys_ea4500_5ghz_wpa2(self): - setup_ap( - access_point=self.access_point, - profile_name="linksys_ea4500", - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=self.ssid, - security=self.security_profile_wpa2, - password=self.password, - ) - asserts.assert_true( - self.dut.associate( - self.ssid, - target_pwd=self.password, - target_security=hostapd_constants.WPA2_STRING, - ), - "Failed to connect.", - ) - - def test_associate_linksys_ea9500_24ghz_open(self): - setup_ap( - access_point=self.access_point, - profile_name="linksys_ea9500", - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.ssid, - ) - asserts.assert_true(self.dut.associate(self.ssid), "Failed to connect.") - - def test_associate_linksys_ea9500_24ghz_wpa2(self): - setup_ap( - access_point=self.access_point, - profile_name="linksys_ea9500", - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.ssid, - security=self.security_profile_wpa2, - password=self.password, - ) - asserts.assert_true( - self.dut.associate( - self.ssid, - target_pwd=self.password, - target_security=hostapd_constants.WPA2_STRING, - ), - "Failed to connect.", - ) - - def test_associate_linksys_ea9500_5ghz_open(self): - setup_ap( - access_point=self.access_point, - profile_name="linksys_ea9500", - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=self.ssid, - ) - asserts.assert_true(self.dut.associate(self.ssid), "Failed to connect.") - - def test_associate_linksys_ea9500_5ghz_wpa2(self): - setup_ap( - access_point=self.access_point, - profile_name="linksys_ea9500", - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=self.ssid, - security=self.security_profile_wpa2, - password=self.password, - ) - asserts.assert_true( - self.dut.associate( - self.ssid, - target_pwd=self.password, - target_security=hostapd_constants.WPA2_STRING, - ), - "Failed to connect.", - ) - - def test_associate_linksys_wrt1900acv2_24ghz_open(self): - setup_ap( - access_point=self.access_point, - profile_name="linksys_wrt1900acv2", - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.ssid, - ) - asserts.assert_true(self.dut.associate(self.ssid), "Failed to connect.") - - def test_associate_linksys_wrt1900acv2_24ghz_wpa2(self): - setup_ap( - access_point=self.access_point, - profile_name="linksys_wrt1900acv2", - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.ssid, - security=self.security_profile_wpa2, - password=self.password, - ) - asserts.assert_true( - self.dut.associate( - self.ssid, - target_pwd=self.password, - target_security=hostapd_constants.WPA2_STRING, - ), - "Failed to connect.", - ) - - def test_associate_linksys_wrt1900acv2_5ghz_open(self): - setup_ap( - access_point=self.access_point, - profile_name="linksys_wrt1900acv2", - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=self.ssid, - ) - asserts.assert_true(self.dut.associate(self.ssid), "Failed to connect.") - - def test_associate_linksys_wrt1900acv2_5ghz_wpa2(self): - setup_ap( - access_point=self.access_point, - profile_name="linksys_wrt1900acv2", - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=self.ssid, - security=self.security_profile_wpa2, - password=self.password, - ) - asserts.assert_true( - self.dut.associate( - self.ssid, - target_pwd=self.password, - target_security=hostapd_constants.WPA2_STRING, - ), - "Failed to connect.", - ) - - def test_associate_netgear_r7000_24ghz_open(self): - setup_ap( - access_point=self.access_point, - profile_name="netgear_r7000", - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.ssid, - ) - asserts.assert_true(self.dut.associate(self.ssid), "Failed to connect.") - - def test_associate_netgear_r7000_24ghz_wpa2(self): - setup_ap( - access_point=self.access_point, - profile_name="netgear_r7000", - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.ssid, - security=self.security_profile_wpa2, - password=self.password, - ) - asserts.assert_true( - self.dut.associate( - self.ssid, - target_pwd=self.password, - target_security=hostapd_constants.WPA2_STRING, - ), - "Failed to connect.", - ) - - def test_associate_netgear_r7000_5ghz_open(self): - setup_ap( - access_point=self.access_point, - profile_name="netgear_r7000", - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=self.ssid, - ) - asserts.assert_true(self.dut.associate(self.ssid), "Failed to connect.") - - def test_associate_netgear_r7000_5ghz_wpa2(self): - setup_ap( - access_point=self.access_point, - profile_name="netgear_r7000", - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=self.ssid, - security=self.security_profile_wpa2, - password=self.password, - ) - asserts.assert_true( - self.dut.associate( - self.ssid, - target_pwd=self.password, - target_security=hostapd_constants.WPA2_STRING, - ), - "Failed to connect.", - ) - - def test_associate_netgear_wndr3400_24ghz_open(self): - setup_ap( - access_point=self.access_point, - profile_name="netgear_wndr3400", - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.ssid, - ) - asserts.assert_true(self.dut.associate(self.ssid), "Failed to connect.") - - def test_associate_netgear_wndr3400_24ghz_wpa2(self): - setup_ap( - access_point=self.access_point, - profile_name="netgear_wndr3400", - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.ssid, - security=self.security_profile_wpa2, - password=self.password, - ) - asserts.assert_true( - self.dut.associate( - self.ssid, - target_pwd=self.password, - target_security=hostapd_constants.WPA2_STRING, - ), - "Failed to connect.", - ) - - def test_associate_netgear_wndr3400_5ghz_open(self): - setup_ap( - access_point=self.access_point, - profile_name="netgear_wndr3400", - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=self.ssid, - ) - asserts.assert_true(self.dut.associate(self.ssid), "Failed to connect.") - - def test_associate_netgear_wndr3400_5ghz_wpa2(self): - setup_ap( - access_point=self.access_point, - profile_name="netgear_wndr3400", - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=self.ssid, - security=self.security_profile_wpa2, - password=self.password, - ) - asserts.assert_true( - self.dut.associate( - self.ssid, - target_pwd=self.password, - target_security=hostapd_constants.WPA2_STRING, - ), - "Failed to connect.", - ) - - def test_associate_securifi_almond_24ghz_open(self): - setup_ap( - access_point=self.access_point, - profile_name="securifi_almond", - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.ssid, - ) - asserts.assert_true(self.dut.associate(self.ssid), "Failed to connect.") - - def test_associate_securifi_almond_24ghz_wpa2(self): - setup_ap( - access_point=self.access_point, - profile_name="securifi_almond", - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.ssid, - security=self.security_profile_wpa2, - password=self.password, - ) - asserts.assert_true( - self.dut.associate( - self.ssid, - target_pwd=self.password, - target_security=hostapd_constants.WPA2_STRING, - ), - "Failed to connect.", - ) - - def test_associate_tplink_archerc5_24ghz_open(self): - setup_ap( - access_point=self.access_point, - profile_name="tplink_archerc5", - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.ssid, - ) - asserts.assert_true(self.dut.associate(self.ssid), "Failed to connect.") - - def test_associate_tplink_archerc5_24ghz_wpa2(self): - setup_ap( - access_point=self.access_point, - profile_name="tplink_archerc5", - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.ssid, - security=self.security_profile_wpa2, - password=self.password, - ) - asserts.assert_true( - self.dut.associate( - self.ssid, - target_pwd=self.password, - target_security=hostapd_constants.WPA2_STRING, - ), - "Failed to connect.", - ) - - def test_associate_tplink_archerc5_5ghz_open(self): - setup_ap( - access_point=self.access_point, - profile_name="tplink_archerc5", - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=self.ssid, - ) - asserts.assert_true(self.dut.associate(self.ssid), "Failed to connect.") - - def test_associate_tplink_archerc5_5ghz_wpa2(self): - setup_ap( - access_point=self.access_point, - profile_name="tplink_archerc5", - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=self.ssid, - security=self.security_profile_wpa2, - password=self.password, - ) - asserts.assert_true( - self.dut.associate( - self.ssid, - target_pwd=self.password, - target_security=hostapd_constants.WPA2_STRING, - ), - "Failed to connect.", - ) - - def test_associate_tplink_archerc7_24ghz_open(self): - setup_ap( - access_point=self.access_point, - profile_name="tplink_archerc7", - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.ssid, - ) - asserts.assert_true(self.dut.associate(self.ssid), "Failed to connect.") - - def test_associate_tplink_archerc7_24ghz_wpa2(self): - setup_ap( - access_point=self.access_point, - profile_name="tplink_archerc7", - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.ssid, - security=self.security_profile_wpa2, - password=self.password, - ) - asserts.assert_true( - self.dut.associate( - self.ssid, - target_pwd=self.password, - target_security=hostapd_constants.WPA2_STRING, - ), - "Failed to connect.", - ) - - def test_associate_tplink_archerc7_5ghz_open(self): - setup_ap( - access_point=self.access_point, - profile_name="tplink_archerc7", - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=self.ssid, - ) - asserts.assert_true(self.dut.associate(self.ssid), "Failed to connect.") - - def test_associate_tplink_archerc7_5ghz_wpa2(self): - setup_ap( - access_point=self.access_point, - profile_name="tplink_archerc7", - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=self.ssid, - security=self.security_profile_wpa2, - password=self.password, - ) - asserts.assert_true( - self.dut.associate( - self.ssid, - target_pwd=self.password, - target_security=hostapd_constants.WPA2_STRING, - ), - "Failed to connect.", - ) - - def test_associate_tplink_c1200_24ghz_open(self): - setup_ap( - access_point=self.access_point, - profile_name="tplink_c1200", - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.ssid, - ) - asserts.assert_true(self.dut.associate(self.ssid), "Failed to connect.") - - def test_associate_tplink_c1200_24ghz_wpa2(self): - setup_ap( - access_point=self.access_point, - profile_name="tplink_c1200", - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.ssid, - security=self.security_profile_wpa2, - password=self.password, - ) - asserts.assert_true( - self.dut.associate( - self.ssid, - target_pwd=self.password, - target_security=hostapd_constants.WPA2_STRING, - ), - "Failed to connect.", - ) - - def test_associate_tplink_c1200_5ghz_open(self): - setup_ap( - access_point=self.access_point, - profile_name="tplink_c1200", - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=self.ssid, - ) - asserts.assert_true(self.dut.associate(self.ssid), "Failed to connect.") - - def test_associate_tplink_c1200_5ghz_wpa2(self): - setup_ap( - access_point=self.access_point, - profile_name="tplink_c1200", - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=self.ssid, - security=self.security_profile_wpa2, - password=self.password, - ) - asserts.assert_true( - self.dut.associate( - self.ssid, - target_pwd=self.password, - target_security=hostapd_constants.WPA2_STRING, - ), - "Failed to connect.", - ) - - def test_associate_tplink_tlwr940n_24ghz_open(self): - setup_ap( - access_point=self.access_point, - profile_name="tplink_tlwr940n", - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.ssid, - ) - asserts.assert_true(self.dut.associate(self.ssid), "Failed to connect.") - - def test_associate_tplink_tlwr940n_24ghz_wpa2(self): - setup_ap( - access_point=self.access_point, - profile_name="tplink_tlwr940n", - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.ssid, - security=self.security_profile_wpa2, - password=self.password, - ) - asserts.assert_true( - self.dut.associate( - self.ssid, - target_pwd=self.password, - target_security=hostapd_constants.WPA2_STRING, - ), - "Failed to connect.", - ) - - -if __name__ == "__main__": - test_runner.main()
diff --git a/src/antlion/tests/wlan/compliance/WlanPhyCompliance11ACTest.py b/src/antlion/tests/wlan/compliance/WlanPhyCompliance11ACTest.py deleted file mode 100644 index 4b797a5..0000000 --- a/src/antlion/tests/wlan/compliance/WlanPhyCompliance11ACTest.py +++ /dev/null
@@ -1,312 +0,0 @@ -#!/usr/bin/env python3 -# -# Copyright 2022 The Fuchsia Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import itertools - -from antlion.controllers.access_point import setup_ap -from antlion.controllers.ap_lib.hostapd_security import Security -from antlion.controllers.ap_lib import hostapd_constants -from antlion.test_utils.abstract_devices.wlan_device import create_wlan_device -from antlion.test_utils.wifi import base_test -from antlion.utils import rand_ascii_str - -from mobly import asserts, test_runner - -# AC Capabilities -""" -Capabilities Not Supported on Whirlwind: - - Supported Channel Width ([VHT160], [VHT160-80PLUS80]): 160mhz and 80+80 - unsupported - - SU Beamformer [SU-BEAMFORMER] - - SU Beamformee [SU-BEAMFORMEE] - - MU Beamformer [MU-BEAMFORMER] - - MU Beamformee [MU-BEAMFORMEE] - - BF Antenna ([BF-ANTENNA-2], [BF-ANTENNA-3], [BF-ANTENNA-4]) - - Rx STBC 2, 3, & 4 ([RX-STBC-12],[RX-STBC-123],[RX-STBC-124]) - - VHT Link Adaptation ([VHT-LINK-ADAPT2],[VHT-LINK-ADAPT3]) - - VHT TXOP Power Save [VHT-TXOP-PS] - - HTC-VHT [HTC-VHT] -""" -VHT_MAX_MPDU_LEN = [ - hostapd_constants.AC_CAPABILITY_MAX_MPDU_7991, - hostapd_constants.AC_CAPABILITY_MAX_MPDU_11454, - "", -] -RXLDPC = [hostapd_constants.AC_CAPABILITY_RXLDPC, ""] -SHORT_GI_80 = [hostapd_constants.AC_CAPABILITY_SHORT_GI_80, ""] -TX_STBC = [hostapd_constants.AC_CAPABILITY_TX_STBC_2BY1, ""] -RX_STBC = [hostapd_constants.AC_CAPABILITY_RX_STBC_1, ""] -MAX_A_MPDU = [ - hostapd_constants.AC_CAPABILITY_MAX_A_MPDU_LEN_EXP0, - hostapd_constants.AC_CAPABILITY_MAX_A_MPDU_LEN_EXP1, - hostapd_constants.AC_CAPABILITY_MAX_A_MPDU_LEN_EXP2, - hostapd_constants.AC_CAPABILITY_MAX_A_MPDU_LEN_EXP3, - hostapd_constants.AC_CAPABILITY_MAX_A_MPDU_LEN_EXP4, - hostapd_constants.AC_CAPABILITY_MAX_A_MPDU_LEN_EXP5, - hostapd_constants.AC_CAPABILITY_MAX_A_MPDU_LEN_EXP6, - hostapd_constants.AC_CAPABILITY_MAX_A_MPDU_LEN_EXP7, - "", -] -RX_ANTENNA = [hostapd_constants.AC_CAPABILITY_RX_ANTENNA_PATTERN, ""] -TX_ANTENNA = [hostapd_constants.AC_CAPABILITY_TX_ANTENNA_PATTERN, ""] - -# Default 11N Capabilities -N_CAPABS_40MHZ = [ - hostapd_constants.N_CAPABILITY_LDPC, - hostapd_constants.N_CAPABILITY_SGI20, - hostapd_constants.N_CAPABILITY_RX_STBC1, - hostapd_constants.N_CAPABILITY_SGI20, - hostapd_constants.N_CAPABILITY_SGI40, - hostapd_constants.N_CAPABILITY_MAX_AMSDU_7935, - hostapd_constants.N_CAPABILITY_HT40_PLUS, -] - -N_CAPABS_20MHZ = [ - hostapd_constants.N_CAPABILITY_LDPC, - hostapd_constants.N_CAPABILITY_SGI20, - hostapd_constants.N_CAPABILITY_RX_STBC1, - hostapd_constants.N_CAPABILITY_SGI20, - hostapd_constants.N_CAPABILITY_MAX_AMSDU_7935, - hostapd_constants.N_CAPABILITY_HT20, -] - -# Default wpa2 profile. -WPA2_SECURITY = Security( - security_mode=hostapd_constants.WPA2_STRING, - password=rand_ascii_str(20), - wpa_cipher=hostapd_constants.WPA2_DEFAULT_CIPER, - wpa2_cipher=hostapd_constants.WPA2_DEFAULT_CIPER, -) - -SECURITIES = [None, WPA2_SECURITY] - - -def generate_test_name(settings): - """Generates a test name string based on the ac_capabilities for - a test case. - - Args: - settings: a dict with the test settings (bandwidth, security, ac_capabs) - - Returns: - A string test case name - """ - chbw = settings["chbw"] - sec = "wpa2" if settings["security"] else "open" - ret = [] - for cap in hostapd_constants.AC_CAPABILITIES_MAPPING.keys(): - if cap in settings["ac_capabilities"]: - ret.append(hostapd_constants.AC_CAPABILITIES_MAPPING[cap]) - return "test_11ac_%smhz_%s_%s" % (chbw, sec, "".join(ret)) - - -# 6912 test cases -class WlanPhyCompliance11ACTest(base_test.WifiBaseTest): - """Tests for validating 11ac PHYS. - - Test Bed Requirement: - * One Android device or Fuchsia device - * One Access Point - """ - - def __init__(self, controllers): - super().__init__(controllers) - - def setup_generated_tests(self): - test_args = ( - self._generate_20mhz_test_args() - + self._generate_40mhz_test_args() - + self._generate_80mhz_test_args() - ) - self.generate_tests( - test_logic=self.setup_and_connect, - name_func=generate_test_name, - arg_sets=test_args, - ) - - def setup_class(self): - super().setup_class() - - device_type = self.user_params.get("dut", "fuchsia_devices") - if device_type == "fuchsia_devices": - self.dut = create_wlan_device(self.fuchsia_devices[0]) - elif device_type == "android_devices": - self.dut = create_wlan_device(self.android_devices[0]) - else: - raise ValueError( - f'Invalid "dut" type specified in config: "{device_type}".' - 'Expected "fuchsia_devices" or "android_devices".' - ) - - self.access_point = self.access_points[0] - self.android_devices = getattr(self, "android_devices", []) - self.access_point.stop_all_aps() - - def setup_test(self): - for ad in self.android_devices: - ad.droid.wakeLockAcquireBright() - ad.droid.wakeUpNow() - self.dut.wifi_toggle_state(True) - - def teardown_test(self): - for ad in self.android_devices: - ad.droid.wakeLockRelease() - ad.droid.goToSleepNow() - self.dut.turn_location_off_and_scan_toggle_off() - self.dut.disconnect() - self.dut.reset_wifi() - self.download_ap_logs() - self.access_point.stop_all_aps() - - def on_fail(self, test_name, begin_time): - super().on_fail(test_name, begin_time) - self.access_point.stop_all_aps() - - def setup_and_connect(self, ap_settings): - """Uses ap_settings to set up ap and then attempts to associate a DUT. - - Args: - ap_settings: a dict containing test case settings, including - bandwidth, security, n_capabilities, and ac_capabilities - - """ - ssid = rand_ascii_str(20) - security = ap_settings["security"] - chbw = ap_settings["chbw"] - password = None - target_security = None - if security: - password = security.password - target_security = security.security_mode_string - n_capabilities = ap_settings["n_capabilities"] - ac_capabilities = ap_settings["ac_capabilities"] - - setup_ap( - access_point=self.access_point, - profile_name="whirlwind", - mode=hostapd_constants.MODE_11AC_MIXED, - channel=36, - n_capabilities=n_capabilities, - ac_capabilities=ac_capabilities, - force_wmm=True, - ssid=ssid, - security=security, - vht_bandwidth=chbw, - password=password, - ) - asserts.assert_true( - self.dut.associate( - ssid, target_pwd=password, target_security=target_security - ), - "Failed to associate.", - ) - - # 1728 tests - def _generate_20mhz_test_args(self): - test_args = [] - - # 864 test cases for open security - # 864 test cases for wpa2 security - for combination in itertools.product( - SECURITIES, - VHT_MAX_MPDU_LEN, - RXLDPC, - RX_STBC, - TX_STBC, - MAX_A_MPDU, - RX_ANTENNA, - TX_ANTENNA, - ): - security = combination[0] - ac_capabilities = combination[1:] - test_args.append( - ( - { - "chbw": 20, - "security": security, - "n_capabilities": N_CAPABS_20MHZ, - "ac_capabilities": ac_capabilities, - }, - ) - ) - - return test_args - - # 1728 tests - def _generate_40mhz_test_args(self): - test_args = [] - - # 864 test cases for open security - # 864 test cases for wpa2 security - for combination in itertools.product( - SECURITIES, - VHT_MAX_MPDU_LEN, - RXLDPC, - RX_STBC, - TX_STBC, - MAX_A_MPDU, - RX_ANTENNA, - TX_ANTENNA, - ): - security = combination[0] - ac_capabilities = combination[1:] - test_args.append( - ( - { - "chbw": 40, - "security": security, - "n_capabilities": N_CAPABS_40MHZ, - "ac_capabilities": ac_capabilities, - }, - ) - ) - - return test_args - - # 3456 tests - def _generate_80mhz_test_args(self): - test_args = [] - - # 1728 test cases for open security - # 1728 test cases for wpa2 security - for combination in itertools.product( - SECURITIES, - VHT_MAX_MPDU_LEN, - RXLDPC, - SHORT_GI_80, - RX_STBC, - TX_STBC, - MAX_A_MPDU, - RX_ANTENNA, - TX_ANTENNA, - ): - security = combination[0] - ac_capabilities = combination[1:] - test_args.append( - ( - { - "chbw": 80, - "security": security, - "n_capabilities": N_CAPABS_40MHZ, - "ac_capabilities": ac_capabilities, - }, - ) - ) - return test_args - - -if __name__ == "__main__": - test_runner.main()
diff --git a/src/antlion/tests/wlan/compliance/WlanPhyCompliance11NTest.py b/src/antlion/tests/wlan/compliance/WlanPhyCompliance11NTest.py deleted file mode 100644 index 244953e..0000000 --- a/src/antlion/tests/wlan/compliance/WlanPhyCompliance11NTest.py +++ /dev/null
@@ -1,593 +0,0 @@ -#!/usr/bin/env python3 -# -# Copyright 2022 The Fuchsia Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import itertools - -from antlion import utils -from antlion.controllers.access_point import setup_ap -from antlion.controllers.ap_lib import hostapd_constants -from antlion.controllers.ap_lib import hostapd_config -from antlion.controllers.ap_lib.hostapd_security import Security -from antlion.controllers.ap_lib.hostapd_utils import generate_random_password -from antlion.test_utils.abstract_devices.wlan_device import create_wlan_device -from antlion.test_utils.wifi import base_test - -from mobly import asserts, test_runner - -FREQUENCY_24 = ["2.4GHz"] -FREQUENCY_5 = ["5GHz"] -CHANNEL_BANDWIDTH_20 = ["HT20"] -CHANNEL_BANDWIDTH_40_LOWER = ["HT40-"] -CHANNEL_BANDWIDTH_40_UPPER = ["HT40+"] -SECURITY_OPEN = "open" -SECURITY_WPA2 = "wpa2" -N_MODE = [hostapd_constants.MODE_11N_PURE, hostapd_constants.MODE_11N_MIXED] -LDPC = [hostapd_constants.N_CAPABILITY_LDPC, ""] -TX_STBC = [hostapd_constants.N_CAPABILITY_TX_STBC, ""] -RX_STBC = [hostapd_constants.N_CAPABILITY_RX_STBC1, ""] -SGI_20 = [hostapd_constants.N_CAPABILITY_SGI20, ""] -SGI_40 = [hostapd_constants.N_CAPABILITY_SGI40, ""] -DSSS_CCK = [hostapd_constants.N_CAPABILITY_DSSS_CCK_40, ""] -INTOLERANT_40 = [hostapd_constants.N_CAPABILITY_40_INTOLERANT, ""] -MAX_AMPDU_7935 = [hostapd_constants.N_CAPABILITY_MAX_AMSDU_7935, ""] -SMPS = [hostapd_constants.N_CAPABILITY_SMPS_STATIC, ""] - - -def generate_test_name(settings): - """Generates a string based on the n_capabilities for a test case - - Args: - settings: A dictionary of hostapd constant n_capabilities. - - Returns: - A string that represents a test case name. - """ - ret = [] - for cap in hostapd_constants.N_CAPABILITIES_MAPPING.keys(): - if cap in settings["n_capabilities"]: - ret.append(hostapd_constants.N_CAPABILITIES_MAPPING[cap]) - # '+' is used by Mobile Harness as special character, don't use it in test names - if settings["chbw"] == "HT40-": - chbw = "HT40Lower" - elif settings["chbw"] == "HT40+": - chbw = "HT40Upper" - else: - chbw = settings["chbw"] - return "test_11n_%s_%s_%s_%s_%s" % ( - settings["frequency"], - chbw, - settings["security"], - settings["n_mode"], - "".join(ret), - ) - - -class WlanPhyCompliance11NTest(base_test.WifiBaseTest): - """Tests for validating 11n PHYS. - - Test Bed Requirement: - * One Android device or Fuchsia device - * One Access Point - """ - - def __init__(self, controllers): - super().__init__(controllers) - - def setup_generated_tests(self): - test_args = ( - self._generate_24_HT20_test_args() - + self._generate_24_HT40_lower_test_args() - + self._generate_24_HT40_upper_test_args() - + self._generate_5_HT20_test_args() - + self._generate_5_HT40_lower_test_args() - + self._generate_5_HT40_upper_test_args() - + self._generate_24_HT20_wpa2_test_args() - + self._generate_24_HT40_lower_wpa2_test_args() - + self._generate_24_HT40_upper_wpa2_test_args() - + self._generate_5_HT20_wpa2_test_args() - + self._generate_5_HT40_lower_wpa2_test_args() - + self._generate_5_HT40_upper_wpa2_test_args() - ) - - self.generate_tests( - test_logic=self.setup_and_connect, - name_func=generate_test_name, - arg_sets=test_args, - ) - - def setup_class(self): - super().setup_class() - - device_type = self.user_params.get("dut", "fuchsia_devices") - if device_type == "fuchsia_devices": - self.dut = create_wlan_device(self.fuchsia_devices[0]) - elif device_type == "android_devices": - self.dut = create_wlan_device(self.android_devices[0]) - else: - raise ValueError( - f'Invalid "dut" type specified in config: "{device_type}".' - 'Expected "fuchsia_devices" or "android_devices".' - ) - - self.access_point = self.access_points[0] - self.access_point.stop_all_aps() - - def setup_test(self): - if hasattr(self, "android_devices"): - for ad in self.android_devices: - ad.droid.wakeLockAcquireBright() - ad.droid.wakeUpNow() - self.dut.wifi_toggle_state(True) - - def teardown_test(self): - if hasattr(self, "android_devices"): - for ad in self.android_devices: - ad.droid.wakeLockRelease() - ad.droid.goToSleepNow() - self.dut.turn_location_off_and_scan_toggle_off() - self.dut.disconnect() - self.dut.reset_wifi() - self.download_ap_logs() - self.access_point.stop_all_aps() - - def on_fail(self, test_name, begin_time): - super().on_fail(test_name, begin_time) - self.access_point.stop_all_aps() - - def setup_and_connect(self, ap_settings): - """Generates a hostapd config, setups up the AP with that config, then - attempts to associate a DUT - - Args: - ap_settings: A dictionary of hostapd constant n_capabilities. - """ - ssid = utils.rand_ascii_str(20) - security_profile = None - password = None - temp_n_capabilities = list(ap_settings["n_capabilities"]) - n_capabilities = [] - for n_capability in temp_n_capabilities: - if n_capability in hostapd_constants.N_CAPABILITIES_MAPPING.keys(): - n_capabilities.append(n_capability) - - if ap_settings["chbw"] == "HT20" or ap_settings["chbw"] == "HT40+": - if ap_settings["frequency"] == "2.4GHz": - channel = 1 - elif ap_settings["frequency"] == "5GHz": - channel = 36 - else: - raise ValueError("Invalid frequence: %s" % ap_settings["frequency"]) - - elif ap_settings["chbw"] == "HT40-": - if ap_settings["frequency"] == "2.4GHz": - channel = 11 - elif ap_settings["frequency"] == "5GHz": - channel = 60 - else: - raise ValueError("Invalid frequency: %s" % ap_settings["frequency"]) - - else: - raise ValueError("Invalid channel bandwidth: %s" % ap_settings["chbw"]) - - if ap_settings["chbw"] == "HT40-" or ap_settings["chbw"] == "HT40+": - if hostapd_config.ht40_plus_allowed(channel): - extended_channel = hostapd_constants.N_CAPABILITY_HT40_PLUS - elif hostapd_config.ht40_minus_allowed(channel): - extended_channel = hostapd_constants.N_CAPABILITY_HT40_MINUS - else: - raise ValueError("Invalid channel: %s" % channel) - n_capabilities.append(extended_channel) - - if ap_settings["security"] == "wpa2": - security_profile = Security( - security_mode=SECURITY_WPA2, - password=generate_random_password(length=20), - wpa_cipher="CCMP", - wpa2_cipher="CCMP", - ) - password = security_profile.password - target_security = ( - hostapd_constants.SECURITY_STRING_TO_DEFAULT_TARGET_SECURITY.get( - ap_settings["security"], None - ) - ) - - mode = ap_settings["n_mode"] - if mode not in N_MODE: - raise ValueError("Invalid n-mode: %s" % ap_settings["n-mode"]) - - setup_ap( - access_point=self.access_point, - profile_name="whirlwind", - mode=mode, - channel=channel, - n_capabilities=n_capabilities, - ac_capabilities=[], - force_wmm=True, - ssid=ssid, - security=security_profile, - password=password, - ) - asserts.assert_true( - self.dut.associate( - ssid, target_pwd=password, target_security=target_security - ), - "Failed to connect.", - ) - - def _generate_24_HT20_test_args(self): - test_args = [] - for combination in itertools.product( - FREQUENCY_24, - CHANNEL_BANDWIDTH_20, - N_MODE, - LDPC, - TX_STBC, - RX_STBC, - SGI_20, - INTOLERANT_40, - MAX_AMPDU_7935, - SMPS, - ): - test_frequency = combination[0] - test_chbw = combination[1] - n_mode = combination[2] - n_capabilities = combination[3:] - test_args.append( - ( - { - "frequency": test_frequency, - "chbw": test_chbw, - "n_mode": n_mode, - "security": SECURITY_OPEN, - "n_capabilities": n_capabilities, - }, - ) - ) - return test_args - - def _generate_24_HT40_lower_test_args(self): - test_args = [] - for combination in itertools.product( - FREQUENCY_24, - CHANNEL_BANDWIDTH_40_LOWER, - LDPC, - TX_STBC, - RX_STBC, - SGI_20, - SGI_40, - MAX_AMPDU_7935, - SMPS, - DSSS_CCK, - ): - test_frequency = combination[0] - test_chbw = combination[1] - n_capabilities = combination[2:] - test_args.append( - ( - { - "frequency": test_frequency, - "chbw": test_chbw, - "n_mode": hostapd_constants.MODE_11N_MIXED, - "security": SECURITY_OPEN, - "n_capabilities": n_capabilities, - }, - ) - ) - return test_args - - def _generate_24_HT40_upper_test_args(self): - test_args = [] - for combination in itertools.product( - FREQUENCY_24, - CHANNEL_BANDWIDTH_40_UPPER, - LDPC, - TX_STBC, - RX_STBC, - SGI_20, - SGI_40, - MAX_AMPDU_7935, - SMPS, - DSSS_CCK, - ): - test_frequency = combination[0] - test_chbw = combination[1] - n_capabilities = combination[2:] - test_args.append( - ( - { - "frequency": test_frequency, - "chbw": test_chbw, - "n_mode": hostapd_constants.MODE_11N_MIXED, - "security": SECURITY_OPEN, - "n_capabilities": n_capabilities, - }, - ) - ) - return test_args - - def _generate_5_HT20_test_args(self): - test_args = [] - for combination in itertools.product( - FREQUENCY_5, - CHANNEL_BANDWIDTH_20, - LDPC, - TX_STBC, - RX_STBC, - SGI_20, - INTOLERANT_40, - MAX_AMPDU_7935, - SMPS, - ): - test_frequency = combination[0] - test_chbw = combination[1] - n_capabilities = combination[2:] - test_args.append( - ( - { - "frequency": test_frequency, - "chbw": test_chbw, - "n_mode": hostapd_constants.MODE_11N_MIXED, - "security": SECURITY_OPEN, - "n_capabilities": n_capabilities, - }, - ) - ) - return test_args - - def _generate_5_HT40_lower_test_args(self): - test_args = [] - for combination in itertools.product( - FREQUENCY_5, - CHANNEL_BANDWIDTH_40_LOWER, - LDPC, - TX_STBC, - RX_STBC, - SGI_20, - SGI_40, - MAX_AMPDU_7935, - SMPS, - DSSS_CCK, - ): - test_frequency = combination[0] - test_chbw = combination[1] - n_capabilities = combination[2:] - test_args.append( - ( - { - "frequency": test_frequency, - "chbw": test_chbw, - "n_mode": hostapd_constants.MODE_11N_MIXED, - "security": SECURITY_OPEN, - "n_capabilities": n_capabilities, - }, - ) - ) - return test_args - - def _generate_5_HT40_upper_test_args(self): - test_args = [] - for combination in itertools.product( - FREQUENCY_5, - CHANNEL_BANDWIDTH_40_UPPER, - N_MODE, - LDPC, - TX_STBC, - RX_STBC, - SGI_20, - SGI_40, - MAX_AMPDU_7935, - SMPS, - DSSS_CCK, - ): - test_frequency = combination[0] - test_chbw = combination[1] - n_mode = combination[2] - n_capabilities = combination[3:] - test_args.append( - ( - { - "frequency": test_frequency, - "chbw": test_chbw, - "n_mode": n_mode, - "security": SECURITY_OPEN, - "n_capabilities": n_capabilities, - }, - ) - ) - return test_args - - def _generate_24_HT20_wpa2_test_args(self): - test_args = [] - for combination in itertools.product( - FREQUENCY_24, - CHANNEL_BANDWIDTH_20, - LDPC, - TX_STBC, - RX_STBC, - SGI_20, - INTOLERANT_40, - MAX_AMPDU_7935, - SMPS, - ): - test_frequency = combination[0] - test_chbw = combination[1] - n_capabilities = combination[2:] - test_args.append( - ( - { - "frequency": test_frequency, - "chbw": test_chbw, - "n_mode": hostapd_constants.MODE_11N_MIXED, - "security": SECURITY_WPA2, - "n_capabilities": n_capabilities, - }, - ) - ) - return test_args - - def _generate_24_HT40_lower_wpa2_test_args(self): - test_args = [] - for combination in itertools.product( - FREQUENCY_24, - CHANNEL_BANDWIDTH_40_LOWER, - LDPC, - TX_STBC, - RX_STBC, - SGI_20, - SGI_40, - MAX_AMPDU_7935, - SMPS, - DSSS_CCK, - ): - test_frequency = combination[0] - test_chbw = combination[1] - n_capabilities = combination[2:] - test_args.append( - ( - { - "frequency": test_frequency, - "chbw": test_chbw, - "n_mode": hostapd_constants.MODE_11N_MIXED, - "security": SECURITY_WPA2, - "n_capabilities": n_capabilities, - }, - ) - ) - return test_args - - def _generate_24_HT40_upper_wpa2_test_args(self): - test_args = [] - for combination in itertools.product( - FREQUENCY_24, - CHANNEL_BANDWIDTH_40_UPPER, - LDPC, - TX_STBC, - RX_STBC, - SGI_20, - SGI_40, - MAX_AMPDU_7935, - SMPS, - DSSS_CCK, - ): - test_frequency = combination[0] - test_chbw = combination[1] - n_capabilities = combination[2:] - test_args.append( - ( - { - "frequency": test_frequency, - "chbw": test_chbw, - "n_mode": hostapd_constants.MODE_11N_MIXED, - "security": SECURITY_WPA2, - "n_capabilities": n_capabilities, - }, - ) - ) - return test_args - - def _generate_5_HT20_wpa2_test_args(self): - test_args = [] - for combination in itertools.product( - FREQUENCY_5, - CHANNEL_BANDWIDTH_20, - LDPC, - TX_STBC, - RX_STBC, - SGI_20, - INTOLERANT_40, - MAX_AMPDU_7935, - SMPS, - ): - test_frequency = combination[0] - test_chbw = combination[1] - n_capabilities = combination[2:] - test_args.append( - ( - { - "frequency": test_frequency, - "chbw": test_chbw, - "n_mode": hostapd_constants.MODE_11N_MIXED, - "security": SECURITY_WPA2, - "n_capabilities": n_capabilities, - }, - ) - ) - return test_args - - def _generate_5_HT40_lower_wpa2_test_args(self): - test_args = [] - for combination in itertools.product( - FREQUENCY_5, - CHANNEL_BANDWIDTH_40_LOWER, - LDPC, - TX_STBC, - RX_STBC, - SGI_20, - SGI_40, - MAX_AMPDU_7935, - SMPS, - DSSS_CCK, - ): - test_frequency = combination[0] - test_chbw = combination[1] - n_capabilities = combination[2:] - test_args.append( - ( - { - "frequency": test_frequency, - "chbw": test_chbw, - "n_mode": hostapd_constants.MODE_11N_MIXED, - "security": SECURITY_WPA2, - "n_capabilities": n_capabilities, - }, - ) - ) - return test_args - - def _generate_5_HT40_upper_wpa2_test_args(self): - test_args = [] - for combination in itertools.product( - FREQUENCY_5, - CHANNEL_BANDWIDTH_40_UPPER, - LDPC, - TX_STBC, - RX_STBC, - SGI_20, - SGI_40, - MAX_AMPDU_7935, - SMPS, - DSSS_CCK, - ): - test_frequency = combination[0] - test_chbw = combination[1] - n_capabilities = combination[2:] - test_args.append( - ( - { - "frequency": test_frequency, - "chbw": test_chbw, - "n_mode": hostapd_constants.MODE_11N_MIXED, - "security": SECURITY_WPA2, - "n_capabilities": n_capabilities, - }, - ) - ) - return test_args - - -if __name__ == "__main__": - test_runner.main()
diff --git a/src/antlion/tests/wlan/compliance/WlanPhyComplianceABGTest.py b/src/antlion/tests/wlan/compliance/WlanPhyComplianceABGTest.py deleted file mode 100644 index 3cadf83..0000000 --- a/src/antlion/tests/wlan/compliance/WlanPhyComplianceABGTest.py +++ /dev/null
@@ -1,1956 +0,0 @@ -#!/usr/bin/env python3 -# -# Copyright 2022 The Fuchsia Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from antlion import utils -from antlion.controllers.access_point import setup_ap -from antlion.controllers.ap_lib import hostapd_constants -from antlion.test_utils.abstract_devices.wlan_device import create_wlan_device -from antlion.test_utils.wifi import base_test - -from mobly import asserts, test_runner - - -class WlanPhyComplianceABGTest(base_test.WifiBaseTest): - """Tests for validating 11a, 11b, and 11g PHYS. - - Test Bed Requirement: - * One Android device or Fuchsia device - * One Access Point - """ - - def setup_class(self): - super().setup_class() - - device_type = self.user_params.get("dut", "fuchsia_devices") - if device_type == "fuchsia_devices": - self.dut = create_wlan_device(self.fuchsia_devices[0]) - elif device_type == "android_devices": - self.dut = create_wlan_device(self.android_devices[0]) - else: - raise ValueError( - f'Invalid "dut" type specified in config: "{device_type}".' - 'Expected "fuchsia_devices" or "android_devices".' - ) - - self.access_point = self.access_points[0] - open_network = self.get_open_network(False, []) - open_network_min_len = self.get_open_network( - False, - [], - ssid_length_2g=hostapd_constants.AP_SSID_MIN_LENGTH_2G, - ssid_length_5g=hostapd_constants.AP_SSID_MIN_LENGTH_5G, - ) - open_network_max_len = self.get_open_network( - False, - [], - ssid_length_2g=hostapd_constants.AP_SSID_MAX_LENGTH_2G, - ssid_length_5g=hostapd_constants.AP_SSID_MAX_LENGTH_5G, - ) - self.open_network_2g = open_network["2g"] - self.open_network_5g = open_network["5g"] - self.open_network_max_len_2g = open_network_max_len["2g"] - self.open_network_max_len_2g["SSID"] = self.open_network_max_len_2g["SSID"][3:] - self.open_network_max_len_5g = open_network_max_len["5g"] - self.open_network_max_len_5g["SSID"] = self.open_network_max_len_5g["SSID"][3:] - self.open_network_min_len_2g = open_network_min_len["2g"] - self.open_network_min_len_2g["SSID"] = self.open_network_min_len_2g["SSID"][3:] - self.open_network_min_len_5g = open_network_min_len["5g"] - self.open_network_min_len_5g["SSID"] = self.open_network_min_len_5g["SSID"][3:] - - self.utf8_ssid_2g = "2𝔤_𝔊𝔬𝔬𝔤𝔩𝔢" - self.utf8_ssid_5g = "5𝔤_𝔊𝔬𝔬𝔤𝔩𝔢" - - self.utf8_ssid_2g_french = "Château du Feÿ" - self.utf8_password_2g_french = "du Feÿ Château" - - self.utf8_ssid_2g_german = "Rat für Straßenatlas" - self.utf8_password_2g_german = "für Straßenatlas Rat" - - self.utf8_ssid_2g_dutch = "Die niet óúd, is níéuw!" - self.utf8_password_2g_dutch = "niet óúd, is níéuw! Die" - - self.utf8_ssid_2g_swedish = "Det är femtioåtta" - self.utf8_password_2g_swedish = "femtioåtta Det är" - - self.utf8_ssid_2g_norwegian = "Curaçao ØÆ æ å å å" - self.utf8_password_2g_norwegian = "ØÆ Curaçao æ å å å" - - # Danish and Norwegian has the same alphabet - self.utf8_ssid_2g_danish = self.utf8_ssid_2g_norwegian - self.utf8_password_2g_danish = self.utf8_password_2g_norwegian - - self.utf8_ssid_2g_japanese = "あなた はお母さん" - self.utf8_password_2g_japanese = "そっくりね。あな" - - self.utf8_ssid_2g_spanish = "¡No á,é,í,ó,ú,ü,ñ,¿,¡" - self.utf8_password_2g_spanish = "á,é,í,ó,ú,ü,ñ,¿,¡ ¡No" - - self.utf8_ssid_2g_italian = "caffè Pinocchio è italiano?" - self.utf8_password_2g_italian = "Pinocchio è italiano? caffè" - - self.utf8_ssid_2g_korean = "ㅘㅙㅚㅛㅜㅝㅞㅟㅠ" - self.utf8_password_2g_korean = "ㅜㅝㅞㅟㅠㅘㅙㅚㅛ" - - self.access_point.stop_all_aps() - - def setup_test(self): - if hasattr(self, "android_devices"): - for ad in self.android_devices: - ad.droid.wakeLockAcquireBright() - ad.droid.wakeUpNow() - self.dut.wifi_toggle_state(True) - - def teardown_test(self): - if hasattr(self, "android_devices"): - for ad in self.android_devices: - ad.droid.wakeLockRelease() - ad.droid.goToSleepNow() - self.dut.turn_location_off_and_scan_toggle_off() - self.dut.disconnect() - self.dut.reset_wifi() - self.download_ap_logs() - self.access_point.stop_all_aps() - - def on_fail(self, test_name, begin_time): - super().on_fail(test_name, begin_time) - self.access_point.stop_all_aps() - - def test_associate_11b_only_long_preamble(self): - setup_ap( - access_point=self.access_point, - profile_name="whirlwind_11ab_legacy", - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.open_network_2g["SSID"], - preamble=False, - ) - asserts.assert_true( - self.dut.associate(self.open_network_2g["SSID"]), "Failed to associate." - ) - - def test_associate_11b_only_short_preamble(self): - setup_ap( - access_point=self.access_point, - profile_name="whirlwind_11ab_legacy", - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.open_network_2g["SSID"], - preamble=True, - ) - asserts.assert_true( - self.dut.associate(self.open_network_2g["SSID"]), "Failed to associate." - ) - - def test_associate_11b_only_minimal_beacon_interval(self): - setup_ap( - access_point=self.access_point, - profile_name="whirlwind_11ab_legacy", - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.open_network_2g["SSID"], - beacon_interval=15, - ) - asserts.assert_true( - self.dut.associate(self.open_network_2g["SSID"]), "Failed to associate." - ) - - def test_associate_11b_only_maximum_beacon_interval(self): - setup_ap( - access_point=self.access_point, - profile_name="whirlwind_11ab_legacy", - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.open_network_2g["SSID"], - beacon_interval=1024, - ) - asserts.assert_true( - self.dut.associate(self.open_network_2g["SSID"]), "Failed to associate." - ) - - def test_associate_11b_only_frag_threshold_430(self): - setup_ap( - access_point=self.access_point, - profile_name="whirlwind_11ab_legacy", - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.open_network_2g["SSID"], - frag_threshold=430, - ) - asserts.assert_true( - self.dut.associate(self.open_network_2g["SSID"]), "Failed to associate." - ) - - def test_associate_11b_only_rts_threshold_256(self): - setup_ap( - access_point=self.access_point, - profile_name="whirlwind_11ab_legacy", - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.open_network_2g["SSID"], - rts_threshold=256, - ) - asserts.assert_true( - self.dut.associate(self.open_network_2g["SSID"]), "Failed to associate." - ) - - def test_associate_11b_only_rts_256_frag_430(self): - setup_ap( - access_point=self.access_point, - profile_name="whirlwind_11ab_legacy", - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.open_network_2g["SSID"], - rts_threshold=256, - frag_threshold=430, - ) - asserts.assert_true( - self.dut.associate(self.open_network_2g["SSID"]), "Failed to associate." - ) - - def test_associate_11b_only_high_dtim_low_beacon_interval(self): - setup_ap( - access_point=self.access_point, - profile_name="whirlwind_11ab_legacy", - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.open_network_2g["SSID"], - dtim_period=3, - beacon_interval=100, - ) - asserts.assert_true( - self.dut.associate(self.open_network_2g["SSID"]), "Failed to associate." - ) - - def test_associate_11b_only_low_dtim_high_beacon_interval(self): - setup_ap( - access_point=self.access_point, - profile_name="whirlwind_11ab_legacy", - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.open_network_2g["SSID"], - dtim_period=1, - beacon_interval=300, - ) - asserts.assert_true( - self.dut.associate(self.open_network_2g["SSID"]), "Failed to associate." - ) - - def test_associate_11b_only_with_WMM_with_default_values(self): - setup_ap( - access_point=self.access_point, - profile_name="whirlwind_11ab_legacy", - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.open_network_2g["SSID"], - force_wmm=True, - additional_ap_parameters=hostapd_constants.WMM_11B_DEFAULT_PARAMS, - ) - asserts.assert_true( - self.dut.associate(self.open_network_2g["SSID"]), "Failed to associate." - ) - - def test_associate_11b_only_with_WMM_with_non_default_values(self): - setup_ap( - access_point=self.access_point, - profile_name="whirlwind_11ab_legacy", - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.open_network_2g["SSID"], - force_wmm=True, - additional_ap_parameters=hostapd_constants.WMM_NON_DEFAULT_PARAMS, - ) - asserts.assert_true( - self.dut.associate(self.open_network_2g["SSID"]), "Failed to associate." - ) - - def test_associate_11b_only_with_WMM_ACM_on_BK(self): - wmm_acm_bits_enabled = utils.merge_dicts( - hostapd_constants.WMM_11B_DEFAULT_PARAMS, hostapd_constants.WMM_ACM_BK - ) - setup_ap( - access_point=self.access_point, - profile_name="whirlwind_11ab_legacy", - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.open_network_2g["SSID"], - force_wmm=True, - additional_ap_parameters=wmm_acm_bits_enabled, - ) - asserts.assert_true( - self.dut.associate(self.open_network_2g["SSID"]), "Failed to associate." - ) - - def test_associate_11b_only_with_WMM_ACM_on_BE(self): - wmm_acm_bits_enabled = utils.merge_dicts( - hostapd_constants.WMM_11B_DEFAULT_PARAMS, hostapd_constants.WMM_ACM_BE - ) - setup_ap( - access_point=self.access_point, - profile_name="whirlwind_11ab_legacy", - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.open_network_2g["SSID"], - force_wmm=True, - additional_ap_parameters=wmm_acm_bits_enabled, - ) - asserts.assert_true( - self.dut.associate(self.open_network_2g["SSID"]), "Failed to associate." - ) - - def test_associate_11b_only_with_WMM_ACM_on_VI(self): - wmm_acm_bits_enabled = utils.merge_dicts( - hostapd_constants.WMM_11B_DEFAULT_PARAMS, hostapd_constants.WMM_ACM_VI - ) - setup_ap( - access_point=self.access_point, - profile_name="whirlwind_11ab_legacy", - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.open_network_2g["SSID"], - force_wmm=True, - additional_ap_parameters=wmm_acm_bits_enabled, - ) - asserts.assert_true( - self.dut.associate(self.open_network_2g["SSID"]), "Failed to associate." - ) - - def test_associate_11b_only_with_WMM_ACM_on_VO(self): - wmm_acm_bits_enabled = utils.merge_dicts( - hostapd_constants.WMM_11B_DEFAULT_PARAMS, hostapd_constants.WMM_ACM_VO - ) - setup_ap( - access_point=self.access_point, - profile_name="whirlwind_11ab_legacy", - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.open_network_2g["SSID"], - force_wmm=True, - additional_ap_parameters=wmm_acm_bits_enabled, - ) - asserts.assert_true( - self.dut.associate(self.open_network_2g["SSID"]), "Failed to associate." - ) - - def test_associate_11b_only_with_WMM_ACM_on_BK_BE_VI(self): - wmm_acm_bits_enabled = utils.merge_dicts( - hostapd_constants.WMM_11B_DEFAULT_PARAMS, - hostapd_constants.WMM_ACM_BK, - hostapd_constants.WMM_ACM_BE, - hostapd_constants.WMM_ACM_VI, - ) - setup_ap( - access_point=self.access_point, - profile_name="whirlwind_11ab_legacy", - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.open_network_2g["SSID"], - force_wmm=True, - additional_ap_parameters=wmm_acm_bits_enabled, - ) - asserts.assert_true( - self.dut.associate(self.open_network_2g["SSID"]), "Failed to associate." - ) - - def test_associate_11b_only_with_WMM_ACM_on_BK_BE_VO(self): - wmm_acm_bits_enabled = utils.merge_dicts( - hostapd_constants.WMM_11B_DEFAULT_PARAMS, - hostapd_constants.WMM_ACM_BK, - hostapd_constants.WMM_ACM_BE, - hostapd_constants.WMM_ACM_VO, - ) - setup_ap( - access_point=self.access_point, - profile_name="whirlwind_11ab_legacy", - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.open_network_2g["SSID"], - force_wmm=True, - additional_ap_parameters=wmm_acm_bits_enabled, - ) - asserts.assert_true( - self.dut.associate(self.open_network_2g["SSID"]), "Failed to associate." - ) - - def test_associate_11b_only_with_WMM_ACM_on_BK_VI_VO(self): - wmm_acm_bits_enabled = utils.merge_dicts( - hostapd_constants.WMM_11B_DEFAULT_PARAMS, - hostapd_constants.WMM_ACM_BK, - hostapd_constants.WMM_ACM_VI, - hostapd_constants.WMM_ACM_VO, - ) - setup_ap( - access_point=self.access_point, - profile_name="whirlwind_11ab_legacy", - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.open_network_2g["SSID"], - force_wmm=True, - additional_ap_parameters=wmm_acm_bits_enabled, - ) - asserts.assert_true( - self.dut.associate(self.open_network_2g["SSID"]), "Failed to associate." - ) - - def test_associate_11b_only_with_WMM_ACM_on_BE_VI_VO(self): - wmm_acm_bits_enabled = utils.merge_dicts( - hostapd_constants.WMM_11B_DEFAULT_PARAMS, - hostapd_constants.WMM_ACM_BE, - hostapd_constants.WMM_ACM_VI, - hostapd_constants.WMM_ACM_VO, - ) - setup_ap( - access_point=self.access_point, - profile_name="whirlwind_11ab_legacy", - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.open_network_2g["SSID"], - force_wmm=True, - additional_ap_parameters=wmm_acm_bits_enabled, - ) - asserts.assert_true( - self.dut.associate(self.open_network_2g["SSID"]), "Failed to associate." - ) - - def test_associate_11b_only_with_country_code(self): - country_info = utils.merge_dicts( - hostapd_constants.ENABLE_IEEE80211D, - hostapd_constants.COUNTRY_STRING["ALL"], - hostapd_constants.COUNTRY_CODE["UNITED_STATES"], - ) - setup_ap( - access_point=self.access_point, - profile_name="whirlwind_11ab_legacy", - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.open_network_2g["SSID"], - additional_ap_parameters=country_info, - ) - asserts.assert_true( - self.dut.associate(self.open_network_2g["SSID"]), "Failed to associate." - ) - - def test_associate_11b_only_with_non_country_code(self): - country_info = utils.merge_dicts( - hostapd_constants.ENABLE_IEEE80211D, - hostapd_constants.COUNTRY_STRING["ALL"], - hostapd_constants.COUNTRY_CODE["NON_COUNTRY"], - ) - setup_ap( - access_point=self.access_point, - profile_name="whirlwind_11ab_legacy", - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.open_network_2g["SSID"], - additional_ap_parameters=country_info, - ) - asserts.assert_true( - self.dut.associate(self.open_network_2g["SSID"]), "Failed to associate." - ) - - def test_associate_11b_only_with_hidden_ssid(self): - setup_ap( - access_point=self.access_point, - profile_name="whirlwind_11ab_legacy", - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.open_network_2g["SSID"], - hidden=True, - ) - asserts.assert_true( - self.dut.associate(self.open_network_2g["SSID"]), "Failed to associate." - ) - - def test_associate_11b_only_with_vendor_ie_in_beacon_correct_length(self): - setup_ap( - access_point=self.access_point, - profile_name="whirlwind_11ab_legacy", - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.open_network_2g["SSID"], - additional_ap_parameters=hostapd_constants.VENDOR_IE[ - "correct_length_beacon" - ], - ) - asserts.assert_true( - self.dut.associate(self.open_network_2g["SSID"]), "Failed to associate." - ) - - def test_associate_11b_only_with_vendor_ie_in_beacon_zero_length(self): - setup_ap( - access_point=self.access_point, - profile_name="whirlwind_11ab_legacy", - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.open_network_2g["SSID"], - additional_ap_parameters=hostapd_constants.VENDOR_IE[ - "zero_length_beacon_without_data" - ], - ) - asserts.assert_true( - self.dut.associate(self.open_network_2g["SSID"]), "Failed to associate." - ) - - def test_associate_11b_only_with_vendor_ie_in_assoc_correct_length(self): - setup_ap( - access_point=self.access_point, - profile_name="whirlwind_11ab_legacy", - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.open_network_2g["SSID"], - additional_ap_parameters=hostapd_constants.VENDOR_IE[ - "correct_length_association_response" - ], - ) - asserts.assert_true( - self.dut.associate(self.open_network_2g["SSID"]), "Failed to associate." - ) - - def test_associate_11b_only_with_vendor_ie_in_assoc_zero_length(self): - setup_ap( - access_point=self.access_point, - profile_name="whirlwind_11ab_legacy", - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.open_network_2g["SSID"], - additional_ap_parameters=hostapd_constants.VENDOR_IE[ - "zero_length_association_" "response_without_data" - ], - ) - asserts.assert_true( - self.dut.associate(self.open_network_2g["SSID"]), "Failed to associate." - ) - - def test_associate_11a_only_long_preamble(self): - setup_ap( - access_point=self.access_point, - profile_name="whirlwind_11ab_legacy", - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=self.open_network_5g["SSID"], - preamble=False, - ) - asserts.assert_true( - self.dut.associate(self.open_network_5g["SSID"]), "Failed to associate." - ) - - def test_associate_11a_only_short_preamble(self): - setup_ap( - access_point=self.access_point, - profile_name="whirlwind_11ab_legacy", - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=self.open_network_5g["SSID"], - preamble=True, - ) - asserts.assert_true( - self.dut.associate(self.open_network_5g["SSID"]), "Failed to associate." - ) - - def test_associate_11a_only_minimal_beacon_interval(self): - setup_ap( - access_point=self.access_point, - profile_name="whirlwind_11ab_legacy", - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=self.open_network_5g["SSID"], - beacon_interval=15, - ) - asserts.assert_true( - self.dut.associate(self.open_network_5g["SSID"]), "Failed to associate." - ) - - def test_associate_11a_only_maximum_beacon_interval(self): - setup_ap( - access_point=self.access_point, - profile_name="whirlwind_11ab_legacy", - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=self.open_network_5g["SSID"], - beacon_interval=1024, - ) - asserts.assert_true( - self.dut.associate(self.open_network_5g["SSID"]), "Failed to associate." - ) - - def test_associate_11a_only_frag_threshold_430(self): - setup_ap( - access_point=self.access_point, - profile_name="whirlwind_11ab_legacy", - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=self.open_network_5g["SSID"], - frag_threshold=430, - ) - asserts.assert_true( - self.dut.associate(self.open_network_5g["SSID"]), "Failed to associate." - ) - - def test_associate_11a_only_rts_threshold_256(self): - setup_ap( - access_point=self.access_point, - profile_name="whirlwind_11ab_legacy", - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=self.open_network_5g["SSID"], - rts_threshold=256, - ) - asserts.assert_true( - self.dut.associate(self.open_network_5g["SSID"]), "Failed to associate." - ) - - def test_associate_11a_only_rts_256_frag_430(self): - setup_ap( - access_point=self.access_point, - profile_name="whirlwind_11ab_legacy", - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=self.open_network_5g["SSID"], - rts_threshold=256, - frag_threshold=430, - ) - asserts.assert_true( - self.dut.associate(self.open_network_5g["SSID"]), "Failed to associate." - ) - - def test_associate_11a_only_high_dtim_low_beacon_interval(self): - setup_ap( - access_point=self.access_point, - profile_name="whirlwind_11ab_legacy", - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=self.open_network_5g["SSID"], - dtim_period=3, - beacon_interval=100, - ) - asserts.assert_true( - self.dut.associate(self.open_network_5g["SSID"]), "Failed to associate." - ) - - def test_associate_11a_only_low_dtim_high_beacon_interval(self): - setup_ap( - access_point=self.access_point, - profile_name="whirlwind_11ab_legacy", - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=self.open_network_5g["SSID"], - dtim_period=1, - beacon_interval=300, - ) - asserts.assert_true( - self.dut.associate(self.open_network_5g["SSID"]), "Failed to associate." - ) - - def test_associate_11a_only_with_WMM_with_default_values(self): - setup_ap( - access_point=self.access_point, - profile_name="whirlwind_11ab_legacy", - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=self.open_network_5g["SSID"], - force_wmm=True, - additional_ap_parameters=hostapd_constants.WMM_PHYS_11A_11G_11N_11AC_DEFAULT_PARAMS, - ) - asserts.assert_true( - self.dut.associate(self.open_network_5g["SSID"]), "Failed to associate." - ) - - def test_associate_11a_only_with_WMM_with_non_default_values(self): - setup_ap( - access_point=self.access_point, - profile_name="whirlwind_11ab_legacy", - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=self.open_network_5g["SSID"], - force_wmm=True, - additional_ap_parameters=hostapd_constants.WMM_NON_DEFAULT_PARAMS, - ) - asserts.assert_true( - self.dut.associate(self.open_network_5g["SSID"]), "Failed to associate." - ) - - def test_associate_11a_only_with_WMM_ACM_on_BK(self): - wmm_acm_bits_enabled = utils.merge_dicts( - hostapd_constants.WMM_PHYS_11A_11G_11N_11AC_DEFAULT_PARAMS, - hostapd_constants.WMM_ACM_BK, - ) - setup_ap( - access_point=self.access_point, - profile_name="whirlwind_11ab_legacy", - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=self.open_network_5g["SSID"], - force_wmm=True, - additional_ap_parameters=wmm_acm_bits_enabled, - ) - asserts.assert_true( - self.dut.associate(self.open_network_5g["SSID"]), "Failed to associate." - ) - - def test_associate_11a_only_with_WMM_ACM_on_BE(self): - wmm_acm_bits_enabled = utils.merge_dicts( - hostapd_constants.WMM_PHYS_11A_11G_11N_11AC_DEFAULT_PARAMS, - hostapd_constants.WMM_ACM_BE, - ) - setup_ap( - access_point=self.access_point, - profile_name="whirlwind_11ab_legacy", - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=self.open_network_5g["SSID"], - force_wmm=True, - additional_ap_parameters=wmm_acm_bits_enabled, - ) - asserts.assert_true( - self.dut.associate(self.open_network_5g["SSID"]), "Failed to associate." - ) - - def test_associate_11a_only_with_WMM_ACM_on_VI(self): - wmm_acm_bits_enabled = utils.merge_dicts( - hostapd_constants.WMM_PHYS_11A_11G_11N_11AC_DEFAULT_PARAMS, - hostapd_constants.WMM_ACM_VI, - ) - setup_ap( - access_point=self.access_point, - profile_name="whirlwind_11ab_legacy", - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=self.open_network_5g["SSID"], - force_wmm=True, - additional_ap_parameters=wmm_acm_bits_enabled, - ) - asserts.assert_true( - self.dut.associate(self.open_network_5g["SSID"]), "Failed to associate." - ) - - def test_associate_11a_only_with_WMM_ACM_on_VO(self): - wmm_acm_bits_enabled = utils.merge_dicts( - hostapd_constants.WMM_PHYS_11A_11G_11N_11AC_DEFAULT_PARAMS, - hostapd_constants.WMM_ACM_VO, - ) - setup_ap( - access_point=self.access_point, - profile_name="whirlwind_11ab_legacy", - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=self.open_network_5g["SSID"], - force_wmm=True, - additional_ap_parameters=wmm_acm_bits_enabled, - ) - asserts.assert_true( - self.dut.associate(self.open_network_5g["SSID"]), "Failed to associate." - ) - - def test_associate_11a_only_with_WMM_ACM_on_BK_BE_VI(self): - wmm_acm_bits_enabled = utils.merge_dicts( - hostapd_constants.WMM_PHYS_11A_11G_11N_11AC_DEFAULT_PARAMS, - hostapd_constants.WMM_ACM_BK, - hostapd_constants.WMM_ACM_BE, - hostapd_constants.WMM_ACM_VI, - ) - setup_ap( - access_point=self.access_point, - profile_name="whirlwind_11ab_legacy", - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=self.open_network_5g["SSID"], - force_wmm=True, - additional_ap_parameters=wmm_acm_bits_enabled, - ) - asserts.assert_true( - self.dut.associate(self.open_network_5g["SSID"]), "Failed to associate." - ) - - def test_associate_11a_only_with_WMM_ACM_on_BK_BE_VO(self): - wmm_acm_bits_enabled = utils.merge_dicts( - hostapd_constants.WMM_PHYS_11A_11G_11N_11AC_DEFAULT_PARAMS, - hostapd_constants.WMM_ACM_BK, - hostapd_constants.WMM_ACM_BE, - hostapd_constants.WMM_ACM_VO, - ) - setup_ap( - access_point=self.access_point, - profile_name="whirlwind_11ab_legacy", - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=self.open_network_5g["SSID"], - force_wmm=True, - additional_ap_parameters=wmm_acm_bits_enabled, - ) - asserts.assert_true( - self.dut.associate(self.open_network_5g["SSID"]), "Failed to associate." - ) - - def test_associate_11a_only_with_WMM_ACM_on_BK_VI_VO(self): - wmm_acm_bits_enabled = utils.merge_dicts( - hostapd_constants.WMM_PHYS_11A_11G_11N_11AC_DEFAULT_PARAMS, - hostapd_constants.WMM_ACM_BK, - hostapd_constants.WMM_ACM_VI, - hostapd_constants.WMM_ACM_VO, - ) - setup_ap( - access_point=self.access_point, - profile_name="whirlwind_11ab_legacy", - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=self.open_network_5g["SSID"], - force_wmm=True, - additional_ap_parameters=wmm_acm_bits_enabled, - ) - asserts.assert_true( - self.dut.associate(self.open_network_5g["SSID"]), "Failed to associate." - ) - - def test_associate_11a_only_with_WMM_ACM_on_BE_VI_VO(self): - wmm_acm_bits_enabled = utils.merge_dicts( - hostapd_constants.WMM_PHYS_11A_11G_11N_11AC_DEFAULT_PARAMS, - hostapd_constants.WMM_ACM_BE, - hostapd_constants.WMM_ACM_VI, - hostapd_constants.WMM_ACM_VO, - ) - setup_ap( - access_point=self.access_point, - profile_name="whirlwind_11ab_legacy", - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=self.open_network_5g["SSID"], - force_wmm=True, - additional_ap_parameters=wmm_acm_bits_enabled, - ) - asserts.assert_true( - self.dut.associate(self.open_network_5g["SSID"]), "Failed to associate." - ) - - def test_associate_11a_only_with_country_code(self): - country_info = utils.merge_dicts( - hostapd_constants.ENABLE_IEEE80211D, - hostapd_constants.COUNTRY_STRING["ALL"], - hostapd_constants.COUNTRY_CODE["UNITED_STATES"], - ) - setup_ap( - access_point=self.access_point, - profile_name="whirlwind_11ab_legacy", - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=self.open_network_5g["SSID"], - additional_ap_parameters=country_info, - ) - asserts.assert_true( - self.dut.associate(self.open_network_5g["SSID"]), "Failed to associate." - ) - - def test_associate_11a_only_with_non_country_code(self): - country_info = utils.merge_dicts( - hostapd_constants.ENABLE_IEEE80211D, - hostapd_constants.COUNTRY_STRING["ALL"], - hostapd_constants.COUNTRY_CODE["NON_COUNTRY"], - ) - setup_ap( - access_point=self.access_point, - profile_name="whirlwind_11ab_legacy", - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=self.open_network_5g["SSID"], - additional_ap_parameters=country_info, - ) - asserts.assert_true( - self.dut.associate(self.open_network_5g["SSID"]), "Failed to associate." - ) - - def test_associate_11a_only_with_hidden_ssid(self): - setup_ap( - access_point=self.access_point, - profile_name="whirlwind_11ab_legacy", - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=self.open_network_5g["SSID"], - hidden=True, - ) - asserts.assert_true( - self.dut.associate(self.open_network_5g["SSID"]), "Failed to associate." - ) - - def test_associate_11a_only_with_vendor_ie_in_beacon_correct_length(self): - setup_ap( - access_point=self.access_point, - profile_name="whirlwind_11ab_legacy", - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=self.open_network_5g["SSID"], - additional_ap_parameters=hostapd_constants.VENDOR_IE[ - "correct_length_beacon" - ], - ) - asserts.assert_true( - self.dut.associate(self.open_network_5g["SSID"]), "Failed to associate." - ) - - def test_associate_11a_only_with_vendor_ie_in_beacon_zero_length(self): - setup_ap( - access_point=self.access_point, - profile_name="whirlwind_11ab_legacy", - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=self.open_network_5g["SSID"], - additional_ap_parameters=hostapd_constants.VENDOR_IE[ - "zero_length_beacon_without_data" - ], - ) - asserts.assert_true( - self.dut.associate(self.open_network_5g["SSID"]), "Failed to associate." - ) - - def test_associate_11a_only_with_vendor_ie_in_assoc_correct_length(self): - setup_ap( - access_point=self.access_point, - profile_name="whirlwind_11ab_legacy", - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=self.open_network_5g["SSID"], - additional_ap_parameters=hostapd_constants.VENDOR_IE[ - "correct_length_association_response" - ], - ) - asserts.assert_true( - self.dut.associate(self.open_network_5g["SSID"]), "Failed to associate." - ) - - def test_associate_11a_only_with_vendor_ie_in_assoc_zero_length(self): - setup_ap( - access_point=self.access_point, - profile_name="whirlwind_11ab_legacy", - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=self.open_network_5g["SSID"], - additional_ap_parameters=hostapd_constants.VENDOR_IE[ - "zero_length_association_" "response_without_data" - ], - ) - asserts.assert_true( - self.dut.associate(self.open_network_5g["SSID"]), "Failed to associate." - ) - - def test_associate_11g_only_long_preamble(self): - data_rates = utils.merge_dicts( - hostapd_constants.OFDM_DATA_RATES, hostapd_constants.OFDM_ONLY_BASIC_RATES - ) - setup_ap( - access_point=self.access_point, - profile_name="whirlwind_11ag_legacy", - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.open_network_2g["SSID"], - preamble=False, - additional_ap_parameters=data_rates, - ) - asserts.assert_true( - self.dut.associate(self.open_network_2g["SSID"]), "Failed to associate." - ) - - def test_associate_11g_only_short_preamble(self): - data_rates = utils.merge_dicts( - hostapd_constants.OFDM_DATA_RATES, hostapd_constants.OFDM_ONLY_BASIC_RATES - ) - setup_ap( - access_point=self.access_point, - profile_name="whirlwind_11ag_legacy", - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.open_network_2g["SSID"], - preamble=True, - additional_ap_parameters=data_rates, - ) - asserts.assert_true( - self.dut.associate(self.open_network_2g["SSID"]), "Failed to associate." - ) - - def test_associate_11g_only_minimal_beacon_interval(self): - data_rates = utils.merge_dicts( - hostapd_constants.OFDM_DATA_RATES, hostapd_constants.OFDM_ONLY_BASIC_RATES - ) - setup_ap( - access_point=self.access_point, - profile_name="whirlwind_11ag_legacy", - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.open_network_2g["SSID"], - beacon_interval=15, - additional_ap_parameters=data_rates, - ) - asserts.assert_true( - self.dut.associate(self.open_network_2g["SSID"]), "Failed to associate." - ) - - def test_associate_11g_only_maximum_beacon_interval(self): - data_rates = utils.merge_dicts( - hostapd_constants.OFDM_DATA_RATES, hostapd_constants.OFDM_ONLY_BASIC_RATES - ) - setup_ap( - access_point=self.access_point, - profile_name="whirlwind_11ag_legacy", - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.open_network_2g["SSID"], - beacon_interval=1024, - additional_ap_parameters=data_rates, - ) - asserts.assert_true( - self.dut.associate(self.open_network_2g["SSID"]), "Failed to associate." - ) - - def test_associate_11g_only_frag_threshold_430(self): - data_rates = utils.merge_dicts( - hostapd_constants.OFDM_DATA_RATES, hostapd_constants.OFDM_ONLY_BASIC_RATES - ) - setup_ap( - access_point=self.access_point, - profile_name="whirlwind_11ag_legacy", - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.open_network_2g["SSID"], - frag_threshold=430, - additional_ap_parameters=data_rates, - ) - asserts.assert_true( - self.dut.associate(self.open_network_2g["SSID"]), "Failed to associate." - ) - - def test_associate_11g_only_rts_threshold_256(self): - data_rates = utils.merge_dicts( - hostapd_constants.OFDM_DATA_RATES, hostapd_constants.OFDM_ONLY_BASIC_RATES - ) - setup_ap( - access_point=self.access_point, - profile_name="whirlwind_11ag_legacy", - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.open_network_2g["SSID"], - rts_threshold=256, - additional_ap_parameters=data_rates, - ) - asserts.assert_true( - self.dut.associate(self.open_network_2g["SSID"]), "Failed to associate." - ) - - def test_associate_11g_only_rts_256_frag_430(self): - data_rates = utils.merge_dicts( - hostapd_constants.OFDM_DATA_RATES, hostapd_constants.OFDM_ONLY_BASIC_RATES - ) - setup_ap( - access_point=self.access_point, - profile_name="whirlwind_11ag_legacy", - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.open_network_2g["SSID"], - rts_threshold=256, - frag_threshold=430, - additional_ap_parameters=data_rates, - ) - asserts.assert_true( - self.dut.associate(self.open_network_2g["SSID"]), "Failed to associate." - ) - - def test_associate_11g_only_high_dtim_low_beacon_interval(self): - data_rates = utils.merge_dicts( - hostapd_constants.OFDM_DATA_RATES, hostapd_constants.OFDM_ONLY_BASIC_RATES - ) - setup_ap( - access_point=self.access_point, - profile_name="whirlwind_11ag_legacy", - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.open_network_2g["SSID"], - dtim_period=3, - beacon_interval=100, - additional_ap_parameters=data_rates, - ) - asserts.assert_true( - self.dut.associate(self.open_network_2g["SSID"]), "Failed to associate." - ) - - def test_associate_11g_only_low_dtim_high_beacon_interval(self): - data_rates = utils.merge_dicts( - hostapd_constants.OFDM_DATA_RATES, hostapd_constants.OFDM_ONLY_BASIC_RATES - ) - setup_ap( - access_point=self.access_point, - profile_name="whirlwind_11ag_legacy", - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.open_network_2g["SSID"], - dtim_period=1, - beacon_interval=300, - additional_ap_parameters=data_rates, - ) - asserts.assert_true( - self.dut.associate(self.open_network_2g["SSID"]), "Failed to associate." - ) - - def test_associate_11g_only_with_WMM_with_default_values(self): - data_rates = utils.merge_dicts( - hostapd_constants.OFDM_DATA_RATES, - hostapd_constants.OFDM_ONLY_BASIC_RATES, - hostapd_constants.WMM_PHYS_11A_11G_11N_11AC_DEFAULT_PARAMS, - ) - setup_ap( - access_point=self.access_point, - profile_name="whirlwind_11ag_legacy", - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.open_network_2g["SSID"], - force_wmm=True, - additional_ap_parameters=data_rates, - ) - asserts.assert_true( - self.dut.associate(self.open_network_2g["SSID"]), "Failed to associate." - ) - - def test_associate_11g_only_with_WMM_with_non_default_values(self): - data_rates = utils.merge_dicts( - hostapd_constants.OFDM_DATA_RATES, - hostapd_constants.OFDM_ONLY_BASIC_RATES, - hostapd_constants.WMM_NON_DEFAULT_PARAMS, - ) - setup_ap( - access_point=self.access_point, - profile_name="whirlwind_11ag_legacy", - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.open_network_2g["SSID"], - force_wmm=True, - additional_ap_parameters=data_rates, - ) - asserts.assert_true( - self.dut.associate(self.open_network_2g["SSID"]), "Failed to associate." - ) - - def test_associate_11g_only_with_WMM_ACM_on_BK(self): - data_rates = utils.merge_dicts( - hostapd_constants.OFDM_DATA_RATES, hostapd_constants.OFDM_ONLY_BASIC_RATES - ) - wmm_acm_bits_enabled = utils.merge_dicts( - hostapd_constants.WMM_PHYS_11A_11G_11N_11AC_DEFAULT_PARAMS, - hostapd_constants.WMM_ACM_BK, - data_rates, - ) - setup_ap( - access_point=self.access_point, - profile_name="whirlwind_11ag_legacy", - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.open_network_2g["SSID"], - force_wmm=True, - additional_ap_parameters=wmm_acm_bits_enabled, - ) - asserts.assert_true( - self.dut.associate(self.open_network_2g["SSID"]), "Failed to associate." - ) - - def test_associate_11g_only_with_WMM_ACM_on_BE(self): - data_rates = utils.merge_dicts( - hostapd_constants.OFDM_DATA_RATES, hostapd_constants.OFDM_ONLY_BASIC_RATES - ) - wmm_acm_bits_enabled = utils.merge_dicts( - hostapd_constants.WMM_PHYS_11A_11G_11N_11AC_DEFAULT_PARAMS, - hostapd_constants.WMM_ACM_BE, - data_rates, - ) - setup_ap( - access_point=self.access_point, - profile_name="whirlwind_11ag_legacy", - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.open_network_2g["SSID"], - force_wmm=True, - additional_ap_parameters=wmm_acm_bits_enabled, - ) - asserts.assert_true( - self.dut.associate(self.open_network_2g["SSID"]), "Failed to associate." - ) - - def test_associate_11g_only_with_WMM_ACM_on_VI(self): - data_rates = utils.merge_dicts( - hostapd_constants.OFDM_DATA_RATES, hostapd_constants.OFDM_ONLY_BASIC_RATES - ) - wmm_acm_bits_enabled = utils.merge_dicts( - hostapd_constants.WMM_PHYS_11A_11G_11N_11AC_DEFAULT_PARAMS, - hostapd_constants.WMM_ACM_VI, - data_rates, - ) - setup_ap( - access_point=self.access_point, - profile_name="whirlwind_11ag_legacy", - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.open_network_2g["SSID"], - force_wmm=True, - additional_ap_parameters=wmm_acm_bits_enabled, - ) - asserts.assert_true( - self.dut.associate(self.open_network_2g["SSID"]), "Failed to associate." - ) - - def test_associate_11g_only_with_WMM_ACM_on_VO(self): - data_rates = utils.merge_dicts( - hostapd_constants.OFDM_DATA_RATES, hostapd_constants.OFDM_ONLY_BASIC_RATES - ) - wmm_acm_bits_enabled = utils.merge_dicts( - hostapd_constants.WMM_PHYS_11A_11G_11N_11AC_DEFAULT_PARAMS, - hostapd_constants.WMM_ACM_VO, - data_rates, - ) - setup_ap( - access_point=self.access_point, - profile_name="whirlwind_11ag_legacy", - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.open_network_2g["SSID"], - force_wmm=True, - additional_ap_parameters=wmm_acm_bits_enabled, - ) - asserts.assert_true( - self.dut.associate(self.open_network_2g["SSID"]), "Failed to associate." - ) - - def test_associate_11g_only_with_WMM_ACM_on_BK_BE_VI(self): - data_rates = utils.merge_dicts( - hostapd_constants.OFDM_DATA_RATES, hostapd_constants.OFDM_ONLY_BASIC_RATES - ) - wmm_acm_bits_enabled = utils.merge_dicts( - hostapd_constants.WMM_PHYS_11A_11G_11N_11AC_DEFAULT_PARAMS, - hostapd_constants.WMM_ACM_BK, - hostapd_constants.WMM_ACM_BE, - hostapd_constants.WMM_ACM_VI, - data_rates, - ) - setup_ap( - access_point=self.access_point, - profile_name="whirlwind_11ag_legacy", - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.open_network_2g["SSID"], - force_wmm=True, - additional_ap_parameters=wmm_acm_bits_enabled, - ) - asserts.assert_true( - self.dut.associate(self.open_network_2g["SSID"]), "Failed to associate." - ) - - def test_associate_11g_only_with_WMM_ACM_on_BK_BE_VO(self): - data_rates = utils.merge_dicts( - hostapd_constants.OFDM_DATA_RATES, hostapd_constants.OFDM_ONLY_BASIC_RATES - ) - wmm_acm_bits_enabled = utils.merge_dicts( - hostapd_constants.WMM_PHYS_11A_11G_11N_11AC_DEFAULT_PARAMS, - hostapd_constants.WMM_ACM_BK, - hostapd_constants.WMM_ACM_BE, - hostapd_constants.WMM_ACM_VO, - data_rates, - ) - setup_ap( - access_point=self.access_point, - profile_name="whirlwind_11ag_legacy", - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.open_network_2g["SSID"], - force_wmm=True, - additional_ap_parameters=wmm_acm_bits_enabled, - ) - asserts.assert_true( - self.dut.associate(self.open_network_2g["SSID"]), "Failed to associate." - ) - - def test_associate_11g_only_with_WMM_ACM_on_BK_VI_VO(self): - data_rates = utils.merge_dicts( - hostapd_constants.OFDM_DATA_RATES, hostapd_constants.OFDM_ONLY_BASIC_RATES - ) - wmm_acm_bits_enabled = utils.merge_dicts( - hostapd_constants.WMM_PHYS_11A_11G_11N_11AC_DEFAULT_PARAMS, - hostapd_constants.WMM_ACM_BK, - hostapd_constants.WMM_ACM_VI, - hostapd_constants.WMM_ACM_VO, - data_rates, - ) - setup_ap( - access_point=self.access_point, - profile_name="whirlwind_11ag_legacy", - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.open_network_2g["SSID"], - force_wmm=True, - additional_ap_parameters=wmm_acm_bits_enabled, - ) - asserts.assert_true( - self.dut.associate(self.open_network_2g["SSID"]), "Failed to associate." - ) - - def test_associate_11g_only_with_WMM_ACM_on_BE_VI_VO(self): - data_rates = utils.merge_dicts( - hostapd_constants.OFDM_DATA_RATES, hostapd_constants.OFDM_ONLY_BASIC_RATES - ) - wmm_acm_bits_enabled = utils.merge_dicts( - hostapd_constants.WMM_PHYS_11A_11G_11N_11AC_DEFAULT_PARAMS, - hostapd_constants.WMM_ACM_BE, - hostapd_constants.WMM_ACM_VI, - hostapd_constants.WMM_ACM_VO, - data_rates, - ) - setup_ap( - access_point=self.access_point, - profile_name="whirlwind_11ag_legacy", - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.open_network_2g["SSID"], - force_wmm=True, - additional_ap_parameters=wmm_acm_bits_enabled, - ) - asserts.assert_true( - self.dut.associate(self.open_network_2g["SSID"]), "Failed to associate." - ) - - def test_associate_11g_only_with_country_code(self): - data_rates = utils.merge_dicts( - hostapd_constants.OFDM_DATA_RATES, hostapd_constants.OFDM_ONLY_BASIC_RATES - ) - country_info = utils.merge_dicts( - hostapd_constants.ENABLE_IEEE80211D, - hostapd_constants.COUNTRY_STRING["ALL"], - hostapd_constants.COUNTRY_CODE["UNITED_STATES"], - data_rates, - ) - setup_ap( - access_point=self.access_point, - profile_name="whirlwind_11ag_legacy", - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.open_network_2g["SSID"], - additional_ap_parameters=country_info, - ) - asserts.assert_true( - self.dut.associate(self.open_network_2g["SSID"]), "Failed to associate." - ) - - def test_associate_11g_only_with_non_country_code(self): - data_rates = utils.merge_dicts( - hostapd_constants.OFDM_DATA_RATES, hostapd_constants.OFDM_ONLY_BASIC_RATES - ) - country_info = utils.merge_dicts( - hostapd_constants.ENABLE_IEEE80211D, - hostapd_constants.COUNTRY_STRING["ALL"], - hostapd_constants.COUNTRY_CODE["NON_COUNTRY"], - data_rates, - ) - setup_ap( - access_point=self.access_point, - profile_name="whirlwind_11ag_legacy", - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.open_network_2g["SSID"], - additional_ap_parameters=country_info, - ) - asserts.assert_true( - self.dut.associate(self.open_network_2g["SSID"]), "Failed to associate." - ) - - def test_associate_11g_only_with_hidden_ssid(self): - data_rates = utils.merge_dicts( - hostapd_constants.OFDM_DATA_RATES, hostapd_constants.OFDM_ONLY_BASIC_RATES - ) - setup_ap( - access_point=self.access_point, - profile_name="whirlwind_11ag_legacy", - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.open_network_2g["SSID"], - hidden=True, - additional_ap_parameters=data_rates, - ) - asserts.assert_true( - self.dut.associate(self.open_network_2g["SSID"]), "Failed to associate." - ) - - def test_associate_11g_only_with_vendor_ie_in_beacon_correct_length(self): - data_rates = utils.merge_dicts( - hostapd_constants.OFDM_DATA_RATES, - hostapd_constants.OFDM_ONLY_BASIC_RATES, - hostapd_constants.VENDOR_IE["correct_length_beacon"], - ) - setup_ap( - access_point=self.access_point, - profile_name="whirlwind_11ag_legacy", - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.open_network_2g["SSID"], - additional_ap_parameters=data_rates, - ) - asserts.assert_true( - self.dut.associate(self.open_network_2g["SSID"]), "Failed to associate." - ) - - def test_associate_11g_only_with_vendor_ie_in_beacon_zero_length(self): - data_rates = utils.merge_dicts( - hostapd_constants.OFDM_DATA_RATES, - hostapd_constants.OFDM_ONLY_BASIC_RATES, - hostapd_constants.VENDOR_IE["zero_length_beacon_without_data"], - ) - setup_ap( - access_point=self.access_point, - profile_name="whirlwind_11ag_legacy", - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.open_network_2g["SSID"], - additional_ap_parameters=data_rates, - ) - asserts.assert_true( - self.dut.associate(self.open_network_2g["SSID"]), "Failed to associate." - ) - - def test_associate_11g_only_with_vendor_ie_in_assoc_correct_length(self): - data_rates = utils.merge_dicts( - hostapd_constants.OFDM_DATA_RATES, - hostapd_constants.OFDM_ONLY_BASIC_RATES, - hostapd_constants.VENDOR_IE["correct_length_association_response"], - ) - setup_ap( - access_point=self.access_point, - profile_name="whirlwind_11ag_legacy", - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.open_network_2g["SSID"], - additional_ap_parameters=data_rates, - ) - asserts.assert_true( - self.dut.associate(self.open_network_2g["SSID"]), "Failed to associate." - ) - - def test_associate_11g_only_with_vendor_ie_in_assoc_zero_length(self): - data_rates = utils.merge_dicts( - hostapd_constants.OFDM_DATA_RATES, - hostapd_constants.OFDM_ONLY_BASIC_RATES, - hostapd_constants.VENDOR_IE["correct_length_association_response"], - hostapd_constants.VENDOR_IE[ - "zero_length_association_" "response_without_data" - ], - ) - setup_ap( - access_point=self.access_point, - profile_name="whirlwind_11ag_legacy", - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.open_network_2g["SSID"], - additional_ap_parameters=data_rates, - ) - asserts.assert_true( - self.dut.associate(self.open_network_2g["SSID"]), "Failed to associate." - ) - - def test_associate_11bg_only_long_preamble(self): - setup_ap( - access_point=self.access_point, - profile_name="whirlwind_11ag_legacy", - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.open_network_2g["SSID"], - preamble=False, - ) - asserts.assert_true( - self.dut.associate(self.open_network_2g["SSID"]), "Failed to associate." - ) - - def test_associate_11bg_short_preamble(self): - setup_ap( - access_point=self.access_point, - profile_name="whirlwind_11ag_legacy", - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.open_network_2g["SSID"], - preamble=True, - ) - asserts.assert_true( - self.dut.associate(self.open_network_2g["SSID"]), "Failed to associate." - ) - - def test_associate_11bg_minimal_beacon_interval(self): - setup_ap( - access_point=self.access_point, - profile_name="whirlwind_11ag_legacy", - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.open_network_2g["SSID"], - beacon_interval=15, - ) - asserts.assert_true( - self.dut.associate(self.open_network_2g["SSID"]), "Failed to associate." - ) - - def test_associate_11bg_maximum_beacon_interval(self): - setup_ap( - access_point=self.access_point, - profile_name="whirlwind_11ag_legacy", - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.open_network_2g["SSID"], - beacon_interval=1024, - ) - asserts.assert_true( - self.dut.associate(self.open_network_2g["SSID"]), "Failed to associate." - ) - - def test_associate_11bg_frag_threshold_430(self): - setup_ap( - access_point=self.access_point, - profile_name="whirlwind_11ag_legacy", - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.open_network_2g["SSID"], - frag_threshold=430, - ) - asserts.assert_true( - self.dut.associate(self.open_network_2g["SSID"]), "Failed to associate." - ) - - def test_associate_11bg_rts_threshold_256(self): - setup_ap( - access_point=self.access_point, - profile_name="whirlwind_11ag_legacy", - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.open_network_2g["SSID"], - rts_threshold=256, - ) - asserts.assert_true( - self.dut.associate(self.open_network_2g["SSID"]), "Failed to associate." - ) - - def test_associate_11bg_rts_256_frag_430(self): - setup_ap( - access_point=self.access_point, - profile_name="whirlwind_11ag_legacy", - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.open_network_2g["SSID"], - rts_threshold=256, - frag_threshold=430, - ) - asserts.assert_true( - self.dut.associate(self.open_network_2g["SSID"]), "Failed to associate." - ) - - def test_associate_11bg_high_dtim_low_beacon_interval(self): - setup_ap( - access_point=self.access_point, - profile_name="whirlwind_11ag_legacy", - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.open_network_2g["SSID"], - dtim_period=3, - beacon_interval=100, - ) - asserts.assert_true( - self.dut.associate(self.open_network_2g["SSID"]), "Failed to associate." - ) - - def test_associate_11bg_low_dtim_high_beacon_interval(self): - setup_ap( - access_point=self.access_point, - profile_name="whirlwind_11ag_legacy", - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.open_network_2g["SSID"], - dtim_period=1, - beacon_interval=300, - ) - asserts.assert_true( - self.dut.associate(self.open_network_2g["SSID"]), "Failed to associate." - ) - - def test_associate_11bg_with_WMM_with_default_values(self): - setup_ap( - access_point=self.access_point, - profile_name="whirlwind_11ag_legacy", - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.open_network_2g["SSID"], - force_wmm=True, - additional_ap_parameters=hostapd_constants.WMM_PHYS_11A_11G_11N_11AC_DEFAULT_PARAMS, - ) - asserts.assert_true( - self.dut.associate(self.open_network_2g["SSID"]), "Failed to associate." - ) - - def test_associate_11bg_with_WMM_with_non_default_values(self): - setup_ap( - access_point=self.access_point, - profile_name="whirlwind_11ag_legacy", - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.open_network_2g["SSID"], - force_wmm=True, - additional_ap_parameters=hostapd_constants.WMM_NON_DEFAULT_PARAMS, - ) - asserts.assert_true( - self.dut.associate(self.open_network_2g["SSID"]), "Failed to associate." - ) - - def test_associate_11bg_with_WMM_ACM_on_BK(self): - wmm_acm_bits_enabled = utils.merge_dicts( - hostapd_constants.WMM_PHYS_11A_11G_11N_11AC_DEFAULT_PARAMS, - hostapd_constants.WMM_ACM_BK, - ) - setup_ap( - access_point=self.access_point, - profile_name="whirlwind_11ag_legacy", - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.open_network_2g["SSID"], - force_wmm=True, - additional_ap_parameters=wmm_acm_bits_enabled, - ) - asserts.assert_true( - self.dut.associate(self.open_network_2g["SSID"]), "Failed to associate." - ) - - def test_associate_11bg_with_WMM_ACM_on_BE(self): - wmm_acm_bits_enabled = utils.merge_dicts( - hostapd_constants.WMM_PHYS_11A_11G_11N_11AC_DEFAULT_PARAMS, - hostapd_constants.WMM_ACM_BE, - ) - setup_ap( - access_point=self.access_point, - profile_name="whirlwind_11ag_legacy", - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.open_network_2g["SSID"], - force_wmm=True, - additional_ap_parameters=wmm_acm_bits_enabled, - ) - asserts.assert_true( - self.dut.associate(self.open_network_2g["SSID"]), "Failed to associate." - ) - - def test_associate_11bg_with_WMM_ACM_on_VI(self): - wmm_acm_bits_enabled = utils.merge_dicts( - hostapd_constants.WMM_PHYS_11A_11G_11N_11AC_DEFAULT_PARAMS, - hostapd_constants.WMM_ACM_VI, - ) - setup_ap( - access_point=self.access_point, - profile_name="whirlwind_11ag_legacy", - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.open_network_2g["SSID"], - force_wmm=True, - additional_ap_parameters=wmm_acm_bits_enabled, - ) - asserts.assert_true( - self.dut.associate(self.open_network_2g["SSID"]), "Failed to associate." - ) - - def test_associate_11bg_with_WMM_ACM_on_VO(self): - wmm_acm_bits_enabled = utils.merge_dicts( - hostapd_constants.WMM_PHYS_11A_11G_11N_11AC_DEFAULT_PARAMS, - hostapd_constants.WMM_ACM_VO, - ) - setup_ap( - access_point=self.access_point, - profile_name="whirlwind_11ag_legacy", - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.open_network_2g["SSID"], - force_wmm=True, - additional_ap_parameters=wmm_acm_bits_enabled, - ) - asserts.assert_true( - self.dut.associate(self.open_network_2g["SSID"]), "Failed to associate." - ) - - def test_associate_11bg_with_WMM_ACM_on_BK_BE_VI(self): - wmm_acm_bits_enabled = utils.merge_dicts( - hostapd_constants.WMM_PHYS_11A_11G_11N_11AC_DEFAULT_PARAMS, - hostapd_constants.WMM_ACM_BK, - hostapd_constants.WMM_ACM_BE, - hostapd_constants.WMM_ACM_VI, - ) - setup_ap( - access_point=self.access_point, - profile_name="whirlwind_11ag_legacy", - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.open_network_2g["SSID"], - force_wmm=True, - additional_ap_parameters=wmm_acm_bits_enabled, - ) - asserts.assert_true( - self.dut.associate(self.open_network_2g["SSID"]), "Failed to associate." - ) - - def test_associate_11bg_with_WMM_ACM_on_BK_BE_VO(self): - wmm_acm_bits_enabled = utils.merge_dicts( - hostapd_constants.WMM_PHYS_11A_11G_11N_11AC_DEFAULT_PARAMS, - hostapd_constants.WMM_ACM_BK, - hostapd_constants.WMM_ACM_BE, - hostapd_constants.WMM_ACM_VO, - ) - setup_ap( - access_point=self.access_point, - profile_name="whirlwind_11ag_legacy", - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.open_network_2g["SSID"], - force_wmm=True, - additional_ap_parameters=wmm_acm_bits_enabled, - ) - asserts.assert_true( - self.dut.associate(self.open_network_2g["SSID"]), "Failed to associate." - ) - - def test_associate_11bg_with_WMM_ACM_on_BK_VI_VO(self): - wmm_acm_bits_enabled = utils.merge_dicts( - hostapd_constants.WMM_PHYS_11A_11G_11N_11AC_DEFAULT_PARAMS, - hostapd_constants.WMM_ACM_BK, - hostapd_constants.WMM_ACM_VI, - hostapd_constants.WMM_ACM_VO, - ) - setup_ap( - access_point=self.access_point, - profile_name="whirlwind_11ag_legacy", - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.open_network_2g["SSID"], - force_wmm=True, - additional_ap_parameters=wmm_acm_bits_enabled, - ) - asserts.assert_true( - self.dut.associate(self.open_network_2g["SSID"]), "Failed to associate." - ) - - def test_associate_11bg_with_WMM_ACM_on_BE_VI_VO(self): - wmm_acm_bits_enabled = utils.merge_dicts( - hostapd_constants.WMM_PHYS_11A_11G_11N_11AC_DEFAULT_PARAMS, - hostapd_constants.WMM_ACM_BE, - hostapd_constants.WMM_ACM_VI, - hostapd_constants.WMM_ACM_VO, - ) - setup_ap( - access_point=self.access_point, - profile_name="whirlwind_11ag_legacy", - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.open_network_2g["SSID"], - force_wmm=True, - additional_ap_parameters=wmm_acm_bits_enabled, - ) - asserts.assert_true( - self.dut.associate(self.open_network_2g["SSID"]), "Failed to associate." - ) - - def test_associate_11bg_with_country_code(self): - country_info = utils.merge_dicts( - hostapd_constants.ENABLE_IEEE80211D, - hostapd_constants.COUNTRY_STRING["ALL"], - hostapd_constants.COUNTRY_CODE["UNITED_STATES"], - ) - setup_ap( - access_point=self.access_point, - profile_name="whirlwind_11ag_legacy", - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.open_network_2g["SSID"], - additional_ap_parameters=country_info, - ) - asserts.assert_true( - self.dut.associate(self.open_network_2g["SSID"]), "Failed to associate." - ) - - def test_associate_11bg_with_non_country_code(self): - country_info = utils.merge_dicts( - hostapd_constants.ENABLE_IEEE80211D, - hostapd_constants.COUNTRY_STRING["ALL"], - hostapd_constants.COUNTRY_CODE["NON_COUNTRY"], - ) - setup_ap( - access_point=self.access_point, - profile_name="whirlwind_11ag_legacy", - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.open_network_2g["SSID"], - additional_ap_parameters=country_info, - ) - asserts.assert_true( - self.dut.associate(self.open_network_2g["SSID"]), "Failed to associate." - ) - - def test_associate_11bg_only_with_hidden_ssid(self): - setup_ap( - access_point=self.access_point, - profile_name="whirlwind_11ag_legacy", - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.open_network_2g["SSID"], - hidden=True, - ) - asserts.assert_true( - self.dut.associate(self.open_network_2g["SSID"]), "Failed to associate." - ) - - def test_associate_11bg_with_vendor_ie_in_beacon_correct_length(self): - setup_ap( - access_point=self.access_point, - profile_name="whirlwind_11ag_legacy", - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.open_network_2g["SSID"], - additional_ap_parameters=hostapd_constants.VENDOR_IE[ - "correct_length_beacon" - ], - ) - asserts.assert_true( - self.dut.associate(self.open_network_2g["SSID"]), "Failed to associate." - ) - - def test_associate_11bg_with_vendor_ie_in_beacon_zero_length(self): - setup_ap( - access_point=self.access_point, - profile_name="whirlwind_11ag_legacy", - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.open_network_2g["SSID"], - additional_ap_parameters=hostapd_constants.VENDOR_IE[ - "zero_length_beacon_without_data" - ], - ) - asserts.assert_true( - self.dut.associate(self.open_network_2g["SSID"]), "Failed to associate." - ) - - def test_associate_11g_only_with_vendor_ie_in_assoc_correct_length(self): - data_rates = utils.merge_dicts( - hostapd_constants.OFDM_DATA_RATES, - hostapd_constants.OFDM_ONLY_BASIC_RATES, - hostapd_constants.VENDOR_IE["correct_length_association_response"], - ) - setup_ap( - access_point=self.access_point, - profile_name="whirlwind_11ag_legacy", - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.open_network_2g["SSID"], - additional_ap_parameters=data_rates, - ) - asserts.assert_true( - self.dut.associate(self.open_network_2g["SSID"]), "Failed to associate." - ) - - def test_associate_11g_only_with_vendor_ie_in_assoc_zero_length(self): - data_rates = utils.merge_dicts( - hostapd_constants.OFDM_DATA_RATES, - hostapd_constants.OFDM_ONLY_BASIC_RATES, - hostapd_constants.VENDOR_IE["correct_length_association_response"], - hostapd_constants.VENDOR_IE[ - "zero_length_association_" "response_without_data" - ], - ) - setup_ap( - access_point=self.access_point, - profile_name="whirlwind_11ag_legacy", - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.open_network_2g["SSID"], - additional_ap_parameters=data_rates, - ) - asserts.assert_true( - self.dut.associate(self.open_network_2g["SSID"]), "Failed to associate." - ) - - def test_minimum_ssid_length_2g_11n_20mhz(self): - setup_ap( - access_point=self.access_point, - profile_name="whirlwind_11ab_legacy", - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.open_network_min_len_2g["SSID"], - ) - asserts.assert_true( - self.dut.associate(self.open_network_min_len_2g["SSID"]), - "Failed to associate.", - ) - - def test_minimum_ssid_length_5g_11ac_80mhz(self): - setup_ap( - access_point=self.access_point, - profile_name="whirlwind_11ab_legacy", - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=self.open_network_min_len_5g["SSID"], - ) - asserts.assert_true( - self.dut.associate(self.open_network_min_len_5g["SSID"]), - "Failed to associate.", - ) - - def test_maximum_ssid_length_2g_11n_20mhz(self): - setup_ap( - access_point=self.access_point, - profile_name="whirlwind_11ab_legacy", - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.open_network_max_len_2g["SSID"], - ) - asserts.assert_true( - self.dut.associate(self.open_network_max_len_2g["SSID"]), - "Failed to associate.", - ) - - def test_maximum_ssid_length_5g_11ac_80mhz(self): - setup_ap( - access_point=self.access_point, - profile_name="whirlwind_11ab_legacy", - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=self.open_network_max_len_5g["SSID"], - ) - asserts.assert_true( - self.dut.associate(self.open_network_max_len_5g["SSID"]), - "Failed to associate.", - ) - - def test_ssid_with_UTF8_characters_2g_11n_20mhz(self): - setup_ap( - access_point=self.access_point, - profile_name="whirlwind_11ab_legacy", - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.utf8_ssid_2g, - ) - asserts.assert_true( - self.dut.associate(self.utf8_ssid_2g), "Failed to associate." - ) - - def test_ssid_with_UTF8_characters_5g_11ac_80mhz(self): - setup_ap( - access_point=self.access_point, - profile_name="whirlwind_11ab_legacy", - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=self.utf8_ssid_5g, - ) - asserts.assert_true( - self.dut.associate(self.utf8_ssid_5g), "Failed to associate." - ) - - def test_ssid_with_UTF8_characters_french_2g_11n_20mhz(self): - setup_ap( - access_point=self.access_point, - profile_name="whirlwind_11ab_legacy", - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.utf8_ssid_2g_french, - ) - asserts.assert_true( - self.dut.associate(self.utf8_ssid_2g_french), "Failed to associate." - ) - - def test_ssid_with_UTF8_characters_german_2g_11n_20mhz(self): - setup_ap( - access_point=self.access_point, - profile_name="whirlwind_11ab_legacy", - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.utf8_ssid_2g_german, - ) - asserts.assert_true( - self.dut.associate(self.utf8_ssid_2g_german), "Failed to associate." - ) - - def test_ssid_with_UTF8_characters_dutch_2g_11n_20mhz(self): - setup_ap( - access_point=self.access_point, - profile_name="whirlwind_11ab_legacy", - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.utf8_ssid_2g_dutch, - ) - asserts.assert_true( - self.dut.associate(self.utf8_ssid_2g_dutch), "Failed to associate." - ) - - def test_ssid_with_UTF8_characters_swedish_2g_11n_20mhz(self): - setup_ap( - access_point=self.access_point, - profile_name="whirlwind_11ab_legacy", - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.utf8_ssid_2g_swedish, - ) - asserts.assert_true( - self.dut.associate(self.utf8_ssid_2g_swedish), "Failed to associate." - ) - - def test_ssid_with_UTF8_characters_norwegian_2g_11n_20mhz(self): - setup_ap( - access_point=self.access_point, - profile_name="whirlwind_11ab_legacy", - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.utf8_ssid_2g_norwegian, - ) - asserts.assert_true( - self.dut.associate(self.utf8_ssid_2g_norwegian), "Failed to associate." - ) - - def test_ssid_with_UTF8_characters_danish_2g_11n_20mhz(self): - setup_ap( - access_point=self.access_point, - profile_name="whirlwind_11ab_legacy", - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.utf8_ssid_2g_danish, - ) - asserts.assert_true( - self.dut.associate(self.utf8_ssid_2g_danish), "Failed to associate." - ) - - def test_ssid_with_UTF8_characters_japanese_2g_11n_20mhz(self): - setup_ap( - access_point=self.access_point, - profile_name="whirlwind_11ab_legacy", - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.utf8_ssid_2g_japanese, - ) - asserts.assert_true( - self.dut.associate(self.utf8_ssid_2g_japanese), "Failed to associate." - ) - - def test_ssid_with_UTF8_characters_spanish_2g_11n_20mhz(self): - setup_ap( - access_point=self.access_point, - profile_name="whirlwind_11ab_legacy", - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.utf8_ssid_2g_spanish, - ) - asserts.assert_true( - self.dut.associate(self.utf8_ssid_2g_spanish), "Failed to associate." - ) - - def test_ssid_with_UTF8_characters_italian_2g_11n_20mhz(self): - setup_ap( - access_point=self.access_point, - profile_name="whirlwind_11ab_legacy", - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.utf8_ssid_2g_italian, - ) - asserts.assert_true( - self.dut.associate(self.utf8_ssid_2g_italian), "Failed to associate." - ) - - def test_ssid_with_UTF8_characters_korean_2g_11n_20mhz(self): - setup_ap( - access_point=self.access_point, - profile_name="whirlwind_11ab_legacy", - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.utf8_ssid_2g_korean, - ) - - asserts.assert_true( - self.dut.associate(self.utf8_ssid_2g_korean), "Failed to associate." - ) - - -if __name__ == "__main__": - test_runner.main()
diff --git a/src/antlion/tests/wlan/compliance/WlanSecurityComplianceABGTest.py b/src/antlion/tests/wlan/compliance/WlanSecurityComplianceABGTest.py deleted file mode 100644 index 98f1903..0000000 --- a/src/antlion/tests/wlan/compliance/WlanSecurityComplianceABGTest.py +++ /dev/null
@@ -1,8313 +0,0 @@ -#!/usr/bin/env python3 -# -# Copyright 2022 The Fuchsia Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -import re -from functools import wraps - -from antlion import utils -from antlion.controllers.access_point import setup_ap -from antlion.controllers.ap_lib import hostapd_constants -from antlion.controllers.ap_lib.hostapd_security import Security -from antlion.controllers.ap_lib.hostapd_utils import generate_random_password -from antlion.test_utils.abstract_devices.wlan_device import create_wlan_device -from antlion.test_utils.wifi import base_test - -from mobly import asserts, test_runner - -AP_11ABG_PROFILE_NAME = "whirlwind_11ag_legacy" -SSID_LENGTH_DEFAULT = 15 - - -def create_security_profile(test_func): - """Decorator for generating hostapd security profile object based on the - test name. - Args: - test_func: The test function - Returns: - security_profile_generator: The function that generates the security - profile object - """ - - @wraps(test_func) - def security_profile_generator(self, *args, **kwargs): - """Function that looks at the name of the function and determines what - the security profile should be based on what items are in the name - - Example: A function with the name sec_wpa_wpa2_ptk_ccmp_tkip would - return a security profile that has wpa and wpa2 configure with a - ptk cipher of ccmp or tkip. Removing one of those options would - drop it from the config. - - Args: - self: The object of the WlanSecurityComplianceABGTest class. - *args: args that were sent to the original test function - **kwargs: kwargs that were sent to the original test function - Returns: - The original function that was called - """ - utf8_password_2g = "2𝔤_𝔊𝔬𝔬𝔤𝔩𝔢" - utf8_password_2g_french = "du Feÿ Château" - utf8_password_2g_german = "für Straßenatlas Rat" - utf8_password_2g_dutch = "niet óúd, is níéuw! Die" - utf8_password_2g_swedish = "femtioåtta Det är" - utf8_password_2g_norwegian = "ØÆ Curaçao æ å å å" - # Danish and Norwegian has the same alphabet - utf8_password_2g_danish = utf8_password_2g_norwegian - utf8_password_2g_japanese = "そっくりね。あな" - utf8_password_2g_spanish = "á,é,í,ó,ú,ü,ñ,¿,¡ ¡No" - utf8_password_2g_italian = "Pinocchio è italiano? caffè" - utf8_password_2g_korean = "ㅜㅝㅞㅟㅠㅘㅙㅚㅛ" - - security = re.search(r"sec(.*?)ptk_(.*)", test_func.__name__) - security_mode = security.group(1) - ptk_type = security.group(2) - wpa_cipher = None - wpa2_cipher = None - if "_wpa_wpa2_wpa3_" in security_mode: - security_mode = hostapd_constants.WPA_WPA2_WPA3_MIXED_STRING - elif "_wpa_wpa2_" in security_mode: - security_mode = hostapd_constants.WPA_MIXED_STRING - elif "_wpa2_wpa3_" in security_mode: - security_mode = hostapd_constants.WPA2_WPA3_MIXED_STRING - elif "_wep_" in security_mode: - security_mode = hostapd_constants.WEP_STRING - elif "_wpa_" in security_mode: - security_mode = hostapd_constants.WPA_STRING - elif "_wpa2_" in security_mode: - security_mode = hostapd_constants.WPA2_STRING - elif "_wpa3_" in security_mode: - security_mode = hostapd_constants.WPA3_STRING - if "tkip" in ptk_type and "ccmp" in ptk_type: - wpa_cipher = "TKIP CCMP" - wpa2_cipher = "TKIP CCMP" - elif "tkip" in ptk_type: - wpa_cipher = "TKIP" - wpa2_cipher = "TKIP" - elif "ccmp" in ptk_type: - wpa_cipher = "CCMP" - wpa2_cipher = "CCMP" - if "max_length_password" in test_func.__name__: - password = generate_random_password( - length=hostapd_constants.MAX_WPA_PASSWORD_LENGTH - ) - elif "max_length_psk" in test_func.__name__: - password = str( - generate_random_password( - length=hostapd_constants.MAX_WPA_PSK_LENGTH, hex=True - ) - ).lower() - elif "wep_5_chars" in test_func.__name__: - password = generate_random_password(length=5) - elif "wep_13_chars" in test_func.__name__: - password = generate_random_password(length=13) - elif "wep_10_hex" in test_func.__name__: - password = str(generate_random_password(length=10, hex=True)).lower() - elif "wep_26_hex" in test_func.__name__: - password = str(generate_random_password(length=26, hex=True)).lower() - elif "utf8" in test_func.__name__: - if "french" in test_func.__name__: - password = utf8_password_2g_french - elif "german" in test_func.__name__: - password = utf8_password_2g_german - elif "dutch" in test_func.__name__: - password = utf8_password_2g_dutch - elif "swedish" in test_func.__name__: - password = utf8_password_2g_swedish - elif "norwegian" in test_func.__name__: - password = utf8_password_2g_norwegian - elif "danish" in test_func.__name__: - password = utf8_password_2g_danish - elif "japanese" in test_func.__name__: - password = utf8_password_2g_japanese - elif "spanish" in test_func.__name__: - password = utf8_password_2g_spanish - elif "italian" in test_func.__name__: - password = utf8_password_2g_italian - elif "korean" in test_func.__name__: - password = utf8_password_2g_korean - else: - password = utf8_password_2g - else: - password = generate_random_password() - target_security = ( - hostapd_constants.SECURITY_STRING_TO_DEFAULT_TARGET_SECURITY.get( - security_mode, None - ) - ) - - self.security_profile = Security( - security_mode=security_mode, - password=password, - wpa_cipher=wpa_cipher, - wpa2_cipher=wpa2_cipher, - ) - self.client_password = password - self.target_security = target_security - self.ssid = utils.rand_ascii_str(SSID_LENGTH_DEFAULT) - return test_func(self, *args, *kwargs) - - return security_profile_generator - - -class WlanSecurityComplianceABGTest(base_test.WifiBaseTest): - """Tests for validating 11a, 11b, and 11g PHYS. - - Test Bed Requirement: - * One Android device or Fuchsia device - * One Access Point - """ - - def setup_class(self): - super().setup_class() - - device_type = self.user_params.get("dut", "fuchsia_devices") - if device_type == "fuchsia_devices": - self.dut = create_wlan_device(self.fuchsia_devices[0]) - elif device_type == "android_devices": - self.dut = create_wlan_device(self.android_devices[0]) - else: - raise ValueError( - f'Invalid "dut" type specified in config: "{device_type}".' - 'Expected "fuchsia_devices" or "android_devices".' - ) - - self.access_point = self.access_points[0] - - self.ssid = None - self.security_profile = None - self.client_password = None - - self.access_point.stop_all_aps() - - def setup_test(self): - if hasattr(self, "android_devices"): - for ad in self.android_devices: - ad.droid.wakeLockAcquireBright() - ad.droid.wakeUpNow() - self.dut.wifi_toggle_state(True) - - def teardown_test(self): - if hasattr(self, "android_devices"): - for ad in self.android_devices: - ad.droid.wakeLockRelease() - ad.droid.goToSleepNow() - self.dut.turn_location_off_and_scan_toggle_off() - self.dut.disconnect() - self.dut.reset_wifi() - self.download_ap_logs() - self.access_point.stop_all_aps() - - def on_fail(self, test_name, begin_time): - super().on_fail(test_name, begin_time) - self.access_point.stop_all_aps() - - @create_security_profile - def test_associate_11a_sec_open_wep_5_chars_ptk_none(self): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=self.ssid, - security=self.security_profile, - password=self.client_password, - force_wmm=False, - additional_ap_parameters=hostapd_constants.WEP_AUTH["open"], - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11a_sec_open_wep_13_chars_ptk_none(self): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=self.ssid, - security=self.security_profile, - password=self.client_password, - force_wmm=False, - additional_ap_parameters=hostapd_constants.WEP_AUTH["open"], - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11a_sec_open_wep_10_hex_ptk_none(self): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=self.ssid, - security=self.security_profile, - password=self.client_password, - force_wmm=False, - additional_ap_parameters=hostapd_constants.WEP_AUTH["open"], - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11a_sec_open_wep_26_hex_ptk_none(self): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=self.ssid, - security=self.security_profile, - password=self.client_password, - force_wmm=False, - additional_ap_parameters=hostapd_constants.WEP_AUTH["open"], - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11a_sec_shared_wep_5_chars_ptk_none(self): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=self.ssid, - security=self.security_profile, - password=self.client_password, - force_wmm=False, - additional_ap_parameters=hostapd_constants.WEP_AUTH["shared"], - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11a_sec_shared_wep_13_chars_ptk_none(self): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=self.ssid, - security=self.security_profile, - password=self.client_password, - force_wmm=False, - additional_ap_parameters=hostapd_constants.WEP_AUTH["shared"], - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11a_sec_shared_wep_10_hex_ptk_none(self): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=self.ssid, - security=self.security_profile, - password=self.client_password, - force_wmm=False, - additional_ap_parameters=hostapd_constants.WEP_AUTH["shared"], - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11a_sec_shared_wep_26_hex_ptk_none(self): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=self.ssid, - security=self.security_profile, - password=self.client_password, - force_wmm=False, - additional_ap_parameters=hostapd_constants.WEP_AUTH["shared"], - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11a_sec_wpa_psk_ptk_tkip(self): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=self.ssid, - security=self.security_profile, - password=self.client_password, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11a_sec_wpa_psk_ptk_ccmp(self): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=self.ssid, - security=self.security_profile, - password=self.client_password, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11a_sec_wpa_psk_ptk_tkip_or_ccmp(self): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=self.ssid, - security=self.security_profile, - password=self.client_password, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11a_max_length_password_sec_wpa_psk_ptk_tkip(self): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=self.ssid, - security=self.security_profile, - password=self.client_password, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11a_max_length_password_sec_wpa_psk_ptk_ccmp(self): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=self.ssid, - security=self.security_profile, - password=self.client_password, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11a_max_length_password_sec_wpa_psk_ptk_tkip_or_ccmp(self): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=self.ssid, - security=self.security_profile, - password=self.client_password, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11a_max_length_psk_sec_wpa_psk_ptk_tkip(self): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=self.ssid, - security=self.security_profile, - password=self.client_password, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11a_max_length_psk_sec_wpa_psk_ptk_ccmp(self): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=self.ssid, - security=self.security_profile, - password=self.client_password, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11a_max_length_psk_sec_wpa_psk_ptk_tkip_or_ccmp(self): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=self.ssid, - security=self.security_profile, - password=self.client_password, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11a_frag_430_sec_wpa_psk_ptk_tkip(self): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=self.ssid, - security=self.security_profile, - password=self.client_password, - frag_threshold=430, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11a_frag_430_sec_wpa_psk_ptk_ccmp(self): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=self.ssid, - security=self.security_profile, - password=self.client_password, - frag_threshold=430, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11a_frag_430_sec_wpa_psk_ptk_tkip_or_ccmp(self): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=self.ssid, - security=self.security_profile, - password=self.client_password, - frag_threshold=430, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11a_rts_256_sec_wpa_psk_ptk_tkip(self): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=self.ssid, - security=self.security_profile, - password=self.client_password, - rts_threshold=256, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11a_rts_256_sec_wpa_psk_ptk_ccmp(self): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=self.ssid, - security=self.security_profile, - password=self.client_password, - rts_threshold=256, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11a_rts_256_sec_wpa_psk_ptk_tkip_or_ccmp(self): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=self.ssid, - security=self.security_profile, - password=self.client_password, - rts_threshold=256, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11a_rts_256_frag_430_sec_wpa_psk_ptk_tkip_or_ccmp(self): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=self.ssid, - security=self.security_profile, - password=self.client_password, - rts_threshold=256, - frag_threshold=430, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11a_high_dtim_low_beacon_int_sec_wpa_psk_ptk_tkip_or_ccmp(self): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=self.ssid, - dtim_period=hostapd_constants.HIGH_DTIM, - beacon_interval=hostapd_constants.LOW_BEACON_INTERVAL, - security=self.security_profile, - password=self.client_password, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11a_low_dtim_high_beacon_int_sec_wpa_psk_ptk_tkip_or_ccmp(self): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=self.ssid, - dtim_period=hostapd_constants.LOW_DTIM, - beacon_interval=hostapd_constants.HIGH_BEACON_INTERVAL, - security=self.security_profile, - password=self.client_password, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11a_with_WMM_with_default_values_sec_wpa_psk_ptk_tkip_or_ccmp( - self, - ): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=self.ssid, - force_wmm=True, - additional_ap_parameters=hostapd_constants.WMM_PHYS_11A_11G_11N_11AC_DEFAULT_PARAMS, - security=self.security_profile, - password=self.client_password, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11a_with_vendor_ie_in_beacon_correct_length_sec_wpa_psk_ptk_tkip_or_ccmp( - self, - ): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=self.ssid, - additional_ap_parameters=hostapd_constants.VENDOR_IE[ - "correct_length_beacon" - ], - security=self.security_profile, - password=self.client_password, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11a_with_vendor_ie_in_beacon_zero_length_sec_wpa_psk_ptk_tkip_or_ccmp( - self, - ): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=self.ssid, - additional_ap_parameters=hostapd_constants.VENDOR_IE[ - "zero_length_beacon_without_data" - ], - security=self.security_profile, - password=self.client_password, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11a_with_vendor_ie_in_beacon_similar_to_wpa_ie_sec_wpa_psk_ptk_tkip_or_ccmp( - self, - ): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=self.ssid, - additional_ap_parameters=hostapd_constants.VENDOR_IE["simliar_to_wpa"], - security=self.security_profile, - password=self.client_password, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11a_sec_wpa2_psk_ptk_tkip(self): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=self.ssid, - security=self.security_profile, - password=self.client_password, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11a_sec_wpa2_psk_ptk_ccmp(self): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=self.ssid, - security=self.security_profile, - password=self.client_password, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11a_sec_wpa2_psk_ptk_tkip_or_ccmp(self): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=self.ssid, - security=self.security_profile, - password=self.client_password, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11a_max_length_password_sec_wpa2_psk_ptk_tkip(self): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=self.ssid, - security=self.security_profile, - password=self.client_password, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11a_max_length_password_sec_wpa2_psk_ptk_ccmp(self): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=self.ssid, - security=self.security_profile, - password=self.client_password, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11a_max_length_password_sec_wpa2_psk_ptk_tkip_or_ccmp(self): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=self.ssid, - security=self.security_profile, - password=self.client_password, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11a_max_length_psk_sec_wpa2_psk_ptk_tkip(self): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=self.ssid, - security=self.security_profile, - password=self.client_password, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11a_max_length_psk_sec_wpa2_psk_ptk_ccmp(self): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=self.ssid, - security=self.security_profile, - password=self.client_password, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11a_max_length_psk_sec_wpa2_psk_ptk_tkip_or_ccmp(self): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=self.ssid, - security=self.security_profile, - password=self.client_password, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11a_frag_430_sec_wpa2_psk_ptk_tkip(self): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=self.ssid, - security=self.security_profile, - password=self.client_password, - frag_threshold=430, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11a_frag_430_sec_wpa2_psk_ptk_ccmp(self): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=self.ssid, - security=self.security_profile, - password=self.client_password, - frag_threshold=430, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11a_frag_430_sec_wpa2_psk_ptk_tkip_or_ccmp(self): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=self.ssid, - security=self.security_profile, - password=self.client_password, - frag_threshold=430, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11a_rts_256_sec_wpa2_psk_ptk_tkip(self): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=self.ssid, - security=self.security_profile, - password=self.client_password, - rts_threshold=256, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11a_rts_256_sec_wpa2_psk_ptk_ccmp(self): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=self.ssid, - security=self.security_profile, - password=self.client_password, - rts_threshold=256, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11a_rts_256_sec_wpa2_psk_ptk_tkip_or_ccmp(self): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=self.ssid, - security=self.security_profile, - password=self.client_password, - rts_threshold=256, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11a_rts_256_frag_430_sec_wpa2_psk_ptk_tkip_or_ccmp(self): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=self.ssid, - security=self.security_profile, - password=self.client_password, - rts_threshold=256, - frag_threshold=430, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11a_high_dtim_low_beacon_int_sec_wpa2_psk_ptk_tkip_or_ccmp(self): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=self.ssid, - dtim_period=hostapd_constants.HIGH_DTIM, - beacon_interval=hostapd_constants.LOW_BEACON_INTERVAL, - security=self.security_profile, - password=self.client_password, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11a_low_dtim_high_beacon_int_sec_wpa2_psk_ptk_tkip_or_ccmp(self): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=self.ssid, - dtim_period=hostapd_constants.LOW_DTIM, - beacon_interval=hostapd_constants.HIGH_BEACON_INTERVAL, - security=self.security_profile, - password=self.client_password, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11a_with_WMM_with_default_values_sec_wpa2_psk_ptk_tkip_or_ccmp( - self, - ): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=self.ssid, - force_wmm=True, - additional_ap_parameters=hostapd_constants.WMM_11B_DEFAULT_PARAMS, - security=self.security_profile, - password=self.client_password, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11a_with_vendor_ie_in_beacon_correct_length_sec_wpa2_psk_ptk_tkip_or_ccmp( - self, - ): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=self.ssid, - additional_ap_parameters=hostapd_constants.VENDOR_IE[ - "correct_length_beacon" - ], - security=self.security_profile, - password=self.client_password, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11a_with_vendor_ie_in_beacon_zero_length_sec_wpa2_psk_ptk_tkip_or_ccmp( - self, - ): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=self.ssid, - additional_ap_parameters=hostapd_constants.VENDOR_IE[ - "zero_length_beacon_without_data" - ], - security=self.security_profile, - password=self.client_password, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11a_with_vendor_ie_in_beacon_similar_to_wpa_ie_sec_wpa2_psk_ptk_tkip_or_ccmp( - self, - ): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=self.ssid, - additional_ap_parameters=hostapd_constants.VENDOR_IE["simliar_to_wpa"], - security=self.security_profile, - password=self.client_password, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11a_pmf_sec_wpa2_psk_ptk_tkip(self): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=self.ssid, - security=self.security_profile, - pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED, - password=self.client_password, - force_wmm=False, - ) - - asserts.assert_false( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Expected failure to associate. This device must support TKIP and " - "PMF, which is not supported on Fuchsia. If this device is a " - "mainstream device, we need to reconsider adding support for TKIP " - "and PMF on Fuchsia.", - ) - - @create_security_profile - def test_associate_11a_pmf_sec_wpa2_psk_ptk_ccmp(self): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=self.ssid, - pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED, - security=self.security_profile, - password=self.client_password, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11a_pmf_sec_wpa2_psk_ptk_tkip_or_ccmp(self): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=self.ssid, - security=self.security_profile, - pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED, - password=self.client_password, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11a_pmf_max_length_password_sec_wpa2_psk_ptk_tkip(self): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=self.ssid, - security=self.security_profile, - pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED, - password=self.client_password, - force_wmm=False, - ) - - asserts.assert_false( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Expected failure to associate. This device must support TKIP and " - "PMF, which is not supported on Fuchsia. If this device is a " - "mainstream device, we need to reconsider adding support for TKIP " - "and PMF on Fuchsia.", - ) - - @create_security_profile - def test_associate_11a_pmf_max_length_password_sec_wpa2_psk_ptk_ccmp(self): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=self.ssid, - security=self.security_profile, - pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED, - password=self.client_password, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11a_pmf_max_length_password_sec_wpa2_psk_ptk_tkip_or_ccmp(self): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=self.ssid, - security=self.security_profile, - pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED, - password=self.client_password, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11a_pmf_max_length_psk_sec_wpa2_psk_ptk_tkip(self): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=self.ssid, - security=self.security_profile, - pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED, - password=self.client_password, - force_wmm=False, - ) - - asserts.assert_false( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Expected failure to associate. This device must support TKIP and " - "PMF, which is not supported on Fuchsia. If this device is a " - "mainstream device, we need to reconsider adding support for TKIP " - "and PMF on Fuchsia.", - ) - - @create_security_profile - def test_associate_11a_pmf_max_length_psk_sec_wpa2_psk_ptk_ccmp(self): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=self.ssid, - security=self.security_profile, - pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED, - password=self.client_password, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11a_pmf_max_length_psk_sec_wpa2_psk_ptk_tkip_or_ccmp(self): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=self.ssid, - security=self.security_profile, - pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED, - password=self.client_password, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11a_pmf_frag_430_sec_wpa2_psk_ptk_tkip(self): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=self.ssid, - security=self.security_profile, - pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED, - password=self.client_password, - frag_threshold=430, - force_wmm=False, - ) - - asserts.assert_false( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Expected failure to associate. This device must support TKIP and " - "PMF, which is not supported on Fuchsia. If this device is a " - "mainstream device, we need to reconsider adding support for TKIP " - "and PMF on Fuchsia.", - ) - - @create_security_profile - def test_associate_11a_pmf_frag_430_sec_wpa2_psk_ptk_ccmp(self): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=self.ssid, - security=self.security_profile, - pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED, - password=self.client_password, - frag_threshold=430, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11a_pmf_frag_430_sec_wpa2_psk_ptk_tkip_or_ccmp(self): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=self.ssid, - security=self.security_profile, - pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED, - password=self.client_password, - frag_threshold=430, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11a_pmf_rts_256_sec_wpa2_psk_ptk_tkip(self): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=self.ssid, - security=self.security_profile, - pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED, - password=self.client_password, - rts_threshold=256, - force_wmm=False, - ) - - asserts.assert_false( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Expected failure to associate. This device must support TKIP and " - "PMF, which is not supported on Fuchsia. If this device is a " - "mainstream device, we need to reconsider adding support for TKIP " - "and PMF on Fuchsia.", - ) - - @create_security_profile - def test_associate_11a_pmf_rts_256_sec_wpa2_psk_ptk_ccmp(self): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=self.ssid, - security=self.security_profile, - pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED, - password=self.client_password, - rts_threshold=256, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11a_pmf_rts_256_sec_wpa2_psk_ptk_tkip_or_ccmp(self): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=self.ssid, - security=self.security_profile, - pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED, - password=self.client_password, - rts_threshold=256, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11a_pmf_rts_256_frag_430_sec_wpa2_psk_ptk_tkip_or_ccmp(self): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=self.ssid, - security=self.security_profile, - pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED, - password=self.client_password, - rts_threshold=256, - frag_threshold=430, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11a_pmf_high_dtim_low_beacon_int_sec_wpa2_psk_ptk_tkip_or_ccmp( - self, - ): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=self.ssid, - dtim_period=hostapd_constants.HIGH_DTIM, - beacon_interval=hostapd_constants.LOW_BEACON_INTERVAL, - security=self.security_profile, - pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED, - password=self.client_password, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11a_pmf_low_dtim_high_beacon_int_sec_wpa2_psk_ptk_tkip_or_ccmp( - self, - ): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=self.ssid, - dtim_period=hostapd_constants.LOW_DTIM, - beacon_interval=hostapd_constants.HIGH_BEACON_INTERVAL, - security=self.security_profile, - pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED, - password=self.client_password, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11a_pmf_with_WMM_with_default_values_sec_wpa2_psk_ptk_tkip_or_ccmp( - self, - ): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=self.ssid, - force_wmm=True, - additional_ap_parameters=hostapd_constants.WMM_11B_DEFAULT_PARAMS, - security=self.security_profile, - pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED, - password=self.client_password, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11a_pmf_with_vendor_ie_in_beacon_correct_length_sec_wpa2_psk_ptk_tkip_or_ccmp( - self, - ): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=self.ssid, - additional_ap_parameters=hostapd_constants.VENDOR_IE[ - "correct_length_beacon" - ], - security=self.security_profile, - pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED, - password=self.client_password, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11a_pmf_with_vendor_ie_in_beacon_zero_length_sec_wpa2_psk_ptk_tkip_or_ccmp( - self, - ): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=self.ssid, - additional_ap_parameters=hostapd_constants.VENDOR_IE[ - "zero_length_beacon_without_data" - ], - security=self.security_profile, - pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED, - password=self.client_password, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11a_pmf_with_vendor_ie_in_beacon_similar_to_wpa_ie_sec_wpa2_psk_ptk_tkip_or_ccmp( - self, - ): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=self.ssid, - additional_ap_parameters=hostapd_constants.VENDOR_IE["simliar_to_wpa"], - security=self.security_profile, - pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED, - password=self.client_password, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11a_sec_wpa_wpa2_psk_ptk_tkip(self): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=self.ssid, - security=self.security_profile, - password=self.client_password, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11a_sec_wpa_wpa2_psk_ptk_ccmp(self): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=self.ssid, - security=self.security_profile, - password=self.client_password, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11a_sec_wpa_wpa2_psk_ptk_tkip_or_ccmp(self): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=self.ssid, - security=self.security_profile, - password=self.client_password, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11a_max_length_password_sec_wpa_wpa2_psk_ptk_tkip(self): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=self.ssid, - security=self.security_profile, - password=self.client_password, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11a_max_length_password_sec_wpa_wpa2_psk_ptk_ccmp(self): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=self.ssid, - security=self.security_profile, - password=self.client_password, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11a_max_length_password_sec_wpa_wpa2_psk_ptk_tkip_or_ccmp(self): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=self.ssid, - security=self.security_profile, - password=self.client_password, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11a_max_length_psk_sec_wpa_wpa2_psk_ptk_tkip(self): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=self.ssid, - security=self.security_profile, - password=self.client_password, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11a_max_length_psk_sec_wpa_wpa2_psk_ptk_ccmp(self): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=self.ssid, - security=self.security_profile, - password=self.client_password, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11a_max_length_psk_sec_wpa_wpa2_psk_ptk_tkip_or_ccmp(self): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=self.ssid, - security=self.security_profile, - password=self.client_password, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11a_frag_430_sec_wpa_wpa2_psk_ptk_tkip(self): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=self.ssid, - security=self.security_profile, - password=self.client_password, - frag_threshold=430, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11a_frag_430_sec_wpa_wpa2_psk_ptk_ccmp(self): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=self.ssid, - security=self.security_profile, - password=self.client_password, - frag_threshold=430, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11a_frag_430_sec_wpa_wpa2_psk_ptk_tkip_or_ccmp(self): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=self.ssid, - security=self.security_profile, - password=self.client_password, - frag_threshold=430, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11a_rts_256_sec_wpa_wpa2_psk_ptk_tkip(self): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=self.ssid, - security=self.security_profile, - password=self.client_password, - rts_threshold=256, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11a_rts_256_sec_wpa_wpa2_psk_ptk_ccmp(self): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=self.ssid, - security=self.security_profile, - password=self.client_password, - rts_threshold=256, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11a_rts_256_sec_wpa_wpa2_psk_ptk_tkip_or_ccmp(self): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=self.ssid, - security=self.security_profile, - password=self.client_password, - rts_threshold=256, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11a_rts_256_frag_430_sec_wpa_wpa2_psk_ptk_tkip_or_ccmp(self): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=self.ssid, - security=self.security_profile, - password=self.client_password, - rts_threshold=256, - frag_threshold=430, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11a_high_dtim_low_beacon_int_sec_wpa_wpa2_psk_ptk_tkip_or_ccmp( - self, - ): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=self.ssid, - dtim_period=hostapd_constants.HIGH_DTIM, - beacon_interval=hostapd_constants.LOW_BEACON_INTERVAL, - security=self.security_profile, - password=self.client_password, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11a_low_dtim_high_beacon_int_sec_wpa_wpa2_psk_ptk_tkip_or_ccmp( - self, - ): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=self.ssid, - dtim_period=hostapd_constants.LOW_DTIM, - beacon_interval=hostapd_constants.HIGH_BEACON_INTERVAL, - security=self.security_profile, - password=self.client_password, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11a_with_WMM_with_default_values_sec_wpa_wpa2_psk_ptk_tkip_or_ccmp( - self, - ): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=self.ssid, - force_wmm=True, - additional_ap_parameters=hostapd_constants.WMM_11B_DEFAULT_PARAMS, - security=self.security_profile, - password=self.client_password, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11a_with_vendor_ie_in_beacon_correct_length_sec_wpa_wpa2_psk_ptk_tkip_or_ccmp( - self, - ): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=self.ssid, - additional_ap_parameters=hostapd_constants.VENDOR_IE[ - "correct_length_beacon" - ], - security=self.security_profile, - password=self.client_password, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11a_with_vendor_ie_in_beacon_zero_length_sec_wpa_wpa2_psk_ptk_tkip_or_ccmp( - self, - ): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=self.ssid, - additional_ap_parameters=hostapd_constants.VENDOR_IE[ - "zero_length_beacon_without_data" - ], - security=self.security_profile, - password=self.client_password, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11a_with_vendor_ie_in_beacon_similar_to_wpa_ie_sec_wpa_wpa2_psk_ptk_tkip_or_ccmp( - self, - ): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=self.ssid, - additional_ap_parameters=hostapd_constants.VENDOR_IE["simliar_to_wpa"], - security=self.security_profile, - password=self.client_password, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11a_sec_wpa3_sae_ptk_ccmp(self): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=self.ssid, - security=self.security_profile, - password=self.client_password, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11a_sec_wpa3_sae_ptk_tkip_or_ccmp(self): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=self.ssid, - security=self.security_profile, - password=self.client_password, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11a_max_length_password_sec_wpa3_sae_ptk_ccmp(self): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=self.ssid, - security=self.security_profile, - password=self.client_password, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11a_max_length_password_sec_wpa3_sae_ptk_tkip_or_ccmp(self): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=self.ssid, - security=self.security_profile, - password=self.client_password, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11a_frag_430_sec_wpa3_sae_ptk_ccmp(self): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=self.ssid, - security=self.security_profile, - password=self.client_password, - frag_threshold=430, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11a_frag_430_sec_wpa3_sae_ptk_tkip_or_ccmp(self): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=self.ssid, - security=self.security_profile, - password=self.client_password, - frag_threshold=430, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11a_rts_256_sec_wpa3_sae_ptk_ccmp(self): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=self.ssid, - security=self.security_profile, - password=self.client_password, - rts_threshold=256, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11a_rts_256_sec_wpa3_sae_ptk_tkip_or_ccmp(self): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=self.ssid, - security=self.security_profile, - password=self.client_password, - rts_threshold=256, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11a_rts_256_frag_430_sec_wpa3_sae_ptk_tkip_or_ccmp(self): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=self.ssid, - security=self.security_profile, - password=self.client_password, - rts_threshold=256, - frag_threshold=430, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11a_high_dtim_low_beacon_int_sec_wpa3_sae_ptk_tkip_or_ccmp(self): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=self.ssid, - dtim_period=hostapd_constants.HIGH_DTIM, - beacon_interval=hostapd_constants.LOW_BEACON_INTERVAL, - security=self.security_profile, - password=self.client_password, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11a_low_dtim_high_beacon_int_sec_wpa3_sae_ptk_tkip_or_ccmp(self): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=self.ssid, - dtim_period=hostapd_constants.LOW_DTIM, - beacon_interval=hostapd_constants.HIGH_BEACON_INTERVAL, - security=self.security_profile, - password=self.client_password, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11a_with_WMM_with_default_values_sec_wpa3_sae_ptk_tkip_or_ccmp( - self, - ): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=self.ssid, - force_wmm=True, - additional_ap_parameters=hostapd_constants.WMM_PHYS_11A_11G_11N_11AC_DEFAULT_PARAMS, - security=self.security_profile, - password=self.client_password, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11a_with_vendor_ie_in_beacon_correct_length_sec_wpa3_sae_ptk_tkip_or_ccmp( - self, - ): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=self.ssid, - additional_ap_parameters=hostapd_constants.VENDOR_IE[ - "correct_length_beacon" - ], - security=self.security_profile, - password=self.client_password, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11a_with_vendor_ie_in_beacon_zero_length_sec_wpa3_sae_ptk_tkip_or_ccmp( - self, - ): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=self.ssid, - additional_ap_parameters=hostapd_constants.VENDOR_IE[ - "zero_length_beacon_without_data" - ], - security=self.security_profile, - password=self.client_password, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11a_with_vendor_ie_in_beacon_similar_to_wpa_ie_sec_wpa3_sae_ptk_tkip_or_ccmp( - self, - ): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=self.ssid, - additional_ap_parameters=hostapd_constants.VENDOR_IE["simliar_to_wpa"], - security=self.security_profile, - password=self.client_password, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11a_sec_wpa2_wpa3_psk_sae_ptk_ccmp(self): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=self.ssid, - security=self.security_profile, - password=self.client_password, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11a_sec_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(self): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=self.ssid, - security=self.security_profile, - password=self.client_password, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11a_max_length_password_sec_wpa2_wpa3_psk_sae_ptk_ccmp(self): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=self.ssid, - security=self.security_profile, - password=self.client_password, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11a_max_length_password_sec_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp( - self, - ): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=self.ssid, - security=self.security_profile, - password=self.client_password, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11a_frag_430_sec_wpa2_wpa3_psk_sae_ptk_ccmp(self): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=self.ssid, - security=self.security_profile, - password=self.client_password, - frag_threshold=430, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11a_frag_430_sec_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(self): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=self.ssid, - security=self.security_profile, - password=self.client_password, - frag_threshold=430, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11a_rts_256_sec_wpa2_wpa3_psk_sae_ptk_ccmp(self): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=self.ssid, - security=self.security_profile, - password=self.client_password, - rts_threshold=256, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11a_rts_256_sec_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(self): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=self.ssid, - security=self.security_profile, - password=self.client_password, - rts_threshold=256, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11a_rts_256_frag_430_sec_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp( - self, - ): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=self.ssid, - security=self.security_profile, - password=self.client_password, - rts_threshold=256, - frag_threshold=430, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11a_high_dtim_low_beacon_int_sec_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp( - self, - ): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=self.ssid, - dtim_period=hostapd_constants.HIGH_DTIM, - beacon_interval=hostapd_constants.LOW_BEACON_INTERVAL, - security=self.security_profile, - password=self.client_password, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11a_low_dtim_high_beacon_int_sec_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp( - self, - ): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=self.ssid, - dtim_period=hostapd_constants.LOW_DTIM, - beacon_interval=hostapd_constants.HIGH_BEACON_INTERVAL, - security=self.security_profile, - password=self.client_password, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11a_with_WMM_with_default_values_sec_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp( - self, - ): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=self.ssid, - force_wmm=True, - additional_ap_parameters=hostapd_constants.WMM_PHYS_11A_11G_11N_11AC_DEFAULT_PARAMS, - security=self.security_profile, - password=self.client_password, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11a_with_vendor_ie_in_beacon_correct_length_sec_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp( - self, - ): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=self.ssid, - additional_ap_parameters=hostapd_constants.VENDOR_IE[ - "correct_length_beacon" - ], - security=self.security_profile, - password=self.client_password, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11a_with_vendor_ie_in_beacon_zero_length_sec_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp( - self, - ): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=self.ssid, - additional_ap_parameters=hostapd_constants.VENDOR_IE[ - "zero_length_beacon_without_data" - ], - security=self.security_profile, - password=self.client_password, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11a_with_vendor_ie_in_beacon_similar_to_wpa_ie_sec_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp( - self, - ): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=self.ssid, - additional_ap_parameters=hostapd_constants.VENDOR_IE["simliar_to_wpa"], - security=self.security_profile, - password=self.client_password, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11a_pmf_sec_wpa2_wpa3_psk_sae_ptk_ccmp(self): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=self.ssid, - security=self.security_profile, - pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED, - password=self.client_password, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11a_pmf_sec_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(self): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=self.ssid, - security=self.security_profile, - pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED, - password=self.client_password, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11a_pmf_max_length_password_sec_wpa2_wpa3_psk_sae_ptk_ccmp(self): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=self.ssid, - security=self.security_profile, - pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED, - password=self.client_password, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11a_pmf_max_length_password_sec_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp( - self, - ): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=self.ssid, - security=self.security_profile, - pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED, - password=self.client_password, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11a_pmf_frag_430_sec_wpa2_wpa3_psk_sae_ptk_ccmp(self): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=self.ssid, - security=self.security_profile, - pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED, - password=self.client_password, - frag_threshold=430, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11a_pmf_frag_430_sec_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(self): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=self.ssid, - security=self.security_profile, - pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED, - password=self.client_password, - frag_threshold=430, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11a_pmf_rts_256_sec_wpa2_wpa3_psk_sae_ptk_ccmp(self): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=self.ssid, - security=self.security_profile, - pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED, - password=self.client_password, - rts_threshold=256, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11a_pmf_rts_256_sec_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(self): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=self.ssid, - security=self.security_profile, - pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED, - password=self.client_password, - rts_threshold=256, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11a_pmf_rts_256_frag_430_sec_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp( - self, - ): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=self.ssid, - security=self.security_profile, - pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED, - password=self.client_password, - rts_threshold=256, - frag_threshold=430, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11a_pmf_high_dtim_low_beacon_int_sec_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp( - self, - ): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=self.ssid, - dtim_period=hostapd_constants.HIGH_DTIM, - beacon_interval=hostapd_constants.LOW_BEACON_INTERVAL, - security=self.security_profile, - pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED, - password=self.client_password, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11a_pmf_low_dtim_high_beacon_int_sec_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp( - self, - ): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=self.ssid, - dtim_period=hostapd_constants.LOW_DTIM, - beacon_interval=hostapd_constants.HIGH_BEACON_INTERVAL, - security=self.security_profile, - pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED, - password=self.client_password, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11a_pmf_with_WMM_with_default_values_sec_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp( - self, - ): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=self.ssid, - force_wmm=True, - additional_ap_parameters=hostapd_constants.WMM_PHYS_11A_11G_11N_11AC_DEFAULT_PARAMS, - security=self.security_profile, - pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED, - password=self.client_password, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11a_pmf_with_vendor_ie_in_beacon_correct_length_sec_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp( - self, - ): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=self.ssid, - additional_ap_parameters=hostapd_constants.VENDOR_IE[ - "correct_length_beacon" - ], - security=self.security_profile, - pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED, - password=self.client_password, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11a_pmf_with_vendor_ie_in_beacon_zero_length_sec_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp( - self, - ): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=self.ssid, - additional_ap_parameters=hostapd_constants.VENDOR_IE[ - "zero_length_beacon_without_data" - ], - security=self.security_profile, - pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED, - password=self.client_password, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11a_pmf_with_vendor_ie_in_beacon_similar_to_wpa_ie_sec_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp( - self, - ): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=self.ssid, - additional_ap_parameters=hostapd_constants.VENDOR_IE["simliar_to_wpa"], - security=self.security_profile, - pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED, - password=self.client_password, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11a_sec_wpa_wpa2_wpa3_psk_sae_ptk_ccmp(self): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=self.ssid, - security=self.security_profile, - password=self.client_password, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11a_sec_wpa_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(self): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=self.ssid, - security=self.security_profile, - password=self.client_password, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11a_max_length_password_sec_wpa_wpa2_wpa3_psk_sae_ptk_ccmp(self): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=self.ssid, - security=self.security_profile, - password=self.client_password, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11a_max_length_password_sec_wpa_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp( - self, - ): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=self.ssid, - security=self.security_profile, - password=self.client_password, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11a_frag_430_sec_wpa_wpa2_wpa3_psk_sae_ptk_ccmp(self): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=self.ssid, - security=self.security_profile, - password=self.client_password, - frag_threshold=430, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11a_frag_430_sec_wpa_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(self): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=self.ssid, - security=self.security_profile, - password=self.client_password, - frag_threshold=430, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11a_rts_256_sec_wpa_wpa2_wpa3_psk_sae_ptk_ccmp(self): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=self.ssid, - security=self.security_profile, - password=self.client_password, - rts_threshold=256, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11a_rts_256_sec_wpa_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(self): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=self.ssid, - security=self.security_profile, - password=self.client_password, - rts_threshold=256, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11a_rts_256_frag_430_sec_wpa_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp( - self, - ): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=self.ssid, - security=self.security_profile, - password=self.client_password, - rts_threshold=256, - frag_threshold=430, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11a_high_dtim_low_beacon_int_sec_wpa_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp( - self, - ): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=self.ssid, - dtim_period=hostapd_constants.HIGH_DTIM, - beacon_interval=hostapd_constants.LOW_BEACON_INTERVAL, - security=self.security_profile, - password=self.client_password, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11a_low_dtim_high_beacon_int_sec_wpa_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp( - self, - ): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=self.ssid, - dtim_period=hostapd_constants.LOW_DTIM, - beacon_interval=hostapd_constants.HIGH_BEACON_INTERVAL, - security=self.security_profile, - password=self.client_password, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11a_with_WMM_with_default_values_sec_wpa_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp( - self, - ): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=self.ssid, - force_wmm=True, - additional_ap_parameters=hostapd_constants.WMM_PHYS_11A_11G_11N_11AC_DEFAULT_PARAMS, - security=self.security_profile, - password=self.client_password, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11a_with_vendor_ie_in_beacon_correct_length_sec_wpa_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp( - self, - ): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=self.ssid, - additional_ap_parameters=hostapd_constants.VENDOR_IE[ - "correct_length_beacon" - ], - security=self.security_profile, - password=self.client_password, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11a_with_vendor_ie_in_beacon_zero_length_sec_wpa_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp( - self, - ): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=self.ssid, - additional_ap_parameters=hostapd_constants.VENDOR_IE[ - "zero_length_beacon_without_data" - ], - security=self.security_profile, - password=self.client_password, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11a_with_vendor_ie_in_beacon_similar_to_wpa_ie_sec_wpa_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp( - self, - ): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=self.ssid, - additional_ap_parameters=hostapd_constants.VENDOR_IE["simliar_to_wpa"], - security=self.security_profile, - password=self.client_password, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11a_pmf_sec_wpa_wpa2_wpa3_psk_sae_ptk_ccmp(self): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=self.ssid, - security=self.security_profile, - pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED, - password=self.client_password, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11a_pmf_sec_wpa_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(self): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=self.ssid, - security=self.security_profile, - pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED, - password=self.client_password, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11a_pmf_max_length_password_sec_wpa_wpa2_wpa3_psk_sae_ptk_ccmp( - self, - ): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=self.ssid, - security=self.security_profile, - pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED, - password=self.client_password, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11a_pmf_max_length_password_sec_wpa_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp( - self, - ): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=self.ssid, - security=self.security_profile, - pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED, - password=self.client_password, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11a_pmf_frag_430_sec_wpa_wpa2_wpa3_psk_sae_ptk_ccmp(self): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=self.ssid, - security=self.security_profile, - pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED, - password=self.client_password, - frag_threshold=430, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11a_pmf_frag_430_sec_wpa_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp( - self, - ): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=self.ssid, - security=self.security_profile, - pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED, - password=self.client_password, - frag_threshold=430, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11a_pmf_rts_256_sec_wpa_wpa2_wpa3_psk_sae_ptk_ccmp(self): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=self.ssid, - security=self.security_profile, - pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED, - password=self.client_password, - rts_threshold=256, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11a_pmf_rts_256_sec_wpa_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(self): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=self.ssid, - security=self.security_profile, - pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED, - password=self.client_password, - rts_threshold=256, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11a_pmf_rts_256_frag_430_sec_wpa_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp( - self, - ): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=self.ssid, - security=self.security_profile, - pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED, - password=self.client_password, - rts_threshold=256, - frag_threshold=430, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11a_pmf_high_dtim_low_beacon_int_sec_wpa_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp( - self, - ): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=self.ssid, - dtim_period=hostapd_constants.HIGH_DTIM, - beacon_interval=hostapd_constants.LOW_BEACON_INTERVAL, - security=self.security_profile, - pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED, - password=self.client_password, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11a_pmf_low_dtim_high_beacon_int_sec_wpa_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp( - self, - ): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=self.ssid, - dtim_period=hostapd_constants.LOW_DTIM, - beacon_interval=hostapd_constants.HIGH_BEACON_INTERVAL, - security=self.security_profile, - pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED, - password=self.client_password, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11a_pmf_with_WMM_with_default_values_sec_wpa_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp( - self, - ): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=self.ssid, - force_wmm=True, - additional_ap_parameters=hostapd_constants.WMM_PHYS_11A_11G_11N_11AC_DEFAULT_PARAMS, - security=self.security_profile, - pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED, - password=self.client_password, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11a_pmf_with_vendor_ie_in_beacon_correct_length_sec_wpa_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp( - self, - ): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=self.ssid, - additional_ap_parameters=hostapd_constants.VENDOR_IE[ - "correct_length_beacon" - ], - security=self.security_profile, - pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED, - password=self.client_password, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11a_pmf_with_vendor_ie_in_beacon_zero_length_sec_wpa_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp( - self, - ): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=self.ssid, - additional_ap_parameters=hostapd_constants.VENDOR_IE[ - "zero_length_beacon_without_data" - ], - security=self.security_profile, - pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED, - password=self.client_password, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11a_pmf_with_vendor_ie_in_beacon_similar_to_wpa_ie_sec_wpa_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp( - self, - ): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=self.ssid, - additional_ap_parameters=hostapd_constants.VENDOR_IE["simliar_to_wpa"], - security=self.security_profile, - pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED, - password=self.client_password, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11bg_sec_open_wep_5_chars_ptk_none(self): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.ssid, - security=self.security_profile, - password=self.client_password, - force_wmm=False, - additional_ap_parameters=hostapd_constants.WEP_AUTH["open"], - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11bg_sec_open_wep_13_chars_ptk_none(self): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.ssid, - security=self.security_profile, - password=self.client_password, - force_wmm=False, - additional_ap_parameters=hostapd_constants.WEP_AUTH["open"], - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11bg_sec_open_wep_10_hex_ptk_none(self): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.ssid, - security=self.security_profile, - password=self.client_password, - force_wmm=False, - additional_ap_parameters=hostapd_constants.WEP_AUTH["open"], - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11bg_sec_open_wep_26_hex_ptk_none(self): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.ssid, - security=self.security_profile, - password=self.client_password, - force_wmm=False, - additional_ap_parameters=hostapd_constants.WEP_AUTH["open"], - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11bg_sec_shared_wep_5_chars_ptk_none(self): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.ssid, - security=self.security_profile, - password=self.client_password, - force_wmm=False, - additional_ap_parameters=hostapd_constants.WEP_AUTH["shared"], - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11bg_sec_shared_wep_13_chars_ptk_none(self): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.ssid, - security=self.security_profile, - password=self.client_password, - force_wmm=False, - additional_ap_parameters=hostapd_constants.WEP_AUTH["shared"], - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11bg_sec_shared_wep_10_hex_ptk_none(self): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.ssid, - security=self.security_profile, - password=self.client_password, - force_wmm=False, - additional_ap_parameters=hostapd_constants.WEP_AUTH["shared"], - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11bg_sec_shared_wep_26_hex_ptk_none(self): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.ssid, - security=self.security_profile, - password=self.client_password, - force_wmm=False, - additional_ap_parameters=hostapd_constants.WEP_AUTH["shared"], - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11bg_sec_wpa_psk_ptk_tkip(self): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.ssid, - security=self.security_profile, - password=self.client_password, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11bg_sec_wpa_psk_ptk_ccmp(self): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.ssid, - security=self.security_profile, - password=self.client_password, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11bg_sec_wpa_psk_ptk_tkip_or_ccmp(self): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.ssid, - security=self.security_profile, - password=self.client_password, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11bg_max_length_password_sec_wpa_psk_ptk_tkip(self): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.ssid, - security=self.security_profile, - password=self.client_password, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11bg_max_length_password_sec_wpa_psk_ptk_ccmp(self): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.ssid, - security=self.security_profile, - password=self.client_password, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11bg_max_length_password_sec_wpa_psk_ptk_tkip_or_ccmp(self): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.ssid, - security=self.security_profile, - password=self.client_password, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11bg_max_length_psk_sec_wpa_psk_ptk_tkip(self): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.ssid, - security=self.security_profile, - password=self.client_password, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11bg_max_length_psk_sec_wpa_psk_ptk_ccmp(self): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.ssid, - security=self.security_profile, - password=self.client_password, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11bg_max_length_psk_sec_wpa_psk_ptk_tkip_or_ccmp(self): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.ssid, - security=self.security_profile, - password=self.client_password, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11bg_frag_430_sec_wpa_psk_ptk_tkip(self): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.ssid, - security=self.security_profile, - password=self.client_password, - frag_threshold=430, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11bg_frag_430_sec_wpa_psk_ptk_ccmp(self): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.ssid, - security=self.security_profile, - password=self.client_password, - frag_threshold=430, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11bg_frag_430_sec_wpa_psk_ptk_tkip_or_ccmp(self): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.ssid, - security=self.security_profile, - password=self.client_password, - frag_threshold=430, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11bg_rts_256_sec_wpa_psk_ptk_tkip(self): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.ssid, - security=self.security_profile, - password=self.client_password, - rts_threshold=256, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11bg_rts_256_sec_wpa_psk_ptk_ccmp(self): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.ssid, - security=self.security_profile, - password=self.client_password, - rts_threshold=256, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11bg_rts_256_sec_wpa_psk_ptk_tkip_or_ccmp(self): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.ssid, - security=self.security_profile, - password=self.client_password, - rts_threshold=256, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11bg_rts_256_frag_430_sec_wpa_psk_ptk_tkip_or_ccmp(self): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.ssid, - security=self.security_profile, - password=self.client_password, - rts_threshold=256, - frag_threshold=430, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11bg_high_dtim_low_beacon_int_sec_wpa_psk_ptk_tkip_or_ccmp(self): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.ssid, - dtim_period=hostapd_constants.HIGH_DTIM, - beacon_interval=hostapd_constants.LOW_BEACON_INTERVAL, - security=self.security_profile, - password=self.client_password, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11bg_low_dtim_high_beacon_int_sec_wpa_psk_ptk_tkip_or_ccmp(self): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.ssid, - dtim_period=hostapd_constants.LOW_DTIM, - beacon_interval=hostapd_constants.HIGH_BEACON_INTERVAL, - security=self.security_profile, - password=self.client_password, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11bg_with_WMM_with_default_values_sec_wpa_psk_ptk_tkip_or_ccmp( - self, - ): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.ssid, - force_wmm=True, - additional_ap_parameters=hostapd_constants.WMM_11B_DEFAULT_PARAMS, - security=self.security_profile, - password=self.client_password, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11bg_with_vendor_ie_in_beacon_correct_length_sec_wpa_psk_ptk_tkip_or_ccmp( - self, - ): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.ssid, - additional_ap_parameters=hostapd_constants.VENDOR_IE[ - "correct_length_beacon" - ], - security=self.security_profile, - password=self.client_password, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11bg_with_vendor_ie_in_beacon_zero_length_sec_wpa_psk_ptk_tkip_or_ccmp( - self, - ): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.ssid, - additional_ap_parameters=hostapd_constants.VENDOR_IE[ - "zero_length_beacon_without_data" - ], - security=self.security_profile, - password=self.client_password, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11bg_with_vendor_ie_in_beacon_similar_to_wpa_ie_sec_wpa_psk_ptk_tkip_or_ccmp( - self, - ): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.ssid, - additional_ap_parameters=hostapd_constants.VENDOR_IE["simliar_to_wpa"], - security=self.security_profile, - password=self.client_password, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11bg_sec_wpa2_psk_ptk_tkip(self): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.ssid, - security=self.security_profile, - password=self.client_password, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11bg_sec_wpa2_psk_ptk_ccmp(self): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.ssid, - security=self.security_profile, - password=self.client_password, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11bg_sec_wpa2_psk_ptk_tkip_or_ccmp(self): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.ssid, - security=self.security_profile, - password=self.client_password, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11bg_max_length_password_sec_wpa2_psk_ptk_tkip(self): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.ssid, - security=self.security_profile, - password=self.client_password, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11bg_max_length_password_sec_wpa2_psk_ptk_ccmp(self): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.ssid, - security=self.security_profile, - password=self.client_password, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11bg_max_length_password_sec_wpa2_psk_ptk_tkip_or_ccmp(self): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.ssid, - security=self.security_profile, - password=self.client_password, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11bg_max_length_psk_sec_wpa2_psk_ptk_tkip(self): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.ssid, - security=self.security_profile, - password=self.client_password, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11bg_max_length_psk_sec_wpa2_psk_ptk_ccmp(self): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.ssid, - security=self.security_profile, - password=self.client_password, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11bg_max_length_psk_sec_wpa2_psk_ptk_tkip_or_ccmp(self): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.ssid, - security=self.security_profile, - password=self.client_password, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11bg_frag_430_sec_wpa2_psk_ptk_tkip(self): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.ssid, - security=self.security_profile, - password=self.client_password, - frag_threshold=430, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11bg_frag_430_sec_wpa2_psk_ptk_ccmp(self): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.ssid, - security=self.security_profile, - password=self.client_password, - frag_threshold=430, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11bg_frag_430_sec_wpa2_psk_ptk_tkip_or_ccmp(self): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.ssid, - security=self.security_profile, - password=self.client_password, - frag_threshold=430, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11bg_rts_256_sec_wpa2_psk_ptk_tkip(self): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.ssid, - security=self.security_profile, - password=self.client_password, - rts_threshold=256, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11bg_rts_256_sec_wpa2_psk_ptk_ccmp(self): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.ssid, - security=self.security_profile, - password=self.client_password, - rts_threshold=256, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11bg_rts_256_sec_wpa2_psk_ptk_tkip_or_ccmp(self): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.ssid, - security=self.security_profile, - password=self.client_password, - rts_threshold=256, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11bg_rts_256_frag_430_sec_wpa2_psk_ptk_tkip_or_ccmp(self): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.ssid, - security=self.security_profile, - password=self.client_password, - rts_threshold=256, - frag_threshold=430, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11bg_high_dtim_low_beacon_int_sec_wpa2_psk_ptk_tkip_or_ccmp( - self, - ): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.ssid, - dtim_period=hostapd_constants.HIGH_DTIM, - beacon_interval=hostapd_constants.LOW_BEACON_INTERVAL, - security=self.security_profile, - password=self.client_password, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11bg_low_dtim_high_beacon_int_sec_wpa2_psk_ptk_tkip_or_ccmp( - self, - ): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.ssid, - dtim_period=hostapd_constants.HIGH_DTIM, - beacon_interval=hostapd_constants.HIGH_BEACON_INTERVAL, - security=self.security_profile, - password=self.client_password, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11bg_with_WMM_with_default_values_sec_wpa2_psk_ptk_tkip_or_ccmp( - self, - ): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.ssid, - force_wmm=True, - additional_ap_parameters=hostapd_constants.WMM_11B_DEFAULT_PARAMS, - security=self.security_profile, - password=self.client_password, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11bg_with_vendor_ie_in_beacon_correct_length_sec_wpa2_psk_ptk_tkip_or_ccmp( - self, - ): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.ssid, - additional_ap_parameters=hostapd_constants.VENDOR_IE[ - "correct_length_beacon" - ], - security=self.security_profile, - password=self.client_password, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11bg_with_vendor_ie_in_beacon_zero_length_sec_wpa2_psk_ptk_tkip_or_ccmp( - self, - ): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.ssid, - additional_ap_parameters=hostapd_constants.VENDOR_IE[ - "zero_length_beacon_without_data" - ], - security=self.security_profile, - password=self.client_password, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11bg_with_vendor_ie_in_beacon_similar_to_wpa_ie_sec_wpa2_psk_ptk_tkip_or_ccmp( - self, - ): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.ssid, - additional_ap_parameters=hostapd_constants.VENDOR_IE["simliar_to_wpa"], - security=self.security_profile, - password=self.client_password, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11bg_pmf_sec_wpa2_psk_ptk_tkip(self): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.ssid, - security=self.security_profile, - pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED, - password=self.client_password, - force_wmm=False, - ) - - asserts.assert_false( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Expected failure to associate. This device must support TKIP and " - "PMF, which is not supported on Fuchsia. If this device is a " - "mainstream device, we need to reconsider adding support for TKIP " - "and PMF on Fuchsia.", - ) - - @create_security_profile - def test_associate_11bg_pmf_sec_wpa2_psk_ptk_ccmp(self): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.ssid, - security=self.security_profile, - pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED, - password=self.client_password, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11bg_pmf_sec_wpa2_psk_ptk_tkip_or_ccmp(self): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.ssid, - security=self.security_profile, - pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED, - password=self.client_password, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11bg_pmf_max_length_password_sec_wpa2_psk_ptk_tkip(self): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.ssid, - security=self.security_profile, - pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED, - password=self.client_password, - force_wmm=False, - ) - - asserts.assert_false( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Expected failure to associate. This device must support TKIP and " - "PMF, which is not supported on Fuchsia. If this device is a " - "mainstream device, we need to reconsider adding support for TKIP " - "and PMF on Fuchsia.", - ) - - @create_security_profile - def test_associate_11bg_pmf_max_length_password_sec_wpa2_psk_ptk_ccmp(self): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.ssid, - security=self.security_profile, - pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED, - password=self.client_password, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11bg_pmf_max_length_password_sec_wpa2_psk_ptk_tkip_or_ccmp(self): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.ssid, - security=self.security_profile, - pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED, - password=self.client_password, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11bg_pmf_max_length_psk_sec_wpa2_psk_ptk_tkip(self): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.ssid, - security=self.security_profile, - pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED, - password=self.client_password, - force_wmm=False, - ) - - asserts.assert_false( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Expected failure to associate. This device must support TKIP and " - "PMF, which is not supported on Fuchsia. If this device is a " - "mainstream device, we need to reconsider adding support for TKIP " - "and PMF on Fuchsia.", - ) - - @create_security_profile - def test_associate_11bg_pmf_max_length_psk_sec_wpa2_psk_ptk_ccmp(self): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.ssid, - security=self.security_profile, - pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED, - password=self.client_password, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11bg_pmf_max_length_psk_sec_wpa2_psk_ptk_tkip_or_ccmp(self): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.ssid, - security=self.security_profile, - pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED, - password=self.client_password, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11bg_pmf_frag_430_sec_wpa2_psk_ptk_tkip(self): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.ssid, - security=self.security_profile, - pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED, - password=self.client_password, - frag_threshold=430, - force_wmm=False, - ) - - asserts.assert_false( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Expected failure to associate. This device must support TKIP and " - "PMF, which is not supported on Fuchsia. If this device is a " - "mainstream device, we need to reconsider adding support for TKIP " - "and PMF on Fuchsia.", - ) - - @create_security_profile - def test_associate_11bg_pmf_frag_430_sec_wpa2_psk_ptk_ccmp(self): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.ssid, - security=self.security_profile, - pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED, - password=self.client_password, - frag_threshold=430, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11bg_pmf_frag_430_sec_wpa2_psk_ptk_tkip_or_ccmp(self): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.ssid, - security=self.security_profile, - pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED, - password=self.client_password, - frag_threshold=430, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11bg_pmf_rts_256_sec_wpa2_psk_ptk_tkip(self): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.ssid, - security=self.security_profile, - pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED, - password=self.client_password, - rts_threshold=256, - force_wmm=False, - ) - - asserts.assert_false( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Expected failure to associate. This device must support TKIP and " - "PMF, which is not supported on Fuchsia. If this device is a " - "mainstream device, we need to reconsider adding support for TKIP " - "and PMF on Fuchsia.", - ) - - @create_security_profile - def test_associate_11bg_pmf_rts_256_sec_wpa2_psk_ptk_ccmp(self): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.ssid, - security=self.security_profile, - pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED, - password=self.client_password, - rts_threshold=256, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11bg_pmf_rts_256_sec_wpa2_psk_ptk_tkip_or_ccmp(self): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.ssid, - security=self.security_profile, - pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED, - password=self.client_password, - rts_threshold=256, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11bg_pmf_rts_256_frag_430_sec_wpa2_psk_ptk_tkip_or_ccmp(self): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.ssid, - security=self.security_profile, - pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED, - password=self.client_password, - rts_threshold=256, - frag_threshold=430, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11bg_pmf_high_dtim_low_beacon_int_sec_wpa2_psk_ptk_tkip_or_ccmp( - self, - ): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.ssid, - dtim_period=hostapd_constants.HIGH_DTIM, - beacon_interval=hostapd_constants.LOW_BEACON_INTERVAL, - security=self.security_profile, - pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED, - password=self.client_password, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11bg_pmf_low_dtim_high_beacon_int_sec_wpa2_psk_ptk_tkip_or_ccmp( - self, - ): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.ssid, - dtim_period=hostapd_constants.HIGH_DTIM, - beacon_interval=hostapd_constants.HIGH_BEACON_INTERVAL, - security=self.security_profile, - pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED, - password=self.client_password, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11bg_pmf_with_WMM_with_default_values_sec_wpa2_psk_ptk_tkip_or_ccmp( - self, - ): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.ssid, - force_wmm=True, - additional_ap_parameters=hostapd_constants.WMM_11B_DEFAULT_PARAMS, - security=self.security_profile, - pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED, - password=self.client_password, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11bg_pmf_with_vendor_ie_in_beacon_correct_length_sec_wpa2_psk_ptk_tkip_or_ccmp( - self, - ): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.ssid, - additional_ap_parameters=hostapd_constants.VENDOR_IE[ - "correct_length_beacon" - ], - security=self.security_profile, - pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED, - password=self.client_password, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11bg_pmf_with_vendor_ie_in_beacon_zero_length_sec_wpa2_psk_ptk_tkip_or_ccmp( - self, - ): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.ssid, - additional_ap_parameters=hostapd_constants.VENDOR_IE[ - "zero_length_beacon_without_data" - ], - security=self.security_profile, - pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED, - password=self.client_password, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11bg_pmf_with_vendor_ie_in_beacon_similar_to_wpa_ie_sec_wpa2_psk_ptk_tkip_or_ccmp( - self, - ): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.ssid, - additional_ap_parameters=hostapd_constants.VENDOR_IE["simliar_to_wpa"], - security=self.security_profile, - pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED, - password=self.client_password, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11bg_sec_wpa_wpa2_psk_ptk_tkip(self): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.ssid, - security=self.security_profile, - password=self.client_password, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11bg_sec_wpa_wpa2_psk_ptk_ccmp(self): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.ssid, - security=self.security_profile, - password=self.client_password, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11bg_sec_wpa_wpa2_psk_ptk_tkip_or_ccmp(self): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.ssid, - security=self.security_profile, - password=self.client_password, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11bg_max_length_password_sec_wpa_wpa2_psk_ptk_tkip(self): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.ssid, - security=self.security_profile, - password=self.client_password, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11bg_max_length_password_sec_wpa_wpa2_psk_ptk_ccmp(self): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.ssid, - security=self.security_profile, - password=self.client_password, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11bg_max_length_password_sec_wpa_wpa2_psk_ptk_tkip_or_ccmp(self): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.ssid, - security=self.security_profile, - password=self.client_password, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11bg_max_length_psk_sec_wpa_wpa2_psk_ptk_tkip(self): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.ssid, - security=self.security_profile, - password=self.client_password, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11bg_max_length_psk_sec_wpa_wpa2_psk_ptk_ccmp(self): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.ssid, - security=self.security_profile, - password=self.client_password, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11bg_max_length_psk_sec_wpa_wpa2_psk_ptk_tkip_or_ccmp(self): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.ssid, - security=self.security_profile, - password=self.client_password, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11bg_frag_430_sec_wpa_wpa2_psk_ptk_tkip(self): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.ssid, - security=self.security_profile, - password=self.client_password, - frag_threshold=430, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11bg_frag_430_sec_wpa_wpa2_psk_ptk_ccmp(self): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.ssid, - security=self.security_profile, - password=self.client_password, - frag_threshold=430, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11bg_frag_430_sec_wpa_wpa2_psk_ptk_tkip_or_ccmp(self): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.ssid, - security=self.security_profile, - password=self.client_password, - frag_threshold=430, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11bg_rts_256_sec_wpa_wpa2_psk_ptk_tkip(self): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.ssid, - security=self.security_profile, - password=self.client_password, - rts_threshold=256, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11bg_rts_256_sec_wpa_wpa2_psk_ptk_ccmp(self): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.ssid, - security=self.security_profile, - password=self.client_password, - rts_threshold=256, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11bg_rts_256_sec_wpa_wpa2_psk_ptk_tkip_or_ccmp(self): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.ssid, - security=self.security_profile, - password=self.client_password, - rts_threshold=256, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11bg_rts_256_frag_430_sec_wpa_wpa2_psk_ptk_tkip_or_ccmp(self): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.ssid, - security=self.security_profile, - password=self.client_password, - rts_threshold=256, - frag_threshold=430, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11bg_high_dtim_low_beacon_int_sec_wpa_wpa2_psk_ptk_tkip_or_ccmp( - self, - ): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.ssid, - dtim_period=hostapd_constants.HIGH_DTIM, - beacon_interval=hostapd_constants.LOW_BEACON_INTERVAL, - security=self.security_profile, - password=self.client_password, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11bg_low_dtim_high_beacon_int_sec_wpa_wpa2_psk_ptk_tkip_or_ccmp( - self, - ): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.ssid, - dtim_period=hostapd_constants.LOW_DTIM, - beacon_interval=hostapd_constants.HIGH_BEACON_INTERVAL, - security=self.security_profile, - password=self.client_password, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11bg_with_WMM_with_default_values_sec_wpa_wpa2_psk_ptk_tkip_or_ccmp( - self, - ): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.ssid, - force_wmm=True, - additional_ap_parameters=hostapd_constants.WMM_PHYS_11A_11G_11N_11AC_DEFAULT_PARAMS, - security=self.security_profile, - password=self.client_password, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11bg_with_vendor_ie_in_beacon_correct_length_sec_wpa_wpa2_psk_ptk_tkip_or_ccmp( - self, - ): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.ssid, - additional_ap_parameters=hostapd_constants.VENDOR_IE[ - "correct_length_beacon" - ], - security=self.security_profile, - password=self.client_password, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11bg_with_vendor_ie_in_beacon_zero_length_sec_wpa_wpa2_psk_ptk_tkip_or_ccmp( - self, - ): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.ssid, - additional_ap_parameters=hostapd_constants.VENDOR_IE[ - "zero_length_beacon_without_data" - ], - security=self.security_profile, - password=self.client_password, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11bg_with_vendor_ie_in_beacon_similar_to_wpa_ie_sec_wpa_wpa2_psk_ptk_tkip_or_ccmp( - self, - ): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.ssid, - additional_ap_parameters=hostapd_constants.VENDOR_IE["simliar_to_wpa"], - security=self.security_profile, - password=self.client_password, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11bg_sec_wpa3_sae_ptk_ccmp(self): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.ssid, - security=self.security_profile, - password=self.client_password, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11bg_sec_wpa3_sae_ptk_tkip_or_ccmp(self): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.ssid, - security=self.security_profile, - password=self.client_password, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11bg_max_length_password_sec_wpa3_sae_ptk_ccmp(self): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.ssid, - security=self.security_profile, - password=self.client_password, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11bg_max_length_password_sec_wpa3_sae_ptk_tkip_or_ccmp(self): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.ssid, - security=self.security_profile, - password=self.client_password, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11bg_frag_430_sec_wpa3_sae_ptk_ccmp(self): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.ssid, - security=self.security_profile, - password=self.client_password, - frag_threshold=430, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11bg_frag_430_sec_wpa3_sae_ptk_tkip_or_ccmp(self): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.ssid, - security=self.security_profile, - password=self.client_password, - frag_threshold=430, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11bg_rts_256_sec_wpa3_sae_ptk_ccmp(self): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.ssid, - security=self.security_profile, - password=self.client_password, - rts_threshold=256, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11bg_rts_256_sec_wpa3_sae_ptk_tkip_or_ccmp(self): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.ssid, - security=self.security_profile, - password=self.client_password, - rts_threshold=256, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11bg_rts_256_frag_430_sec_wpa3_sae_ptk_tkip_or_ccmp(self): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.ssid, - security=self.security_profile, - password=self.client_password, - rts_threshold=256, - frag_threshold=430, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11bg_high_dtim_low_beacon_int_sec_wpa3_sae_ptk_tkip_or_ccmp( - self, - ): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.ssid, - dtim_period=hostapd_constants.HIGH_DTIM, - beacon_interval=hostapd_constants.LOW_BEACON_INTERVAL, - security=self.security_profile, - password=self.client_password, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11bg_low_dtim_high_beacon_int_sec_wpa3_sae_ptk_tkip_or_ccmp( - self, - ): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.ssid, - dtim_period=hostapd_constants.LOW_DTIM, - beacon_interval=hostapd_constants.HIGH_BEACON_INTERVAL, - security=self.security_profile, - password=self.client_password, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11bg_with_WMM_with_default_values_sec_wpa3_sae_ptk_tkip_or_ccmp( - self, - ): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.ssid, - force_wmm=True, - additional_ap_parameters=hostapd_constants.WMM_11B_DEFAULT_PARAMS, - security=self.security_profile, - password=self.client_password, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11bg_with_vendor_ie_in_beacon_correct_length_sec_wpa3_sae_ptk_tkip_or_ccmp( - self, - ): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.ssid, - additional_ap_parameters=hostapd_constants.VENDOR_IE[ - "correct_length_beacon" - ], - security=self.security_profile, - password=self.client_password, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11bg_with_vendor_ie_in_beacon_zero_length_sec_wpa3_sae_ptk_tkip_or_ccmp( - self, - ): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.ssid, - additional_ap_parameters=hostapd_constants.VENDOR_IE[ - "zero_length_beacon_without_data" - ], - security=self.security_profile, - password=self.client_password, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11bg_with_vendor_ie_in_beacon_similar_to_wpa_ie_sec_wpa3_sae_ptk_tkip_or_ccmp( - self, - ): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.ssid, - additional_ap_parameters=hostapd_constants.VENDOR_IE["simliar_to_wpa"], - security=self.security_profile, - password=self.client_password, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11bg_sec_wpa2_wpa3_psk_sae_ptk_ccmp(self): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.ssid, - security=self.security_profile, - password=self.client_password, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11bg_sec_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(self): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.ssid, - security=self.security_profile, - password=self.client_password, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11bg_max_length_password_sec_wpa2_wpa3_psk_sae_ptk_ccmp(self): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.ssid, - security=self.security_profile, - password=self.client_password, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11bg_max_length_password_sec_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp( - self, - ): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.ssid, - security=self.security_profile, - password=self.client_password, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11bg_frag_430_sec_wpa2_wpa3_psk_sae_ptk_ccmp(self): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.ssid, - security=self.security_profile, - password=self.client_password, - frag_threshold=430, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11bg_frag_430_sec_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(self): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.ssid, - security=self.security_profile, - password=self.client_password, - frag_threshold=430, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11bg_rts_256_sec_wpa2_wpa3_psk_sae_ptk_ccmp(self): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.ssid, - security=self.security_profile, - password=self.client_password, - rts_threshold=256, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11bg_rts_256_sec_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(self): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.ssid, - security=self.security_profile, - password=self.client_password, - rts_threshold=256, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11bg_rts_256_frag_430_sec_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp( - self, - ): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.ssid, - security=self.security_profile, - password=self.client_password, - rts_threshold=256, - frag_threshold=430, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11bg_high_dtim_low_beacon_int_sec_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp( - self, - ): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.ssid, - dtim_period=hostapd_constants.HIGH_DTIM, - beacon_interval=hostapd_constants.LOW_BEACON_INTERVAL, - security=self.security_profile, - password=self.client_password, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11bg_low_dtim_high_beacon_int_sec_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp( - self, - ): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.ssid, - dtim_period=hostapd_constants.LOW_DTIM, - beacon_interval=hostapd_constants.HIGH_BEACON_INTERVAL, - security=self.security_profile, - password=self.client_password, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11bg_with_WMM_with_default_values_sec_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp( - self, - ): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.ssid, - force_wmm=True, - additional_ap_parameters=hostapd_constants.WMM_11B_DEFAULT_PARAMS, - security=self.security_profile, - password=self.client_password, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11bg_with_vendor_ie_in_beacon_correct_length_sec_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp( - self, - ): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.ssid, - additional_ap_parameters=hostapd_constants.VENDOR_IE[ - "correct_length_beacon" - ], - security=self.security_profile, - password=self.client_password, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11bg_with_vendor_ie_in_beacon_zero_length_sec_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp( - self, - ): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.ssid, - additional_ap_parameters=hostapd_constants.VENDOR_IE[ - "zero_length_beacon_without_data" - ], - security=self.security_profile, - password=self.client_password, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11bg_with_vendor_ie_in_beacon_similar_to_wpa_ie_sec_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp( - self, - ): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.ssid, - additional_ap_parameters=hostapd_constants.VENDOR_IE["simliar_to_wpa"], - security=self.security_profile, - password=self.client_password, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11bg_pmf_sec_wpa2_wpa3_psk_sae_ptk_ccmp(self): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.ssid, - security=self.security_profile, - pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED, - password=self.client_password, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11bg_pmf_sec_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(self): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.ssid, - security=self.security_profile, - pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED, - password=self.client_password, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11bg_pmf_max_length_password_sec_wpa2_wpa3_psk_sae_ptk_ccmp( - self, - ): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.ssid, - security=self.security_profile, - pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED, - password=self.client_password, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11bg_pmf_max_length_password_sec_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp( - self, - ): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.ssid, - security=self.security_profile, - pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED, - password=self.client_password, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11bg_pmf_frag_430_sec_wpa2_wpa3_psk_sae_ptk_ccmp(self): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.ssid, - security=self.security_profile, - pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED, - password=self.client_password, - frag_threshold=430, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11bg_pmf_frag_430_sec_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(self): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.ssid, - security=self.security_profile, - pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED, - password=self.client_password, - frag_threshold=430, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11bg_pmf_rts_256_sec_wpa2_wpa3_psk_sae_ptk_ccmp(self): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.ssid, - security=self.security_profile, - pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED, - password=self.client_password, - rts_threshold=256, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11bg_pmf_rts_256_sec_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(self): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.ssid, - security=self.security_profile, - pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED, - password=self.client_password, - rts_threshold=256, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11bg_pmf_rts_256_frag_430_sec_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp( - self, - ): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.ssid, - security=self.security_profile, - pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED, - password=self.client_password, - rts_threshold=256, - frag_threshold=430, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11bg_pmf_high_dtim_low_beacon_int_sec_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp( - self, - ): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.ssid, - dtim_period=hostapd_constants.HIGH_DTIM, - beacon_interval=hostapd_constants.LOW_BEACON_INTERVAL, - security=self.security_profile, - pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED, - password=self.client_password, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11bg_pmf_low_dtim_high_beacon_int_sec_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp( - self, - ): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.ssid, - dtim_period=hostapd_constants.LOW_DTIM, - beacon_interval=hostapd_constants.HIGH_BEACON_INTERVAL, - security=self.security_profile, - pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED, - password=self.client_password, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11bg_pmf_with_WMM_with_default_values_sec_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp( - self, - ): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.ssid, - force_wmm=True, - additional_ap_parameters=hostapd_constants.WMM_11B_DEFAULT_PARAMS, - security=self.security_profile, - pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED, - password=self.client_password, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11bg_pmf_with_vendor_ie_in_beacon_correct_length_sec_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp( - self, - ): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.ssid, - additional_ap_parameters=hostapd_constants.VENDOR_IE[ - "correct_length_beacon" - ], - security=self.security_profile, - pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED, - password=self.client_password, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11bg_pmf_with_vendor_ie_in_beacon_zero_length_sec_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp( - self, - ): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.ssid, - additional_ap_parameters=hostapd_constants.VENDOR_IE[ - "zero_length_beacon_without_data" - ], - security=self.security_profile, - pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED, - password=self.client_password, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11bg_pmf_with_vendor_ie_in_beacon_similar_to_wpa_ie_sec_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp( - self, - ): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.ssid, - additional_ap_parameters=hostapd_constants.VENDOR_IE["simliar_to_wpa"], - security=self.security_profile, - pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED, - password=self.client_password, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11bg_sec_wpa_wpa2_wpa3_psk_sae_ptk_ccmp(self): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.ssid, - security=self.security_profile, - password=self.client_password, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11bg_sec_wpa_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(self): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.ssid, - security=self.security_profile, - password=self.client_password, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11bg_max_length_password_sec_wpa_wpa2_wpa3_psk_sae_ptk_ccmp( - self, - ): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.ssid, - security=self.security_profile, - password=self.client_password, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11bg_max_length_password_sec_wpa_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp( - self, - ): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.ssid, - security=self.security_profile, - password=self.client_password, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11bg_frag_430_sec_wpa_wpa2_wpa3_psk_sae_ptk_ccmp(self): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.ssid, - security=self.security_profile, - password=self.client_password, - frag_threshold=430, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11bg_frag_430_sec_wpa_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(self): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.ssid, - security=self.security_profile, - password=self.client_password, - frag_threshold=430, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11bg_rts_256_sec_wpa_wpa2_wpa3_psk_sae_ptk_ccmp(self): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.ssid, - security=self.security_profile, - password=self.client_password, - rts_threshold=256, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11bg_rts_256_sec_wpa_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(self): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.ssid, - security=self.security_profile, - password=self.client_password, - rts_threshold=256, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11bg_rts_256_frag_430_sec_wpa_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp( - self, - ): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.ssid, - security=self.security_profile, - password=self.client_password, - rts_threshold=256, - frag_threshold=430, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11bg_high_dtim_low_beacon_int_sec_wpa_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp( - self, - ): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.ssid, - dtim_period=hostapd_constants.HIGH_DTIM, - beacon_interval=hostapd_constants.LOW_BEACON_INTERVAL, - security=self.security_profile, - password=self.client_password, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11bg_low_dtim_high_beacon_int_sec_wpa_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp( - self, - ): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.ssid, - dtim_period=hostapd_constants.LOW_DTIM, - beacon_interval=hostapd_constants.HIGH_BEACON_INTERVAL, - security=self.security_profile, - password=self.client_password, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11bg_with_WMM_with_default_values_sec_wpa_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp( - self, - ): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.ssid, - force_wmm=True, - additional_ap_parameters=hostapd_constants.WMM_11B_DEFAULT_PARAMS, - security=self.security_profile, - password=self.client_password, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11bg_with_vendor_ie_in_beacon_correct_length_sec_wpa_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp( - self, - ): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.ssid, - additional_ap_parameters=hostapd_constants.VENDOR_IE[ - "correct_length_beacon" - ], - security=self.security_profile, - password=self.client_password, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11bg_with_vendor_ie_in_beacon_zero_length_sec_wpa_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp( - self, - ): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.ssid, - additional_ap_parameters=hostapd_constants.VENDOR_IE[ - "zero_length_beacon_without_data" - ], - security=self.security_profile, - password=self.client_password, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11bg_with_vendor_ie_in_beacon_similar_to_wpa_ie_sec_wpa_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp( - self, - ): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.ssid, - additional_ap_parameters=hostapd_constants.VENDOR_IE["simliar_to_wpa"], - security=self.security_profile, - password=self.client_password, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11bg_pmf_sec_wpa_wpa2_wpa3_psk_sae_ptk_ccmp(self): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.ssid, - security=self.security_profile, - pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED, - password=self.client_password, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11bg_pmf_sec_wpa_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(self): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.ssid, - security=self.security_profile, - pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED, - password=self.client_password, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11bg_pmf_max_length_password_sec_wpa_wpa2_wpa3_psk_sae_ptk_ccmp( - self, - ): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.ssid, - security=self.security_profile, - pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED, - password=self.client_password, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11bg_pmf_max_length_password_sec_wpa_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp( - self, - ): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.ssid, - security=self.security_profile, - pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED, - password=self.client_password, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11bg_pmf_frag_430_sec_wpa_wpa2_wpa3_psk_sae_ptk_ccmp(self): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.ssid, - security=self.security_profile, - pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED, - password=self.client_password, - frag_threshold=430, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11bg_pmf_frag_430_sec_wpa_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp( - self, - ): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.ssid, - security=self.security_profile, - pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED, - password=self.client_password, - frag_threshold=430, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11bg_pmf_rts_256_sec_wpa_wpa2_wpa3_psk_sae_ptk_ccmp(self): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.ssid, - security=self.security_profile, - pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED, - password=self.client_password, - rts_threshold=256, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11bg_pmf_rts_256_sec_wpa_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp( - self, - ): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.ssid, - security=self.security_profile, - pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED, - password=self.client_password, - rts_threshold=256, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11bg_pmf_rts_256_frag_430_sec_wpa_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp( - self, - ): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.ssid, - security=self.security_profile, - pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED, - password=self.client_password, - rts_threshold=256, - frag_threshold=430, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11bg_pmf_high_dtim_low_beacon_int_sec_wpa_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp( - self, - ): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.ssid, - dtim_period=hostapd_constants.HIGH_DTIM, - beacon_interval=hostapd_constants.LOW_BEACON_INTERVAL, - security=self.security_profile, - pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED, - password=self.client_password, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11bg_pmf_low_dtim_high_beacon_int_sec_wpa_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp( - self, - ): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.ssid, - dtim_period=hostapd_constants.LOW_DTIM, - beacon_interval=hostapd_constants.HIGH_BEACON_INTERVAL, - security=self.security_profile, - pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED, - password=self.client_password, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11bg_pmf_with_WMM_with_default_values_sec_wpa_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp( - self, - ): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.ssid, - force_wmm=True, - additional_ap_parameters=hostapd_constants.WMM_11B_DEFAULT_PARAMS, - security=self.security_profile, - pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED, - password=self.client_password, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11bg_pmf_with_vendor_ie_in_beacon_correct_length_sec_wpa_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp( - self, - ): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.ssid, - additional_ap_parameters=hostapd_constants.VENDOR_IE[ - "correct_length_beacon" - ], - security=self.security_profile, - pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED, - password=self.client_password, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11bg_pmf_with_vendor_ie_in_beacon_zero_length_sec_wpa_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp( - self, - ): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.ssid, - additional_ap_parameters=hostapd_constants.VENDOR_IE[ - "zero_length_beacon_without_data" - ], - security=self.security_profile, - pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED, - password=self.client_password, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_11bg_pmf_with_vendor_ie_in_beacon_similar_to_wpa_ie_sec_wpa_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp( - self, - ): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.ssid, - additional_ap_parameters=hostapd_constants.VENDOR_IE["simliar_to_wpa"], - security=self.security_profile, - pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED, - password=self.client_password, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_utf8_password_11bg_sec_wpa2_psk_ptk_ccmp(self): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.ssid, - security=self.security_profile, - password=self.client_password, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_utf8_french_password_11bg_sec_wpa2_psk_ptk_ccmp(self): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.ssid, - security=self.security_profile, - password=self.client_password, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_utf8_german_password_11bg_sec_wpa2_psk_ptk_ccmp(self): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.ssid, - security=self.security_profile, - password=self.client_password, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_utf8_dutch_password_11bg_sec_wpa2_psk_ptk_ccmp(self): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.ssid, - security=self.security_profile, - password=self.client_password, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_utf8_swedish_password_11bg_sec_wpa2_psk_ptk_ccmp(self): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.ssid, - security=self.security_profile, - password=self.client_password, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_utf8_norwegian_password_11bg_sec_wpa2_psk_ptk_ccmp(self): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.ssid, - security=self.security_profile, - password=self.client_password, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_utf8_danish_password_11bg_sec_wpa2_psk_ptk_ccmp(self): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.ssid, - security=self.security_profile, - password=self.client_password, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_utf8_japanese_password_11bg_sec_wpa2_psk_ptk_ccmp(self): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.ssid, - security=self.security_profile, - password=self.client_password, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_utf8_spanish_password_11bg_sec_wpa2_psk_ptk_ccmp(self): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.ssid, - security=self.security_profile, - password=self.client_password, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_utf8_italian_password_11bg_sec_wpa2_psk_ptk_ccmp(self): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.ssid, - security=self.security_profile, - password=self.client_password, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - @create_security_profile - def test_associate_utf8_korean_password_11bg_sec_wpa2_psk_ptk_ccmp(self): - setup_ap( - access_point=self.access_point, - profile_name=AP_11ABG_PROFILE_NAME, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.ssid, - security=self.security_profile, - password=self.client_password, - force_wmm=False, - ) - - asserts.assert_true( - self.dut.associate( - self.ssid, - target_security=self.target_security, - target_pwd=self.client_password, - ), - "Failed to associate.", - ) - - -if __name__ == "__main__": - test_runner.main()
diff --git a/src/antlion/tests/wlan/facade/BUILD.gn b/src/antlion/tests/wlan/facade/BUILD.gn deleted file mode 100644 index c62ce70..0000000 --- a/src/antlion/tests/wlan/facade/BUILD.gn +++ /dev/null
@@ -1,32 +0,0 @@ -# Copyright 2023 The Fuchsia Authors. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -import("//third_party/antlion/antlion_host_test.gni") -import("//third_party/antlion/environments.gni") - -assert(is_host, "antlion tests only supported for host testing") - -antlion_host_test("wlan_deprecated_configuration_test") { - main_source = "WlanDeprecatedConfigurationTest.py" - environments = display_envs -} - -antlion_host_test("wlan_facade_test") { - main_source = "WlanFacadeTest.py" - environments = display_envs -} - -antlion_host_test("wlan_status_test") { - main_source = "WlanStatusTest.py" - environments = display_envs -} - -group("e2e_tests") { - testonly = true - public_deps = [ - ":wlan_deprecated_configuration_test($host_toolchain)", - ":wlan_facade_test($host_toolchain)", - ":wlan_status_test($host_toolchain)", - ] -}
diff --git a/src/antlion/tests/wlan/facade/WlanDeprecatedConfigurationTest.py b/src/antlion/tests/wlan/facade/WlanDeprecatedConfigurationTest.py deleted file mode 100644 index 7fee369..0000000 --- a/src/antlion/tests/wlan/facade/WlanDeprecatedConfigurationTest.py +++ /dev/null
@@ -1,200 +0,0 @@ -#!/usr/bin/env python3 -# -# Copyright 2022 The Fuchsia Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from antlion import utils -from antlion.test_utils.wifi import base_test -from antlion.test_utils.abstract_devices.wlan_device import create_wlan_device - -from mobly import asserts, test_runner - -AP_ROLE = "Ap" -DEFAULT_SSID = "testssid" -DEFAULT_SECURITY = "none" -DEFAULT_PASSWORD = "" -DEFAULT_CONNECTIVITY_MODE = "local_only" -DEFAULT_OPERATING_BAND = "any" -TEST_MAC_ADDR = "12:34:56:78:9a:bc" -TEST_MAC_ADDR_SECONDARY = "bc:9a:78:56:34:12" - - -class WlanDeprecatedConfigurationTest(base_test.WifiBaseTest): - """Tests for WlanDeprecatedConfigurationFacade""" - - def setup_class(self): - super().setup_class() - self.dut = create_wlan_device(self.fuchsia_devices[0]) - - def setup_test(self): - self._stop_soft_aps() - - def teardown_test(self): - self._stop_soft_aps() - - def _get_ap_interface_mac_address(self): - """Retrieves mac address from wlan interface with role ap - - Returns: - string, the mac address of the AP interface - - Raises: - ConnectionError, if SL4F calls fail - AttributeError, if no interface has role 'Ap' - """ - wlan_ifaces = self.dut.device.sl4f.wlan_lib.wlanGetIfaceIdList() - if wlan_ifaces.get("error"): - raise ConnectionError( - "Failed to get wlan interface IDs: %s" % wlan_ifaces["error"] - ) - - for wlan_iface in wlan_ifaces["result"]: - iface_info = self.dut.device.sl4f.wlan_lib.wlanQueryInterface(wlan_iface) - if iface_info.get("error"): - raise ConnectionError( - "Failed to query wlan iface: %s" % iface_info["error"] - ) - - if iface_info["result"]["role"] == AP_ROLE: - if "mac_addr" in iface_info["result"]: - return utils.mac_address_list_to_str( - iface_info["result"]["mac_addr"] - ) - elif "sta_addr" in iface_info["result"]: - return utils.mac_address_list_to_str( - iface_info["result"]["sta_addr"] - ) - raise AttributeError("AP iface info does not contain MAC address.") - raise AttributeError( - "Failed to get ap interface mac address. No AP interface found." - ) - - def _start_soft_ap(self): - """Starts SoftAP on DUT. - - Raises: - ConnectionError, if SL4F call fails. - """ - self.log.info("Starting SoftAP on Fuchsia device (%s)." % self.dut.device.ip) - response = self.dut.device.sl4f.wlan_ap_policy_lib.wlanStartAccessPoint( - DEFAULT_SSID, - DEFAULT_SECURITY, - DEFAULT_PASSWORD, - DEFAULT_CONNECTIVITY_MODE, - DEFAULT_OPERATING_BAND, - ) - if response.get("error"): - raise ConnectionError("Failed to setup SoftAP: %s" % response["error"]) - - def _stop_soft_aps(self): - """Stops SoftAP on DUT. - - Raises: - ConnectionError, if SL4F call fails. - """ - self.log.info("Stopping SoftAP.") - response = self.dut.device.sl4f.wlan_ap_policy_lib.wlanStopAllAccessPoint() - if response.get("error"): - raise ConnectionError("Failed to stop SoftAP: %s" % response["error"]) - - def _suggest_ap_mac_addr(self, mac_addr): - """Suggests mac address for AP interface. - Args: - mac_addr: string, mac address to suggest. - - Raises: - TestFailure, if SL4F call fails. - """ - self.log.info( - "Suggesting AP mac addr (%s) via wlan_deprecated_configuration_lib." - % mac_addr - ) - response = self.dut.device.sl4f.wlan_deprecated_configuration_lib.wlanSuggestAccessPointMacAddress( - mac_addr - ) - if response.get("error"): - asserts.fail( - "Failed to suggest AP mac address (%s): %s" - % (mac_addr, response["error"]) - ) - - def _verify_mac_addr(self, expected_addr): - """Verifies mac address of ap interface is set to expected mac address. - - Args: - Args: - expected_addr: string, expected mac address - - Raises: - TestFailure, if actual mac address is not expected mac address. - """ - set_mac_addr = self._get_ap_interface_mac_address() - if set_mac_addr != expected_addr: - asserts.fail( - "Failed to set AP mac address " - "via wlan_deprecated_configuration_lib. Expected mac addr: %s," - " Actual mac addr: %s" % (expected_addr, set_mac_addr) - ) - else: - self.log.info("AP mac address successfully set to %s" % expected_addr) - - def test_suggest_ap_mac_address(self): - """Tests suggest ap mac address SL4F call - - 1. Get initial mac address - 2. Suggest new mac address - 3. Verify new mac address is set successfully - 4. Reset to initial mac address - 5. Verify initial mac address is reset successfully - - - Raises: - TestFailure, if wlanSuggestAccessPointMacAddress call fails or - of mac address is not the suggest value - ConnectionError, if other SL4F calls fail - """ - # Retrieve initial ap mac address - self._start_soft_ap() - - self.log.info("Getting initial mac address.") - initial_mac_addr = self._get_ap_interface_mac_address() - self.log.info("Initial mac address: %s" % initial_mac_addr) - - if initial_mac_addr != TEST_MAC_ADDR: - suggested_mac_addr = TEST_MAC_ADDR - else: - suggested_mac_addr = TEST_MAC_ADDR_SECONDARY - - self._stop_soft_aps() - - # Suggest and verify new mac address - self._suggest_ap_mac_addr(suggested_mac_addr) - - self._start_soft_ap() - - self._verify_mac_addr(suggested_mac_addr) - - self._stop_soft_aps() - - # Reset to initial mac address and verify - self.log.info("Resetting to initial mac address (%s)." % initial_mac_addr) - self._suggest_ap_mac_addr(initial_mac_addr) - - self._start_soft_ap() - - self._verify_mac_addr(initial_mac_addr) - - -if __name__ == "__main__": - test_runner.main()
diff --git a/src/antlion/tests/wlan/facade/WlanFacadeTest.py b/src/antlion/tests/wlan/facade/WlanFacadeTest.py deleted file mode 100644 index fdddf69..0000000 --- a/src/antlion/tests/wlan/facade/WlanFacadeTest.py +++ /dev/null
@@ -1,81 +0,0 @@ -#!/usr/bin/env python3 -# -# Copyright 2022 The Fuchsia Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -""" -Script for verifying that we can invoke methods of the WlanFacade. - -""" -import array - -from antlion.test_utils.wifi import base_test -from antlion.test_utils.abstract_devices.wlan_device import create_wlan_device - -from mobly import asserts, signals, test_runner - - -class WlanFacadeTest(base_test.WifiBaseTest): - def setup_class(self): - super().setup_class() - if len(self.fuchsia_devices) < 1: - raise signals.TestAbortClass( - "Sorry, please try verifying FuchsiaDevice is in your " - "config file and try again." - ) - self.dut = create_wlan_device(self.fuchsia_devices[0]) - - def test_get_phy_id_list(self): - result = self.dut.device.sl4f.wlan_lib.wlanPhyIdList() - error = result["error"] - asserts.assert_true(error is None, error) - - self.log.info("Got Phy IDs %s" % result["result"]) - return True - - def test_get_country(self): - wlan_lib = self.dut.device.sl4f.wlan_lib - - result = wlan_lib.wlanPhyIdList() - error = result["error"] - asserts.assert_true(error is None, error) - phy_id = result["result"][0] - - result = wlan_lib.wlanGetCountry(phy_id) - error = result["error"] - asserts.assert_true(error is None, error) - - country_bytes = result["result"] - country_string = str(array.array("b", country_bytes), encoding="us-ascii") - self.log.info("Got country %s (%s)", country_string, country_bytes) - return True - - def test_get_dev_path(self): - wlan_lib = self.dut.device.sl4f.wlan_lib - - result = wlan_lib.wlanPhyIdList() - error = result["error"] - asserts.assert_true(error is None, error) - phy_id = result["result"][0] - - result = wlan_lib.wlanGetDevPath(phy_id) - error = result["error"] - asserts.assert_true(error is None, error) - - dev_path = result["result"] - self.log.info("Got device path: %s", dev_path) - return True - - -if __name__ == "__main__": - test_runner.main()
diff --git a/src/antlion/tests/wlan/facade/WlanStatusTest.py b/src/antlion/tests/wlan/facade/WlanStatusTest.py deleted file mode 100644 index cf70b0d..0000000 --- a/src/antlion/tests/wlan/facade/WlanStatusTest.py +++ /dev/null
@@ -1,85 +0,0 @@ -#!/usr/bin/env python3 -# -# Copyright 2022 The Fuchsia Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -""" -Test to verify that a DUT's client interface's status can be queried. -""" - -from antlion.test_utils.wifi import base_test - -from mobly import signals, test_runner - - -class WlanStatusTest(base_test.WifiBaseTest): - """WLAN status test class. - - Test Bed Requirements: - * One or more Fuchsia devices with WLAN client capabilities. - """ - - def setup_class(self): - super().setup_class() - for fd in self.fuchsia_devices: - fd.configure_wlan( - association_mechanism="policy", preserve_saved_networks=True - ) - - def on_fail(self, test_name, begin_time): - for fd in self.fuchsia_devices: - super().on_device_fail(fd, test_name, begin_time) - fd.configure_wlan( - association_mechanism="policy", preserve_saved_networks=True - ) - - def test_wlan_stopped_client_status(self): - """Queries WLAN status on DUTs with no WLAN ifaces. - - Tests that DUTs without WLAN interfaces have empty results and return - an error when queried for status. - """ - for fd in self.fuchsia_devices: - fd.deconfigure_wlan() - - status = fd.sl4f.wlan_lib.wlanStatus() - self.log.debug(status) - if not status["error"] or status["result"]: - raise signals.TestFailure("DUT's WLAN client status should be empty") - - raise signals.TestPass("Success") - - def test_wlan_started_client_status(self): - """Queries WLAN status on DUTs with WLAN ifaces. - - Tests that, once WLAN client interfaces have been created, each one - returns a result and that none of them return errors when queried for - status. - """ - for fd in self.fuchsia_devices: - fd.configure_wlan( - association_mechanism="policy", preserve_saved_networks=True - ) - - status = fd.sl4f.wlan_lib.wlanStatus() - self.log.debug(status) - if status["error"] or not status["result"]: - raise signals.TestFailure( - "DUT's WLAN client status should be populated" - ) - - raise signals.TestPass("Success") - - -if __name__ == "__main__": - test_runner.main()
diff --git a/src/antlion/tests/wlan/functional/BUILD.gn b/src/antlion/tests/wlan/functional/BUILD.gn deleted file mode 100644 index 7171f0a..0000000 --- a/src/antlion/tests/wlan/functional/BUILD.gn +++ /dev/null
@@ -1,97 +0,0 @@ -# Copyright 2023 The Fuchsia Authors. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -import("//third_party/antlion/antlion_host_test.gni") -import("//third_party/antlion/environments.gni") - -assert(is_host, "antlion tests only supported for host testing") - -antlion_host_test("beacon_loss_test") { - main_source = "BeaconLossTest.py" - environments = display_ap_envs -} - -antlion_host_test("channel_switch_test") { - main_source = "ChannelSwitchTest.py" - environments = display_ap_envs -} - -antlion_host_test("connection_stress_test") { - main_source = "ConnectionStressTest.py" - environments = display_ap_envs -} - -antlion_host_test("download_stress_test") { - main_source = "DownloadStressTest.py" - - # Requires external internet access. This is considered bad practice for an - # automated test due to reliance on external services. Will remain an at-desk - # dest until rewritten to remove dependance on external services. - # environments = display_ap_envs - environments = [] -} - -antlion_host_test("ping_stress_test") { - main_source = "PingStressTest.py" - environments = display_ap_envs -} - -antlion_host_test("soft_ap_test") { - main_source = "SoftApTest.py" - - # Requires one Fuchsia device and one Anddroid device. There are no - # infra-hosted environments to run this test on. Will likely remain an at-desk - # test for as long as it requires an Android device. - environments = [] -} - -antlion_host_test("wlan_reboot_test") { - main_source = "WlanRebootTest.py" - test_params = "wlan_reboot_test_params.yaml" - environments = display_ap_iperf_envs -} - -antlion_host_test("wlan_scan_test") { - main_source = "WlanScanTest.py" - environments = display_ap_envs -} - -antlion_host_test("wlan_target_security_test") { - main_source = "WlanTargetSecurityTest.py" - environments = display_ap_envs -} - -antlion_host_test("wlan_wireless_network_management_test") { - main_source = "WlanWirelessNetworkManagementTest.py" - environments = display_ap_envs -} - -group("e2e_tests") { - testonly = true - public_deps = [ - ":beacon_loss_test($host_toolchain)", - ":channel_switch_test($host_toolchain)", - ":ping_stress_test($host_toolchain)", - ":wlan_reboot_test($host_toolchain)", - ":wlan_scan_test($host_toolchain)", - ":wlan_target_security_test($host_toolchain)", - ":wlan_wireless_network_management_test($host_toolchain)", - ] -} - -group("e2e_tests_quick") { - testonly = true - public_deps = [ - ":ping_stress_test($host_toolchain)", - ] -} - -# Tests that are disabled in automation -group("e2e_tests_manual") { - testonly = true - public_deps = [ - ":download_stress_test($host_toolchain)", - ":soft_ap_test($host_toolchain)", - ] -}
diff --git a/src/antlion/tests/wlan/functional/BeaconLossTest.py b/src/antlion/tests/wlan/functional/BeaconLossTest.py deleted file mode 100644 index 60c6a78..0000000 --- a/src/antlion/tests/wlan/functional/BeaconLossTest.py +++ /dev/null
@@ -1,142 +0,0 @@ -#!/usr/bin/env python3 -# -# Copyright 2022 The Fuchsia Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -""" -Script for testing WiFi recovery after rebooting the AP. - -Override default number of iterations using the following -parameter in the test config file. - -"beacon_loss_test_iterations": "5" -""" - -import time - -from antlion.controllers.access_point import setup_ap -from antlion.controllers.ap_lib import hostapd_constants -from antlion.test_utils.abstract_devices.wlan_device import create_wlan_device -from antlion.test_utils.wifi import base_test -from antlion.utils import rand_ascii_str - -from mobly import asserts, test_runner - - -class BeaconLossTest(base_test.WifiBaseTest): - # Default number of test iterations here. - # Override using parameter in config file. - # Eg: "beacon_loss_test_iterations": "10" - num_of_iterations = 5 - - # Time to wait for AP to startup - wait_ap_startup_s = 15 - - # Default wait time in seconds for the AP radio to turn back on - wait_to_connect_after_ap_txon_s = 5 - - # Time to wait for device to disconnect after AP radio of - wait_after_ap_txoff_s = 15 - - # Time to wait for device to complete connection setup after - # given an associate command - wait_client_connection_setup_s = 15 - - def setup_class(self): - super().setup_class() - self.ssid = rand_ascii_str(10) - - device_type = self.user_params.get("dut", "fuchsia_devices") - if device_type == "fuchsia_devices": - self.dut = create_wlan_device(self.fuchsia_devices[0]) - elif device_type == "android_devices": - self.dut = create_wlan_device(self.android_devices[0]) - else: - raise ValueError( - f'Invalid "dut" type specified in config: "{device_type}".' - 'Expected "fuchsia_devices" or "android_devices".' - ) - - self.access_point = self.access_points[0] - self.num_of_iterations = int( - self.user_params.get("beacon_loss_test_iterations", self.num_of_iterations) - ) - self.in_use_interface = None - - def teardown_test(self): - self.dut.disconnect() - self.dut.reset_wifi() - # ensure radio is on, in case the test failed while the radio was off - self.access_point.iwconfig.ap_iwconfig(self.in_use_interface, "txpower on") - self.download_ap_logs() - self.access_point.stop_all_aps() - - def on_fail(self, test_name, begin_time): - super().on_fail(test_name, begin_time) - self.access_point.stop_all_aps() - - def beacon_loss(self, channel): - setup_ap( - access_point=self.access_point, - profile_name="whirlwind", - channel=channel, - ssid=self.ssid, - ) - time.sleep(self.wait_ap_startup_s) - if channel > 14: - self.in_use_interface = self.access_point.wlan_5g - else: - self.in_use_interface = self.access_point.wlan_2g - - # TODO(b/144505723): [ACTS] update BeaconLossTest.py to handle client - # roaming, saved networks, etc. - self.log.info("sending associate command for ssid %s", self.ssid) - self.dut.associate(target_ssid=self.ssid) - - asserts.assert_true(self.dut.is_connected(), "Failed to connect.") - - time.sleep(self.wait_client_connection_setup_s) - - for _ in range(0, self.num_of_iterations): - # Turn off AP radio - self.log.info("turning off radio") - self.access_point.iwconfig.ap_iwconfig(self.in_use_interface, "txpower off") - time.sleep(self.wait_after_ap_txoff_s) - - # Did we disconnect from AP? - asserts.assert_false(self.dut.is_connected(), "Failed to disconnect.") - - # Turn on AP radio - self.log.info("turning on radio") - self.access_point.iwconfig.ap_iwconfig(self.in_use_interface, "txpower on") - time.sleep(self.wait_to_connect_after_ap_txon_s) - - # Tell the client to connect - self.log.info("sending associate command for ssid %s" % self.ssid) - self.dut.associate(target_ssid=self.ssid) - time.sleep(self.wait_client_connection_setup_s) - - # Did we connect back to WiFi? - asserts.assert_true(self.dut.is_connected(), "Failed to connect back.") - - return True - - def test_beacon_loss_2g(self): - self.beacon_loss(channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G) - - def test_beacon_loss_5g(self): - self.beacon_loss(channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G) - - -if __name__ == "__main__": - test_runner.main()
diff --git a/src/antlion/tests/wlan/functional/ChannelSwitchTest.py b/src/antlion/tests/wlan/functional/ChannelSwitchTest.py deleted file mode 100644 index b9a674c..0000000 --- a/src/antlion/tests/wlan/functional/ChannelSwitchTest.py +++ /dev/null
@@ -1,412 +0,0 @@ -#!/usr/bin/env python3 -# -# Copyright 2022 The Fuchsia Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -""" -Tests STA handling of channel switch announcements. -""" - -import random -import time -from typing import Sequence - -from antlion.controllers.access_point import setup_ap -from antlion.controllers.ap_lib import hostapd_constants -from antlion.utils import rand_ascii_str -from antlion.test_utils.wifi import base_test -from antlion.test_utils.abstract_devices.wlan_device import create_wlan_device - -from mobly import asserts, test_runner - - -class ChannelSwitchTest(base_test.WifiBaseTest): - # Time to wait between issuing channel switches - WAIT_BETWEEN_CHANNEL_SWITCHES_S = 15 - - # For operating class 115 tests. - GLOBAL_OPERATING_CLASS_115_CHANNELS = [36, 40, 44, 48] - # A channel outside the operating class. - NON_GLOBAL_OPERATING_CLASS_115_CHANNEL = 52 - - # For operating class 124 tests. - GLOBAL_OPERATING_CLASS_124_CHANNELS = [149, 153, 157, 161] - # A channel outside the operating class. - NON_GLOBAL_OPERATING_CLASS_124_CHANNEL = 52 - - def setup_class(self) -> None: - super().setup_class() - self.ssid = rand_ascii_str(10) - - device_type = self.user_params.get("dut", "fuchsia_devices") - if device_type == "fuchsia_devices": - self.dut = create_wlan_device(self.fuchsia_devices[0]) - elif device_type == "android_devices": - self.dut = create_wlan_device(self.android_devices[0]) - else: - raise ValueError( - f'Invalid "dut" type specified in config: "{device_type}".' - 'Expected "fuchsia_devices" or "android_devices".' - ) - - self.access_point = self.access_points[0] - self._stop_all_soft_aps() - self.in_use_interface = None - - def teardown_test(self) -> None: - self.dut.disconnect() - self.dut.reset_wifi() - self.download_ap_logs() - self.access_point.stop_all_aps() - - # TODO(fxbug.dev/85738): Change band type to an enum. - def channel_switch( - self, - band: str, - starting_channel: int, - channel_switches: Sequence[int], - test_with_soft_ap: bool = False, - ) -> None: - """Setup and run a channel switch test with the given parameters. - - Creates an AP, associates to it, and then issues channel switches - through the provided channels. After each channel switch, the test - checks that the DUT is connected for a period of time before considering - the channel switch successful. If directed to start a SoftAP, the test - will also check that the SoftAP is on the expected channel after each - channel switch. - - Args: - band: band that AP will use, must be a valid band (e.g. - hostapd_constants.BAND_2G) - starting_channel: channel number that AP will use at startup - channel_switches: ordered list of channels that the test will - attempt to switch to - test_with_soft_ap: whether to start a SoftAP before beginning the - channel switches (default is False); note that if a SoftAP is - started, the test will also check that the SoftAP handles - channel switches correctly - """ - asserts.assert_true( - band in [hostapd_constants.BAND_2G, hostapd_constants.BAND_5G], - "Failed to setup AP, invalid band {}".format(band), - ) - - self.current_channel_num = starting_channel - if band == hostapd_constants.BAND_5G: - self.in_use_interface = self.access_point.wlan_5g - elif band == hostapd_constants.BAND_2G: - self.in_use_interface = self.access_point.wlan_2g - asserts.assert_true( - self._channels_valid_for_band([self.current_channel_num], band), - "starting channel {} not a valid channel for band {}".format( - self.current_channel_num, band - ), - ) - - setup_ap( - access_point=self.access_point, - profile_name="whirlwind", - channel=self.current_channel_num, - ssid=self.ssid, - ) - if test_with_soft_ap: - self._start_soft_ap() - self.log.info("sending associate command for ssid %s", self.ssid) - self.dut.associate(target_ssid=self.ssid) - asserts.assert_true(self.dut.is_connected(), "Failed to connect.") - - asserts.assert_true( - channel_switches, "Cannot run test, no channels to switch to" - ) - asserts.assert_true( - self._channels_valid_for_band(channel_switches, band), - "channel_switches {} includes invalid channels for band {}".format( - channel_switches, band - ), - ) - - for channel_num in channel_switches: - if channel_num == self.current_channel_num: - continue - self.log.info( - "channel switch: {} -> {}".format(self.current_channel_num, channel_num) - ) - self.access_point.channel_switch(self.in_use_interface, channel_num) - channel_num_after_switch = self.access_point.get_current_channel( - self.in_use_interface - ) - asserts.assert_equal( - channel_num_after_switch, channel_num, "AP failed to channel switch" - ) - self.current_channel_num = channel_num - - # Check periodically to see if DUT stays connected. Sometimes - # CSA-induced disconnects occur seconds after last channel switch. - for _ in range(self.WAIT_BETWEEN_CHANNEL_SWITCHES_S): - asserts.assert_true( - self.dut.is_connected(), - "Failed to stay connected after channel switch.", - ) - client_channel = self._client_channel() - asserts.assert_equal( - client_channel, - channel_num, - "Client interface on wrong channel ({})".format(client_channel), - ) - if test_with_soft_ap: - soft_ap_channel = self._soft_ap_channel() - asserts.assert_equal( - soft_ap_channel, - channel_num, - "SoftAP interface on wrong channel ({})".format( - soft_ap_channel - ), - ) - time.sleep(1) - - def test_channel_switch_2g(self) -> None: - """Channel switch through all (US only) channels in the 2 GHz band.""" - self.channel_switch( - band=hostapd_constants.BAND_2G, - starting_channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - channel_switches=hostapd_constants.US_CHANNELS_2G, - ) - - def test_channel_switch_2g_with_soft_ap(self) -> None: - """Channel switch through (US only) 2 Ghz channels with SoftAP up.""" - self.channel_switch( - band=hostapd_constants.BAND_2G, - starting_channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - channel_switches=hostapd_constants.US_CHANNELS_2G, - test_with_soft_ap=True, - ) - - def test_channel_switch_2g_shuffled_with_soft_ap(self) -> None: - """Switch through shuffled (US only) 2 Ghz channels with SoftAP up.""" - channels = hostapd_constants.US_CHANNELS_2G - random.shuffle(channels) - self.log.info("Shuffled channel switch sequence: {}".format(channels)) - self.channel_switch( - band=hostapd_constants.BAND_2G, - starting_channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - channel_switches=channels, - test_with_soft_ap=True, - ) - - # TODO(fxbug.dev/84777): This test fails. - def test_channel_switch_5g(self) -> None: - """Channel switch through all (US only) channels in the 5 GHz band.""" - self.channel_switch( - band=hostapd_constants.BAND_5G, - starting_channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - channel_switches=hostapd_constants.US_CHANNELS_5G, - ) - - # TODO(fxbug.dev/84777): This test fails. - def test_channel_switch_5g_with_soft_ap(self) -> None: - """Channel switch through (US only) 5 GHz channels with SoftAP up.""" - self.channel_switch( - band=hostapd_constants.BAND_5G, - starting_channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - channel_switches=hostapd_constants.US_CHANNELS_5G, - test_with_soft_ap=True, - ) - - def test_channel_switch_5g_shuffled_with_soft_ap(self) -> None: - """Switch through shuffled (US only) 5 Ghz channels with SoftAP up.""" - channels = hostapd_constants.US_CHANNELS_5G - random.shuffle(channels) - self.log.info("Shuffled channel switch sequence: {}".format(channels)) - self.channel_switch( - band=hostapd_constants.BAND_5G, - starting_channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - channel_switches=channels, - test_with_soft_ap=True, - ) - - # TODO(fxbug.dev/84777): This test fails. - def test_channel_switch_regression_global_operating_class_115(self) -> None: - """Channel switch into, through, and out of global op. class 115 channels. - - Global operating class 115 is described in IEEE 802.11-2016 Table E-4. - Regression test for fxbug.dev/84777. - """ - channels = self.GLOBAL_OPERATING_CLASS_115_CHANNELS + [ - self.NON_GLOBAL_OPERATING_CLASS_115_CHANNEL - ] - self.channel_switch( - band=hostapd_constants.BAND_5G, - starting_channel=self.NON_GLOBAL_OPERATING_CLASS_115_CHANNEL, - channel_switches=channels, - ) - - # TODO(fxbug.dev/84777): This test fails. - def test_channel_switch_regression_global_operating_class_115_with_soft_ap( - self, - ) -> None: - """Test global operating class 124 channel switches, with SoftAP. - - Regression test for fxbug.dev/84777. - """ - channels = self.GLOBAL_OPERATING_CLASS_115_CHANNELS + [ - self.NON_GLOBAL_OPERATING_CLASS_115_CHANNEL - ] - self.channel_switch( - band=hostapd_constants.BAND_5G, - starting_channel=self.NON_GLOBAL_OPERATING_CLASS_115_CHANNEL, - channel_switches=channels, - test_with_soft_ap=True, - ) - - # TODO(fxbug.dev/84777): This test fails. - def test_channel_switch_regression_global_operating_class_124(self) -> None: - """Switch into, through, and out of global op. class 124 channels. - - Global operating class 124 is described in IEEE 802.11-2016 Table E-4. - Regression test for fxbug.dev/64279. - """ - channels = self.GLOBAL_OPERATING_CLASS_124_CHANNELS + [ - self.NON_GLOBAL_OPERATING_CLASS_124_CHANNEL - ] - self.channel_switch( - band=hostapd_constants.BAND_5G, - starting_channel=self.NON_GLOBAL_OPERATING_CLASS_124_CHANNEL, - channel_switches=channels, - ) - - # TODO(fxbug.dev/84777): This test fails. - def test_channel_switch_regression_global_operating_class_124_with_soft_ap( - self, - ) -> None: - """Test global operating class 124 channel switches, with SoftAP. - - Regression test for fxbug.dev/64279. - """ - channels = self.GLOBAL_OPERATING_CLASS_124_CHANNELS + [ - self.NON_GLOBAL_OPERATING_CLASS_124_CHANNEL - ] - self.channel_switch( - band=hostapd_constants.BAND_5G, - starting_channel=self.NON_GLOBAL_OPERATING_CLASS_124_CHANNEL, - channel_switches=channels, - test_with_soft_ap=True, - ) - - def _channels_valid_for_band(self, channels: Sequence[int], band: str) -> bool: - """Determine if the channels are valid for the band (US only). - - Args: - channels: channel numbers - band: a valid band (e.g. hostapd_constants.BAND_2G) - """ - if band == hostapd_constants.BAND_2G: - band_channels = frozenset(hostapd_constants.US_CHANNELS_2G) - elif band == hostapd_constants.BAND_5G: - band_channels = frozenset(hostapd_constants.US_CHANNELS_5G) - else: - asserts.fail("Invalid band {}".format(band)) - channels_set = frozenset(channels) - if channels_set <= band_channels: - return True - return False - - def _start_soft_ap(self) -> None: - """Start a SoftAP on the DUT. - - Raises: - EnvironmentError: if the SoftAP does not start - """ - ssid = rand_ascii_str(10) - security_type = "none" - password = "" - connectivity_mode = "local_only" - operating_band = "any" - - self.log.info("Starting SoftAP on DUT") - - response = self.dut.device.sl4f.wlan_ap_policy_lib.wlanStartAccessPoint( - ssid, security_type, password, connectivity_mode, operating_band - ) - if response.get("error"): - raise EnvironmentError( - "SL4F: Failed to setup SoftAP. Err: %s" % response["error"] - ) - self.log.info("SoftAp network (%s) is up." % ssid) - - def _stop_all_soft_aps(self) -> None: - """Stops all SoftAPs on Fuchsia Device. - - Raises: - EnvironmentError: if SoftAP stop call fails - """ - response = self.dut.device.sl4f.wlan_ap_policy_lib.wlanStopAllAccessPoint() - if response.get("error"): - raise EnvironmentError( - "SL4F: Failed to stop all SoftAPs. Err: %s" % response["error"] - ) - - def _client_channel(self) -> int: - """Determine the channel of the DUT client interface. - - If the interface is not connected, the method will assert a test - failure. - - Returns: channel number - - Raises: - EnvironmentError: if client interface channel cannot be - determined - """ - status = self.dut.status() - if status["error"]: - raise EnvironmentError("Could not determine client channel") - - result = status["result"] - if isinstance(result, dict): - if result.get("Connected"): - return result["Connected"]["channel"]["primary"] - asserts.fail("Client interface not connected") - raise EnvironmentError("Could not determine client channel") - - def _soft_ap_channel(self) -> int: - """Determine the channel of the DUT SoftAP interface. - - If the interface is not connected, the method will assert a test - failure. - - Returns: channel number - - Raises: - EnvironmentError: if SoftAP interface channel cannot be determined. - """ - iface_ids = self.dut.get_wlan_interface_id_list() - for iface_id in iface_ids: - query = self.dut.device.sl4f.wlan_lib.wlanQueryInterface(iface_id) - if query["error"]: - continue - query_result = query["result"] - if type(query_result) is dict and query_result.get("role") == "Ap": - status = self.dut.device.sl4f.wlan_lib.wlanStatus(iface_id) - if status["error"]: - continue - status_result = status["result"] - if isinstance(status_result, dict): - if status_result.get("Connected"): - return status_result["Connected"]["channel"]["primary"] - asserts.fail("SoftAP interface not connected") - raise EnvironmentError("Could not determine SoftAP channel") - - -if __name__ == "__main__": - test_runner.main()
diff --git a/src/antlion/tests/wlan/functional/ConnectionStressTest.py b/src/antlion/tests/wlan/functional/ConnectionStressTest.py deleted file mode 100644 index fa52c7f..0000000 --- a/src/antlion/tests/wlan/functional/ConnectionStressTest.py +++ /dev/null
@@ -1,228 +0,0 @@ -#!/usr/bin/env python3 -# -# Copyright 2022 The Fuchsia Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -""" -Script for testing WiFi connection and disconnection in a loop - -""" - -import time - -from antlion.controllers.access_point import setup_ap -from antlion.controllers.ap_lib import hostapd_constants -from antlion.controllers.ap_lib import hostapd_security -from antlion.test_utils.wifi import base_test -from antlion.test_utils.abstract_devices.wlan_device import create_wlan_device -from antlion.utils import rand_ascii_str - -from mobly import signals, test_runner - - -class ConnectionStressTest(base_test.WifiBaseTest): - # Default number of test iterations here. - # Override using parameter in config file. - # Eg: "connection_stress_test_iterations": "50" - num_of_iterations = 10 - channel_2G = hostapd_constants.AP_DEFAULT_CHANNEL_2G - channel_5G = hostapd_constants.AP_DEFAULT_CHANNEL_5G - - def setup_class(self): - super().setup_class() - self.ssid = rand_ascii_str(10) - self.fd = self.fuchsia_devices[0] - self.dut = create_wlan_device(self.fd) - self.access_point = self.access_points[0] - self.num_of_iterations = int( - self.user_params.get( - "connection_stress_test_iterations", self.num_of_iterations - ) - ) - self.log.info("iterations: %d" % self.num_of_iterations) - - def teardown_test(self): - self.dut.reset_wifi() - self.download_ap_logs() - self.access_point.stop_all_aps() - - def on_fail(self, test_name, begin_time): - super().on_fail(test_name, begin_time) - self.access_point.stop_all_aps() - - def start_ap(self, profile, channel, security=None): - """Starts an Access Point - - Args: - profile: Profile name such as 'whirlwind' - channel: Channel to operate on - """ - self.log.info("Profile: %s, Channel: %d" % (profile, channel)) - setup_ap( - access_point=self.access_point, - profile_name=profile, - channel=channel, - ssid=self.ssid, - security=security, - ) - - def connect_disconnect( - self, ap_config, ssid=None, password=None, negative_test=False - ): - """Helper to start an AP, connect DUT to it and disconnect - - Args: - ap_config: Dictionary contaning profile name and channel - ssid: ssid to connect to - password: password for the ssid to connect to - """ - security_mode = ap_config.get("security_mode", None) - target_security = ( - hostapd_constants.SECURITY_STRING_TO_DEFAULT_TARGET_SECURITY.get( - security_mode, None - ) - ) - - if security_mode: - security_profile = hostapd_security.Security( - security_mode=ap_config["security_mode"], password=ap_config["password"] - ) - else: - security_profile = None - - # Start AP - self.start_ap( - ap_config["profile"], ap_config["channel"], security=security_profile - ) - - failed = False - # Connect and Disconnect several times - for x in range(0, self.num_of_iterations): - if not ssid: - ssid = self.ssid - if negative_test: - if not self.dut.associate( - ssid, target_pwd=password, target_security=target_security - ): - self.log.info("Attempt %d. Did not associate as expected." % x) - else: - self.log.error( - "Attempt %d. Negative test successfully " - "associated. Fail." % x - ) - failed = True - else: - # Connect - if self.dut.associate(ssid, target_pwd=password): - self.log.info("Attempt %d. Successfully associated" % x) - else: - self.log.error("Attempt %d. Failed to associate." % x) - failed = True - # Disconnect - self.dut.disconnect() - - # Wait a second before trying again - time.sleep(1) - - # Stop AP - self.access_point.stop_all_aps() - if failed: - raise signals.TestFailure("One or more association attempt failed.") - - def test_whirlwind_2g(self): - self.connect_disconnect( - {"profile": "whirlwind", "channel": self.channel_2G, "security_mode": None} - ) - - def test_whirlwind_5g(self): - self.connect_disconnect( - {"profile": "whirlwind", "channel": self.channel_5G, "security_mode": None} - ) - - def test_whirlwind_11ab_2g(self): - self.connect_disconnect( - { - "profile": "whirlwind_11ab_legacy", - "channel": self.channel_2G, - "security_mode": None, - } - ) - - def test_whirlwind_11ab_5g(self): - self.connect_disconnect( - { - "profile": "whirlwind_11ab_legacy", - "channel": self.channel_5G, - "security_mode": None, - } - ) - - def test_whirlwind_11ag_2g(self): - self.connect_disconnect( - { - "profile": "whirlwind_11ag_legacy", - "channel": self.channel_2G, - "security_mode": None, - } - ) - - def test_whirlwind_11ag_5g(self): - self.connect_disconnect( - { - "profile": "whirlwind_11ag_legacy", - "channel": self.channel_5G, - "security_mode": None, - } - ) - - def test_wrong_ssid_whirlwind_2g(self): - self.connect_disconnect( - {"profile": "whirlwind", "channel": self.channel_2G, "security_mode": None}, - ssid=rand_ascii_str(20), - negative_test=True, - ) - - def test_wrong_ssid_whirlwind_5g(self): - self.connect_disconnect( - {"profile": "whirlwind", "channel": self.channel_5G, "security_mode": None}, - ssid=rand_ascii_str(20), - negative_test=True, - ) - - def test_wrong_password_whirlwind_2g(self): - self.connect_disconnect( - { - "profile": "whirlwind", - "channel": self.channel_2G, - "security_mode": hostapd_constants.WPA2_STRING, - "password": rand_ascii_str(10), - }, - password=rand_ascii_str(20), - negative_test=True, - ) - - def test_wrong_password_whirlwind_5g(self): - self.connect_disconnect( - { - "profile": "whirlwind", - "channel": self.channel_5G, - "security_mode": hostapd_constants.WPA2_STRING, - "password": rand_ascii_str(10), - }, - password=rand_ascii_str(20), - negative_test=True, - ) - - -if __name__ == "__main__": - test_runner.main()
diff --git a/src/antlion/tests/wlan/functional/DownloadStressTest.py b/src/antlion/tests/wlan/functional/DownloadStressTest.py deleted file mode 100644 index 28012c3..0000000 --- a/src/antlion/tests/wlan/functional/DownloadStressTest.py +++ /dev/null
@@ -1,192 +0,0 @@ -#!/usr/bin/env python3 -# -# Copyright 2022 The Fuchsia Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -""" -Script for testing various download stress scenarios. - -""" -import threading - -from antlion.controllers.access_point import setup_ap -from antlion.controllers.ap_lib import hostapd_constants -from antlion.test_utils.wifi import base_test -from antlion.test_utils.abstract_devices.wlan_device import create_wlan_device -from antlion.test_utils.fuchsia import utils -from antlion.utils import rand_ascii_str - -from mobly import signals, test_runner - - -class DownloadStressTest(base_test.WifiBaseTest): - # Default number of test iterations here. - # Override using parameter in config file. - # Eg: "download_stress_test_iterations": "10" - num_of_iterations = 3 - - # Timeout for download thread in seconds - download_timeout_s = 60 * 5 - - # Download urls - url_20MB = "http://ipv4.download.thinkbroadband.com/20MB.zip" - url_40MB = "http://ipv4.download.thinkbroadband.com/40MB.zip" - url_60MB = "http://ipv4.download.thinkbroadband.com/60MB.zip" - url_512MB = "http://ipv4.download.thinkbroadband.com/512MB.zip" - - # Constants used in test_one_large_multiple_small_downloads - download_small_url = url_20MB - download_large_url = url_512MB - num_of_small_downloads = 5 - download_threads_result = [] - - def setup_class(self): - super().setup_class() - self.ssid = rand_ascii_str(10) - self.dut = create_wlan_device(self.fuchsia_devices[0]) - self.access_point = self.access_points[0] - self.num_of_iterations = int( - self.user_params.get( - "download_stress_test_iterations", self.num_of_iterations - ) - ) - - setup_ap( - access_point=self.access_point, - profile_name="whirlwind", - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.ssid, - ) - self.dut.associate(self.ssid) - - def teardown_test(self): - self.download_threads_result.clear() - self.dut.disconnect() - self.dut.reset_wifi() - self.download_ap_logs() - self.access_point.stop_all_aps() - - def test_download_small(self): - self.log.info("Downloading small file") - return self.download_file(self.url_20MB) - - def test_download_large(self): - return self.download_file(self.url_512MB) - - def test_continuous_download(self): - for x in range(0, self.num_of_iterations): - if not self.download_file(self.url_512MB): - return False - return True - - def download_file(self, url): - self.log.info("Start downloading: %s" % url) - return utils.http_file_download_by_curl( - self.dut.device, - url, - additional_args="--max-time %d --silent" % self.download_timeout_s, - ) - - def download_thread(self, url): - download_status = self.download_file(url) - if download_status: - self.log.info("Success downloading: %s" % url) - else: - self.log.info("Failure downloading: %s" % url) - - self.download_threads_result.append(download_status) - return download_status - - def test_multi_downloads(self): - download_urls = [self.url_20MB, self.url_40MB, self.url_60MB] - download_threads = [] - - try: - # Start multiple downloads at the same time - for index, url in enumerate(download_urls): - self.log.info("Create and start thread %d." % index) - t = threading.Thread(target=self.download_thread, args=(url,)) - download_threads.append(t) - t.start() - - # Wait for all threads to complete or timeout - for t in download_threads: - t.join(self.download_timeout_s) - - finally: - is_alive = False - - for index, t in enumerate(download_threads): - if t.isAlive(): - t = None - is_alive = True - - if is_alive: - raise signals.TestFailure("Thread %d timedout" % index) - - for index in range(0, len(self.download_threads_result)): - if not self.download_threads_result[index]: - self.log.info("Download failed for %d" % index) - raise signals.TestFailure("Thread %d failed to download" % index) - return False - - return True - - def test_one_large_multiple_small_downloads(self): - for index in range(self.num_of_iterations): - download_threads = [] - try: - large_thread = threading.Thread( - target=self.download_thread, args=(self.download_large_url,) - ) - download_threads.append(large_thread) - large_thread.start() - - for i in range(self.num_of_small_downloads): - # Start small file download - t = threading.Thread( - target=self.download_thread, args=(self.download_small_url,) - ) - download_threads.append(t) - t.start() - # Wait for thread to exit before starting the next iteration - t.join(self.download_timeout_s) - - # Wait for the large file download thread to complete - large_thread.join(self.download_timeout_s) - - finally: - is_alive = False - - for index, t in enumerate(download_threads): - if t.isAlive(): - t = None - is_alive = True - - if is_alive: - raise signals.TestFailure("Thread %d timedout" % index) - - for index in range(0, len(self.download_threads_result)): - if not self.download_threads_result[index]: - self.log.info("Download failed for %d" % index) - raise signals.TestFailure("Thread %d failed to download" % index) - return False - - # Clear results before looping again - self.download_threads_result.clear() - - return True - - -if __name__ == "__main__": - test_runner.main()
diff --git a/src/antlion/tests/wlan/functional/PingStressTest.py b/src/antlion/tests/wlan/functional/PingStressTest.py deleted file mode 100644 index 431c2e1..0000000 --- a/src/antlion/tests/wlan/functional/PingStressTest.py +++ /dev/null
@@ -1,250 +0,0 @@ -#!/usr/bin/env python3 -# -# Copyright 2022 The Fuchsia Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -""" -PingStressTest exercises sending ICMP and ICMPv6 pings to a wireless access -router and another device behind the AP. Note, this does not reach out to the -internet. The DUT is only responsible for sending a routable packet; any -communication past the first-hop is not the responsibility of the DUT. -""" - -import threading - -from collections import namedtuple - -from antlion import utils -from antlion.controllers.access_point import setup_ap -from antlion.controllers.ap_lib import hostapd_constants -from antlion.test_utils.wifi import base_test -from antlion.test_utils.abstract_devices.wlan_device import create_wlan_device -from antlion.utils import rand_ascii_str - -from mobly import signals, test_runner - -LOOPBACK_IPV4 = "127.0.0.1" -LOOPBACK_IPV6 = "::1" -PING_RESULT_TIMEOUT_SEC = 60 * 5 - -Test = namedtuple( - typename="Args", - field_names=["name", "dest_ip", "count", "interval", "timeout", "size"], - defaults=[3, 1000, 1000, 25], -) - -Addrs = namedtuple( - typename="Addrs", - field_names=["gateway_ipv4", "gateway_ipv6", "remote_ipv4", "remote_ipv6"], -) - - -class PingStressTest(base_test.WifiBaseTest): - def setup_generated_tests(self): - self.generate_tests( - self.send_ping, - lambda test_name, *_: f"test_{test_name}", - [ - Test("loopback_ipv4", LOOPBACK_IPV4), - Test("loopback_ipv6", LOOPBACK_IPV6), - Test("gateway_ipv4", lambda addrs: addrs.gateway_ipv4), - Test("gateway_ipv6", lambda addrs: addrs.gateway_ipv6), - Test("remote_ipv4_small_packet", lambda addrs: addrs.remote_ipv4), - Test("remote_ipv6_small_packet", lambda addrs: addrs.remote_ipv6), - Test( - "remote_ipv4_small_packet_long", - lambda addrs: addrs.remote_ipv4, - count=50, - ), - Test( - "remote_ipv6_small_packet_long", - lambda addrs: addrs.remote_ipv6, - count=50, - ), - Test( - "remote_ipv4_medium_packet", - lambda addrs: addrs.remote_ipv4, - size=64, - ), - Test( - "remote_ipv6_medium_packet", - lambda addrs: addrs.remote_ipv6, - size=64, - ), - Test( - "remote_ipv4_medium_packet_long", - lambda addrs: addrs.remote_ipv4, - count=50, - timeout=1500, - size=64, - ), - Test( - "remote_ipv6_medium_packet_long", - lambda addrs: addrs.remote_ipv6, - count=50, - timeout=1500, - size=64, - ), - Test( - "remote_ipv4_large_packet", - lambda addrs: addrs.remote_ipv4, - size=500, - ), - Test( - "remote_ipv6_large_packet", - lambda addrs: addrs.remote_ipv6, - size=500, - ), - Test( - "remote_ipv4_large_packet_long", - lambda addrs: addrs.remote_ipv4, - count=50, - timeout=5000, - size=500, - ), - Test( - "remote_ipv6_large_packet_long", - lambda addrs: addrs.remote_ipv6, - count=50, - timeout=5000, - size=500, - ), - ], - ) - - def setup_class(self): - super().setup_class() - self.ssid = rand_ascii_str(10) - self.dut = create_wlan_device(self.fuchsia_devices[0]) - self.access_point = self.access_points[0] - self.iperf_server = self.iperf_servers[0] - setup_ap( - access_point=self.access_point, - profile_name="whirlwind", - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.ssid, - setup_bridge=True, - is_ipv6_enabled=True, - is_nat_enabled=False, - ) - - ap_bridges = self.access_point.interfaces.get_bridge_interface() - if len(ap_bridges) != 1: - raise signals.TestAbortClass( - f"Expected one bridge interface on the AP, got {ap_bridges}" - ) - self.ap_ipv4 = utils.get_addr(self.access_point.ssh, ap_bridges[0]) - self.ap_ipv6 = utils.get_addr( - self.access_point.ssh, ap_bridges[0], addr_type="ipv6_link_local" - ) - self.log.info(f"Gateway finished setup ({self.ap_ipv4} | {self.ap_ipv6})") - - self.iperf_server.renew_test_interface_ip_address() - self.iperf_server_ipv4 = self.iperf_server.get_addr() - self.iperf_server_ipv6 = self.iperf_server.get_addr( - addr_type="ipv6_private_local" - ) - self.log.info( - f"Remote finished setup ({self.iperf_server_ipv4} | {self.iperf_server_ipv6})" - ) - - self.dut.associate(self.ssid) - - # Wait till the DUT has valid IP addresses after connecting. - self.dut.device.wait_for_ipv4_addr( - self.dut.device.wlan_client_test_interface_name - ) - self.dut.device.wait_for_ipv6_addr( - self.dut.device.wlan_client_test_interface_name - ) - self.log.info("DUT has valid IP addresses on test network") - - def teardown_class(self): - self.dut.disconnect() - self.dut.reset_wifi() - self.download_ap_logs() - self.access_point.stop_all_aps() - - def send_ping(self, _, get_addr_fn, count=3, interval=1000, timeout=1000, size=25): - dest_ip = ( - get_addr_fn( - Addrs( - gateway_ipv4=self.ap_ipv4, - # IPv6 link-local addresses require specification of the - # outgoing interface as the scope ID when sending packets. - gateway_ipv6=f"{self.ap_ipv6}%{self.dut.get_default_wlan_test_interface()}", - remote_ipv4=self.iperf_server_ipv4, - # IPv6 global addresses do not require scope IDs. - remote_ipv6=self.iperf_server_ipv6, - ) - ) - if callable(get_addr_fn) - else get_addr_fn - ) - - self.log.info(f"Attempting to ping {dest_ip}...") - ping_result = self.dut.can_ping(dest_ip, count, interval, timeout, size) - if ping_result: - self.log.info("Ping was successful.") - else: - raise signals.TestFailure("Ping was unsuccessful.") - - def test_simultaneous_pings(self): - ping_urls = [ - self.iperf_server_ipv4, - self.ap_ipv4, - self.iperf_server_ipv6, - f"{self.ap_ipv6}%{self.dut.get_default_wlan_test_interface()}", - ] - ping_threads = [] - ping_results = [] - - def ping_thread(self, dest_ip, ping_results): - self.log.info("Attempting to ping %s..." % dest_ip) - ping_result = self.dut.can_ping(dest_ip, count=10, size=50) - if ping_result: - self.log.info("Success pinging: %s" % dest_ip) - else: - self.log.info("Failure pinging: %s" % dest_ip) - ping_results.append(ping_result) - - try: - # Start multiple ping at the same time - for index, url in enumerate(ping_urls): - t = threading.Thread(target=ping_thread, args=(self, url, ping_results)) - ping_threads.append(t) - t.start() - - # Wait for all threads to complete or timeout - for t in ping_threads: - t.join(PING_RESULT_TIMEOUT_SEC) - - finally: - is_alive = False - - for index, t in enumerate(ping_threads): - if t.is_alive(): - t = None - is_alive = True - - if is_alive: - raise signals.TestFailure(f"Timed out while pinging {ping_urls[index]}") - - for index in range(0, len(ping_results)): - if not ping_results[index]: - raise signals.TestFailure(f"Failed to ping {ping_urls[index]}") - return True - - -if __name__ == "__main__": - test_runner.main()
diff --git a/src/antlion/tests/wlan/functional/SoftApTest.py b/src/antlion/tests/wlan/functional/SoftApTest.py deleted file mode 100644 index 471c5a3..0000000 --- a/src/antlion/tests/wlan/functional/SoftApTest.py +++ /dev/null
@@ -1,2112 +0,0 @@ -#!/usr/bin/env python3 -# -# Copyright 2022 The Fuchsia Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import multiprocessing as mp -import random -import time - -from antlion import utils -from antlion.controllers import iperf_server -from antlion.controllers import iperf_client -from antlion.controllers.access_point import setup_ap, AccessPoint -from antlion.controllers.ap_lib import hostapd_constants -from antlion.controllers.ap_lib import hostapd_security -from antlion.controllers.ap_lib.hostapd_utils import generate_random_password -from antlion.controllers.utils_lib.ssh import settings -from antlion.test_utils.wifi import base_test -from antlion.test_utils.abstract_devices.wlan_device import create_wlan_device - -from mobly import asserts, signals, test_runner - -CONNECTIVITY_MODE_LOCAL = "local_only" -CONNECTIVITY_MODE_UNRESTRICTED = "unrestricted" -DEFAULT_AP_PROFILE = "whirlwind" -DEFAULT_IPERF_PORT = 5201 -DEFAULT_STRESS_TEST_ITERATIONS = 10 -DEFAULT_TIMEOUT = 30 -DEFAULT_IPERF_TIMEOUT = 60 -DEFAULT_NO_ADDR_EXPECTED_TIMEOUT = 5 -INTERFACE_ROLE_AP = "Ap" -INTERFACE_ROLE_CLIENT = "Client" -OPERATING_BAND_2G = "only_2_4_ghz" -OPERATING_BAND_5G = "only_5_ghz" -OPERATING_BAND_ANY = "any" -SECURITY_OPEN = "none" -SECURITY_WEP = "wep" -SECURITY_WPA = "wpa" -SECURITY_WPA2 = "wpa2" -SECURITY_WPA3 = "wpa3" -STATE_UP = True -STATE_DOWN = False -TEST_TYPE_ASSOCIATE_ONLY = "associate_only" -TEST_TYPE_ASSOCIATE_AND_PING = "associate_and_ping" -TEST_TYPE_ASSOCIATE_AND_PASS_TRAFFIC = "associate_and_pass_traffic" -TEST_TYPES = { - TEST_TYPE_ASSOCIATE_ONLY, - TEST_TYPE_ASSOCIATE_AND_PING, - TEST_TYPE_ASSOCIATE_AND_PASS_TRAFFIC, -} - - -def get_test_name_from_settings(settings): - return settings["test_name"] - - -def get_ap_params_from_config_or_default(config): - """Retrieves AP parameters from antlion config, or returns default settings. - - Args: - config: dict, from antlion config, that may contain custom ap parameters - - Returns: - dict, containing all AP parameters - """ - profile = config.get("profile", DEFAULT_AP_PROFILE) - ssid = config.get("ssid", utils.rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_2G)) - channel = config.get("channel", hostapd_constants.AP_DEFAULT_CHANNEL_2G) - security_mode = config.get("security_mode", None) - password = config.get("password", None) - if security_mode: - if not password: - password = generate_random_password(security_mode=security_mode) - security = hostapd_security.Security(security_mode, password) - else: - security = None - - return { - "profile": profile, - "ssid": ssid, - "channel": channel, - "security": security, - "password": password, - } - - -def get_soft_ap_params_from_config_or_default(config): - """Retrieves SoftAp parameters from antlion config or returns default settings. - - Args: - config: dict, from antlion config, that may contain custom soft ap - parameters - - Returns: - dict, containing all soft AP parameters - """ - ssid = config.get("ssid", utils.rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_2G)) - connectivity_mode = config.get("connectivity_mode", CONNECTIVITY_MODE_LOCAL) - operating_band = config.get("operating_band", OPERATING_BAND_2G) - security_type = config.get("security_type", SECURITY_OPEN) - password = config.get("password", "") - - # The SoftAP API uses 'open' security instead of None, '' password - # instead of None, and security_type instead of security_mode, hence - # the difference between ap_params and soft_ap_params - if security_type != SECURITY_OPEN and password == "": - password = generate_random_password(security_mode=security_type) - - return { - "ssid": ssid, - "connectivity_mode": connectivity_mode, - "operating_band": operating_band, - "security_type": security_type, - "password": password, - } - - -class StressTestIterationFailure(Exception): - """Used to differentiate a subtest failure from an actual exception""" - - -class SoftApTest(base_test.WifiBaseTest): - """Tests for Fuchsia SoftAP - - Testbed requirement: - * One Fuchsia device - * At least one client (Android) device - * For multi-client tests, at least two client (Android) devices are - required. Test will be skipped if less than two client devices are - present. - * For any tests that exercise client-mode (e.g. toggle tests, simultaneous - tests), a physical AP (whirlwind) is also required. Those tests will be - skipped if physical AP is not present. - """ - - def setup_class(self): - self.soft_ap_test_params = self.user_params.get("soft_ap_test_params", {}) - self.dut = create_wlan_device(self.fuchsia_devices[0]) - - # TODO(fxb/51313): Add in device agnosticity for clients - # Create a wlan device and iperf client for each Android client - self.clients = [] - self.iperf_clients_map = {} - for device in self.android_devices: - client_wlan_device = create_wlan_device(device) - self.clients.append(client_wlan_device) - self.iperf_clients_map[ - client_wlan_device - ] = client_wlan_device.create_iperf_client() - self.primary_client = self.clients[0] - - # Create an iperf server on the DUT, which will be used for any streaming. - self.iperf_server_settings = settings.from_config( - { - "user": self.dut.device.ssh_username, - "host": self.dut.device.ip, - "ssh_config": self.dut.device.ssh_config, - } - ) - self.iperf_server = iperf_server.IPerfServerOverSsh( - self.iperf_server_settings, DEFAULT_IPERF_PORT, use_killall=True - ) - self.iperf_server.start() - - # Attempt to create an ap iperf server. AP is only required for tests - # that use client mode. - try: - self.access_point: AccessPoint = self.access_points[0] - self.ap_iperf_client = iperf_client.IPerfClientOverSsh( - self.access_point.ssh_settings - ) - except AttributeError: - self.access_point = None - self.ap_iperf_client = None - - self.iperf_clients_map[self.access_point] = self.ap_iperf_client - - def teardown_class(self): - # Because this is using killall, it will stop all iperf processes - self.iperf_server.stop() - - def setup_test(self): - for ad in self.android_devices: - ad.droid.wakeLockAcquireBright() - ad.droid.wakeUpNow() - for client in self.clients: - client.disconnect() - client.reset_wifi() - client.wifi_toggle_state(True) - self.stop_all_soft_aps() - if self.access_point: - self.access_point.stop_all_aps() - self.dut.disconnect() - - def teardown_test(self): - for client in self.clients: - client.disconnect() - for ad in self.android_devices: - ad.droid.wakeLockRelease() - ad.droid.goToSleepNow() - self.stop_all_soft_aps() - if self.access_point: - self.download_ap_logs() - self.access_point.stop_all_aps() - self.dut.disconnect() - - def start_soft_ap(self, settings): - """Starts a softAP on Fuchsia device. - - Args: - settings: a dict containing softAP configuration params - ssid: string, SSID of softAP network - security_type: string, security type of softAP network - - 'none', 'wep', 'wpa', 'wpa2', 'wpa3' - password: string, password if applicable - connectivity_mode: string, connecitivity_mode for softAP - - 'local_only', 'unrestricted' - operating_band: string, band for softAP network - - 'any', 'only_5_ghz', 'only_2_4_ghz' - """ - ssid = settings["ssid"] - security_type = settings["security_type"] - password = settings.get("password", "") - connectivity_mode = settings["connectivity_mode"] - operating_band = settings["operating_band"] - - self.log.info("Starting SoftAP on DUT with settings: %s" % settings) - - response = self.dut.device.sl4f.wlan_ap_policy_lib.wlanStartAccessPoint( - ssid, security_type, password, connectivity_mode, operating_band - ) - if response.get("error"): - raise EnvironmentError( - "SL4F: Failed to setup SoftAP. Err: %s" % response["error"] - ) - - self.log.info("SoftAp network (%s) is up." % ssid) - - def stop_soft_ap(self, settings): - """Stops a specific SoftAP On Fuchsia device. - - Args: - settings: a dict containing softAP config params (see start_soft_ap) - for details - - Raises: - EnvironmentError, if StopSoftAP call fails. - """ - ssid = settings["ssid"] - security_type = settings["security_type"] - password = settings.get("password", "") - - response = self.dut.device.sl4f.wlan_ap_policy_lib.wlanStopAccessPoint( - ssid, security_type, password - ) - if response.get("error"): - raise EnvironmentError( - "SL4F: Failed to stop SoftAP. Err: %s" % response["error"] - ) - - def stop_all_soft_aps(self): - """Stops all SoftAPs on Fuchsia Device. - - Raises: - EnvironmentError, if StopAllAps call fails. - """ - response = self.dut.device.sl4f.wlan_ap_policy_lib.wlanStopAllAccessPoint() - if response.get("error"): - raise EnvironmentError( - "SL4F: Failed to stop all SoftAPs. Err: %s" % response["error"] - ) - - def associate_with_soft_ap(self, device, soft_ap_settings): - """Associates client device with softAP on Fuchsia device. - - Args: - device: wlan_device to associate with the softAP - settings: a dict containing softAP config params (see start_soft_ap) - for details - - Raises: - TestFailure, if association fails - """ - self.log.info( - "Attempting to associate client %s with SoftAP on FuchsiaDevice " - "(%s)." % (device.identifier, self.dut.identifier) - ) - - check_connectivity = ( - soft_ap_settings["connectivity_mode"] == CONNECTIVITY_MODE_UNRESTRICTED - ) - associated = device.associate( - soft_ap_settings["ssid"], - target_pwd=soft_ap_settings.get("password"), - target_security=hostapd_constants.SECURITY_STRING_TO_DEFAULT_TARGET_SECURITY.get( - soft_ap_settings["security_type"], None - ), - check_connectivity=check_connectivity, - ) - - if not associated: - self.log.error("Failed to connect to SoftAp.") - return False - - self.log.info("Client successfully associated with SoftAP.") - return True - - def disconnect_from_soft_ap(self, device): - """Disconnects client device from SoftAP. - - Args: - device: wlan_device to disconnect from SoftAP - """ - self.log.info("Disconnecting device %s from SoftAP." % device.identifier) - device.disconnect() - - def get_device_test_interface(self, device, role=None, channel=None): - """Retrieves test interface from a provided device, which can be the - FuchsiaDevice DUT, the AccessPoint, or an AndroidClient. - - Args: - device: the device do get the test interface from. Either - FuchsiaDevice (DUT), Android client, or AccessPoint. - role: str, either "client" or "ap". Required for FuchsiaDevice (DUT) - channel: int, channel of the ap network. Required for AccessPoint. - - Returns: - String, name of test interface on given device. - """ - - if device is self.dut: - device.device.wlan_controller.update_wlan_interfaces() - if role == INTERFACE_ROLE_CLIENT: - return device.device.wlan_client_test_interface_name - elif role == INTERFACE_ROLE_AP: - return device.device.wlan_ap_test_interface_name - else: - raise ValueError("Unsupported interface role: %s" % role) - elif isinstance(device, AccessPoint): - if not channel: - raise ValueError("Must provide a channel to get AccessPoint interface") - if channel < 36: - return device.wlan_2g - else: - return device.wlan_5g - else: - return device.get_default_wlan_test_interface() - - def wait_for_ipv4_address(self, device, interface_name, timeout=DEFAULT_TIMEOUT): - """Waits for interface on a wlan_device to get an ipv4 address. - - Args: - device: wlan_device or AccessPoint to check interface - interface_name: name of the interface to check - timeout: seconds to wait before raising an error - - Raises: - ValueError, if interface does not have an ipv4 address after timeout - """ - if isinstance(device, AccessPoint): - comm_channel = device.ssh - else: - comm_channel = device.device - end_time = time.time() + timeout - while time.time() < end_time: - ips = utils.get_interface_ip_addresses(comm_channel, interface_name) - if len(ips["ipv4_private"]) > 0: - self.log.info( - "Device %s interface %s has ipv4 address %s" - % (device.identifier, interface_name, ips["ipv4_private"][0]) - ) - return ips["ipv4_private"][0] - else: - time.sleep(1) - raise ConnectionError( - "After %s seconds, device %s still does not have an ipv4 address " - "on interface %s." % (timeout, device.identifier, interface_name) - ) - - def device_can_ping_addr(self, device, dest_ip, timeout=DEFAULT_TIMEOUT): - """Verify wlan_device can ping a destination ip. - - Args: - device: wlan_device to initiate ping - dest_ip: ip to ping from wlan_device - - Raises: - TestFailure, if ping fails - """ - end_time = time.time() + timeout - while time.time() < end_time: - with utils.SuppressLogOutput(): - ping_result = device.can_ping(dest_ip) - - if ping_result: - self.log.info( - "Ping successful from device %s to dest ip %s." - % (device.identifier, dest_ip) - ) - return True - else: - self.log.debug( - "Device %s could not ping dest ip %s. Retrying in 1 second." - % (device.identifier, dest_ip) - ) - time.sleep(1) - else: - self.log.info( - "Failed to ping from device %s to dest ip %s." - % (device.identifier, dest_ip) - ) - return False - - def run_iperf_traffic(self, ip_client, server_address, server_port=5201): - """Runs traffic between client and ap an verifies throughput. - - Args: - ip_client: iperf client to use - server_address: ipv4 address of the iperf server to use - server_port: port of the iperf server - - Raises: - TestFailure, if no traffic passes in either direction - """ - ip_client_identifier = self.get_iperf_client_identifier(ip_client) - - self.log.info( - "Running traffic from iperf client %s to iperf server %s." - % (ip_client_identifier, server_address) - ) - client_to_ap_path = ip_client.start( - server_address, "-i 1 -t 10 -J -p %s" % server_port, "client_to_soft_ap" - ) - - client_to_ap_result = iperf_server.IPerfResult(client_to_ap_path) - if not client_to_ap_result.avg_receive_rate: - raise ConnectionError( - "Failed to pass traffic from iperf client %s to iperf server %s." - % (ip_client_identifier, server_address) - ) - - self.log.info( - "Passed traffic from iperf client %s to iperf server %s with avg " - "rate of %s MB/s." - % ( - ip_client_identifier, - server_address, - client_to_ap_result.avg_receive_rate, - ) - ) - - self.log.info( - "Running traffic from iperf server %s to iperf client %s." - % (server_address, ip_client_identifier) - ) - ap_to_client_path = ip_client.start( - server_address, "-i 1 -t 10 -R -J -p %s" % server_port, "soft_ap_to_client" - ) - - ap_to_client_result = iperf_server.IPerfResult(ap_to_client_path) - if not ap_to_client_result.avg_receive_rate: - raise ConnectionError( - "Failed to pass traffic from iperf server %s to iperf client %s." - % (server_address, ip_client_identifier) - ) - - self.log.info( - "Passed traffic from iperf server %s to iperf client %s with avg " - "rate of %s MB/s." - % ( - server_address, - ip_client_identifier, - ap_to_client_result.avg_receive_rate, - ) - ) - - def run_iperf_traffic_parallel_process( - self, ip_client, server_address, error_queue, server_port=5201 - ): - """Executes run_iperf_traffic using a queue to capture errors. Used - when running iperf in a parallel process. - - Args: - ip_client: iperf client to use - server_address: ipv4 address of the iperf server to use - error_queue: multiprocessing queue to capture errors - server_port: port of the iperf server - """ - try: - self.run_iperf_traffic(ip_client, server_address, server_port=server_port) - except ConnectionError as err: - error_queue.put( - "In iperf process from %s to %s: %s" - % (self.get_iperf_client_identifier(ip_client), server_address, err) - ) - - def get_iperf_client_identifier(self, ip_client): - """Retrieves an indentifer string from iperf client, for logging. - - Args: - ip_client: iperf client to grab identifier from - """ - if type(ip_client) == iperf_client.IPerfClientOverAdb: - return ip_client._android_device_or_serial.serial - return ip_client._ssh_settings.hostname - - def device_is_connected_to_ap( - self, client, ap, channel=None, check_traffic=False, timeout=DEFAULT_TIMEOUT - ): - """Returns whether client device can ping (and optionally pass traffic) - to the ap device. - - Args: - client: device that should be associated. Either FuchsiaDevice (DUT) - or Android client - ap: device acting as AP. Either FuchsiaDevice (DUT) or AccessPoint. - channel: int, channel the AP is using. Required if ap is an - AccessPoint object. - check_traffic: bool, whether to attempt to pass traffic between - client and ap devices. - timeout: int, time in seconds to wait for devices to have ipv4 - addresses - """ - try: - # Get interfaces - client_interface = self.get_device_test_interface( - client, INTERFACE_ROLE_CLIENT - ) - ap_interface = self.get_device_test_interface( - ap, role=INTERFACE_ROLE_AP, channel=channel - ) - - # Get addresses - client_ipv4 = self.wait_for_ipv4_address( - client, client_interface, timeout=timeout - ) - ap_ipv4 = self.wait_for_ipv4_address(ap, ap_interface, timeout=timeout) - except ConnectionError as err: - self.log.error("Failed to retrieve interfaces and addresses. Err: %s" % err) - return False - - if not self.device_can_ping_addr(client, ap_ipv4): - self.log.error("Failed to ping from client to ap.") - return False - - if not self.device_can_ping_addr(ap, client_ipv4): - self.log.error("Failed to ping from ap to client.") - return False - - if check_traffic: - try: - if client is self.dut: - self.run_iperf_traffic(self.iperf_clients_map[ap], client_ipv4) - else: - self.run_iperf_traffic(self.iperf_clients_map[client], ap_ipv4) - except ConnectionError as err: - self.log.error("Failed to run traffic between DUT and AP.") - return False - return True - - def verify_soft_ap_connectivity_from_state(self, state, client): - """Verifies SoftAP state based on a client connection. - - Args: - state: bool, whether SoftAP should be up - client: SoftApClient, to verify connectivity (or lack therof) - """ - if state == STATE_UP: - return self.device_is_connected_to_ap(client, self.dut) - else: - with utils.SuppressLogOutput(): - try: - return not self.device_is_connected_to_ap( - client, self.dut, timeout=DEFAULT_NO_ADDR_EXPECTED_TIMEOUT - ) - # Allow a failed to find ap interface error - except LookupError as err: - self.log.debug("Hit expected LookupError: %s" % err) - return True - - def verify_client_mode_connectivity_from_state(self, state, channel): - """Verifies client mode state based on DUT-AP connection. - - Args: - state: bool, whether client mode should be up - channel: int, channel of the APs network - """ - if state == STATE_UP: - return self.device_is_connected_to_ap( - self.dut, self.access_point, channel=channel - ) - else: - with utils.SuppressLogOutput(): - try: - return not self.device_is_connected_to_ap( - self.dut, - self.access_point, - channel=channel, - timeout=DEFAULT_NO_ADDR_EXPECTED_TIMEOUT, - ) - # Allow a failed to find client interface error - except LookupError as err: - self.log.debug("Hit expected LookupError: %s" % err) - return True - - # Test Types - - def verify_soft_ap_associate_only(self, client, soft_ap_settings): - if not self.associate_with_soft_ap(client, soft_ap_settings): - asserts.fail("Failed to associate client with SoftAP.") - - def verify_soft_ap_associate_and_ping(self, client, soft_ap_settings): - self.verify_soft_ap_associate_only(client, soft_ap_settings) - if not self.device_is_connected_to_ap(client, self.dut): - asserts.fail("Client and SoftAP could not ping eachother.") - - def verify_soft_ap_associate_and_pass_traffic(self, client, settings): - self.verify_soft_ap_associate_only(client, settings) - if not self.device_is_connected_to_ap(client, self.dut, check_traffic=True): - asserts.fail( - "Client and SoftAP not responding to pings and passing traffic " - "as expected." - ) - - # Runners for Generated Test Cases - - def run_soft_ap_association_stress_test(self, settings): - """Sets up a SoftAP, and repeatedly associates and disassociates a - client. - - Args: - settings: test configuration settings, see - test_soft_ap_association_stress for details - """ - client = settings["client"] - soft_ap_params = settings["soft_ap_params"] - test_type = settings["test_type"] - if not test_type in TEST_TYPES: - raise ValueError("Unrecognized test type %s" % test_type) - iterations = settings["iterations"] - self.log.info( - "Running association stress test type %s in iteration %s times" - % (test_type, iterations) - ) - - self.start_soft_ap(soft_ap_params) - - passed_count = 0 - for run in range(iterations): - try: - self.log.info("Starting SoftAp association run %s" % str(run + 1)) - - if test_type == TEST_TYPE_ASSOCIATE_ONLY: - self.verify_soft_ap_associate_only(client, soft_ap_params) - - elif test_type == TEST_TYPE_ASSOCIATE_AND_PING: - self.verify_soft_ap_associate_and_ping(client, soft_ap_params) - - elif test_type == TEST_TYPE_ASSOCIATE_AND_PASS_TRAFFIC: - self.verify_soft_ap_associate_and_pass_traffic( - client, soft_ap_params - ) - - else: - raise AttributeError("Invalid test type: %s" % test_type) - - except signals.TestFailure as err: - self.log.error( - "SoftAp association stress run %s failed. Err: %s" - % (str(run + 1), err.details) - ) - else: - self.log.info( - "SoftAp association stress run %s successful." % str(run + 1) - ) - passed_count += 1 - - if passed_count < iterations: - asserts.fail( - "SoftAp association stress test passed on %s/%s runs." - % (passed_count, iterations) - ) - - asserts.explicit_pass( - "SoftAp association stress test passed on %s/%s runs." - % (passed_count, iterations) - ) - - # Alternate SoftAP and Client mode test - - def run_soft_ap_and_client_mode_alternating_test(self, settings): - """Runs a single soft_ap and client alternating stress test. - - See test_soft_ap_and_client_mode_alternating_stress for details. - """ - iterations = settings["iterations"] - pass_count = 0 - current_soft_ap_state = STATE_DOWN - current_client_mode_state = STATE_DOWN - - self.client_mode_toggle_pre_test(settings) - for iteration in range(iterations): - passes = True - - # Attempt to toggle SoftAP on, then off. If the first toggle fails - # to occur, exit early. - for _ in range(2): - (current_soft_ap_state, err) = self.run_toggle_iteration_func( - self.soft_ap_toggle_test_iteration, settings, current_soft_ap_state - ) - if err: - self.log.error( - "Iteration %s failed. Err: %s" % (str(iteration + 1), err) - ) - passes = False - if current_soft_ap_state == STATE_DOWN: - break - - # Attempt to toggle Client mode on, then off. If the first toggle, - # fails to occur, exit early. - for _ in range(2): - (current_client_mode_state, err) = self.run_toggle_iteration_func( - self.client_mode_toggle_test_iteration, - settings, - current_client_mode_state, - ) - if err: - self.log.error( - "Iteration %s failed. Err: %s" % (str(iteration + 1), err) - ) - passes = False - if current_client_mode_state == STATE_DOWN: - break - - if passes: - pass_count += 1 - - if pass_count == iterations: - asserts.explicit_pass( - "Toggle SoftAP and client mode stress test passed %s/%s times." - % (pass_count, iterations) - ) - else: - asserts.fail( - "Toggle SoftAP and client mode stress test only passed %s/%s " - "times." % (pass_count, iterations) - ) - - # Toggle Stress Test Helper Functions - - def run_toggle_stress_test(self, settings): - """Runner function for toggle stress tests. - - Repeats some test function through stress test iterations, logging - failures, tracking pass rate, managing states, etc. - - Args: - settings: dict, stress test settings - - Asserts: - PASS: if all iterations of the test function pass - FAIL: if any iteration of the test function fails - """ - test_runner_func = settings["test_runner_func"] - pre_test_func = settings.get("pre_test_func", None) - iterations = settings["iterations"] - if pre_test_func: - pre_test_func(settings) - - pass_count = 0 - current_state = STATE_DOWN - for iteration in range(iterations): - (current_state, err) = self.run_toggle_iteration_func( - test_runner_func, settings, current_state - ) - if err: - self.log.error( - "Iteration %s failed. Err: %s" % (str(iteration + 1), err) - ) - else: - pass_count += 1 - - if pass_count == iterations: - asserts.explicit_pass( - "Stress test passed %s/%s times." % (pass_count, iterations) - ) - else: - asserts.fail( - "Stress test only passed %s/%s " "times." % (pass_count, iterations) - ) - - def run_toggle_iteration_func(self, func, settings, current_state): - """Runs a toggle iteration function, updating the current state - based on what the toggle iteration function raises. - - Used for toggle stress tests. - - Note on EnvironmentError vs StressTestIterationFailure: - StressTestIterationFailure is raised by func when the toggle occurs - but connectivty or some other post-toggle check fails (i.e. the - next iteration should toggle to the next state.) - - EnvironmentError is raise by func when the toggle itself fails (i.e - the next iteration should retry the same toggle again.) - - Args: - func: toggle iteration func to run (e.g soft_ap_toggle_iteration) - settings: dict, stress test settings - current_state: bool, the current state of the mode being toggled - - Returns: - (new_state, err): - new_state: bool, state of the mode after toggle attempt - err: exception, if any are raise, else None - """ - try: - func(settings, current_state) - except EnvironmentError as err: - return (current_state, err) - except StressTestIterationFailure as err: - return (not current_state, err) - else: - return (not current_state, None) - - # Stress Test Toggle Functions - - def start_soft_ap_and_verify_connected(self, client, soft_ap_params): - """Sets up SoftAP, associates a client, then verifies connection. - - Args: - client: SoftApClient, client to use to verify SoftAP - soft_ap_params: dict, containing parameters to setup softap - - Raises: - StressTestIterationFailure, if toggle occurs, but connection - is not functioning as expected - """ - # Change SSID every time, to avoid client connection issues. - soft_ap_params["ssid"] = utils.rand_ascii_str( - hostapd_constants.AP_SSID_LENGTH_2G - ) - self.start_soft_ap(soft_ap_params) - associated = self.associate_with_soft_ap(client, soft_ap_params) - if not associated: - raise StressTestIterationFailure( - "Failed to associated client to DUT SoftAP. " - "Continuing with iterations." - ) - - if not self.verify_soft_ap_connectivity_from_state(STATE_UP, client): - raise StressTestIterationFailure( - "Failed to ping between client and DUT. Continuing " "with iterations." - ) - - def stop_soft_ap_and_verify_disconnected(self, client, soft_ap_params): - """Tears down SoftAP, and verifies connection is down. - - Args: - client: SoftApClient, client to use to verify SoftAP - soft_ap_params: dict, containing parameters of SoftAP to teardown - - Raise: - EnvironmentError, if client and AP can still communicate - """ - self.log.info("Stopping SoftAP on DUT.") - self.stop_soft_ap(soft_ap_params) - - if not self.verify_soft_ap_connectivity_from_state(STATE_DOWN, client): - raise EnvironmentError( - "Client can still ping DUT. Continuing with " "iterations." - ) - - def start_client_mode_and_verify_connected(self, ap_params): - """Connects DUT to AP in client mode and verifies connection - - Args: - ap_params: dict, containing parameters of the AP network - - Raises: - EnvironmentError, if DUT fails to associate altogether - StressTestIterationFailure, if DUT associates but connection is not - functioning as expected. - """ - ap_ssid = ap_params["ssid"] - ap_password = ap_params["password"] - ap_channel = ap_params["channel"] - ap_security = ap_params.get("security") - - if ap_security: - ap_security_mode = ap_security.security_mode_string - else: - ap_security_mode = None - - self.log.info("Associating DUT with AP network: %s" % ap_ssid) - associated = self.dut.associate( - target_ssid=ap_ssid, - target_pwd=ap_password, - target_security=hostapd_constants.SECURITY_STRING_TO_DEFAULT_TARGET_SECURITY.get( - ap_security_mode, None - ), - ) - if not associated: - raise EnvironmentError("Failed to associate DUT in client mode.") - else: - self.log.info("Association successful.") - - if not self.verify_client_mode_connectivity_from_state(STATE_UP, ap_channel): - raise StressTestIterationFailure("Failed to ping AP from DUT.") - - def stop_client_mode_and_verify_disconnected(self, ap_params): - """Disconnects DUT from AP and verifies connection is down. - - Args: - ap_params: dict, containing parameters of the AP network - - Raises: - EnvironmentError, if DUT and AP can still communicate - """ - self.log.info("Disconnecting DUT from AP.") - self.dut.disconnect() - if not self.verify_client_mode_connectivity_from_state( - STATE_DOWN, ap_params["channel"] - ): - raise EnvironmentError("DUT can still ping AP.") - - # Toggle Stress Test Iteration and Pre-Test Functions - - # SoftAP Toggle Stress Test Helper Functions - - def soft_ap_toggle_test_iteration(self, settings, current_state): - """Runs a single iteration of SoftAP toggle stress test - - Args: - settings: dict, containing test settings - current_state: bool, current state of SoftAP (True if up, - else False) - - Raises: - StressTestIterationFailure, if toggle occurs but mode isn't - functioning correctly. - EnvironmentError, if toggle fails to occur at all - """ - soft_ap_params = settings["soft_ap_params"] - self.log.info("Toggling SoftAP %s." % ("down" if current_state else "up")) - - if current_state == STATE_DOWN: - self.start_soft_ap_and_verify_connected(self.primary_client, soft_ap_params) - - else: - self.stop_soft_ap_and_verify_disconnected( - self.primary_client, soft_ap_params - ) - - # Client Mode Toggle Stress Test Helper Functions - - def client_mode_toggle_pre_test(self, settings): - """Prepares the AP before client mode toggle tests - - Args: - settings: dict, stress test settings - - Raises: - ConnectionError, if AP setup fails - """ - ap_params = settings["ap_params"] - ap_channel = ap_params["channel"] - ap_profile = ap_params.pop("profile") - self.log.info("Setting up AP with params: %s" % ap_params) - setup_ap(access_point=self.access_point, profile_name=ap_profile, **ap_params) - # Confirms AP assigned itself an address - ap_interface = self.get_device_test_interface( - self.access_point, channel=ap_channel - ) - self.wait_for_ipv4_address(self.access_point, ap_interface) - - def client_mode_toggle_test_iteration(self, settings, current_state): - """Runs a single iteration of client mode toggle stress test - - Args: - settings: dict, containing test settings - current_state: bool, current state of client mode (True if up, - else False) - - Raises: - StressTestIterationFailure, if toggle occurs but mode isn't - functioning correctly. - EnvironmentError, if toggle fails to occur at all - """ - ap_params = settings["ap_params"] - self.log.info("Toggling client mode %s" % ("off" if current_state else "on")) - - if current_state == STATE_DOWN: - self.start_client_mode_and_verify_connected(ap_params) - - else: - self.stop_client_mode_and_verify_disconnected(ap_params) - - # Toggle SoftAP with Client Mode Up Test Helper Functions - - def soft_ap_toggle_with_client_mode_pre_test(self, settings): - """Sets up and verifies client mode before SoftAP toggle test. - Args: - settings: dict, stress test settings - - Raises: - ConnectionError, if client mode setup fails - """ - self.client_mode_toggle_pre_test(settings) - try: - self.start_client_mode_and_verify_connected(settings["ap_params"]) - except StressTestIterationFailure as err: - # This prevents it being treated as a routine error - raise ConnectionError( - "Failed to set up DUT client mode before SoftAP toggle test." - "Err: %s" % err - ) - - def soft_ap_toggle_with_client_mode_iteration( - self, - settings, - current_state, - ): - """Runs single iteration of SoftAP toggle stress with client mode test. - - Args: - settings: dict, containing test settings - current_state: bool, current state of SoftAP (True if up, - else False) - - Raises: - StressTestIterationFailure, if toggle occurs but mode isn't - functioning correctly. - EnvironmentError, if toggle fails to occur at all - """ - ap_params = settings["ap_params"] - ap_channel = ap_params["channel"] - self.soft_ap_toggle_test_iteration(settings, current_state) - if not self.device_is_connected_to_ap( - self.dut, self.access_point, channel=ap_channel - ): - raise StressTestIterationFailure( - "DUT client mode is no longer functional after SoftAP toggle." - ) - - # Toggle Client Mode with SoftAP Up Test Helper Functions - - def client_mode_toggle_with_soft_ap_pre_test(self, settings): - """Sets up and verifies softap before client mode toggle test. - Args: - settings: dict, stress test settings - - Raises: - ConnectionError, if softap setup fails - """ - self.client_mode_toggle_pre_test(settings) - try: - self.start_soft_ap_and_verify_connected( - self.primary_client, settings["soft_ap_params"] - ) - except StressTestIterationFailure as err: - # This prevents it being treated as a routine error - raise ConnectionError( - "Failed to set up SoftAP before client mode toggle test. Err: %s" % err - ) - - def client_mode_toggle_with_soft_ap_iteration(self, settings, current_state): - """Runs single iteration of client mode toggle stress with SoftAP test. - - Args: - settings: dict, containing test settings - current_state: bool, current state of client mode (True if up, - else False) - - Raises: - StressTestIterationFailure, if toggle occurs but mode isn't - functioning correctly. - EnvironmentError, if toggle fails to occur at all - """ - self.client_mode_toggle_test_iteration(settings, current_state) - if not self.device_is_connected_to_ap(self.primary_client, self.dut): - raise StressTestIterationFailure( - "SoftAP is no longer functional after client mode toggle." - ) - - # Toggle SoftAP and Client Mode Randomly - - def run_soft_ap_and_client_mode_random_toggle_stress_test(self, settings): - """Runner function for SoftAP and client mode random toggle tests. - - Each iteration, randomly chooses if a mode will be toggled or not. - - Args: - settings: dict, containing test settings - """ - iterations = settings["iterations"] - pass_count = 0 - current_soft_ap_state = STATE_DOWN - current_client_mode_state = STATE_DOWN - ap_channel = settings["ap_params"]["channel"] - - self.client_mode_toggle_pre_test(settings) - for iteration in range(iterations): - self.log.info( - "Starting iteration %s out of %s." % (str(iteration + 1), iterations) - ) - passes = True - - # Randomly determine if softap, client mode, or both should - # be toggled. - rand_toggle_choice = random.randrange(0, 3) - if rand_toggle_choice <= 1: - (current_soft_ap_state, err) = self.run_toggle_iteration_func( - self.soft_ap_toggle_test_iteration, settings, current_soft_ap_state - ) - if err: - self.log.error( - "Iteration %s failed toggling SoftAP. Err: %s" - % (str(iteration + 1), err) - ) - passes = False - if rand_toggle_choice >= 1: - (current_client_mode_state, err) = self.run_toggle_iteration_func( - self.client_mode_toggle_test_iteration, - settings, - current_client_mode_state, - ) - if err: - self.log.error( - "Iteration %s failed toggling client mode. Err: %s" - % (str(iteration + 1), err) - ) - passes = False - - soft_ap_verified = self.verify_soft_ap_connectivity_from_state( - current_soft_ap_state, self.primary_client - ) - client_mode_verified = self.verify_client_mode_connectivity_from_state( - current_client_mode_state, ap_channel - ) - - if not soft_ap_verified or not client_mode_verified: - passes = False - if passes: - pass_count += 1 - - if pass_count == iterations: - asserts.explicit_pass( - "Stress test passed %s/%s times." % (pass_count, iterations) - ) - else: - asserts.fail( - "Stress test only passed %s/%s " "times." % (pass_count, iterations) - ) - - # Test Cases - - def test_soft_ap_2g_open_local(self): - soft_ap_params = { - "ssid": utils.rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_2G), - "security_type": SECURITY_OPEN, - "connectivity_mode": CONNECTIVITY_MODE_LOCAL, - "operating_band": OPERATING_BAND_2G, - } - self.start_soft_ap(soft_ap_params) - self.verify_soft_ap_associate_and_pass_traffic( - self.primary_client, soft_ap_params - ) - - def test_soft_ap_5g_open_local(self): - soft_ap_params = { - "ssid": utils.rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_5G), - "security_type": SECURITY_OPEN, - "connectivity_mode": CONNECTIVITY_MODE_LOCAL, - "operating_band": OPERATING_BAND_5G, - } - self.start_soft_ap(soft_ap_params) - self.verify_soft_ap_associate_and_pass_traffic( - self.primary_client, soft_ap_params - ) - - def test_soft_ap_any_open_local(self): - soft_ap_params = { - "ssid": utils.rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_5G), - "security_type": SECURITY_OPEN, - "connectivity_mode": CONNECTIVITY_MODE_LOCAL, - "operating_band": OPERATING_BAND_ANY, - } - self.start_soft_ap(soft_ap_params) - self.verify_soft_ap_associate_and_pass_traffic( - self.primary_client, soft_ap_params - ) - - def test_soft_ap_2g_wep_local(self): - soft_ap_params = { - "ssid": utils.rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_2G), - "security_type": SECURITY_WEP, - "password": generate_random_password(security_mode=SECURITY_WEP), - "connectivity_mode": CONNECTIVITY_MODE_LOCAL, - "operating_band": OPERATING_BAND_2G, - } - self.start_soft_ap(soft_ap_params) - self.verify_soft_ap_associate_and_pass_traffic( - self.primary_client, soft_ap_params - ) - - def test_soft_ap_5g_wep_local(self): - soft_ap_params = { - "ssid": utils.rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_5G), - "security_type": SECURITY_WEP, - "password": generate_random_password(security_mode=SECURITY_WEP), - "connectivity_mode": CONNECTIVITY_MODE_LOCAL, - "operating_band": OPERATING_BAND_5G, - } - self.start_soft_ap(soft_ap_params) - self.verify_soft_ap_associate_and_pass_traffic( - self.primary_client, soft_ap_params - ) - - def test_soft_ap_any_wep_local(self): - soft_ap_params = { - "ssid": utils.rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_5G), - "security_type": SECURITY_WEP, - "password": generate_random_password(security_mode=SECURITY_WEP), - "connectivity_mode": CONNECTIVITY_MODE_LOCAL, - "operating_band": OPERATING_BAND_ANY, - } - self.start_soft_ap(soft_ap_params) - self.verify_soft_ap_associate_and_pass_traffic( - self.primary_client, - ) - - def test_soft_ap_2g_wpa_local(self): - soft_ap_params = { - "ssid": utils.rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_2G), - "security_type": SECURITY_WPA, - "password": generate_random_password(), - "connectivity_mode": CONNECTIVITY_MODE_LOCAL, - "operating_band": OPERATING_BAND_2G, - } - self.start_soft_ap(soft_ap_params) - self.verify_soft_ap_associate_and_pass_traffic( - self.primary_client, soft_ap_params - ) - - def test_soft_ap_5g_wpa_local(self): - soft_ap_params = { - "ssid": utils.rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_5G), - "security_type": SECURITY_WPA, - "password": generate_random_password(), - "connectivity_mode": CONNECTIVITY_MODE_LOCAL, - "operating_band": OPERATING_BAND_5G, - } - self.start_soft_ap(soft_ap_params) - self.verify_soft_ap_associate_and_pass_traffic( - self.primary_client, soft_ap_params - ) - - def test_soft_ap_any_wpa_local(self): - soft_ap_params = { - "ssid": utils.rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_5G), - "security_type": SECURITY_WPA, - "password": generate_random_password(), - "connectivity_mode": CONNECTIVITY_MODE_LOCAL, - "operating_band": OPERATING_BAND_ANY, - } - self.start_soft_ap(soft_ap_params) - self.verify_soft_ap_associate_and_pass_traffic( - self.primary_client, soft_ap_params - ) - - def test_soft_ap_2g_wpa2_local(self): - soft_ap_params = { - "ssid": utils.rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_2G), - "security_type": SECURITY_WPA2, - "password": generate_random_password(), - "connectivity_mode": CONNECTIVITY_MODE_LOCAL, - "operating_band": OPERATING_BAND_2G, - } - self.start_soft_ap(soft_ap_params) - self.verify_soft_ap_associate_and_pass_traffic( - self.primary_client, soft_ap_params - ) - - def test_soft_ap_5g_wpa2_local(self): - soft_ap_params = { - "ssid": utils.rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_5G), - "security_type": SECURITY_WPA2, - "password": generate_random_password(), - "connectivity_mode": CONNECTIVITY_MODE_LOCAL, - "operating_band": OPERATING_BAND_5G, - } - self.start_soft_ap(soft_ap_params) - self.verify_soft_ap_associate_and_pass_traffic( - self.primary_client, soft_ap_params - ) - - def test_soft_ap_any_wpa2_local(self): - soft_ap_params = { - "ssid": utils.rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_5G), - "security_type": SECURITY_WPA2, - "password": generate_random_password(), - "connectivity_mode": CONNECTIVITY_MODE_LOCAL, - "operating_band": OPERATING_BAND_ANY, - } - self.start_soft_ap(soft_ap_params) - self.verify_soft_ap_associate_and_pass_traffic( - self.primary_client, soft_ap_params - ) - - def test_soft_ap_2g_wpa3_local(self): - soft_ap_params = { - "ssid": utils.rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_2G), - "security_type": SECURITY_WPA3, - "password": generate_random_password(), - "connectivity_mode": CONNECTIVITY_MODE_LOCAL, - "operating_band": OPERATING_BAND_2G, - } - self.start_soft_ap(soft_ap_params) - self.verify_soft_ap_associate_and_pass_traffic( - self.primary_client, soft_ap_params - ) - - def test_soft_ap_5g_wpa3_local(self): - soft_ap_params = { - "ssid": utils.rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_5G), - "security_type": SECURITY_WPA3, - "password": generate_random_password(), - "connectivity_mode": CONNECTIVITY_MODE_LOCAL, - "operating_band": OPERATING_BAND_ANY, - } - self.start_soft_ap(soft_ap_params) - self.verify_soft_ap_associate_and_pass_traffic( - self.primary_client, soft_ap_params - ) - - def test_soft_ap_any_wpa3_local(self): - soft_ap_params = { - "ssid": utils.rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_5G), - "security_type": SECURITY_WPA3, - "password": generate_random_password(), - "connectivity_mode": CONNECTIVITY_MODE_LOCAL, - "operating_band": OPERATING_BAND_ANY, - } - self.start_soft_ap(soft_ap_params) - self.verify_soft_ap_associate_and_pass_traffic( - self.primary_client, soft_ap_params - ) - - def test_soft_ap_2g_open_unrestricted(self): - soft_ap_params = { - "ssid": utils.rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_2G), - "security_type": SECURITY_OPEN, - "connectivity_mode": CONNECTIVITY_MODE_UNRESTRICTED, - "operating_band": OPERATING_BAND_2G, - } - self.start_soft_ap(soft_ap_params) - self.verify_soft_ap_associate_and_pass_traffic( - self.primary_client, soft_ap_params - ) - - def test_soft_ap_5g_open_unrestricted(self): - soft_ap_params = { - "ssid": utils.rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_5G), - "security_type": SECURITY_OPEN, - "connectivity_mode": CONNECTIVITY_MODE_UNRESTRICTED, - "operating_band": OPERATING_BAND_5G, - } - self.start_soft_ap(soft_ap_params) - self.verify_soft_ap_associate_and_pass_traffic( - self.primary_client, soft_ap_params - ) - - def test_soft_ap_any_open_unrestricted(self): - soft_ap_params = { - "ssid": utils.rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_5G), - "security_type": SECURITY_OPEN, - "connectivity_mode": CONNECTIVITY_MODE_UNRESTRICTED, - "operating_band": OPERATING_BAND_ANY, - } - self.start_soft_ap(soft_ap_params) - self.verify_soft_ap_associate_and_pass_traffic( - self.primary_client, soft_ap_params - ) - - def test_soft_ap_2g_wep_unrestricted(self): - soft_ap_params = { - "ssid": utils.rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_2G), - "security_type": SECURITY_WEP, - "password": generate_random_password(security_mode=SECURITY_WEP), - "connectivity_mode": CONNECTIVITY_MODE_UNRESTRICTED, - "operating_band": OPERATING_BAND_2G, - } - self.start_soft_ap(soft_ap_params) - self.verify_soft_ap_associate_and_pass_traffic( - self.primary_client, soft_ap_params - ) - - def test_soft_ap_5g_wep_unrestricted(self): - soft_ap_params = { - "ssid": utils.rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_5G), - "security_type": SECURITY_WEP, - "password": generate_random_password(security_mode=SECURITY_WEP), - "connectivity_mode": CONNECTIVITY_MODE_UNRESTRICTED, - "operating_band": OPERATING_BAND_5G, - } - self.start_soft_ap(soft_ap_params) - self.verify_soft_ap_associate_and_pass_traffic( - self.primary_client, soft_ap_params - ) - - def test_soft_ap_any_wep_unrestricted(self): - soft_ap_params = { - "ssid": utils.rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_5G), - "security_type": SECURITY_WEP, - "password": generate_random_password(security_mode=SECURITY_WEP), - "connectivity_mode": CONNECTIVITY_MODE_UNRESTRICTED, - "operating_band": OPERATING_BAND_ANY, - } - self.start_soft_ap(soft_ap_params) - self.verify_soft_ap_associate_and_pass_traffic( - self.primary_client, soft_ap_params - ) - - def test_soft_ap_2g_wpa_unrestricted(self): - soft_ap_params = { - "ssid": utils.rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_2G), - "security_type": SECURITY_WPA, - "password": generate_random_password(), - "connectivity_mode": CONNECTIVITY_MODE_UNRESTRICTED, - "operating_band": OPERATING_BAND_2G, - } - self.start_soft_ap(soft_ap_params) - self.verify_soft_ap_associate_and_pass_traffic( - self.primary_client, soft_ap_params - ) - - def test_soft_ap_5g_wpa_unrestricted(self): - soft_ap_params = { - "ssid": utils.rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_5G), - "security_type": SECURITY_WPA, - "password": generate_random_password(), - "connectivity_mode": CONNECTIVITY_MODE_UNRESTRICTED, - "operating_band": OPERATING_BAND_5G, - } - self.start_soft_ap(soft_ap_params) - self.verify_soft_ap_associate_and_pass_traffic( - self.primary_client, soft_ap_params - ) - - def test_soft_ap_any_wpa_unrestricted(self): - soft_ap_params = { - "ssid": utils.rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_5G), - "security_type": SECURITY_WPA, - "password": generate_random_password(), - "connectivity_mode": CONNECTIVITY_MODE_UNRESTRICTED, - "operating_band": OPERATING_BAND_ANY, - } - self.start_soft_ap(soft_ap_params) - self.verify_soft_ap_associate_and_pass_traffic( - self.primary_client, soft_ap_params - ) - - def test_soft_ap_2g_wpa2_unrestricted(self): - soft_ap_params = { - "ssid": utils.rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_2G), - "security_type": SECURITY_WPA2, - "password": generate_random_password(), - "connectivity_mode": CONNECTIVITY_MODE_UNRESTRICTED, - "operating_band": OPERATING_BAND_2G, - } - self.start_soft_ap(soft_ap_params) - self.verify_soft_ap_associate_and_pass_traffic( - self.primary_client, soft_ap_params - ) - - def test_soft_ap_5g_wpa2_unrestricted(self): - soft_ap_params = { - "ssid": utils.rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_5G), - "security_type": SECURITY_WPA2, - "password": generate_random_password(), - "connectivity_mode": CONNECTIVITY_MODE_UNRESTRICTED, - "operating_band": OPERATING_BAND_5G, - } - self.start_soft_ap(soft_ap_params) - self.verify_soft_ap_associate_and_pass_traffic( - self.primary_client, soft_ap_params - ) - - def test_soft_ap_any_wpa2_unrestricted(self): - soft_ap_params = { - "ssid": utils.rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_5G), - "security_type": SECURITY_WPA2, - "password": generate_random_password(), - "connectivity_mode": CONNECTIVITY_MODE_UNRESTRICTED, - "operating_band": OPERATING_BAND_ANY, - } - self.start_soft_ap(soft_ap_params) - self.verify_soft_ap_associate_and_pass_traffic( - self.primary_client, soft_ap_params - ) - - def test_soft_ap_2g_wpa3_unrestricted(self): - soft_ap_params = { - "ssid": utils.rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_2G), - "security_type": SECURITY_WPA3, - "password": generate_random_password(), - "connectivity_mode": CONNECTIVITY_MODE_UNRESTRICTED, - "operating_band": OPERATING_BAND_2G, - } - self.start_soft_ap(soft_ap_params) - self.verify_soft_ap_associate_and_pass_traffic( - self.primary_client, soft_ap_params - ) - - def test_soft_ap_5g_wpa3_unrestricted(self): - soft_ap_params = { - "ssid": utils.rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_5G), - "security_type": SECURITY_WPA3, - "password": generate_random_password(), - "connectivity_mode": CONNECTIVITY_MODE_UNRESTRICTED, - "operating_band": OPERATING_BAND_ANY, - } - self.start_soft_ap(soft_ap_params) - self.verify_soft_ap_associate_and_pass_traffic( - self.primary_client, soft_ap_params - ) - - def test_soft_ap_any_wpa3_unrestricted(self): - soft_ap_params = { - "ssid": utils.rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_5G), - "security_type": SECURITY_WPA3, - "password": generate_random_password(), - "connectivity_mode": CONNECTIVITY_MODE_UNRESTRICTED, - "operating_band": OPERATING_BAND_ANY, - } - self.start_soft_ap(soft_ap_params) - self.verify_soft_ap_associate_and_pass_traffic( - self.primary_client, soft_ap_params - ) - - def test_multi_client(self): - """Tests multi-client association with a single soft AP network. - - This tests associates a variable length list of clients, verfying it can - can ping the SoftAP and pass traffic, and then verfies all previously - associated clients can still ping and pass traffic. - - The same occurs in reverse for disassocations. - - SoftAP parameters can be changed from default via ACTS config: - Example Config - "soft_ap_test_params" : { - "multi_client_test_params": { - "ssid": "testssid", - "security_type": "wpa2", - "password": "password", - "connectivity_mode": "local_only", - "operating_band": "only_2_4_ghz" - } - } - """ - asserts.skip_if(len(self.clients) < 2, "Test requires at least 2 SoftAPClients") - - test_params = self.soft_ap_test_params.get("multi_client_test_params", {}) - soft_ap_params = get_soft_ap_params_from_config_or_default( - test_params.get("soft_ap_params", {}) - ) - - self.start_soft_ap(soft_ap_params) - - associated = [] - - for client in self.clients: - # Associate new client - self.verify_soft_ap_associate_and_ping(client, soft_ap_params) - - # Verify previously associated clients still behave as expected - for associated_client in associated: - self.log.info( - "Verifying previously associated client %s still functions correctly." - % associated_client["device"].identifier - ) - if not self.device_is_connected_to_ap( - associated_client["device"], self.dut, check_traffic=True - ): - asserts.fail( - "Previously associated client %s failed checks after " - "client %s associated." - % (associated_client["device"].identifier, client.identifier) - ) - - client_interface = self.get_device_test_interface(client) - client_ipv4 = self.wait_for_ipv4_address(client, client_interface) - associated.append({"device": client, "address": client_ipv4}) - - self.log.info("All devices successfully associated.") - - self.log.info("Verifying all associated clients can ping eachother.") - for transmitter in associated: - for receiver in associated: - if transmitter != receiver: - if not transmitter["device"].can_ping(receiver["address"]): - asserts.fail( - "Could not ping from one associated client (%s) to another (%s)." - % (transmitter["address"], receiver["address"]) - ) - else: - self.log.info( - "Successfully pinged from associated client (%s) to another (%s)" - % (transmitter["address"], receiver["address"]) - ) - - self.log.info( - "All associated clients can ping eachother. Beginning disassociations." - ) - - while len(associated) > 0: - # Disassociate client - client = associated.pop()["device"] - self.disconnect_from_soft_ap(client) - - # Verify still connected clients still behave as expected - for associated_client in associated: - self.log.info( - "Verifying still associated client %s still functions " - "correctly." % associated_client["device"].identifier - ) - if not self.device_is_connected_to_ap( - associated_client["device"], self.dut, check_traffic=True - ): - asserts.fail( - "Previously associated client %s failed checks after" - " client %s disassociated." - % (associated_client["device"].identifier, client.identifier) - ) - - self.log.info("All disassociations occurred smoothly.") - - def test_simultaneous_soft_ap_and_client(self): - """Tests FuchsiaDevice DUT can act as a client and a SoftAP - simultaneously. - - Raises: - ConnectionError: if DUT fails to connect as client - RuntimeError: if parallel processes fail to join - TestFailure: if DUT fails to pass traffic as either a client or an - AP - """ - asserts.skip_if(not self.access_point, "No access point provided.") - - self.log.info("Setting up AP using hostapd.") - test_params = self.soft_ap_test_params.get("soft_ap_and_client_test_params", {}) - - # Configure AP - ap_params = get_ap_params_from_config_or_default( - test_params.get("ap_params", {}) - ) - - # Setup AP and associate DUT - ap_profile = ap_params.pop("profile") - setup_ap(access_point=self.access_point, profile_name=ap_profile, **ap_params) - try: - self.start_client_mode_and_verify_connected(ap_params) - except Exception as err: - asserts.fail("Failed to set up client mode. Err: %s" % err) - - # Setup SoftAP - soft_ap_params = get_soft_ap_params_from_config_or_default( - test_params.get("soft_ap_params", {}) - ) - self.start_soft_ap_and_verify_connected(self.primary_client, soft_ap_params) - - # Get FuchsiaDevice test interfaces - dut_ap_interface = self.get_device_test_interface( - self.dut, role=INTERFACE_ROLE_AP - ) - dut_client_interface = self.get_device_test_interface( - self.dut, role=INTERFACE_ROLE_CLIENT - ) - - # Get FuchsiaDevice addresses - dut_ap_ipv4 = self.wait_for_ipv4_address(self.dut, dut_ap_interface) - dut_client_ipv4 = self.wait_for_ipv4_address(self.dut, dut_client_interface) - - # Set up secondary iperf server of FuchsiaDevice - self.log.info("Setting up second iperf server on FuchsiaDevice DUT.") - secondary_iperf_server = iperf_server.IPerfServerOverSsh( - self.iperf_server_settings, DEFAULT_IPERF_PORT + 1, use_killall=True - ) - secondary_iperf_server.start() - - # Set up iperf client on AP - self.log.info("Setting up iperf client on AP.") - ap_iperf_client = iperf_client.IPerfClientOverSsh( - self.access_point.ssh_settings - ) - - # Setup iperf processes: - # Primary client <-> SoftAP interface on FuchsiaDevice - # AP <-> Client interface on FuchsiaDevice - process_errors = mp.Queue() - iperf_soft_ap = mp.Process( - target=self.run_iperf_traffic_parallel_process, - args=[ - self.iperf_clients_map[self.primary_client], - dut_ap_ipv4, - process_errors, - ], - ) - - iperf_fuchsia_client = mp.Process( - target=self.run_iperf_traffic_parallel_process, - args=[ap_iperf_client, dut_client_ipv4, process_errors], - kwargs={"server_port": 5202}, - ) - - # Run iperf processes simultaneously - self.log.info( - "Running simultaneous iperf traffic: between AP and DUT " - "client interface, and DUT AP interface and client." - ) - - iperf_soft_ap.start() - iperf_fuchsia_client.start() - - # Block until processes can join or timeout - for proc in [iperf_soft_ap, iperf_fuchsia_client]: - proc.join(timeout=DEFAULT_IPERF_TIMEOUT) - if proc.is_alive(): - proc.terminate() - proc.join() - raise RuntimeError("Failed to join process %s" % proc) - - # Stop iperf server (also stopped in teardown class as failsafe) - secondary_iperf_server.stop() - - # Check errors from parallel processes - if process_errors.empty(): - asserts.explicit_pass( - "FuchsiaDevice was successfully able to pass traffic as a " - "client and an AP simultaneously." - ) - else: - while not process_errors.empty(): - self.log.error("Error in iperf process: %s" % process_errors.get()) - asserts.fail( - "FuchsiaDevice failed to pass traffic as a client and an AP " - "simultaneously." - ) - - def test_soft_ap_association_stress(self): - """Sets up a single AP and repeatedly associate/disassociate - a client, verifying connection every time - - Each test creates 1 SoftAP and repeatedly associates/disassociates - client. - - Example Config - "soft_ap_test_params" : { - "soft_ap_association_stress_tests": [ - { - "ssid": "test_network", - "security_type": "wpa2", - "password": "password", - "connectivity_mode": "local_only", - "operating_band": "only_2_4_ghz", - "iterations": 10 - } - ] - } - """ - tests = self.soft_ap_test_params.get( - "test_soft_ap_association_stress", - [dict(test_name="test_soft_ap_association_stress_default")], - ) - - test_settings_list = [] - for config_settings in tests: - soft_ap_params = get_soft_ap_params_from_config_or_default( - config_settings.get("soft_ap_params", {}) - ) - test_type = config_settings.get("test_type", "associate_and_pass_traffic") - iterations = config_settings.get( - "iterations", DEFAULT_STRESS_TEST_ITERATIONS - ) - test_settings = { - "test_name": config_settings.get( - "test_name", - "test_soft_ap_association_stress_%s_iterations" % iterations, - ), - "client": self.primary_client, - "soft_ap_params": soft_ap_params, - "test_type": test_type, - "iterations": iterations, - } - test_settings_list.append(test_settings) - - self.run_generated_testcases( - self.run_soft_ap_association_stress_test, - test_settings_list, - name_func=get_test_name_from_settings, - ) - - def test_soft_ap_and_client_mode_alternating_stress(self): - """Runs tests that alternate between SoftAP and Client modes. - - Each tests sets up an AP. Then, for each iteration: - - DUT starts up SoftAP, client associates with SoftAP, - connection is verified, then disassociates - - DUT associates to the AP, connection is verified, then - disassociates - - Example Config: - "soft_ap_test_params": { - "toggle_soft_ap_and_client_tests": [ - { - "test_name": "test_wpa2_client_ap_toggle", - "ap_params": { - "channel": 6, - "ssid": "test-ap-network", - "security_mode": "wpa2", - "password": "password" - }, - "soft_ap_params": { - "ssid": "test-soft-ap-network", - "security_type": "wpa2", - "password": "other-password", - "connectivity_mode": "local_only", - "operating_band": "only_2_4_ghz" - }, - "iterations": 5 - } - ] - } - """ - asserts.skip_if(not self.access_point, "No access point provided.") - tests = self.soft_ap_test_params.get( - "test_soft_ap_and_client_mode_alternating_stress", - [dict(test_name="test_soft_ap_and_client_mode_alternating_stress_default")], - ) - - test_settings_list = [] - for config_settings in tests: - ap_params = get_ap_params_from_config_or_default( - config_settings.get("ap_params", {}) - ) - soft_ap_params = get_soft_ap_params_from_config_or_default( - config_settings.get("soft_ap_params", {}) - ) - iterations = config_settings.get( - "iterations", DEFAULT_STRESS_TEST_ITERATIONS - ) - - test_settings = { - "test_name": config_settings.get( - "test_name", - "test_soft_ap_and_client_mode_alternating_stress_%s_iterations" - % iterations, - ), - "iterations": iterations, - "soft_ap_params": soft_ap_params, - "ap_params": ap_params, - } - - test_settings_list.append(test_settings) - self.run_generated_testcases( - test_func=self.run_soft_ap_and_client_mode_alternating_test, - settings=test_settings_list, - name_func=get_test_name_from_settings, - ) - - def test_soft_ap_toggle_stress(self): - """Runs SoftAP toggling stress test. - - Each iteration toggles SoftAP to the opposite state (up or down). - - If toggled up, a client is associated and connection is verified - If toggled down, test verifies client is not connected - - Will run with default params, but custom tests can be provided in the - ACTS config. - - Example Config - "soft_ap_test_params" : { - "test_soft_ap_toggle_stress": [ - "soft_ap_params": { - "security_type": "wpa2", - "password": "password", - "connectivity_mode": "local_only", - "operating_band": "only_2_4_ghz", - }, - "iterations": 10 - ] - } - """ - tests = self.soft_ap_test_params.get( - "test_soft_ap_toggle_stress", - [dict(test_name="test_soft_ap_toggle_stress_default")], - ) - - test_settings_list = [] - for config_settings in tests: - soft_ap_params = get_soft_ap_params_from_config_or_default( - config_settings.get("soft_ap_params", {}) - ) - iterations = config_settings.get( - "iterations", DEFAULT_STRESS_TEST_ITERATIONS - ) - test_settings = { - "test_name": config_settings.get( - "test_name", "test_soft_ap_toggle_stress_%s_iterations" % iterations - ), - "test_runner_func": self.soft_ap_toggle_test_iteration, - "soft_ap_params": soft_ap_params, - "iterations": iterations, - } - test_settings_list.append(test_settings) - - self.run_generated_testcases( - self.run_toggle_stress_test, - test_settings_list, - name_func=get_test_name_from_settings, - ) - - def test_client_mode_toggle_stress(self): - """Runs client mode toggling stress test. - - Each iteration toggles client mode to the opposite state (up or down). - - If toggled up, DUT associates to AP, and connection is verified - If toggled down, test verifies DUT is not connected to AP - - Will run with default params, but custom tests can be provided in the - ACTS config. - - Example Config - "soft_ap_test_params" : { - "test_client_mode_toggle_stress": [ - "soft_ap_params": { - 'ssid': ssid, - 'channel': channel, - 'security_mode': security, - 'password': password - }, - "iterations": 10 - ] - } - """ - asserts.skip_if(not self.access_point, "No access point provided.") - tests = self.soft_ap_test_params.get( - "test_client_mode_toggle_stress", - [dict(test_name="test_client_mode_toggle_stress_default")], - ) - - test_settings_list = [] - for config_settings in tests: - ap_params = get_ap_params_from_config_or_default( - config_settings.get("ap_params", {}) - ) - iterations = config_settings.get( - "iterations", DEFAULT_STRESS_TEST_ITERATIONS - ) - test_settings = { - "test_name": config_settings.get( - "test_name", - "test_client_mode_toggle_stress_%s_iterations" % iterations, - ), - "test_runner_func": self.client_mode_toggle_test_iteration, - "pre_test_func": self.client_mode_toggle_pre_test, - "ap_params": ap_params, - "iterations": iterations, - } - test_settings_list.append(test_settings) - self.run_generated_testcases( - self.run_toggle_stress_test, - test_settings_list, - name_func=get_test_name_from_settings, - ) - - def test_soft_ap_toggle_stress_with_client_mode(self): - """Same as test_soft_ap_toggle_stress, but client mode is set up - at test start and verified after every toggle.""" - asserts.skip_if(not self.access_point, "No access point provided.") - tests = self.soft_ap_test_params.get( - "test_soft_ap_toggle_stress_with_client_mode", - [dict(test_name="test_soft_ap_toggle_stress_with_client_mode_default")], - ) - - test_settings_list = [] - for config_settings in tests: - soft_ap_params = get_soft_ap_params_from_config_or_default( - config_settings.get("soft_ap_params", {}) - ) - ap_params = get_ap_params_from_config_or_default( - config_settings.get("ap_params", {}) - ) - iterations = config_settings.get( - "iterations", DEFAULT_STRESS_TEST_ITERATIONS - ) - test_settings = { - "test_name": config_settings.get( - "test_name", - "test_soft_ap_toggle_stress_with_client_mode_%s_iterations" - % iterations, - ), - "test_runner_func": self.soft_ap_toggle_with_client_mode_iteration, - "pre_test_func": self.soft_ap_toggle_with_client_mode_pre_test, - "soft_ap_params": soft_ap_params, - "ap_params": ap_params, - "iterations": iterations, - } - test_settings_list.append(test_settings) - self.run_generated_testcases( - self.run_toggle_stress_test, - test_settings_list, - name_func=get_test_name_from_settings, - ) - - def test_client_mode_toggle_stress_with_soft_ap(self): - """Same as test_client_mode_toggle_stress, but softap is set up at - test start and verified after every toggle.""" - asserts.skip_if(not self.access_point, "No access point provided.") - tests = self.soft_ap_test_params.get( - "test_client_mode_toggle_stress_with_soft_ap", - [dict(test_name="test_client_mode_toggle_stress_with_soft_ap_default")], - ) - - test_settings_list = [] - for config_settings in tests: - soft_ap_params = get_soft_ap_params_from_config_or_default( - config_settings.get("soft_ap_params", {}) - ) - ap_params = get_ap_params_from_config_or_default( - config_settings.get("ap_params", {}) - ) - iterations = config_settings.get( - "iterations", DEFAULT_STRESS_TEST_ITERATIONS - ) - test_settings = { - "test_name": config_settings.get( - "test_name", - "test_client_mode_toggle_stress_with_soft_ap_%s_iterations" - % iterations, - ), - "test_runner_func": self.client_mode_toggle_with_soft_ap_iteration, - "pre_test_func": self.client_mode_toggle_with_soft_ap_pre_test, - "soft_ap_params": soft_ap_params, - "ap_params": ap_params, - "iterations": iterations, - } - test_settings_list.append(test_settings) - self.run_generated_testcases( - self.run_toggle_stress_test, - test_settings_list, - name_func=get_test_name_from_settings, - ) - - def test_soft_ap_and_client_mode_random_toggle_stress(self): - """Same as above toggle stres tests, but each iteration, either softap, - client mode, or both are toggled, then states are verified.""" - asserts.skip_if(not self.access_point, "No access point provided.") - tests = self.soft_ap_test_params.get( - "test_soft_ap_and_client_mode_random_toggle_stress", - [ - dict( - test_name="test_soft_ap_and_client_mode_random_toggle_stress_default" - ) - ], - ) - - test_settings_list = [] - for config_settings in tests: - soft_ap_params = get_soft_ap_params_from_config_or_default( - config_settings.get("soft_ap_params", {}) - ) - ap_params = get_ap_params_from_config_or_default( - config_settings.get("ap_params", {}) - ) - iterations = config_settings.get( - "iterations", DEFAULT_STRESS_TEST_ITERATIONS - ) - test_settings = { - "test_name": config_settings.get( - "test_name", - "test_soft_ap_and_client_mode_random_toggle_stress_%s_iterations" - % iterations, - ), - "soft_ap_params": soft_ap_params, - "ap_params": ap_params, - "iterations": iterations, - } - test_settings_list.append(test_settings) - self.run_generated_testcases( - self.run_soft_ap_and_client_mode_random_toggle_stress_test, - test_settings_list, - name_func=get_test_name_from_settings, - ) - - -if __name__ == "__main__": - test_runner.main()
diff --git a/src/antlion/tests/wlan/functional/WlanRebootTest.py b/src/antlion/tests/wlan/functional/WlanRebootTest.py deleted file mode 100644 index 5c8406b..0000000 --- a/src/antlion/tests/wlan/functional/WlanRebootTest.py +++ /dev/null
@@ -1,824 +0,0 @@ -#!/usr/bin/env python3 -# -# Copyright 2022 The Fuchsia Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import itertools -import os -import time -from multiprocessing import Process - -from antlion import context, utils -from antlion.controllers import iperf_client, iperf_server -from antlion.controllers.access_point import AccessPoint, setup_ap -from antlion.controllers.ap_lib import hostapd_constants -from antlion.controllers.ap_lib.hostapd_security import Security -from antlion.controllers.ap_lib.hostapd_utils import generate_random_password -from antlion.net import wait_for_port -from antlion.test_utils.abstract_devices.wlan_device import create_wlan_device -from antlion.test_utils.wifi import base_test - -from mobly import asserts, test_runner - -# Constants, for readibility -AP = "ap" -DUT = "dut" -DEVICES = [AP, DUT] - -SOFT = "soft" -HARD = "hard" -REBOOT_TYPES = [SOFT, HARD] - -BAND_2G = "2g" -BAND_5G = "5g" -BANDS = [BAND_2G, BAND_5G] - -IPV4 = "ipv4" -IPV6 = "ipv6" -DUAL_IPV4_IPV6 = {IPV4: True, IPV6: True} -IPV4_ONLY = {IPV4: True, IPV6: False} -IPV6_ONLY = {IPV4: False, IPV6: True} -IP_VERSIONS = [IPV4_ONLY, IPV6_ONLY, DUAL_IPV4_IPV6] - -INTERRUPTS = [True, False] -OPEN_ENCRYPTION_STRING = "open" -SECURITY_MODES = [ - OPEN_ENCRYPTION_STRING, - hostapd_constants.WPA2_STRING, - hostapd_constants.WPA3_STRING, -] - -DEFAULT_IPERF_TIMEOUT = 30 - -DUT_NETWORK_CONNECTION_TIMEOUT = 60 -DUT_IP_ADDRESS_TIMEOUT = 30 # max time for DAD to complete - -# Constants for Custom Reboot Tests -ALL = "all" -BOTH = "both" - -CUSTOM_TEST_REBOOT_DEVICES = {AP: [AP], DUT: [DUT], ALL: [AP, DUT]} -CUSTOM_TEST_REBOOT_TYPES = {SOFT: [SOFT], HARD: [HARD], ALL: [SOFT, HARD]} -CUSTOM_TEST_BANDS = {BAND_2G: [BAND_2G], BAND_5G: [BAND_5G], ALL: [BAND_2G, BAND_5G]} -CUSTOM_TEST_IP_VERSIONS = { - IPV4: [IPV4_ONLY], - IPV6: [IPV6_ONLY], - BOTH: [DUAL_IPV4_IPV6], - ALL: [IPV4_ONLY, IPV6_ONLY, DUAL_IPV4_IPV6], -} -CUSTOM_TEST_INTERRUPTS = {"true": [True], "false": [False], ALL: [True, False]} - - -class WlanRebootTest(base_test.WifiBaseTest): - """Tests wlan reconnects in different reboot scenarios. - - Testbed Requirement: - * One ACTS compatible device (dut) - * One Whirlwind Access Point (will also serve as iperf server) - * One PduDevice - """ - - def __init__(self, controllers): - super().__init__(controllers) - - def setup_generated_tests(self): - self._read_wlan_reboot_test_params() - self.generate_tests( - test_logic=self.run_reboot_test, - name_func=self.generate_test_name, - arg_sets=self.generate_test_args(), - ) - - def setup_class(self): - super().setup_class() - self.android_devices = getattr(self, "android_devices", []) - self.fuchsia_devices = getattr(self, "fuchsia_devices", []) - self.pdu_devices = getattr(self, "pdu_devices", []) - - device_type = self.user_params.get("dut", "fuchsia_devices") - if device_type == "fuchsia_devices": - self.dut = create_wlan_device(self.fuchsia_devices[0]) - elif device_type == "android_devices": - self.dut = create_wlan_device(self.android_devices[0]) - else: - raise ValueError( - f'Invalid "dut" type specified in config: "{device_type}".' - 'Expected "fuchsia_devices" or "android_devices".' - ) - - self.access_point: AccessPoint = self.access_points[0] - - self.iperf_server_on_ap = None - self.iperf_client_on_dut = None - if not self.skip_iperf: - if hasattr(self, "iperf_clients") and self.iperf_clients: - self.iperf_client_on_dut = self.iperf_clients[0] - else: - self.iperf_client_on_dut = self.dut.create_iperf_client() - else: - self.log.info( - "Skipping iperf throughput validation as requested by ACTS " "config" - ) - - def setup_test(self): - self.access_point.stop_all_aps() - self.dut.wifi_toggle_state(True) - for ad in self.android_devices: - ad.droid.wakeLockAcquireBright() - ad.droid.wakeUpNow() - self.dut.disconnect() - self.dut.device.configure_wlan() - self.ssid = utils.rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_2G) - - def teardown_test(self): - # TODO(b/273923552): We take a snapshot here and before rebooting the - # DUT for every test because the persistence component does not make the - # inspect logs available for 120 seconds. This helps for debugging - # issues where we need previous state. - self.dut.device.take_bug_report() - self.download_ap_logs() - self.access_point.stop_all_aps() - self.dut.disconnect() - for ad in self.android_devices: - ad.droid.wakeLockRelease() - ad.droid.goToSleepNow() - self.dut.turn_location_off_and_scan_toggle_off() - self.dut.reset_wifi() - - def setup_ap( - self, ssid, band, ipv4=True, ipv6=False, security_mode=None, password=None - ): - """Setup ap with basic config. - - Args: - ssid: string, ssid to setup on ap - band: string ('2g' or '5g') of band to setup. - ipv4: True if using ipv4 (dhcp), else False. - ipv6: True if using ipv6 (radvd), else False. - """ - # TODO(fxb/63719): Add varying AP parameters - security_profile = None - if security_mode: - security_profile = Security(security_mode=security_mode, password=password) - if band == BAND_2G: - setup_ap( - access_point=self.access_point, - profile_name="whirlwind", - channel=11, - ssid=ssid, - security=security_profile, - is_ipv6_enabled=ipv6, - ) - elif band == BAND_5G: - setup_ap( - access_point=self.access_point, - profile_name="whirlwind", - channel=36, - ssid=ssid, - security=security_profile, - is_ipv6_enabled=ipv6, - ) - - if not ipv4: - self.access_point.stop_dhcp() - - self.log.info("Network (SSID: %s) is up." % ssid) - - def setup_iperf_server_on_ap(self, band) -> iperf_server.IPerfServerOverSsh: - """Configures iperf server based on the tests band. - - Args: - band: string ('2g' or '5g') of band to setup. - """ - if band == BAND_2G: - return iperf_server.IPerfServerOverSsh( - self.access_point.ssh_settings, - 5201, - test_interface=self.access_point.wlan_2g, - ) - elif band == BAND_5G: - return iperf_server.IPerfServerOverSsh( - self.access_point.ssh_settings, - 5201, - test_interface=self.access_point.wlan_5g, - ) - - def get_iperf_server_address(self, iperf_server_on_ap, ip_version): - """Retrieves the ip address of the iperf server. - - Args: - iperf_server_on_ap: IPerfServer object, linked to AP - ip_version: string, the ip version (ipv4 or ipv6) - - Returns: - String, the ip address of the iperf_server - """ - iperf_server_addresses = iperf_server_on_ap.get_interface_ip_addresses( - iperf_server_on_ap.test_interface - ) - if ip_version == IPV4: - iperf_server_ip_address = iperf_server_addresses["ipv4_private"][0] - elif ip_version == IPV6: - if iperf_server_addresses["ipv6_private_local"]: - iperf_server_ip_address = iperf_server_addresses["ipv6_private_local"][ - 0 - ] - else: - iperf_server_ip_address = "%s%%%s" % ( - iperf_server_addresses["ipv6_link_local"][0], - self.iperf_client_on_dut.test_interface, - ) - else: - raise ValueError("Invalid IP version: %s" % ip_version) - - return iperf_server_ip_address - - def verify_traffic_between_dut_and_ap( - self, iperf_server_on_ap, iperf_client_on_dut, ip_version=IPV4 - ): - """Runs IPerf traffic from the iperf client (dut) and the iperf - server (and vice versa) and verifies traffic was able to pass - successfully. - - Args: - iperf_server_on_ap: IPerfServer object, linked to AP - iperf_client_on_dut: IPerfClient object, linked to DUT - ip_version: string, the ip version (ipv4 or ipv6) - - Raises: - ValueError, if invalid ip_version is passed. - ConnectionError, if traffic is not passed successfully in both - directions. - """ - dut_ip_addresses = self.dut.device.get_interface_ip_addresses( - iperf_client_on_dut.test_interface - ) - - iperf_server_ip_address = self.get_iperf_server_address( - iperf_server_on_ap, ip_version - ) - - self.log.info( - "Attempting to pass traffic from DUT to IPerf server (%s)." - % iperf_server_ip_address - ) - tx_file = iperf_client_on_dut.start( - iperf_server_ip_address, - "-i 1 -t 3 -J", - "reboot_tx", - timeout=DEFAULT_IPERF_TIMEOUT, - ) - tx_results = iperf_server.IPerfResult(tx_file) - if not tx_results.avg_receive_rate or tx_results.avg_receive_rate == 0: - raise ConnectionError( - "Failed to pass IPerf traffic from DUT to server (%s). TX " - "Average Receive Rate: %s" - % (iperf_server_ip_address, tx_results.avg_receive_rate) - ) - else: - self.log.info( - "Success: Traffic passed from DUT to IPerf server (%s)." - % iperf_server_ip_address - ) - self.log.info( - "Attempting to pass traffic from IPerf server (%s) to DUT." - % iperf_server_ip_address - ) - rx_file = iperf_client_on_dut.start( - iperf_server_ip_address, - "-i 1 -t 3 -R -J", - "reboot_rx", - timeout=DEFAULT_IPERF_TIMEOUT, - ) - rx_results = iperf_server.IPerfResult(rx_file) - if not rx_results.avg_receive_rate or rx_results.avg_receive_rate == 0: - raise ConnectionError( - "Failed to pass IPerf traffic from server (%s) to DUT. RX " - "Average Receive Rate: %s" - % (iperf_server_ip_address, rx_results.avg_receive_rate) - ) - else: - self.log.info( - "Success: Traffic passed from IPerf server (%s) to DUT." - % iperf_server_ip_address - ) - - def start_dut_ping_process(self, iperf_server_on_ap, ip_version=IPV4): - """Creates a process that pings the AP from the DUT. - - Runs in parallel for 15 seconds, so it can be interrupted by a reboot. - Sleeps for a few seconds to ensure pings have started. - - Args: - iperf_server_on_ap: IPerfServer object, linked to AP - ip_version: string, the ip version (ipv4 or ipv6) - """ - ap_address = self.get_iperf_server_address(iperf_server_on_ap, ip_version) - if ap_address: - self.log.info( - "Starting ping process to %s in parallel. Logs from this " - "process will be suppressed, since it will be intentionally " - "interrupted." % ap_address - ) - ping_proc = Process( - target=self.dut.ping, args=[ap_address], kwargs={"count": 15} - ) - with utils.SuppressLogOutput(): - ping_proc.start() - # Allow for a few seconds of pinging before allowing it to be - # interrupted. - time.sleep(3) - else: - raise ConnectionError("Failed to retrieve APs iperf address.") - - def prepare_dut_for_reconnection(self): - """Perform any actions to ready DUT for reconnection. - - These actions will vary depending on the DUT. eg. android devices may - need to be woken up, ambient devices should not require any interaction, - etc. - """ - self.dut.wifi_toggle_state(True) - for ad in self.android_devices: - ad.droid.wakeUpNow() - - def wait_for_dut_network_connection(self, ssid): - """Checks if device is connected to given network. Sleeps 1 second - between retries. - - Args: - ssid: string of ssid - Raises: - ConnectionError, if DUT is not connected after all timeout. - """ - self.log.info( - "Checking if DUT is connected to %s network. Will retry for %s " - "seconds." % (ssid, self.dut_network_connection_timeout) - ) - timeout = time.time() + self.dut_network_connection_timeout - while time.time() < timeout: - try: - is_connected = self.dut.is_connected(ssid=ssid) - except Exception as err: - self.log.debug("SL4* call failed. Retrying in 1 second.") - is_connected = False - finally: - if is_connected: - self.log.info("Success: DUT has connected.") - break - else: - self.log.debug( - "DUT not connected to network %s...retrying in 1 second." % ssid - ) - time.sleep(1) - else: - raise ConnectionError("DUT failed to connect to the network.") - - def write_csv_time_to_reconnect(self, test_name, time_to_reconnect): - """Writes the time to reconnect to a csv file. - Args: - test_name: the name of the test case - time_to_reconnect: the time from when the rebooted device came back - up to when it reassociated (or 'FAIL'), if it failed to - reconnect. - """ - log_context = context.get_current_context() - log_path = os.path.join(log_context.get_base_output_path(), "WlanRebootTest/") - csv_file_name = "%stime_to_reconnect.csv" % log_path - self.log.info("Writing to %s" % csv_file_name) - with open(csv_file_name, "a") as csv_file: - csv_file.write("%s,%s\n" % (test_name, time_to_reconnect)) - - def log_and_continue(self, run, time_to_reconnect=None, error=None): - """Writes the time to reconnect to the csv file before continuing, used - in stress tests runs. - - Args: - time_to_reconnect: the time from when the rebooted device came back - ip to when reassociation occurred. - run: the run number in a looped stress tested., - error: string, error message to log before continuing with the test - """ - if error: - self.log.info( - "Device failed to reconnect to network %s on run %s. Error: %s" - % (self.ssid, run, error) - ) - self.write_csv_time_to_reconnect( - "%s_run_%s" % (self.test_name, run), "FAIL" - ) - - else: - self.log.info( - "Device successfully reconnected to network %s after %s seconds" - " on run %s." % (self.ssid, time_to_reconnect, run) - ) - self.write_csv_time_to_reconnect( - "%s_run_%s" % (self.test_name, run), time_to_reconnect - ) - - def run_reboot_test(self, settings): - """Runs a reboot test based on a given config. - 1. Setups up a network, associates the dut, and saves the network. - 2. Verifies the dut receives ip address(es). - 3. Verifies traffic between DUT and AP (IPerf client and server). - 4. Reboots (hard or soft) the device (dut or ap). - - If the ap was rebooted, setup the same network again. - 5. Wait for reassociation or timeout. - 6. If reassocation occurs: - - Verifies the dut receives ip address(es). - - Verifies traffic between DUT and AP (IPerf client and server). - 7. Logs time to reconnect (or failure to reconnect) - 8. If stress testing, repeats steps 4 - 7 for N iterations. - - Args: - settings: dictionary containing the following values: - reboot_device: string ('dut' or 'ap') of the device to reboot. - reboot_type: string ('soft' or 'hard') of how to reboot the - reboot_device. - band: string ('2g' or '5g') of band to setup. - ipv4: True if using ipv4 (dhcp), else False. - ipv6: True if using ipv6 (radvd), else False. - - Optional: - interrupt: if True, the DUT will be pinging the AP in a - parallel process when the reboot occurs. This is used to - compare reconnect times when idle to active. - test_name: name of the test, used when stress testing. - iterations: number of times to perform test, used when stress - testing. - - Raises: - ValueError, if ipv4 and ipv6 are both False - ValueError, if band is not '2g' or '5g' - ValueError, if reboot_device is not 'dut' or 'ap' - ValueError, if reboot_type is not 'soft' or 'hard' - - """ - iterations = settings.get("iterations", 1) - passed_count = 0 - ipv4 = settings.get("ipv4", None) - ipv6 = settings.get("ipv6", None) - reboot_device = settings["reboot_device"] - reboot_type = settings["reboot_type"] - band = settings["band"] - security_mode = settings.get("security_mode", None) - password = settings.get("password", None) - if security_mode: - if security_mode.lower() == "open": - security_mode = None - elif not password: - password = generate_random_password(security_mode=security_mode) - interrupt = settings.get("interrupt", None) - # Skip hard reboots if no PDU present - asserts.skip_if( - reboot_type == HARD and len(self.pdu_devices) == 0, - "Hard reboots require a PDU device.", - ) - # Skip DUT reboot w/ interrupt tests, since they are not more helpful - # and may cause threading issues. - asserts.skip_if( - (reboot_device == DUT) and interrupt, - "Stream interrupts for DUT reboots are prone to threading issues " - "and are not supported.", - ) - - # Validate test settings. - if not ipv4 and not ipv6: - raise ValueError("Either ipv4, ipv6, or both must be True.") - if reboot_device != DUT and reboot_device != AP: - raise ValueError("Invalid reboot device: %s" % reboot_device) - if reboot_type != SOFT and reboot_type != HARD: - raise ValueError("Invalid reboot type: %s" % reboot_type) - if band != BAND_2G and band != BAND_5G: - raise ValueError("Invalid band: %s" % band) - - self.setup_ap(self.ssid, band, ipv4, ipv6, security_mode, password) - if not self.dut.associate( - self.ssid, - target_security=hostapd_constants.SECURITY_STRING_TO_DEFAULT_TARGET_SECURITY.get( - security_mode - ), - target_pwd=password, - ): - raise EnvironmentError("Initial network connection failed.") - - if not self.skip_iperf: - dut_test_interface = self.iperf_client_on_dut.test_interface - if ipv4: - self.dut.device.wait_for_ipv4_addr(dut_test_interface) - if ipv6: - self.dut.device.wait_for_ipv6_addr(dut_test_interface) - - self.iperf_server_on_ap = self.setup_iperf_server_on_ap(band) - self.iperf_server_on_ap.start() - wait_for_port(self.iperf_server_on_ap.ssh_settings.hostname, 5201) - - ip_version = IPV6 if ipv6 else IPV4 - self.verify_traffic_between_dut_and_ap( - self.iperf_server_on_ap, self.iperf_client_on_dut, ip_version=ip_version - ) - - # Looping reboots for stress testing - for run in range(iterations): - run += 1 - self.log.info("Starting run %s of %s." % (run, iterations)) - - # Ping from DUT to AP during AP reboot - if interrupt: - if ipv4: - self.start_dut_ping_process(self.iperf_server_on_ap) - if ipv6: - self.start_dut_ping_process( - self.iperf_server_on_ap, ip_version=IPV6 - ) - - # TODO(b/273923552): We take a snapshot here and during test - # teardown for every test because the persistence component does not - # make the inspect logs available for 120 seconds. This helps for - # debugging issues where we need previous state. - self.dut.device.take_bug_report() - - # DUT reboots - if reboot_device == DUT: - if ( - not self.skip_iperf - and type(self.iperf_client_on_dut) - == iperf_client.IPerfClientOverSsh - ): - self.iperf_client_on_dut.close_ssh() - if reboot_type == SOFT: - self.dut.device.reboot() - elif reboot_type == HARD: - self.dut.hard_power_cycle(self.pdu_devices) - - # AP reboots - elif reboot_device == AP: - if reboot_type == SOFT: - self.log.info("Cleanly stopping ap.") - self.access_point.stop_all_aps() - elif reboot_type == HARD: - if not self.skip_iperf: - self.iperf_server_on_ap.close_ssh() - self.access_point.hard_power_cycle(self.pdu_devices) - self.setup_ap(self.ssid, band, ipv4, ipv6, security_mode, password) - - self.prepare_dut_for_reconnection() - uptime = time.time() - try: - self.wait_for_dut_network_connection(self.ssid) - time_to_reconnect = time.time() - uptime - - if not self.skip_iperf: - if ipv4: - self.dut.device.wait_for_ipv4_addr(dut_test_interface) - if ipv6: - self.dut.device.wait_for_ipv6_addr(dut_test_interface) - - self.iperf_server_on_ap.start() - - if ipv4: - self.verify_traffic_between_dut_and_ap( - self.iperf_server_on_ap, self.iperf_client_on_dut - ) - if ipv6: - self.verify_traffic_between_dut_and_ap( - self.iperf_server_on_ap, - self.iperf_client_on_dut, - ip_version=IPV6, - ) - - except ConnectionError as err: - self.log_and_continue(run, error=err) - else: - passed_count += 1 - self.log_and_continue(run, time_to_reconnect=time_to_reconnect) - - if passed_count == iterations: - asserts.explicit_pass( - "Test Summary: device successfully reconnected to network %s " - "%s/%s times." % (self.ssid, passed_count, iterations) - ) - - else: - asserts.fail( - "Test Summary: device failed reconnection test. Reconnected to " - "network %s %s/%s times." % (self.ssid, passed_count, iterations) - ) - - def generate_test_name(self, settings): - """Generates a test case name based on the reboot settings passed. - - Args: - settings: A dictionary of settings related to reboot test. - - Returns: - A string that represents a test case name. - """ - test_name = ( - "test_{reboot_type}_reboot_{reboot_device}_{band}_{security_mode}".format( - **settings - ) - ) - - if settings.get(IPV4): - test_name += "_ipv4" - - if settings.get(IPV6): - test_name += "_ipv6" - - if settings.get("interrupt"): - test_name += "_interrupt" - - if settings.get("iterations"): - test_name += f"_with_{settings['iterations']}_iterations" - - return test_name - - def generate_test_args(self): - # If custom reboot tests present in ACTS config then run just those - test_args = self._generate_custom_reboots_test_args() - if test_args: - return test_args - - # Interrupt tests requires using iperf. So do not run interrupt tests - # when skip_iperf is True - if self.skip_iperf is True: - interrupts = [False] - else: - interrupts = INTERRUPTS - - for ( - reboot_device, - reboot_type, - band, - ip_version, - interrupt, - security_mode, - ) in itertools.product( - DEVICES, REBOOT_TYPES, BANDS, IP_VERSIONS, interrupts, SECURITY_MODES - ): - settings = { - "reboot_device": reboot_device, - "reboot_type": reboot_type, - "band": band, - "security_mode": security_mode, - "ipv4": ip_version["ipv4"], - "ipv6": ip_version["ipv6"], - "interrupt": interrupt, - } - test_args.append((settings,)) - - return test_args - - def _generate_custom_reboots_test_args(self): - """Used to create custom reboot tests from antlion config. Can be - individual tests or permutation sets (i.e. setting "all" for a - test param will run a test with every permutation). - - Parameters: - reboot_device: string - "ap", "dut", or "all" - reboot_type: string - "soft", "hard", or "all" - band: string, "2g" - "5g", "all" - ip_version: string - "ipv4", "ipv6", "both", or "all" - interrupt: bool - whether to have traffic flowing at reboot - security_modes: optional, string or list - "open", "wep", "wpa", - "wpa2", "wpa/wpa2", "wpa3", "wpa2/wpa3" - iterations: int - number of iterations for each test - - Example: - "wlan_reboot_test_params": { - "test_custom_reboots": [ - { - "reboot_device": "dut", - "reboot_type": "soft", - "band": "2g", - "ip_version": "both" - }, - { - "reboot_device": "all", - "reboot_type": "hard", - "band": "all", - "ip_version": ipv4", - "security_modes": "wpa2", - "iterations": 10 - }, - { - "reboot_device": "dut", - "reboot_type": "hard", - "band": "5g", - "ip_version": "ipv4", - "security_modes": ["open", "wpa3"] - } - ] - } - - The first example runs a single DUT soft reboot test with a 2.4GHz - network and dual ipv4/ipv6. - - The second example runs 4 tests, each with 10 iterations. It runs hard - reboots with ipv4 for the permutations of DUT/AP and 2.4GHz/5GHz. - - The third example runs two tests, both hard reboots of the DUT with 5g - and ipv4 only, one with open security and one with WPA3. - """ - if "test_custom_reboots" not in self.wlan_reboot_test_params: - self.log.info("No custom reboots provided in ACTS config.") - return [] - - test_args = [] - for test in self.wlan_reboot_test_params["test_custom_reboots"]: - # Ensure required params are present - try: - reboot_device = test["reboot_device"].lower() - reboot_type = test["reboot_type"].lower() - band = test["band"].lower() - ip_version = test["ip_version"].lower() - except KeyError as err: - raise AttributeError( - "Must provide reboot_type, reboot_device, ip_version, and " - "band (optionally interrupt and iterations) in custom test " - "config. See test_custom_reboots docstring for details. " - "Err: %s" % err - ) - security_modes = test.get("security_modes", "open") - interrupt = str(test.get("interrupt", False)).lower() - iterations = test.get("iterations", 1) - - if interrupt == "true" and self.skip_iperf: - raise AttributeError( - "Interrupt can't be set to True when iperf is disabled. " - "Update 'skip_iperf' to 'false' in ACTS config and run again" - ) - - # Validate parameters and convert to lists (for permutations) - try: - reboot_devices = CUSTOM_TEST_REBOOT_DEVICES[reboot_device] - reboot_types = CUSTOM_TEST_REBOOT_TYPES[reboot_type] - bands = CUSTOM_TEST_BANDS[band] - ip_versions = CUSTOM_TEST_IP_VERSIONS[ip_version] - interrupts = CUSTOM_TEST_INTERRUPTS[interrupt] - if isinstance(security_modes, str): - security_modes = [security_modes] - except KeyError as err: - raise AttributeError( - "Invalid custom test parameter provided. Err: %s" % err - ) - - for ( - reboot_device, - reboot_type, - band, - ip_version, - interrupt, - security_mode, - ) in itertools.product( - reboot_devices, - reboot_types, - bands, - ip_versions, - interrupts, - security_modes, - ): - settings = { - "reboot_device": reboot_device, - "reboot_type": reboot_type, - "band": band, - "security_mode": security_mode, - "ipv4": ip_version[IPV4], - "ipv6": ip_version[IPV6], - "interrupt": interrupt, - "iterations": iterations, - } - - test_args.append((settings,)) - return test_args - - def _read_wlan_reboot_test_params(self): - self.wlan_reboot_test_params = self.user_params.get( - "wlan_reboot_test_params", {} - ) - self.skip_iperf = self.wlan_reboot_test_params.get("skip_iperf", False) - # Times (in seconds) to wait for DUT network connection and assigning an - # ip address to the wlan interface. - self.dut_network_connection_timeout = self.wlan_reboot_test_params.get( - "dut_network_connection_timeout", DUT_NETWORK_CONNECTION_TIMEOUT - ) - self.dut_ip_address_timeout = self.wlan_reboot_test_params.get( - "dut_ip_address_timeout", DUT_IP_ADDRESS_TIMEOUT - ) - - -if __name__ == "__main__": - test_runner.main()
diff --git a/src/antlion/tests/wlan/functional/WlanScanTest.py b/src/antlion/tests/wlan/functional/WlanScanTest.py deleted file mode 100644 index ba6961a..0000000 --- a/src/antlion/tests/wlan/functional/WlanScanTest.py +++ /dev/null
@@ -1,265 +0,0 @@ -#!/usr/bin/env python3.4 -# -# Copyright 2022 The Fuchsia Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -""" -This test exercises basic scanning functionality to confirm expected behavior -related to wlan scanning -""" - -from datetime import datetime - -from antlion.controllers.ap_lib import hostapd_ap_preset -from antlion.controllers.ap_lib import hostapd_bss_settings -from antlion.controllers.ap_lib import hostapd_constants -from antlion.controllers.ap_lib import hostapd_security -from antlion.test_utils.wifi import base_test - -from mobly import signals, test_runner - - -class WlanScanTest(base_test.WifiBaseTest): - """WLAN scan test class. - - Test Bed Requirement: - * One or more Fuchsia devices - * Several Wi-Fi networks visible to the device, including an open Wi-Fi - network or a onHub/GoogleWifi - """ - - def setup_class(self): - super().setup_class() - - self.access_point = self.access_points[0] - self.start_access_point = False - for fd in self.fuchsia_devices: - fd.configure_wlan(association_mechanism="drivers") - if self.access_point: - # This section sets up the config that could be sent to the AP if - # the AP is needed. The reasoning is since ACTS already connects - # to the AP if it is in the config, generating the config in memory - # has no over head is used if need by the test if one of the ssids - # needed for the test is not included in the config. The logic - # here creates 2 ssids on each radio, 5ghz and 2.4ghz, with an - # open, no security network and one that is wpa2, for a total of 4 - # networks. However, if all of the ssids are specified in the - # the config will never be written to the AP and the AP will not be - # brought up. For more information about how to configure the - # hostapd config info, see the hostapd libraries, which have more - # documentation. - bss_settings_2g = [] - bss_settings_5g = [] - open_network = self.get_open_network(False, []) - self.open_network_2g = open_network["2g"] - self.open_network_5g = open_network["5g"] - wpa2_settings = self.get_psk_network(False, []) - self.wpa2_network_2g = wpa2_settings["2g"] - self.wpa2_network_5g = wpa2_settings["5g"] - bss_settings_2g.append( - hostapd_bss_settings.BssSettings( - name=self.wpa2_network_2g["SSID"], - ssid=self.wpa2_network_2g["SSID"], - security=hostapd_security.Security( - security_mode=self.wpa2_network_2g["security"], - password=self.wpa2_network_2g["password"], - ), - ) - ) - bss_settings_5g.append( - hostapd_bss_settings.BssSettings( - name=self.wpa2_network_5g["SSID"], - ssid=self.wpa2_network_5g["SSID"], - security=hostapd_security.Security( - security_mode=self.wpa2_network_5g["security"], - password=self.wpa2_network_5g["password"], - ), - ) - ) - self.ap_2g = hostapd_ap_preset.create_ap_preset( - iface_wlan_2g=self.access_point.wlan_2g, - iface_wlan_5g=self.access_point.wlan_5g, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=self.open_network_2g["SSID"], - bss_settings=bss_settings_2g, - ) - self.ap_5g = hostapd_ap_preset.create_ap_preset( - iface_wlan_2g=self.access_point.wlan_2g, - iface_wlan_5g=self.access_point.wlan_5g, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=self.open_network_5g["SSID"], - bss_settings=bss_settings_5g, - ) - - if "wlan_open_network_2g" in self.user_params: - self.open_network_2g = self.user_params.get("wlan_open_network_2g") - elif self.access_point: - self.start_access_point_2g = True - else: - raise Exception("Missing parameter in config " "(wlan_open_network_2g)") - - if "wlan_open_network_5g" in self.user_params: - self.open_network_5g = self.user_params.get("wlan_open_network_5g") - elif self.access_point: - self.start_access_point_5g = True - else: - raise Exception("Missing parameter in config " "(wlan_open_network_5g)") - - if "wlan_wpa2_network_2g" in self.user_params: - self.wpa2_network_2g = self.user_params.get("wlan_wpa2_network_2g") - elif self.access_point: - self.start_access_point_2g = True - else: - raise Exception("Missing parameter in config " "(wlan_wpa2_network_2g)") - - if "wlan_wpa2_network_5g" in self.user_params: - self.wpa2_network_5g = self.user_params.get("wlan_wpa2_network_5g") - elif self.access_point: - self.start_access_point_5g = True - else: - raise Exception("Missing parameter in config " "(wlan_wpa2_network_5g)") - - # Only bring up the APs that are needed for the test. Each ssid is - # randomly generated so there is no chance of re associating to a - # previously saved ssid on the device. - if self.start_access_point_2g: - self.start_access_point = True - self.access_point.start_ap(hostapd_config=self.ap_2g) - if self.start_access_point_5g: - self.start_access_point = True - self.access_point.start_ap(hostapd_config=self.ap_5g) - - def setup_test(self): - for fd in self.fuchsia_devices: - # stub for setting up all the fuchsia devices in the testbed. - pass - - def teardown_test(self): - for fd in self.fuchsia_devices: - fd.sl4f.wlan_lib.wlanDisconnect() - - def teardown_class(self): - if self.start_access_point: - self.download_ap_logs() - self.access_point.stop_all_aps() - - def on_fail(self, test_name, begin_time): - for fd in self.fuchsia_devices: - super().on_device_fail(fd, test_name, begin_time) - fd.configure_wlan(association_mechanism="drivers") - - """Helper Functions""" - - def check_connect_response(self, connection_response): - """Checks the result of connecting to a wlan. - Args: - connection_response: The response from SL4F after attempting - to connect to a wlan. - """ - if connection_response.get("error") is None: - # the command did not get an error response - go ahead and - # check the result - connection_result = connection_response.get("result") - if connection_result: - self.log.info("connection to network successful") - else: - # ideally, we would have the actual error... but logging - # here to cover that error case - raise signals.TestFailure("Connect call failed, aborting test") - else: - # the response indicates an error - log and raise failure - raise signals.TestFailure( - "Aborting test - Connect call failed " - "with error: %s" % connection_response.get("error") - ) - - def scan_while_connected(self, wlan_network_params, fd): - """Connects to as specified network and initiates a scan - Args: - wlan_network_params: A dictionary containing wlan - infomation. - fd: The fuchsia device to connect to the wlan. - """ - target_ssid = wlan_network_params["SSID"] - self.log.info("got the ssid! %s", target_ssid) - target_pwd = None - if "password" in wlan_network_params: - target_pwd = wlan_network_params["password"] - - bss_scan_response = fd.sl4f.wlan_lib.wlanScanForBSSInfo().get("result") - connection_response = fd.sl4f.wlan_lib.wlanConnectToNetwork( - target_ssid, bss_scan_response[target_ssid][0], target_pwd=target_pwd - ) - self.check_connect_response(connection_response) - self.basic_scan_request(fd) - - def basic_scan_request(self, fd): - """Initiates a basic scan on a Fuchsia device - Args: - fd: A fuchsia device - """ - start_time = datetime.now() - - scan_response = fd.sl4f.wlan_lib.wlanStartScan() - - # first check if we received an error - if scan_response.get("error") is None: - # the scan command did not get an error response - go ahead - # and check for scan results - scan_results = scan_response["result"] - else: - # the response indicates an error - log and raise failure - raise signals.TestFailure( - "Aborting test - scan failed with " - "error: %s" % scan_response.get("error") - ) - - self.log.info("scan contained %d results", len(scan_results)) - - total_time_ms = (datetime.now() - start_time).total_seconds() * 1000 - self.log.info("scan time: %d ms", total_time_ms) - - if len(scan_results) > 0: - raise signals.TestPass( - details="", extras={"Scan time": "%d" % total_time_ms} - ) - else: - raise signals.TestFailure("Scan failed or did not " "find any networks") - - """Tests""" - - def test_basic_scan_request(self): - """Verify a general scan trigger returns at least one result""" - for fd in self.fuchsia_devices: - self.basic_scan_request(fd) - - def test_scan_while_connected_open_network_2g(self): - for fd in self.fuchsia_devices: - self.scan_while_connected(self.open_network_2g, fd) - - def test_scan_while_connected_wpa2_network_2g(self): - for fd in self.fuchsia_devices: - self.scan_while_connected(self.wpa2_network_2g, fd) - - def test_scan_while_connected_open_network_5g(self): - for fd in self.fuchsia_devices: - self.scan_while_connected(self.open_network_5g, fd) - - def test_scan_while_connected_wpa2_network_5g(self): - for fd in self.fuchsia_devices: - self.scan_while_connected(self.wpa2_network_5g, fd) - - -if __name__ == "__main__": - test_runner.main()
diff --git a/src/antlion/tests/wlan/functional/WlanTargetSecurityTest.py b/src/antlion/tests/wlan/functional/WlanTargetSecurityTest.py deleted file mode 100644 index 6cfdc0a..0000000 --- a/src/antlion/tests/wlan/functional/WlanTargetSecurityTest.py +++ /dev/null
@@ -1,390 +0,0 @@ -#!/usr/bin/env python3 -# -# Copyright 2022 The Fuchsia Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from antlion import utils -from antlion.controllers.access_point import setup_ap -from antlion.controllers.ap_lib import hostapd_constants -from antlion.controllers.ap_lib.hostapd_security import Security -from antlion.test_utils.wifi import base_test -from antlion.test_utils.abstract_devices.wlan_device import create_wlan_device - -from mobly import asserts, test_runner - - -# TODO(fxb/68956): Add security protocol check to mixed mode tests when info is -# available. -class WlanTargetSecurityTest(base_test.WifiBaseTest): - """Tests Fuchsia's target security concept and security upgrading - - Testbed Requirements: - * One Fuchsia device - * One Whirlwind Access Point - """ - - def setup_class(self): - if "dut" in self.user_params and self.user_params["dut"] != "fuchsia_devices": - raise AttributeError( - "WlanTargetSecurityTest is only relevant for Fuchsia devices." - ) - - self.dut = create_wlan_device(self.fuchsia_devices[0]) - if self.dut.device.association_mechanism != "policy": - raise AttributeError("Must use WLAN policy layer to test target security.") - - self.access_point = self.access_points[0] - - def teardown_class(self): - self.dut.disconnect() - self.access_point.stop_all_aps() - - def teardown_test(self): - self.dut.disconnect() - self.download_ap_logs() - self.access_point.stop_all_aps() - - def on_fail(self, test_name, begin_time): - super().on_fail(test_name, begin_time) - self.access_point.stop_all_aps() - - def on_exception(self, test_name, begin_time): - super().on_exception(test_name, begin_time) - self.dut.disconnect() - self.access_point.stop_all_aps() - - def setup_ap(self, security_mode=None): - """Sets up an AP using the provided security mode. - - Args: - security_mode: string, security mode for AP - Returns: - Tuple, (ssid, password). Returns a password even if for open - security, since non-open target securities require a credential - to attempt a connection. - """ - ssid = utils.rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_5G) - # Length 13, so it can be used for WEP or WPA - password = utils.rand_ascii_str(13) - security_profile = None - - if security_mode: - security_profile = Security(security_mode=security_mode, password=password) - - setup_ap( - access_point=self.access_point, - profile_name="whirlwind", - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=ssid, - security=security_profile, - ) - - return (ssid, password) - - # Open Security on AP - def test_associate_open_ap_with_open_target_security(self): - ssid, _ = self.setup_ap() - asserts.assert_true(self.dut.associate(ssid), "Failed to associate.") - - def test_reject_open_ap_with_wep_target_security(self): - ssid, password = self.setup_ap() - asserts.assert_false( - self.dut.associate( - ssid, target_security=hostapd_constants.WEP_STRING, target_pwd=password - ), - "Should not have associated.", - ) - - def test_reject_open_ap_with_wpa_target_security(self): - ssid, password = self.setup_ap() - asserts.assert_false( - self.dut.associate( - ssid, target_security=hostapd_constants.WPA_STRING, target_pwd=password - ), - "Should not have associated.", - ) - - def test_reject_open_ap_with_wpa2_target_security(self): - ssid, password = self.setup_ap() - asserts.assert_false( - self.dut.associate( - ssid, target_security=hostapd_constants.WPA2_STRING, target_pwd=password - ), - "Should not have associated.", - ) - - def test_reject_open_ap_with_wpa3_target_security(self): - ssid, password = self.setup_ap() - asserts.assert_false( - self.dut.associate( - ssid, target_security=hostapd_constants.WPA3_STRING, target_pwd=password - ), - "Should not have associated.", - ) - - # WEP Security on AP - def test_reject_wep_ap_with_open_target_security(self): - ssid, _ = self.setup_ap(hostapd_constants.WEP_STRING) - asserts.assert_false(self.dut.associate(ssid), "Should not have associated.") - - def test_associate_wep_ap_with_wep_target_security(self): - ssid, password = self.setup_ap(hostapd_constants.WEP_STRING) - asserts.assert_true( - self.dut.associate( - ssid, target_security=hostapd_constants.WEP_STRING, target_pwd=password - ), - "Failed to associate.", - ) - - def test_reject_wep_ap_with_wpa_target_security(self): - ssid, password = self.setup_ap(hostapd_constants.WEP_STRING) - asserts.assert_false( - self.dut.associate( - ssid, target_security=hostapd_constants.WPA_STRING, target_pwd=password - ), - "Should not have associated.", - ) - - def test_reject_wep_ap_with_wpa2_target_security(self): - ssid, password = self.setup_ap(hostapd_constants.WEP_STRING) - asserts.assert_false( - self.dut.associate( - ssid, target_security=hostapd_constants.WPA2_STRING, target_pwd=password - ), - "Should not have associated.", - ) - - def test_reject_wep_ap_with_wpa3_target_security(self): - ssid, password = self.setup_ap(hostapd_constants.WEP_STRING) - asserts.assert_false( - self.dut.associate( - ssid, target_security=hostapd_constants.WPA3_STRING, target_pwd=password - ), - "Should not have associated.", - ) - - # WPA Security on AP - def test_reject_wpa_ap_with_open_target_security(self): - ssid, _ = self.setup_ap(hostapd_constants.WPA_STRING) - asserts.assert_false(self.dut.associate(ssid), "Should not have associated.") - - def test_reject_wpa_ap_with_wep_target_security(self): - ssid, password = self.setup_ap(hostapd_constants.WPA_STRING) - asserts.assert_false( - self.dut.associate( - ssid, target_security=hostapd_constants.WEP_STRING, target_pwd=password - ), - "Should not have associated.", - ) - - def test_associate_wpa_ap_with_wpa_target_security(self): - ssid, password = self.setup_ap(hostapd_constants.WPA_STRING) - asserts.assert_true( - self.dut.associate( - ssid, target_security=hostapd_constants.WPA_STRING, target_pwd=password - ), - "Failed to associate.", - ) - - def test_reject_wpa_ap_with_wpa2_target_security(self): - ssid, password = self.setup_ap(hostapd_constants.WPA_STRING) - asserts.assert_false( - self.dut.associate( - ssid, target_security=hostapd_constants.WPA2_STRING, target_pwd=password - ), - "Should not have associated.", - ) - - def test_reject_wpa_ap_with_wpa3_target_security(self): - ssid, password = self.setup_ap(hostapd_constants.WPA_STRING) - asserts.assert_false( - self.dut.associate( - ssid, target_security=hostapd_constants.WPA3_STRING, target_pwd=password - ), - "Should not have associated.", - ) - - # WPA2 Security on AP - def test_reject_wpa2_ap_with_open_target_security(self): - ssid, _ = self.setup_ap(hostapd_constants.WPA2_STRING) - asserts.assert_false(self.dut.associate(ssid), "Should not have associated.") - - def test_reject_wpa2_ap_with_wep_target_security(self): - ssid, password = self.setup_ap(hostapd_constants.WPA2_STRING) - asserts.assert_false( - self.dut.associate( - ssid, target_security=hostapd_constants.WEP_STRING, target_pwd=password - ), - "Should not have associated.", - ) - - def test_associate_wpa2_ap_with_wpa_target_security(self): - ssid, password = self.setup_ap(hostapd_constants.WPA2_STRING) - asserts.assert_true( - self.dut.associate( - ssid, target_security=hostapd_constants.WPA_STRING, target_pwd=password - ), - "Failed to associate.", - ) - - def test_associate_wpa2_ap_with_wpa2_target_security(self): - ssid, password = self.setup_ap(hostapd_constants.WPA2_STRING) - asserts.assert_true( - self.dut.associate( - ssid, target_security=hostapd_constants.WPA2_STRING, target_pwd=password - ), - "Failed to associate.", - ) - - def test_reject_wpa2_ap_with_wpa3_target_security(self): - ssid, password = self.setup_ap(hostapd_constants.WPA2_STRING) - asserts.assert_false( - self.dut.associate( - ssid, target_security=hostapd_constants.WPA3_STRING, target_pwd=password - ), - "Should not have associated.", - ) - - # WPA/WPA2 Security on AP - def test_reject_wpa_wpa2_ap_with_open_target_security(self): - ssid, _ = self.setup_ap(hostapd_constants.WPA_MIXED_STRING) - asserts.assert_false(self.dut.associate(ssid), "Should not have associated.") - - def test_reject_wpa_wpa2_ap_with_wep_target_security(self): - ssid, password = self.setup_ap(hostapd_constants.WPA_MIXED_STRING) - asserts.assert_false( - self.dut.associate( - ssid, target_security=hostapd_constants.WEP_STRING, target_pwd=password - ), - "Should not have associated.", - ) - - def test_associate_wpa_wpa2_ap_with_wpa_target_security(self): - ssid, password = self.setup_ap(hostapd_constants.WPA_MIXED_STRING) - asserts.assert_true( - self.dut.associate( - ssid, target_security=hostapd_constants.WPA_STRING, target_pwd=password - ), - "Failed to associate.", - ) - - def test_associate_wpa_wpa2_ap_with_wpa2_target_security(self): - ssid, password = self.setup_ap(hostapd_constants.WPA_MIXED_STRING) - asserts.assert_true( - self.dut.associate( - ssid, target_security=hostapd_constants.WPA2_STRING, target_pwd=password - ), - "Failed to associate.", - ) - - def test_reject_wpa_wpa2_ap_with_wpa3_target_security(self): - ssid, password = self.setup_ap(hostapd_constants.WPA_MIXED_STRING) - asserts.assert_false( - self.dut.associate( - ssid, target_security=hostapd_constants.WPA3_STRING, target_pwd=password - ), - "Should not have associated.", - ) - - # WPA3 Security on AP - def test_reject_wpa3_ap_with_open_target_security(self): - ssid, _ = self.setup_ap(hostapd_constants.WPA3_STRING) - asserts.assert_false(self.dut.associate(ssid), "Should not have associated.") - - def test_reject_wpa3_ap_with_wep_target_security(self): - ssid, password = self.setup_ap(hostapd_constants.WPA3_STRING) - asserts.assert_false( - self.dut.associate( - ssid, target_security=hostapd_constants.WEP_STRING, target_pwd=password - ), - "Should not have associated.", - ) - - def test_associate_wpa3_ap_with_wpa_target_security(self): - ssid, password = self.setup_ap(hostapd_constants.WPA3_STRING) - asserts.assert_false( - self.dut.associate( - ssid, target_security=hostapd_constants.WPA_STRING, target_pwd=password - ), - "Expected failure to associate. WPA credentials for WPA3 was " - "temporarily disabled, see https://fxbug.dev/85817 for context. " - "If this feature was reenabled, please update this test's " - "expectation.", - ) - - def test_associate_wpa3_ap_with_wpa2_target_security(self): - ssid, password = self.setup_ap(hostapd_constants.WPA3_STRING) - asserts.assert_true( - self.dut.associate( - ssid, target_security=hostapd_constants.WPA2_STRING, target_pwd=password - ), - "Failed to associate.", - ) - - def test_associate_wpa3_ap_with_wpa3_target_security(self): - ssid, password = self.setup_ap(hostapd_constants.WPA3_STRING) - asserts.assert_true( - self.dut.associate( - ssid, target_security=hostapd_constants.WPA3_STRING, target_pwd=password - ), - "Failed to associate.", - ) - - # WPA2/WPA3 Security on AP - def test_reject_wpa2_wpa3_ap_with_open_target_security(self): - ssid, _ = self.setup_ap(hostapd_constants.WPA2_WPA3_MIXED_STRING) - asserts.assert_false(self.dut.associate(ssid), "Should not have associated.") - - def test_reject_wpa2_wpa3_ap_with_wep_target_security(self): - ssid, password = self.setup_ap(hostapd_constants.WPA2_WPA3_MIXED_STRING) - asserts.assert_false( - self.dut.associate( - ssid, target_security=hostapd_constants.WEP_STRING, target_pwd=password - ), - "Should not have associated.", - ) - - def test_associate_wpa2_wpa3_ap_with_wpa_target_security(self): - ssid, password = self.setup_ap(hostapd_constants.WPA2_WPA3_MIXED_STRING) - asserts.assert_false( - self.dut.associate( - ssid, target_security=hostapd_constants.WPA_STRING, target_pwd=password - ), - "Expected failure to associate. WPA credentials for WPA3 was " - "temporarily disabled, see https://fxbug.dev/85817 for context. " - "If this feature was reenabled, please update this test's " - "expectation.", - ) - - def test_associate_wpa2_wpa3_ap_with_wpa2_target_security(self): - ssid, password = self.setup_ap(hostapd_constants.WPA2_WPA3_MIXED_STRING) - asserts.assert_true( - self.dut.associate( - ssid, target_security=hostapd_constants.WPA2_STRING, target_pwd=password - ), - "Failed to associate.", - ) - - def test_associate_wpa2_wpa3_ap_with_wpa3_target_security(self): - ssid, password = self.setup_ap(hostapd_constants.WPA2_WPA3_MIXED_STRING) - asserts.assert_true( - self.dut.associate( - ssid, target_security=hostapd_constants.WPA3_STRING, target_pwd=password - ), - "Failed to associate.", - ) - - -if __name__ == "__main__": - test_runner.main()
diff --git a/src/antlion/tests/wlan/functional/WlanWirelessNetworkManagementTest.py b/src/antlion/tests/wlan/functional/WlanWirelessNetworkManagementTest.py deleted file mode 100644 index 4b0e9b0..0000000 --- a/src/antlion/tests/wlan/functional/WlanWirelessNetworkManagementTest.py +++ /dev/null
@@ -1,421 +0,0 @@ -#!/usr/bin/env python3 -# -# Copyright 2022 The Fuchsia Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import time - -from datetime import datetime, timedelta, timezone -from typing import FrozenSet, Optional - -from antlion import utils -from antlion.controllers.access_point import setup_ap -from antlion.controllers.ap_lib import hostapd_constants -from antlion.controllers.ap_lib.hostapd_security import Security -from antlion.controllers.ap_lib.radio_measurement import ( - BssidInformation, - BssidInformationCapabilities, - NeighborReportElement, - PhyType, -) -from antlion.controllers.ap_lib.wireless_network_management import ( - BssTransitionManagementRequest, -) -from antlion.test_utils.abstract_devices.wlan_device import create_wlan_device -from antlion.test_utils.wifi import base_test - -from mobly import asserts, signals, test_runner - - -# Antlion can see (via the wlan_features config directive) whether WNM features -# are enabled, and runs or skips tests depending on presence of WNM features. -class WlanWirelessNetworkManagementTest(base_test.WifiBaseTest): - """Tests Fuchsia's Wireless Network Management (AKA 802.11v) support. - - Testbed Requirements: - * One Fuchsia device - * One Whirlwind access point - - Existing Fuchsia drivers do not yet support WNM features out-of-the-box, so these - tests check that WNM features are not enabled. - """ - - def setup_class(self): - if "dut" in self.user_params and self.user_params["dut"] != "fuchsia_devices": - raise AttributeError( - "WlanWirelessNetworkManagementTest is only relevant for Fuchsia devices." - ) - - self.dut = create_wlan_device(self.fuchsia_devices[0]) - if self.dut.device.association_mechanism != "policy": - raise AttributeError("Must use WLAN policy layer to test WNM.") - self.access_point = self.access_points[0] - - def teardown_class(self): - self.dut.disconnect() - self.access_point.stop_all_aps() - - def teardown_test(self): - self.dut.disconnect() - self.download_ap_logs() - self.access_point.stop_all_aps() - - def on_fail(self, test_name: str, begin_time: str): - super().on_fail(test_name, begin_time) - self.access_point.stop_all_aps() - - def on_exception(self, test_name: str, begin_time: str): - super().on_exception(test_name, begin_time) - self.dut.disconnect() - self.access_point.stop_all_aps() - - def setup_ap( - self, - ssid: str, - security_mode: Optional[str] = None, - additional_ap_parameters: Optional[dict] = None, - channel: int = hostapd_constants.AP_DEFAULT_CHANNEL_2G, - wnm_features: FrozenSet[hostapd_constants.WnmFeature] = frozenset(), - ): - """Sets up an AP using the provided parameters. - - Args: - ssid: SSID for the AP. - security_mode: expressed as string (e.g. WPA2, default is None - which indicates open security). - additional_ap_parameters: A dictionary of parameters that can sent - directly into the hostapd config file. - channel: which channel number to set the AP to (default is - AP_DEFAULT_CHANNEL_2G). - wnm_features: Wireless Network Management features to enable - (default is no WNM features). - """ - setup_ap( - access_point=self.access_point, - profile_name="whirlwind", - channel=channel, - ssid=ssid, - security=Security(security_mode), - additional_ap_parameters=additional_ap_parameters, - wnm_features=wnm_features, - ) - - def _get_client_mac(self) -> str: - """Get the MAC address of the DUT client interface. - - Returns: - str, MAC address of the DUT client interface. - Raises: - ValueError if there is no DUT client interface. - ConnectionError if the DUT interface query fails. - """ - wlan_ifaces = self.dut.device.sl4f.wlan_lib.wlanGetIfaceIdList() - if wlan_ifaces.get("error"): - raise ConnectionError( - "Failed to get wlan interface IDs: %s" % wlan_ifaces["error"] - ) - - for wlan_iface in wlan_ifaces["result"]: - iface_info = self.dut.device.sl4f.wlan_lib.wlanQueryInterface(wlan_iface) - if iface_info.get("error"): - raise ConnectionError( - "Failed to query wlan iface: %s" % iface_info["error"] - ) - - if iface_info["result"]["role"] == "Client": - return utils.mac_address_list_to_str(iface_info["result"]["sta_addr"]) - raise ValueError( - "Failed to get client interface mac address. No client interface found." - ) - - def test_bss_transition_is_not_advertised_when_ap_supported_dut_unsupported(self): - if self.dut.feature_is_present("BSS_TRANSITION_MANAGEMENT"): - raise signals.TestSkip("skipping test because BTM feature is present") - - ssid = utils.rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_2G) - wnm_features = frozenset( - [hostapd_constants.WnmFeature.BSS_TRANSITION_MANAGEMENT] - ) - self.setup_ap(ssid, wnm_features=wnm_features) - asserts.assert_true(self.dut.associate(ssid), "Failed to associate.") - asserts.assert_true(self.dut.is_connected(), "Failed to connect.") - client_mac = self._get_client_mac() - - ext_capabilities = self.access_point.get_sta_extended_capabilities( - self.access_point.wlan_2g, client_mac - ) - asserts.assert_false( - ext_capabilities.bss_transition, - "DUT is incorrectly advertising BSS Transition Management support", - ) - - def test_bss_transition_is_advertised_when_ap_supported_dut_supported(self): - if not self.dut.feature_is_present("BSS_TRANSITION_MANAGEMENT"): - raise signals.TestSkip("skipping test because BTM feature is not present") - - ssid = utils.rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_2G) - wnm_features = frozenset( - [hostapd_constants.WnmFeature.BSS_TRANSITION_MANAGEMENT] - ) - self.setup_ap(ssid, wnm_features=wnm_features) - asserts.assert_true(self.dut.associate(ssid), "Failed to associate.") - asserts.assert_true(self.dut.is_connected(), "Failed to connect.") - client_mac = self._get_client_mac() - - ext_capabilities = self.access_point.get_sta_extended_capabilities( - self.access_point.wlan_2g, client_mac - ) - asserts.assert_true( - ext_capabilities.bss_transition, - "DUT is not advertising BSS Transition Management support", - ) - - def test_wnm_sleep_mode_is_not_advertised_when_ap_supported_dut_unsupported(self): - ssid = utils.rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_2G) - wnm_features = frozenset([hostapd_constants.WnmFeature.WNM_SLEEP_MODE]) - self.setup_ap(ssid, wnm_features=wnm_features) - asserts.assert_true(self.dut.associate(ssid), "Failed to associate.") - asserts.assert_true(self.dut.is_connected(), "Failed to connect.") - client_mac = self._get_client_mac() - - ext_capabilities = self.access_point.get_sta_extended_capabilities( - self.access_point.wlan_2g, client_mac - ) - asserts.assert_false( - ext_capabilities.wnm_sleep_mode, - "DUT is incorrectly advertising WNM Sleep Mode support", - ) - - def test_roam_on_btm_req(self): - if not self.dut.feature_is_present("BSS_TRANSITION_MANAGEMENT"): - raise signals.TestSkip("skipping test because BTM feature is not present") - - ssid = utils.rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_2G) - wnm_features = frozenset( - [hostapd_constants.WnmFeature.BSS_TRANSITION_MANAGEMENT] - ) - # Setup 2.4 GHz AP. - self.setup_ap( - ssid, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - wnm_features=wnm_features, - ) - - asserts.assert_true(self.dut.associate(ssid), "Failed to associate.") - # Verify that DUT is actually associated (as seen from AP). - client_mac = self._get_client_mac() - asserts.assert_true( - client_mac in self.access_point.get_stas(self.access_point.wlan_2g), - "Client MAC not included in list of associated STAs on the 2.4GHz band", - ) - - # Setup 5 GHz AP with same SSID. - self.setup_ap( - ssid, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - wnm_features=wnm_features, - ) - - # Construct a BTM request. - dest_bssid = self.access_point.get_bssid_from_ssid( - ssid, self.access_point.wlan_5g - ) - dest_bssid_info = BssidInformation( - security=True, capabilities=BssidInformationCapabilities() - ) - neighbor_5g_ap = NeighborReportElement( - dest_bssid, - dest_bssid_info, - operating_class=126, - channel_number=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - phy_type=PhyType.VHT, - ) - btm_req = BssTransitionManagementRequest( - preferred_candidate_list_included=True, - disassociation_imminent=True, - candidate_list=[neighbor_5g_ap], - ) - - # Sleep to avoid concurrent scan during reassociation, necessary due to a firmware bug. - # TODO(fxbug.dev/117517) Remove when fixed, or when non-firmware BTM support is merged. - time.sleep(5) - - # Send BTM request from 2.4 GHz AP to DUT - self.access_point.send_bss_transition_management_req( - self.access_point.wlan_2g, client_mac, btm_req - ) - - # Check that DUT has reassociated. - REASSOC_DEADLINE = datetime.now(timezone.utc) + timedelta(seconds=2) - while datetime.now(timezone.utc) < REASSOC_DEADLINE: - if client_mac in self.access_point.get_stas(self.access_point.wlan_5g): - break - else: - time.sleep(0.25) - - # Verify that DUT roamed (as seen from AP). - asserts.assert_true( - client_mac in self.access_point.get_stas(self.access_point.wlan_5g), - "Client MAC not included in list of associated STAs on the 5GHz band", - ) - - def test_btm_req_ignored_dut_unsupported(self): - if self.dut.feature_is_present("BSS_TRANSITION_MANAGEMENT"): - raise signals.TestSkip("skipping test because BTM feature is present") - - ssid = utils.rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_2G) - wnm_features = frozenset( - [hostapd_constants.WnmFeature.BSS_TRANSITION_MANAGEMENT] - ) - # Setup 2.4 GHz AP. - self.setup_ap( - ssid, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - wnm_features=wnm_features, - ) - - asserts.assert_true(self.dut.associate(ssid), "Failed to associate.") - # Verify that DUT is actually associated (as seen from AP). - client_mac = self._get_client_mac() - asserts.assert_true( - client_mac in self.access_point.get_stas(self.access_point.wlan_2g), - "Client MAC not included in list of associated STAs on the 2.4GHz band", - ) - - # Setup 5 GHz AP with same SSID. - self.setup_ap( - ssid, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - wnm_features=wnm_features, - ) - - # Construct a BTM request. - dest_bssid = self.access_point.get_bssid_from_ssid( - ssid, self.access_point.wlan_5g - ) - dest_bssid_info = BssidInformation( - security=True, capabilities=BssidInformationCapabilities() - ) - neighbor_5g_ap = NeighborReportElement( - dest_bssid, - dest_bssid_info, - operating_class=126, - channel_number=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - phy_type=PhyType.VHT, - ) - btm_req = BssTransitionManagementRequest( - disassociation_imminent=True, candidate_list=[neighbor_5g_ap] - ) - - # Send BTM request from 2.4 GHz AP to DUT - self.access_point.send_bss_transition_management_req( - self.access_point.wlan_2g, client_mac, btm_req - ) - - # Check that DUT has not reassociated. - REASSOC_DEADLINE = datetime.now(timezone.utc) + timedelta(seconds=2) - while datetime.now(timezone.utc) < REASSOC_DEADLINE: - # Fail if DUT has reassociated to 5 GHz AP (as seen from AP). - if client_mac in self.access_point.get_stas(self.access_point.wlan_5g): - raise signals.TestFailure( - "DUT unexpectedly roamed to target BSS after BTM request" - ) - else: - time.sleep(0.25) - - # DUT should have stayed associated to original AP. - asserts.assert_true( - client_mac in self.access_point.get_stas(self.access_point.wlan_2g), - "DUT lost association on the 2.4GHz band after BTM request", - ) - - def test_btm_req_target_ap_rejects_reassoc(self): - if not self.dut.feature_is_present("BSS_TRANSITION_MANAGEMENT"): - raise signals.TestSkip("skipping test because BTM feature is not present") - - ssid = utils.rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_2G) - wnm_features = frozenset( - [hostapd_constants.WnmFeature.BSS_TRANSITION_MANAGEMENT] - ) - # Setup 2.4 GHz AP. - self.setup_ap( - ssid, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - wnm_features=wnm_features, - ) - - asserts.assert_true(self.dut.associate(ssid), "Failed to associate.") - # Verify that DUT is actually associated (as seen from AP). - client_mac = self._get_client_mac() - asserts.assert_true( - client_mac in self.access_point.get_stas(self.access_point.wlan_2g), - "Client MAC not included in list of associated STAs on the 2.4GHz band", - ) - - # Setup 5 GHz AP with same SSID, but reject all STAs. - reject_all_sta_param = {"max_num_sta": 0} - self.setup_ap( - ssid, - additional_ap_parameters=reject_all_sta_param, - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - wnm_features=wnm_features, - ) - - # Construct a BTM request. - dest_bssid = self.access_point.get_bssid_from_ssid( - ssid, self.access_point.wlan_5g - ) - dest_bssid_info = BssidInformation( - security=True, capabilities=BssidInformationCapabilities() - ) - neighbor_5g_ap = NeighborReportElement( - dest_bssid, - dest_bssid_info, - operating_class=126, - channel_number=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - phy_type=PhyType.VHT, - ) - btm_req = BssTransitionManagementRequest( - disassociation_imminent=True, candidate_list=[neighbor_5g_ap] - ) - - # Sleep to avoid concurrent scan during reassociation, necessary due to a firmware bug. - # TODO(fxbug.dev/117517) Remove when fixed, or when non-firmware BTM support is merged. - time.sleep(5) - - # Send BTM request from 2.4 GHz AP to DUT - self.access_point.send_bss_transition_management_req( - self.access_point.wlan_2g, client_mac, btm_req - ) - - # Check that DUT has not reassociated. - REASSOC_DEADLINE = datetime.now(timezone.utc) + timedelta(seconds=2) - while datetime.now(timezone.utc) < REASSOC_DEADLINE: - # Fail if DUT has reassociated to 5 GHz AP (as seen from AP). - if client_mac in self.access_point.get_stas(self.access_point.wlan_5g): - raise signals.TestFailure( - "DUT unexpectedly roamed to target BSS after BTM request" - ) - else: - time.sleep(0.25) - - # DUT should have stayed associated to original AP. - asserts.assert_true( - client_mac in self.access_point.get_stas(self.access_point.wlan_2g), - "DUT lost association on the 2.4GHz band after BTM request", - ) - - -if __name__ == "__main__": - test_runner.main()
diff --git a/src/antlion/tests/wlan/functional/wlan_reboot_test_params.yaml b/src/antlion/tests/wlan/functional/wlan_reboot_test_params.yaml deleted file mode 100644 index c559bbc..0000000 --- a/src/antlion/tests/wlan/functional/wlan_reboot_test_params.yaml +++ /dev/null
@@ -1,2 +0,0 @@ -wlan_reboot_test_params: - skip_iperf: false
diff --git a/src/antlion/tests/wlan/misc/BUILD.gn b/src/antlion/tests/wlan/misc/BUILD.gn deleted file mode 100644 index a0250c8..0000000 --- a/src/antlion/tests/wlan/misc/BUILD.gn +++ /dev/null
@@ -1,26 +0,0 @@ -# Copyright 2023 The Fuchsia Authors. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -import("//third_party/antlion/antlion_host_test.gni") -import("//third_party/antlion/environments.gni") - -assert(is_host, "antlion tests only supported for host testing") - -antlion_host_test("wlan_interface_test") { - main_source = "WlanInterfaceTest.py" - environments = display_envs -} - -antlion_host_test("wlan_misc_scenario") { - main_source = "WlanMiscScenarioTest.py" - environments = display_ap_envs -} - -group("e2e_tests") { - testonly = true - public_deps = [ - ":wlan_interface_test($host_toolchain)", - ":wlan_misc_scenario($host_toolchain)", - ] -}
diff --git a/src/antlion/tests/wlan/misc/WlanInterfaceTest.py b/src/antlion/tests/wlan/misc/WlanInterfaceTest.py deleted file mode 100644 index 0614901..0000000 --- a/src/antlion/tests/wlan/misc/WlanInterfaceTest.py +++ /dev/null
@@ -1,64 +0,0 @@ -#!/usr/bin/env python3 -# -# Copyright 2022 The Fuchsia Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from antlion.test_utils.wifi import base_test -from antlion.test_utils.abstract_devices.wlan_device import create_wlan_device - -from mobly import signals, test_runner - - -class WlanInterfaceTest(base_test.WifiBaseTest): - def setup_class(self): - super().setup_class() - - device_type = self.user_params.get("dut", "fuchsia_devices") - if device_type == "fuchsia_devices": - self.dut = create_wlan_device(self.fuchsia_devices[0]) - elif device_type == "android_devices": - self.dut = create_wlan_device(self.android_devices[0]) - else: - raise ValueError( - f'Invalid "dut" type specified in config: "{device_type}".' - 'Expected "fuchsia_devices" or "android_devices".' - ) - - def test_destroy_iface(self): - """Test that we don't error out when destroying the WLAN interface. - - Steps: - 1. Find a wlan interface - 2. Destroy it - - Expected Result: - Verify there are no errors in destroying the wlan interface. - - Returns: - signals.TestPass if no errors - signals.TestFailure if there are any errors during the test. - - TAGS: WLAN - Priority: 1 - """ - wlan_interfaces = self.dut.get_wlan_interface_id_list() - if len(wlan_interfaces) < 1: - raise signals.TestFailure("Not enough wlan interfaces for test") - if not self.dut.destroy_wlan_interface(wlan_interfaces[0]): - raise signals.TestFailure("Failed to destroy WLAN interface") - raise signals.TestPass("Success") - - -if __name__ == "__main__": - test_runner.main()
diff --git a/src/antlion/tests/wlan/misc/WlanMiscScenarioTest.py b/src/antlion/tests/wlan/misc/WlanMiscScenarioTest.py deleted file mode 100644 index b5d17c5..0000000 --- a/src/antlion/tests/wlan/misc/WlanMiscScenarioTest.py +++ /dev/null
@@ -1,118 +0,0 @@ -#!/usr/bin/env python3 -# -# Copyright 2022 The Fuchsia Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from antlion import utils -from antlion.controllers.access_point import setup_ap -from antlion.controllers.ap_lib import hostapd_constants -from antlion.controllers.ap_lib.hostapd_utils import generate_random_password -from antlion.controllers.ap_lib.hostapd_security import Security -from antlion.test_utils.wifi import base_test -from antlion.test_utils.abstract_devices.wlan_device import create_wlan_device - -from mobly import asserts, test_runner - - -class WlanMiscScenarioTest(base_test.WifiBaseTest): - """Random scenario tests, usually to reproduce certain bugs, that do not - fit into a specific test category, but should still be run in CI to catch - regressions. - """ - - def setup_class(self): - super().setup_class() - - device_type = self.user_params.get("dut", "fuchsia_devices") - if device_type == "fuchsia_devices": - self.dut = create_wlan_device(self.fuchsia_devices[0]) - elif device_type == "android_devices": - self.dut = create_wlan_device(self.android_devices[0]) - else: - raise ValueError( - f'Invalid "dut" type specified in config: "{device_type}".' - 'Expected "fuchsia_devices" or "android_devices".' - ) - - self.access_point = self.access_points[0] - - def teardown_class(self): - self.dut.disconnect() - self.access_point.stop_all_aps() - - def teardown_test(self): - self.dut.disconnect() - self.download_ap_logs() - self.access_point.stop_all_aps() - - def on_fail(self, test_name, begin_time): - super().on_fail(test_name, begin_time) - self.access_point.stop_all_aps() - - def on_exception(self, test_name, begin_time): - super().on_exception(test_name, begin_time) - self.dut.disconnect() - self.access_point.stop_all_aps() - - def test_connect_to_wpa2_after_wpa3_rejection(self): - """Test association to non-WPA3 network after receiving a WPA3 - rejection, which was triggering a firmware hang. - - Bug: https://bugs.fuchsia.dev/p/fuchsia/issues/detail?id=71233 - """ - # Setup a WPA3 network - wpa3_ssid = utils.rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_5G) - setup_ap( - access_point=self.access_point, - profile_name="whirlwind", - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=wpa3_ssid, - security=Security( - security_mode="wpa3", password=generate_random_password("wpa3") - ), - ) - # Attempt to associate with wrong password, expecting failure - self.log.info("Attempting to associate WPA3 with wrong password.") - asserts.assert_false( - self.dut.associate( - wpa3_ssid, target_pwd="wrongpass", target_security="wpa3" - ), - "Associated with WPA3 network using the wrong password", - ) - - self.access_point.stop_all_aps() - - # Setup a WPA2 Network - wpa2_ssid = utils.rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_5G) - wpa2_password = generate_random_password("wpa2") - setup_ap( - access_point=self.access_point, - profile_name="whirlwind", - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=wpa2_ssid, - security=Security(security_mode="wpa2", password=wpa2_password), - ) - - # Attempt to associate, expecting success - self.log.info("Attempting to associate with WPA2 network.") - asserts.assert_true( - self.dut.associate( - wpa2_ssid, target_pwd=wpa2_password, target_security="wpa2" - ), - "Failed to associate with WPA2 network after a WPA3 rejection.", - ) - - -if __name__ == "__main__": - test_runner.main()
diff --git a/src/antlion/tests/wlan/performance/BUILD.gn b/src/antlion/tests/wlan/performance/BUILD.gn deleted file mode 100644 index 0fb75d9..0000000 --- a/src/antlion/tests/wlan/performance/BUILD.gn +++ /dev/null
@@ -1,46 +0,0 @@ -# Copyright 2023 The Fuchsia Authors. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -import("//third_party/antlion/antlion_host_test.gni") -import("//third_party/antlion/environments.gni") - -assert(is_host, "antlion tests only supported for host testing") - -antlion_host_test("channel_sweep_test") { - main_source = "ChannelSweepTest.py" - test_params = "channel_sweep_test_params.yaml" - environments = display_ap_iperf_envs -} - -antlion_host_test("wlan_rvr_test") { - main_source = "WlanRvrTest.py" - test_params = "rvr_settings.yaml" - environments = display_ap_iperf_attenuator_envs -} - -antlion_host_test("wlan_wmm_test") { - main_source = "WmmTest.py" - - # Requires a second station and custom configuration. There are no available - # testbeds to support this toplogy. This will remain an at-desk test until an - # infra-hosted testbed matching this topology is supported. - environments = [] -} - -group("e2e_tests") { - testonly = true - public_deps = [ - ":wlan_rvr_test($host_toolchain)", - ] -} - -group("e2e_tests_manual") { - testonly = true - public_deps = [ - # Running ChannelSweepTest is usually only necessary when verifying new WLAN - # firmware patches. Take it out of automation; it takes too long otherwise. - ":channel_sweep_test($host_toolchain)", - ":wlan_wmm_test($host_toolchain)", - ] -}
diff --git a/src/antlion/tests/wlan/performance/ChannelSweepTest.py b/src/antlion/tests/wlan/performance/ChannelSweepTest.py deleted file mode 100644 index db148e9..0000000 --- a/src/antlion/tests/wlan/performance/ChannelSweepTest.py +++ /dev/null
@@ -1,1254 +0,0 @@ -#!/usr/bin/env python3 -# -# Copyright 2022 The Fuchsia Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import os -import time - -from statistics import pstdev - -from antlion import context, utils -from antlion.controllers.access_point import setup_ap -from antlion.controllers.ap_lib import hostapd_config, hostapd_constants -from antlion.controllers.ap_lib.hostapd_security import Security -from antlion.controllers.iperf_server import IPerfResult -from antlion.test_utils.wifi import base_test -from antlion.test_utils.abstract_devices.wlan_device import create_wlan_device - -from mobly import asserts, test_runner - -N_CAPABILITIES_DEFAULT = [ - hostapd_constants.N_CAPABILITY_LDPC, - hostapd_constants.N_CAPABILITY_SGI20, - hostapd_constants.N_CAPABILITY_SGI40, - hostapd_constants.N_CAPABILITY_TX_STBC, - hostapd_constants.N_CAPABILITY_RX_STBC1, -] - -AC_CAPABILITIES_DEFAULT = [ - hostapd_constants.AC_CAPABILITY_MAX_MPDU_11454, - hostapd_constants.AC_CAPABILITY_RXLDPC, - hostapd_constants.AC_CAPABILITY_SHORT_GI_80, - hostapd_constants.AC_CAPABILITY_TX_STBC_2BY1, - hostapd_constants.AC_CAPABILITY_RX_STBC_1, - hostapd_constants.AC_CAPABILITY_MAX_A_MPDU_LEN_EXP7, - hostapd_constants.AC_CAPABILITY_RX_ANTENNA_PATTERN, - hostapd_constants.AC_CAPABILITY_TX_ANTENNA_PATTERN, -] - -DEFAULT_MIN_THROUGHPUT = 0 -DEFAULT_MAX_STD_DEV = 1 -DEFAULT_IPERF_TIMEOUT = 30 - -DEFAULT_TIME_TO_WAIT_FOR_IP_ADDR = 30 -GRAPH_CIRCLE_SIZE = 10 -IPERF_NO_THROUGHPUT_VALUE = 0 -MAX_2_4_CHANNEL = 14 -TIME_TO_SLEEP_BETWEEN_RETRIES = 1 -TIME_TO_WAIT_FOR_COUNTRY_CODE = 10 -WEP_HEX_STRING_LENGTH = 10 - -MEGABITS_PER_SECOND = "Mbps" - - -def get_test_name(settings): - """Retrieves the test_name value from test_settings""" - return settings.get("test_name") - - -class ChannelSweepTest(base_test.WifiBaseTest): - """Tests channel performance and regulatory compliance.. - - Testbed Requirement: - * One ACTS compatible device (dut) - * One Access Point - * One Linux Machine used as IPerfServer if running performance tests - Note: Performance tests should be done in isolated testbed. - """ - - def __init__(self, controllers): - super().__init__(controllers) - if "channel_sweep_test_params" in self.user_params: - self.time_to_wait_for_ip_addr = self.user_params[ - "channel_sweep_test_params" - ].get("time_to_wait_for_ip_addr", DEFAULT_TIME_TO_WAIT_FOR_IP_ADDR) - else: - self.time_to_wait_for_ip_addr = DEFAULT_TIME_TO_WAIT_FOR_IP_ADDR - - def setup_class(self): - super().setup_class() - - device_type = self.user_params.get("dut", "fuchsia_devices") - if device_type == "fuchsia_devices": - self.dut = create_wlan_device(self.fuchsia_devices[0]) - elif device_type == "android_devices": - self.dut = create_wlan_device(self.android_devices[0]) - else: - raise ValueError( - f'Invalid "dut" type specified in config: "{device_type}".' - 'Expected "fuchsia_devices" or "android_devices".' - ) - - self.android_devices = getattr(self, "android_devices", []) - - self.access_point = self.access_points[0] - self.access_point.stop_all_aps() - - self.iperf_server = None - self.iperf_client = None - - self.channel_sweep_test_params = self.user_params.get( - "channel_sweep_test_params", {} - ) - # Allows users to skip the iperf throughput measurements, just verifying - # association. - if not self.channel_sweep_test_params.get("skip_performance"): - try: - self.iperf_server = self.iperf_servers[0] - self.iperf_server.start() - except AttributeError: - self.log.warn( - "Missing iperf config. Throughput cannot be measured, so only " - "association will be tested." - ) - - if hasattr(self, "iperf_clients") and self.iperf_clients: - self.iperf_client = self.iperf_clients[0] - else: - self.iperf_client = self.dut.create_iperf_client() - - self.regulatory_results = "====CountryCode,Channel,Frequency,ChannelBandwith,Connected/Not-Connected====\n" - - def teardown_class(self): - super().teardown_class() - output_path = context.get_current_context().get_base_output_path() - regulatory_save_path = "%s/ChannelSweepTest/%s" % ( - output_path, - "regulatory_results.txt", - ) - f = open(regulatory_save_path, "w") - f.write(self.regulatory_results) - f.close() - - def setup_test(self): - # TODO(fxb/46417): Uncomment when wlanClearCountry is implemented up any - # country code changes. - # for fd in self.fuchsia_devices: - # phy_ids_response = fd.wlan_lib.wlanPhyIdList() - # if phy_ids_response.get('error'): - # raise ConnectionError( - # 'Failed to retrieve phy ids from FuchsiaDevice (%s). ' - # 'Error: %s' % (fd.ip, phy_ids_response['error'])) - # for id in phy_ids_response['result']: - # clear_country_response = fd.wlan_lib.wlanClearCountry(id) - # if clear_country_response.get('error'): - # raise EnvironmentError( - # 'Failed to reset country code on FuchsiaDevice (%s). ' - # 'Error: %s' % (fd.ip, clear_country_response['error']) - # ) - self.access_point.stop_all_aps() - for ad in self.android_devices: - ad.droid.wakeLockAcquireBright() - ad.droid.wakeUpNow() - self.dut.wifi_toggle_state(True) - self.dut.disconnect() - - def teardown_test(self): - for ad in self.android_devices: - ad.droid.wakeLockRelease() - ad.droid.goToSleepNow() - self.dut.turn_location_off_and_scan_toggle_off() - self.dut.disconnect() - self.download_ap_logs() - self.access_point.stop_all_aps() - - def set_dut_country_code(self, country_code): - """Set the country code on the DUT. Then verify that the country - code was set successfully - - Args: - country_code: string, the 2 character country code to set - """ - self.log.info("Setting DUT country code to %s" % country_code) - country_code_response = self.dut.device.sl4f.regulatory_region_lib.setRegion( - country_code - ) - if country_code_response.get("error"): - raise EnvironmentError( - "Failed to set country code (%s) on DUT. Error: %s" - % (country_code, country_code_response["error"]) - ) - - self.log.info( - "Verifying DUT country code was correctly set to %s." % country_code - ) - phy_ids_response = self.dut.device.sl4f.wlan_lib.wlanPhyIdList() - if phy_ids_response.get("error"): - raise ConnectionError( - "Failed to get phy ids from DUT. Error: %s" - % (country_code, phy_ids_response["error"]) - ) - - end_time = time.time() + TIME_TO_WAIT_FOR_COUNTRY_CODE - while time.time() < end_time: - for id in phy_ids_response["result"]: - get_country_response = self.dut.device.sl4f.wlan_lib.wlanGetCountry(id) - if get_country_response.get("error"): - raise ConnectionError( - "Failed to query PHY ID (%s) for country. Error: %s" - % (id, get_country_response["error"]) - ) - - set_code = "".join( - [chr(ascii_char) for ascii_char in get_country_response["result"]] - ) - if set_code != country_code: - self.log.debug( - "PHY (id: %s) has incorrect country code set. " - "Expected: %s, Got: %s" % (id, country_code, set_code) - ) - break - else: - self.log.info("All PHYs have expected country code (%s)" % country_code) - break - time.sleep(TIME_TO_SLEEP_BETWEEN_RETRIES) - else: - raise EnvironmentError( - "Failed to set DUT country code to %s." % country_code - ) - - def setup_ap(self, channel, channel_bandwidth, security_profile=None): - """Start network on AP with basic configuration. - - Args: - channel: int, channel to use for network - channel_bandwidth: int, channel bandwidth in mhz to use for network, - security_profile: Security object, or None if open - - Returns: - string, ssid of network running - - Raises: - ConnectionError if network is not started successfully. - """ - if channel > MAX_2_4_CHANNEL: - vht_bandwidth = channel_bandwidth - else: - vht_bandwidth = None - - if channel_bandwidth == hostapd_constants.CHANNEL_BANDWIDTH_20MHZ: - n_capabilities = N_CAPABILITIES_DEFAULT + [ - hostapd_constants.N_CAPABILITY_HT20 - ] - elif ( - channel_bandwidth == hostapd_constants.CHANNEL_BANDWIDTH_40MHZ - or channel_bandwidth == hostapd_constants.CHANNEL_BANDWIDTH_80MHZ - ): - if hostapd_config.ht40_plus_allowed(channel): - extended_channel = [hostapd_constants.N_CAPABILITY_HT40_PLUS] - elif hostapd_config.ht40_minus_allowed(channel): - extended_channel = [hostapd_constants.N_CAPABILITY_HT40_MINUS] - else: - raise ValueError("Invalid Channel: %s" % channel) - n_capabilities = N_CAPABILITIES_DEFAULT + extended_channel - else: - raise ValueError("Invalid Bandwidth: %s" % channel_bandwidth) - ssid = utils.rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_2G) - try: - setup_ap( - access_point=self.access_point, - profile_name="whirlwind", - channel=channel, - security=security_profile, - n_capabilities=n_capabilities, - ac_capabilities=None, - force_wmm=True, - ssid=ssid, - vht_bandwidth=vht_bandwidth, - setup_bridge=True, - ) - except Exception as err: - raise ConnectionError( - "Failed to setup ap on channel: %s, channel bandwidth: %smhz. " - "Error: %s" % (channel, channel_bandwidth, err) - ) - else: - self.log.info( - "Network (ssid: %s) up on channel %s w/ channel bandwidth %smhz" - % (ssid, channel, channel_bandwidth) - ) - - return ssid - - def get_and_verify_iperf_address(self, channel, device, interface=None): - """Get ip address from a devices interface and verify it belongs to - expected subnet based on APs DHCP config. - - Args: - channel: int, channel network is running on, to determine subnet - device: device to get ip address for - interface (default: None): interface on device to get ip address. - If None, uses device.test_interface. - - Returns: - String, ip address of device on given interface (or test_interface) - - Raises: - ConnectionError, if device does not have a valid ip address after - all retries. - """ - if channel <= MAX_2_4_CHANNEL: - subnet = self.access_point._AP_2G_SUBNET_STR - else: - subnet = self.access_point._AP_5G_SUBNET_STR - end_time = time.time() + self.time_to_wait_for_ip_addr - while time.time() < end_time: - if interface: - device_addresses = device.get_interface_ip_addresses(interface) - else: - device_addresses = device.get_interface_ip_addresses( - device.test_interface - ) - - if device_addresses["ipv4_private"]: - for ip_addr in device_addresses["ipv4_private"]: - if utils.ip_in_subnet(ip_addr, subnet): - return ip_addr - else: - self.log.debug( - "Device has an ip address (%s), but it is not in " - "subnet %s" % (ip_addr, subnet) - ) - else: - self.log.debug("Device does not have a valid ip address. Retrying.") - time.sleep(TIME_TO_SLEEP_BETWEEN_RETRIES) - raise ConnectionError("Device failed to get an ip address.") - - def get_iperf_throughput( - self, iperf_server_address, iperf_client_address, reverse=False - ): - """Run iperf between client and server and get the throughput. - - Args: - iperf_server_address: string, ip address of running iperf server - iperf_client_address: string, ip address of iperf client (dut) - reverse (default: False): If True, run traffic in reverse direction, - from server to client. - - Returns: - int, iperf throughput OR IPERF_NO_THROUGHPUT_VALUE, if iperf fails - """ - if reverse: - self.log.info( - "Running IPerf traffic from server (%s) to dut (%s)." - % (iperf_server_address, iperf_client_address) - ) - iperf_results_file = self.iperf_client.start( - iperf_server_address, - "-i 1 -t 10 -R -J", - "channel_sweep_rx", - timeout=DEFAULT_IPERF_TIMEOUT, - ) - else: - self.log.info( - "Running IPerf traffic from dut (%s) to server (%s)." - % (iperf_client_address, iperf_server_address) - ) - iperf_results_file = self.iperf_client.start( - iperf_server_address, - "-i 1 -t 10 -J", - "channel_sweep_tx", - timeout=DEFAULT_IPERF_TIMEOUT, - ) - if iperf_results_file: - iperf_results = IPerfResult( - iperf_results_file, reporting_speed_units=MEGABITS_PER_SECOND - ) - return iperf_results.avg_send_rate - else: - return IPERF_NO_THROUGHPUT_VALUE - - def log_to_file_and_throughput_data( - self, channel, channel_bandwidth, tx_throughput, rx_throughput - ): - """Write performance info to csv file and to throughput data. - - Args: - channel: int, channel that test was run on - channel_bandwidth: int, channel bandwidth the test used - tx_throughput: float, throughput value from dut to iperf server - rx_throughput: float, throughput value from iperf server to dut - """ - test_name = self.throughput_data["test"] - output_path = context.get_current_context().get_base_output_path() - log_path = "%s/ChannelSweepTest/%s" % (output_path, test_name) - if not os.path.exists(log_path): - os.makedirs(log_path) - log_file = "%s/%s_%smhz.csv" % (log_path, test_name, channel_bandwidth) - self.log.info("Writing IPerf results for %s to %s" % (test_name, log_file)) - with open(log_file, "a") as csv_file: - csv_file.write("%s,%s,%s\n" % (channel, tx_throughput, rx_throughput)) - self.throughput_data["results"][str(channel)] = { - "tx_throughput": tx_throughput, - "rx_throughput": rx_throughput, - } - - def write_graph(self): - """Create graph html files from throughput data, plotting channel vs - tx_throughput and channel vs rx_throughput. - """ - # If performance measurement is skipped - if not self.iperf_server: - return - - try: - from bokeh.plotting import ColumnDataSource - from bokeh.plotting import figure - from bokeh.plotting import output_file - from bokeh.plotting import save - except ImportError as e: - self.log.warn( - "bokeh is not installed: skipping creation of graphs. " - "Note CSV files are still available. If graphs are " - 'desired, install antlion with the "bokeh" feature.' - ) - return - - output_path = context.get_current_context().get_base_output_path() - test_name = self.throughput_data["test"] - channel_bandwidth = self.throughput_data["channel_bandwidth"] - output_file_name = "%s/ChannelSweepTest/%s/%s_%smhz.html" % ( - output_path, - test_name, - test_name, - channel_bandwidth, - ) - output_file(output_file_name) - channels = [] - tx_throughputs = [] - rx_throughputs = [] - for channel in self.throughput_data["results"]: - channels.append(str(channel)) - tx_throughputs.append( - self.throughput_data["results"][channel]["tx_throughput"] - ) - rx_throughputs.append( - self.throughput_data["results"][channel]["rx_throughput"] - ) - channel_vs_throughput_data = ColumnDataSource( - data=dict( - channels=channels, - tx_throughput=tx_throughputs, - rx_throughput=rx_throughputs, - ) - ) - TOOLTIPS = [ - ("Channel", "@channels"), - ("TX_Throughput", "@tx_throughput"), - ("RX_Throughput", "@rx_throughput"), - ] - channel_vs_throughput_graph = figure( - title="Channels vs. Throughput", - x_axis_label="Channels", - x_range=channels, - y_axis_label="Throughput", - tooltips=TOOLTIPS, - ) - channel_vs_throughput_graph.sizing_mode = "stretch_both" - channel_vs_throughput_graph.title.align = "center" - channel_vs_throughput_graph.line( - "channels", - "tx_throughput", - source=channel_vs_throughput_data, - line_width=2, - line_color="blue", - legend_label="TX_Throughput", - ) - channel_vs_throughput_graph.circle( - "channels", - "tx_throughput", - source=channel_vs_throughput_data, - size=GRAPH_CIRCLE_SIZE, - color="blue", - ) - channel_vs_throughput_graph.line( - "channels", - "rx_throughput", - source=channel_vs_throughput_data, - line_width=2, - line_color="red", - legend_label="RX_Throughput", - ) - channel_vs_throughput_graph.circle( - "channels", - "rx_throughput", - source=channel_vs_throughput_data, - size=GRAPH_CIRCLE_SIZE, - color="red", - ) - - channel_vs_throughput_graph.legend.location = "top_left" - graph_file = save([channel_vs_throughput_graph]) - self.log.info("Saved graph to %s" % graph_file) - - def verify_standard_deviation(self, max_std_dev): - """Verifies the standard deviation of the throughput across the channels - does not exceed the max_std_dev value. - - Args: - max_std_dev: float, max standard deviation of throughput for a test - to pass (in Mb/s) - - Raises: - TestFailure, if standard deviation of throughput exceeds max_std_dev - """ - # If performance measurement is skipped - if not self.iperf_server: - return - self.log.info( - "Verifying standard deviation across channels does not " - "exceed max standard deviation of %s Mb/s" % max_std_dev - ) - tx_values = [] - rx_values = [] - for channel in self.throughput_data["results"]: - if self.throughput_data["results"][channel]["tx_throughput"] is not None: - tx_values.append( - self.throughput_data["results"][channel]["tx_throughput"] - ) - if self.throughput_data["results"][channel]["rx_throughput"] is not None: - rx_values.append( - self.throughput_data["results"][channel]["rx_throughput"] - ) - tx_std_dev = pstdev(tx_values) - rx_std_dev = pstdev(rx_values) - if tx_std_dev > max_std_dev or rx_std_dev > max_std_dev: - asserts.fail( - "With %smhz channel bandwidth, throughput standard " - "deviation (tx: %s Mb/s, rx: %s Mb/s) exceeds max standard " - "deviation (%s Mb/s)." - % ( - self.throughput_data["channel_bandwidth"], - tx_std_dev, - rx_std_dev, - max_std_dev, - ) - ) - else: - asserts.explicit_pass( - "Throughput standard deviation (tx: %s Mb/s, rx: %s Mb/s) " - "with %smhz channel bandwidth does not exceed maximum (%s Mb/s)." - % ( - tx_std_dev, - rx_std_dev, - self.throughput_data["channel_bandwidth"], - max_std_dev, - ) - ) - - def run_channel_performance_tests(self, settings): - """Test function for running channel performance tests. Used by both - explicit test cases and debug test cases from config. Runs a performance - test for each channel in test_channels with test_channel_bandwidth, then - writes a graph and csv file of the channel vs throughput. - - Args: - settings: dict, containing the following test settings - test_channels: list of channels to test. - test_channel_bandwidth: int, channel bandwidth to use for test. - test_security (optional): string, security type to use for test. - min_tx_throughput (optional, default: 0): float, minimum tx - throughput threshold to pass individual channel tests - (in Mb/s). - min_rx_throughput (optional, default: 0): float, minimum rx - throughput threshold to pass individual channel tests - (in Mb/s). - max_std_dev (optional, default: 1): float, maximum standard - deviation of throughput across all test channels to pass - test (in Mb/s). - base_test_name (optional): string, test name prefix to use with - generated subtests. - country_name (optional): string, country name from - hostapd_constants to set on device. - country_code (optional): string, two-char country code to set on - the DUT. Takes priority over country_name. - test_name (debug tests only): string, the test name for this - parent test case from the config file. In explicit tests, - this is not necessary. - - Writes: - CSV file: channel, tx_throughput, rx_throughput - for every test channel. - Graph: channel vs tx_throughput & channel vs rx_throughput - - Raises: - TestFailure, if throughput standard deviation across channels - exceeds max_std_dev - - Example Explicit Test (see EOF for debug JSON example): - def test_us_2g_20mhz_wpa2(self): - self.run_channel_performance_tests( - dict( - test_channels=hostapd_constants.US_CHANNELS_2G, - test_channel_bandwidth=20, - test_security=hostapd_constants.WPA2_STRING, - min_tx_throughput=2, - min_rx_throughput=4, - max_std_dev=0.75, - country_code='US', - base_test_name='test_us')) - """ - test_channels = settings["test_channels"] - test_channel_bandwidth = settings["test_channel_bandwidth"] - test_security = settings.get("test_security", None) - test_name = settings.get("test_name", self.test_name) - base_test_name = settings.get("base_test_name", "test") - min_tx_throughput = settings.get("min_tx_throughput", DEFAULT_MIN_THROUGHPUT) - min_rx_throughput = settings.get("min_rx_throughput", DEFAULT_MIN_THROUGHPUT) - max_std_dev = settings.get("max_std_dev", DEFAULT_MAX_STD_DEV) - country_code = settings.get("country_code") - country_name = settings.get("country_name") - country_label = None - - if country_code: - country_label = country_code - self.set_dut_country_code(country_code) - elif country_name: - country_label = country_name - code = hostapd_constants.COUNTRY_CODE[country_name]["country_code"] - self.set_dut_country_code(code) - - self.throughput_data = { - "test": test_name, - "channel_bandwidth": test_channel_bandwidth, - "results": {}, - } - test_list = [] - for channel in test_channels: - sub_test_name = "test_%schannel_%s_%smhz_%s_performance" % ( - "%s_" % country_label if country_label else "", - channel, - test_channel_bandwidth, - test_security if test_security else "open", - ) - test_list.append( - { - "test_name": sub_test_name, - "channel": int(channel), - "channel_bandwidth": int(test_channel_bandwidth), - "security": test_security, - "min_tx_throughput": min_tx_throughput, - "min_rx_throughput": min_rx_throughput, - } - ) - self.run_generated_testcases( - self.get_channel_performance, settings=test_list, name_func=get_test_name - ) - self.log.info("Channel tests completed.") - self.write_graph() - self.verify_standard_deviation(max_std_dev) - - def get_channel_performance(self, settings): - """Run a single channel performance test and logs results to csv file - and throughput data. Run with generated sub test cases in - run_channel_performance_tests. - - 1. Sets up network with test settings - 2. Associates DUT - 3. Runs traffic between DUT and iperf server (both directions) - 4. Logs channel, tx_throughput (Mb/s), and rx_throughput (Mb/s) to - log file and throughput data. - 5. Checks throughput values against minimum throughput thresholds. - - Args: - settings: see run_channel_performance_tests - - Raises: - TestFailure, if throughput (either direction) is less than - the directions given minimum throughput threshold. - """ - channel = settings["channel"] - channel_bandwidth = settings["channel_bandwidth"] - security = settings["security"] - test_name = settings["test_name"] - min_tx_throughput = settings["min_tx_throughput"] - min_rx_throughput = settings["min_rx_throughput"] - if security: - if security == hostapd_constants.WEP_STRING: - password = utils.rand_hex_str(WEP_HEX_STRING_LENGTH) - else: - password = utils.rand_ascii_str(hostapd_constants.MIN_WPA_PSK_LENGTH) - security_profile = Security(security_mode=security, password=password) - target_security = ( - hostapd_constants.SECURITY_STRING_TO_DEFAULT_TARGET_SECURITY.get( - security - ) - ) - else: - password = None - security_profile = None - target_security = None - ssid = self.setup_ap(channel, channel_bandwidth, security_profile) - associated = self.dut.associate( - ssid, target_pwd=password, target_security=target_security - ) - if not associated: - if self.iperf_server: - self.log_to_file_and_throughput_data( - channel, channel_bandwidth, None, None - ) - asserts.fail("Device failed to associate with network %s" % ssid) - self.log.info("DUT (%s) connected to network %s." % (self.dut.device.ip, ssid)) - if self.iperf_server: - self.iperf_server.renew_test_interface_ip_address() - self.log.info( - "Getting ip address for iperf server. Will retry for %s seconds." - % self.time_to_wait_for_ip_addr - ) - iperf_server_address = self.get_and_verify_iperf_address( - channel, self.iperf_server - ) - self.log.info( - "Getting ip address for DUT. Will retry for %s seconds." - % self.time_to_wait_for_ip_addr - ) - iperf_client_address = self.get_and_verify_iperf_address( - channel, self.dut.device, self.iperf_client.test_interface - ) - tx_throughput = self.get_iperf_throughput( - iperf_server_address, iperf_client_address - ) - rx_throughput = self.get_iperf_throughput( - iperf_server_address, iperf_client_address, reverse=True - ) - self.log_to_file_and_throughput_data( - channel, channel_bandwidth, tx_throughput, rx_throughput - ) - self.log.info( - "Throughput (tx, rx): (%s Mb/s, %s Mb/s), " - "Minimum threshold (tx, rx): (%s Mb/s, %s Mb/s)" - % (tx_throughput, rx_throughput, min_tx_throughput, min_rx_throughput) - ) - base_message = ( - "Actual throughput (on channel: %s, channel bandwidth: " - "%s, security: %s)" % (channel, channel_bandwidth, security) - ) - if ( - not tx_throughput - or not rx_throughput - or tx_throughput < min_tx_throughput - or rx_throughput < min_rx_throughput - ): - asserts.fail("%s below the minimum threshold." % base_message) - asserts.explicit_pass("%s above the minimum threshold." % base_message) - else: - asserts.explicit_pass( - "Association test pass. No throughput measurement taken." - ) - - def verify_regulatory_compliance(self, settings): - """Test function for regulatory compliance tests. Verify device complies - with provided regulatory requirements. - - Args: - settings: dict, containing the following test settings - test_channels: dict, mapping channels to a set of the channel - bandwidths to test (see example for using JSON). Defaults - to hostapd_constants.ALL_CHANNELS. - country_code: string, two-char country code to set on device - (prioritized over country_name) - country_name: string, country name from hostapd_constants to set - on device. - base_test_name (optional): string, test name prefix to use with - generatedsubtests. - test_name: string, the test name for this - parent test case from the config file. In explicit tests, - this is not necessary. - """ - country_name = settings.get("country_name") - country_code = settings.get("country_code") - if not (country_code or country_name): - raise ValueError("No country code or name provided.") - - test_channels = settings.get("test_channels", hostapd_constants.ALL_CHANNELS) - allowed_channels = settings["allowed_channels"] - - base_test_name = settings.get("base_test_name", "test_compliance") - - if country_code: - code = country_code - else: - code = hostapd_constants.COUNTRY_CODE[country_name]["country_code"] - - self.set_dut_country_code(code) - - test_list = [] - for channel in test_channels: - for channel_bandwidth in test_channels[channel]: - sub_test_name = "%s_channel_%s_%smhz" % ( - base_test_name, - channel, - channel_bandwidth, - ) - should_associate = ( - channel in allowed_channels - and channel_bandwidth in allowed_channels[channel] - ) - # Note: these int conversions because when these tests are - # imported via JSON, they may be strings since the channels - # will be keys. This makes the json/list test_channels param - # behave exactly like the in code dict/set test_channels. - test_list.append( - { - "country_code": code, - "channel": int(channel), - "channel_bandwidth": int(channel_bandwidth), - "should_associate": should_associate, - "test_name": sub_test_name, - } - ) - self.run_generated_testcases( - test_func=self.verify_channel_compliance, - settings=test_list, - name_func=get_test_name, - ) - - def verify_channel_compliance(self, settings): - """Verify device complies with provided regulatory requirements for a - specific channel and channel bandwidth. Run with generated test cases - in the verify_regulatory_compliance parent test. - _ - Args: - settings: see verify_regulatory_compliance` - """ - channel = settings["channel"] - channel_bandwidth = settings["channel_bandwidth"] - code = settings["country_code"] - should_associate = settings["should_associate"] - - ssid = self.setup_ap(channel, channel_bandwidth) - - self.log.info( - "Attempting to associate with network (%s) on channel %s @ %smhz. " - "Expected behavior: %s" - % ( - ssid, - channel, - channel_bandwidth, - "Device should associate" - if should_associate - else "Device should NOT associate.", - ) - ) - - associated = self.dut.associate(ssid) - - regulatory_result_marker = "REGTRACKER: %s,%s,%s,%s,%s" % ( - code, - channel, - "2.4" if channel < 36 else "5", - channel_bandwidth, - "c" if associated else "nc", - ) - self.regulatory_results += regulatory_result_marker + "\n" - self.log.info(regulatory_result_marker) - - if associated == should_associate: - asserts.explicit_pass( - "Device complied with %s regulatory requirement for channel %s " - " with channel bandwidth %smhz. %s" - % ( - code, - channel, - channel_bandwidth, - "Associated." if associated else "Refused to associate.", - ) - ) - else: - asserts.fail( - "Device failed compliance with regulatory domain %s for " - "channel %s with channel bandwidth %smhz. Expected: %s, Got: %s" - % ( - code, - channel, - channel_bandwidth, - "Should associate" if should_associate else "Should not associate", - "Associated" if associated else "Did not associate", - ) - ) - - # Helper functions to allow explicit tests throughput and standard deviation - # thresholds to be passed in via config. - def _get_min_tx_throughput(self, test_name): - return ( - self.user_params.get("channel_sweep_test_params", {}) - .get(test_name, {}) - .get("min_tx_throughput", DEFAULT_MIN_THROUGHPUT) - ) - - def _get_min_rx_throughput(self, test_name): - return ( - self.user_params.get("channel_sweep_test_params", {}) - .get(test_name, {}) - .get("min_rx_throughput", DEFAULT_MIN_THROUGHPUT) - ) - - def _get_max_std_dev(self, test_name): - return ( - self.user_params.get("channel_sweep_test_params", {}) - .get(test_name, {}) - .get("min_std_dev", DEFAULT_MAX_STD_DEV) - ) - - # Channel Performance of US Channels: 570 Test Cases - # 36 Test Cases - def test_us_20mhz_open_channel_performance(self): - self.run_channel_performance_tests( - dict( - test_channels=hostapd_constants.US_CHANNELS_2G - + hostapd_constants.US_CHANNELS_5G, - test_channel_bandwidth=hostapd_constants.CHANNEL_BANDWIDTH_20MHZ, - base_test_name=self.test_name, - min_tx_throughput=self._get_min_tx_throughput(self.test_name), - min_rx_throughput=self._get_min_rx_throughput(self.test_name), - max_std_dev=self._get_max_std_dev(self.test_name), - ) - ) - - # 35 Test Cases - def test_us_40mhz_open_channel_performance(self): - self.run_channel_performance_tests( - dict( - test_channels=hostapd_constants.US_CHANNELS_2G - + hostapd_constants.US_CHANNELS_5G[:-1], - test_channel_bandwidth=hostapd_constants.CHANNEL_BANDWIDTH_40MHZ, - base_test_name=self.test_name, - min_tx_throughput=self._get_min_tx_throughput(self.test_name), - min_rx_throughput=self._get_min_rx_throughput(self.test_name), - max_std_dev=self._get_max_std_dev(self.test_name), - ) - ) - - # 24 Test Cases - def test_us_80mhz_open_channel_performance(self): - self.run_channel_performance_tests( - dict( - test_channels=hostapd_constants.US_CHANNELS_5G[:-1], - test_channel_bandwidth=hostapd_constants.CHANNEL_BANDWIDTH_80MHZ, - base_test_name=self.test_name, - min_tx_throughput=self._get_min_tx_throughput(self.test_name), - min_rx_throughput=self._get_min_rx_throughput(self.test_name), - max_std_dev=self._get_max_std_dev(self.test_name), - ) - ) - - # 36 Test Cases - def test_us_20mhz_wep_channel_performance(self): - self.run_channel_performance_tests( - dict( - test_channels=hostapd_constants.US_CHANNELS_2G - + hostapd_constants.US_CHANNELS_5G, - test_channel_bandwidth=hostapd_constants.CHANNEL_BANDWIDTH_20MHZ, - test_security=hostapd_constants.WEP_STRING, - base_test_name=self.test_name, - min_tx_throughput=self._get_min_tx_throughput(self.test_name), - min_rx_throughput=self._get_min_rx_throughput(self.test_name), - max_std_dev=self._get_max_std_dev(self.test_name), - ) - ) - - # 35 Test Cases - def test_us_40mhz_wep_channel_performance(self): - self.run_channel_performance_tests( - dict( - test_channels=hostapd_constants.US_CHANNELS_2G - + hostapd_constants.US_CHANNELS_5G[:-1], - test_channel_bandwidth=hostapd_constants.CHANNEL_BANDWIDTH_40MHZ, - test_security=hostapd_constants.WEP_STRING, - base_test_name=self.test_name, - min_tx_throughput=self._get_min_tx_throughput(self.test_name), - min_rx_throughput=self._get_min_rx_throughput(self.test_name), - max_std_dev=self._get_max_std_dev(self.test_name), - ) - ) - - # 24 Test Cases - def test_us_80mhz_wep_channel_performance(self): - self.run_channel_performance_tests( - dict( - test_channels=hostapd_constants.US_CHANNELS_5G[:-1], - test_channel_bandwidth=hostapd_constants.CHANNEL_BANDWIDTH_80MHZ, - test_security=hostapd_constants.WEP_STRING, - base_test_name=self.test_name, - min_tx_throughput=self._get_min_tx_throughput(self.test_name), - min_rx_throughput=self._get_min_rx_throughput(self.test_name), - max_std_dev=self._get_max_std_dev(self.test_name), - ) - ) - - # 36 Test Cases - def test_us_20mhz_wpa_channel_performance(self): - self.run_channel_performance_tests( - dict( - test_channels=hostapd_constants.US_CHANNELS_2G - + hostapd_constants.US_CHANNELS_5G, - test_channel_bandwidth=hostapd_constants.CHANNEL_BANDWIDTH_20MHZ, - test_security=hostapd_constants.WPA_STRING, - base_test_name=self.test_name, - min_tx_throughput=self._get_min_tx_throughput(self.test_name), - min_rx_throughput=self._get_min_rx_throughput(self.test_name), - max_std_dev=self._get_max_std_dev(self.test_name), - ) - ) - - # 35 Test Cases - def test_us_40mhz_wpa_channel_performance(self): - self.run_channel_performance_tests( - dict( - test_channels=hostapd_constants.US_CHANNELS_2G - + hostapd_constants.US_CHANNELS_5G[:-1], - test_channel_bandwidth=hostapd_constants.CHANNEL_BANDWIDTH_40MHZ, - test_security=hostapd_constants.WPA_STRING, - base_test_name=self.test_name, - min_tx_throughput=self._get_min_tx_throughput(self.test_name), - min_rx_throughput=self._get_min_rx_throughput(self.test_name), - max_std_dev=self._get_max_std_dev(self.test_name), - ) - ) - - # 24 Test Cases - def test_us_80mhz_wpa_channel_performance(self): - self.run_channel_performance_tests( - dict( - test_channels=hostapd_constants.US_CHANNELS_5G[:-1], - test_channel_bandwidth=hostapd_constants.CHANNEL_BANDWIDTH_80MHZ, - test_security=hostapd_constants.WPA_STRING, - base_test_name=self.test_name, - min_tx_throughput=self._get_min_tx_throughput(self.test_name), - min_rx_throughput=self._get_min_rx_throughput(self.test_name), - max_std_dev=self._get_max_std_dev(self.test_name), - ) - ) - - # 36 Test Cases - def test_us_20mhz_wpa2_channel_performance(self): - self.run_channel_performance_tests( - dict( - test_channels=hostapd_constants.US_CHANNELS_2G - + hostapd_constants.US_CHANNELS_5G, - test_channel_bandwidth=hostapd_constants.CHANNEL_BANDWIDTH_20MHZ, - test_security=hostapd_constants.WPA2_STRING, - base_test_name=self.test_name, - min_tx_throughput=self._get_min_tx_throughput(self.test_name), - min_rx_throughput=self._get_min_rx_throughput(self.test_name), - max_std_dev=self._get_max_std_dev(self.test_name), - ) - ) - - # 35 Test Cases - def test_us_40mhz_wpa2_channel_performance(self): - self.run_channel_performance_tests( - dict( - test_channels=hostapd_constants.US_CHANNELS_2G - + hostapd_constants.US_CHANNELS_5G[:-1], - test_channel_bandwidth=hostapd_constants.CHANNEL_BANDWIDTH_40MHZ, - test_security=hostapd_constants.WPA2_STRING, - base_test_name=self.test_name, - min_tx_throughput=self._get_min_tx_throughput(self.test_name), - min_rx_throughput=self._get_min_rx_throughput(self.test_name), - max_std_dev=self._get_max_std_dev(self.test_name), - ) - ) - - # 24 Test Cases - def test_us_80mhz_wpa2_channel_performance(self): - self.run_channel_performance_tests( - dict( - test_channels=hostapd_constants.US_CHANNELS_5G[:-1], - test_channel_bandwidth=hostapd_constants.CHANNEL_BANDWIDTH_80MHZ, - test_security=hostapd_constants.WPA2_STRING, - base_test_name=self.test_name, - min_tx_throughput=self._get_min_tx_throughput(self.test_name), - min_rx_throughput=self._get_min_rx_throughput(self.test_name), - max_std_dev=self._get_max_std_dev(self.test_name), - ) - ) - - # 36 Test Cases - def test_us_20mhz_wpa_wpa2_channel_performance(self): - self.run_channel_performance_tests( - dict( - test_channels=hostapd_constants.US_CHANNELS_2G - + hostapd_constants.US_CHANNELS_5G, - test_channel_bandwidth=hostapd_constants.CHANNEL_BANDWIDTH_20MHZ, - test_security=hostapd_constants.WPA_MIXED_STRING, - base_test_name=self.test_name, - min_tx_throughput=self._get_min_tx_throughput(self.test_name), - min_rx_throughput=self._get_min_rx_throughput(self.test_name), - max_std_dev=self._get_max_std_dev(self.test_name), - ) - ) - - # 35 Test Cases - def test_us_40mhz_wpa_wpa2_channel_performance(self): - self.run_channel_performance_tests( - dict( - test_channels=hostapd_constants.US_CHANNELS_2G - + hostapd_constants.US_CHANNELS_5G[:-1], - test_channel_bandwidth=hostapd_constants.CHANNEL_BANDWIDTH_40MHZ, - test_security=hostapd_constants.WPA_MIXED_STRING, - base_test_name=self.test_name, - min_tx_throughput=self._get_min_tx_throughput(self.test_name), - min_rx_throughput=self._get_min_rx_throughput(self.test_name), - max_std_dev=self._get_max_std_dev(self.test_name), - ) - ) - - # 24 Test Cases - def test_us_80mhz_wpa_wpa2_channel_performance(self): - self.run_channel_performance_tests( - dict( - test_channels=hostapd_constants.US_CHANNELS_5G[:-1], - test_channel_bandwidth=hostapd_constants.CHANNEL_BANDWIDTH_80MHZ, - test_security=hostapd_constants.WPA_MIXED_STRING, - base_test_name=self.test_name, - min_tx_throughput=self._get_min_tx_throughput(self.test_name), - min_rx_throughput=self._get_min_rx_throughput(self.test_name), - max_std_dev=self._get_max_std_dev(self.test_name), - ) - ) - - # 36 Test Cases - def test_us_20mhz_wpa3_channel_performance(self): - self.run_channel_performance_tests( - dict( - test_channels=hostapd_constants.US_CHANNELS_2G - + hostapd_constants.US_CHANNELS_5G, - test_channel_bandwidth=hostapd_constants.CHANNEL_BANDWIDTH_20MHZ, - test_security=hostapd_constants.WPA3_STRING, - base_test_name=self.test_name, - min_tx_throughput=self._get_min_tx_throughput(self.test_name), - min_rx_throughput=self._get_min_rx_throughput(self.test_name), - max_std_dev=self._get_max_std_dev(self.test_name), - ) - ) - - # 35 Test Cases - def test_us_40mhz_wpa3_channel_performance(self): - self.run_channel_performance_tests( - dict( - test_channels=hostapd_constants.US_CHANNELS_2G - + hostapd_constants.US_CHANNELS_5G[:-1], - test_channel_bandwidth=hostapd_constants.CHANNEL_BANDWIDTH_40MHZ, - test_security=hostapd_constants.WPA3_STRING, - base_test_name=self.test_name, - min_tx_throughput=self._get_min_tx_throughput(self.test_name), - min_rx_throughput=self._get_min_rx_throughput(self.test_name), - max_std_dev=self._get_max_std_dev(self.test_name), - ) - ) - - # 24 Test Cases - def test_us_80mhz_wpa3_channel_performance(self): - self.run_channel_performance_tests( - dict( - test_channels=hostapd_constants.US_CHANNELS_5G[:-1], - test_channel_bandwidth=hostapd_constants.CHANNEL_BANDWIDTH_80MHZ, - test_security=hostapd_constants.WPA3_STRING, - base_test_name=self.test_name, - min_tx_throughput=self._get_min_tx_throughput(self.test_name), - min_rx_throughput=self._get_min_rx_throughput(self.test_name), - max_std_dev=self._get_max_std_dev(self.test_name), - ) - ) - - def test_channel_performance_debug(self): - """Run channel performance test cases from the ACTS config file. - - Example: - "channel_sweep_test_params": { - "debug_channel_performance_tests": [ - { - "test_name": "test_123_20mhz_wpa2_performance" - "test_channels": [1, 2, 3], - "test_channel_bandwidth": 20, - "test_security": "wpa2", - "base_test_name": "test_123_perf", - "min_tx_throughput": 1.1, - "min_rx_throughput": 3, - "max_std_dev": 0.5 - }, - ... - ] - } - - """ - asserts.skip_if( - "debug_channel_performance_tests" - not in self.user_params.get("channel_sweep_test_params", {}), - "No custom channel performance tests provided in config.", - ) - base_tests = self.user_params["channel_sweep_test_params"][ - "debug_channel_performance_tests" - ] - self.run_generated_testcases( - self.run_channel_performance_tests, - settings=base_tests, - name_func=get_test_name, - ) - - def test_regulatory_compliance(self): - """Run regulatory compliance test case from the ACTS config file. - Note: only one country_name OR country_code is required. - - Example: - "channel_sweep_test_params": { - "regulatory_compliance_tests": [ - { - "test_name": "test_japan_compliance_1_13_36" - "country_name": "JAPAN", - "country_code": "JP", - "test_channels": { - "1": [20, 40], "13": [40], "36": [20, 40, 80] - }, - "allowed_channels": { - "1": [20, 40], "36": [20, 40, 80] - }, - "base_test_name": "test_japan" - }, - ... - ] - } - """ - asserts.skip_if( - "regulatory_compliance_tests" - not in self.user_params.get("channel_sweep_test_params", {}), - "No custom regulatory compliance tests provided in config.", - ) - - # TODO(http://b/280442689): Add "supported_country_codes" and - # "unsupported_channels" to test params - base_tests = self.user_params["channel_sweep_test_params"][ - "regulatory_compliance_tests" - ] - self.run_generated_testcases( - self.verify_regulatory_compliance, - settings=base_tests, - name_func=get_test_name, - ) - - -if __name__ == "__main__": - test_runner.main()
diff --git a/src/antlion/tests/wlan/performance/WlanRvrTest.py b/src/antlion/tests/wlan/performance/WlanRvrTest.py deleted file mode 100644 index ad97221..0000000 --- a/src/antlion/tests/wlan/performance/WlanRvrTest.py +++ /dev/null
@@ -1,1137 +0,0 @@ -#!/usr/bin/env python3 -# -# Copyright 2022 The Fuchsia Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -import os -import time -import logging - -from antlion import context -from antlion.controllers.access_point import setup_ap -from antlion.controllers.ap_lib import hostapd_constants -from antlion.controllers.ap_lib.radvd import Radvd -from antlion.controllers.ap_lib.radvd_config import RadvdConfig -from antlion.controllers.ap_lib.hostapd_security import Security -from antlion.controllers.attenuator import get_attenuators_for_device -from antlion.controllers.iperf_server import IPerfResult -from antlion.test_utils.abstract_devices.wlan_device import create_wlan_device -from antlion.test_utils.wifi import base_test -from antlion.utils import rand_ascii_str - -from mobly import asserts, test_runner - -AP_11ABG_PROFILE_NAME = "whirlwind_11ag_legacy" -REPORTING_SPEED_UNITS = "Mbps" - -RVR_GRAPH_SUMMARY_FILE = "rvr_summary.html" - -DAD_TIMEOUT_SEC = 30 - - -def create_rvr_graph(test_name, graph_path, graph_data): - """Creates the RvR graphs - Args: - test_name: The name of test that was run. This is the title of the - graph - graph_path: Where to put the graph html file. - graph_data: A dictionary of the data to be graphed. - Returns: - A list of bokeh graph objects. - """ - try: - from bokeh.plotting import ColumnDataSource - from bokeh.plotting import figure - from bokeh.plotting import output_file - from bokeh.plotting import save - except ImportError as e: - logging.warn( - "bokeh is not installed: skipping creation of graphs. " - "Note CSV files are still available. If graphs are " - 'desired, install antlion with the "bokeh" feature.' - ) - return [] - - output_file( - "%srvr_throughput_vs_attn_%s.html" % (graph_path, test_name), title=test_name - ) - throughput_vs_attn_data = ColumnDataSource( - data=dict( - relative_attn=graph_data["throughput_vs_attn"]["relative_attn"], - throughput=graph_data["throughput_vs_attn"]["throughput"], - ) - ) - TOOLTIPS = [("Attenuation", "@relative_attn"), ("Throughput", "@throughput")] - throughput_vs_attn_graph = figure( - title="Throughput vs Relative Attenuation (Test Case: %s)" % test_name, - x_axis_label=graph_data["throughput_vs_attn"]["x_label"], - y_axis_label=graph_data["throughput_vs_attn"]["y_label"], - x_range=graph_data["throughput_vs_attn"]["relative_attn"], - tooltips=TOOLTIPS, - ) - throughput_vs_attn_graph.sizing_mode = "stretch_width" - throughput_vs_attn_graph.title.align = "center" - throughput_vs_attn_graph.line( - "relative_attn", "throughput", source=throughput_vs_attn_data, line_width=2 - ) - throughput_vs_attn_graph.circle( - "relative_attn", "throughput", source=throughput_vs_attn_data, size=10 - ) - save([throughput_vs_attn_graph]) - return [throughput_vs_attn_graph] - - -def write_csv_rvr_data(test_name, csv_path, csv_data): - """Writes the CSV data for the RvR test - Args: - test_name: The name of test that was run. - csv_path: Where to put the csv file. - csv_data: A dictionary of the data to be put in the csv file. - """ - csv_file_name = "%srvr_throughput_vs_attn_%s.csv" % (csv_path, test_name) - throughput = csv_data["throughput_vs_attn"]["throughput"] - relative_attn = csv_data["throughput_vs_attn"]["relative_attn"] - with open(csv_file_name, "w+") as csv_fileId: - csv_fileId.write( - "%s,%s\n" - % ( - csv_data["throughput_vs_attn"]["x_label"], - csv_data["throughput_vs_attn"]["y_label"], - ) - ) - for csv_loop_counter in range(0, len(relative_attn)): - csv_fileId.write( - "%s,%s\n" - % (int(relative_attn[csv_loop_counter]), throughput[csv_loop_counter]) - ) - - -class WlanRvrTest(base_test.WifiBaseTest): - """Tests running WLAN RvR. - - Test Bed Requirement: - * One Android device or Fuchsia device - * One Access Point - * One attenuator - * One Linux iPerf Server - """ - - def __init__(self, controllers): - super().__init__(controllers) - self.rvr_graph_summary = [] - - def setup_class(self): - super().setup_class() - - device_type = self.user_params.get("dut", "fuchsia_devices") - if device_type == "fuchsia_devices": - self.dut = create_wlan_device(self.fuchsia_devices[0]) - elif device_type == "android_devices": - self.dut = create_wlan_device(self.android_devices[0]) - else: - raise ValueError( - f'Invalid "dut" type specified in config: "{device_type}".' - 'Expected "fuchsia_devices" or "android_devices".' - ) - - self.starting_attn = self.user_params["rvr_settings"].get("starting_attn", 0) - - self.ending_attn = self.user_params["rvr_settings"].get("ending_attn", 95) - - self.step_size_in_db = self.user_params["rvr_settings"].get( - "step_size_in_db", 1 - ) - - self.dwell_time_in_secs = self.user_params["rvr_settings"].get( - "dwell_time_in_secs", 10 - ) - - self.reverse_rvr_after_forward = bool( - (self.user_params["rvr_settings"].get("reverse_rvr_after_forward", None)) - ) - - self.iperf_flags = self.user_params["rvr_settings"].get("iperf_flags", "-i 1") - - self.iperf_flags = "%s -t %s -J" % (self.iperf_flags, self.dwell_time_in_secs) - - self.debug_loop_count = self.user_params["rvr_settings"].get( - "debug_loop_count", 1 - ) - - self.debug_pre_traffic_cmd = self.user_params["rvr_settings"].get( - "debug_pre_traffic_cmd", None - ) - - self.debug_post_traffic_cmd = self.user_params["rvr_settings"].get( - "debug_post_traffic_cmd", None - ) - - self.router_adv_daemon = None - - if self.ending_attn == "auto": - self.use_auto_end = True - self.ending_attn = 100 - if self.step_size_in_db > 2: - asserts.fail( - "When using an ending attenuation of 'auto' " - "please use a value < 2db. Larger jumps will " - "break the test reporting." - ) - - self.access_point = self.access_points[0] - self.attenuators_2g = get_attenuators_for_device( - self.controller_configs["AccessPoint"][0]["Attenuator"], - self.attenuators, - "attenuator_ports_wifi_2g", - ) - self.attenuators_5g = get_attenuators_for_device( - self.controller_configs["AccessPoint"][0]["Attenuator"], - self.attenuators, - "attenuator_ports_wifi_5g", - ) - - self.iperf_server = self.iperf_servers[0] - - if hasattr(self, "iperf_clients") and self.iperf_clients: - self.dut_iperf_client = self.iperf_clients[0] - else: - self.dut_iperf_client = self.dut.create_iperf_client() - - self.access_point.stop_all_aps() - - def setup_test(self): - if self.iperf_server: - self.iperf_server.start() - if hasattr(self, "android_devices"): - for ad in self.android_devices: - ad.droid.wakeLockAcquireBright() - ad.droid.wakeUpNow() - self.dut.wifi_toggle_state(True) - - def teardown_test(self): - self.cleanup_tests() - - def teardown_class(self): - if self.router_adv_daemon: - self.router_adv_daemon.stop() - try: - from bokeh.plotting import output_file - from bokeh.plotting import save - - output_path = context.get_current_context().get_base_output_path() - test_class_name = context.get_current_context().test_class_name - - output_file( - f"{output_path}/{test_class_name}/rvr_summary.html", title="RvR Sumamry" - ) - save(list(self.rvr_graph_summary)) - except ImportError as e: - logging.warn( - "bokeh is not installed: skipping creation of graphs. " - "Note CSV files are still available. If graphs are " - 'desired, install antlion with the "bokeh" feature.' - ) - except Exception as e: - self.log.error(f"Unable to generate RvR summary file: {e}") - - super().teardown_class() - - def on_fail(self, test_name, begin_time): - super().on_fail(test_name, begin_time) - self.cleanup_tests() - - def cleanup_tests(self): - """Cleans up all the dangling pieces of the tests, for example, the - iperf server, radvd, all the currently running APs, and the various - clients running during the tests. - """ - - if self.router_adv_daemon: - output_path = context.get_current_context().get_base_output_path() - full_output_path = os.path.join(output_path, "radvd_log.txt") - radvd_log_file = open(full_output_path, "w") - radvd_log_file.write(self.router_adv_daemon.pull_logs()) - radvd_log_file.close() - self.router_adv_daemon.stop() - if hasattr(self, "android_devices"): - for ad in self.android_devices: - ad.droid.wakeLockRelease() - ad.droid.goToSleepNow() - if self.iperf_server: - self.iperf_server.stop() - self.dut.turn_location_off_and_scan_toggle_off() - self.dut.disconnect() - self.dut.reset_wifi() - self.download_ap_logs() - self.access_point.stop_all_aps() - - def _wait_for_ipv4_addrs(self): - """Wait for an IPv4 addresses to become available on the DUT and iperf - server. - - Returns: - A string containing the private IPv4 address of the iperf server. - - Raises: - TestFailure: If unable to acquire a IPv4 address. - """ - ip_address_checker_counter = 0 - ip_address_checker_max_attempts = 3 - while ip_address_checker_counter < ip_address_checker_max_attempts: - self.iperf_server.renew_test_interface_ip_address() - iperf_server_ip_addresses = self.iperf_server.get_interface_ip_addresses( - self.iperf_server.test_interface - ) - dut_ip_addresses = self.dut.device.get_interface_ip_addresses( - self.dut_iperf_client.test_interface - ) - - self.log.info("IPerf server IP info: {}".format(iperf_server_ip_addresses)) - self.log.info("DUT IP info: {}".format(dut_ip_addresses)) - - if not iperf_server_ip_addresses["ipv4_private"]: - self.log.warn( - "Unable to get the iperf server IPv4 " "address. Retrying..." - ) - ip_address_checker_counter += 1 - time.sleep(1) - continue - - if dut_ip_addresses["ipv4_private"]: - return iperf_server_ip_addresses["ipv4_private"][0] - - self.log.warn( - "Unable to get the DUT IPv4 address starting at " - 'attenuation "{}". Retrying...'.format(self.starting_attn) - ) - ip_address_checker_counter += 1 - time.sleep(1) - - asserts.fail( - "IPv4 addresses are not available on both the DUT and iperf server." - ) - - # TODO (b/258264565): Merge with fuchsia_device wait_for_ipv6_addr. - def _wait_for_dad(self, device, test_interface): - """Wait for Duplicate Address Detection to resolve so that an - private-local IPv6 address is available for test. - - Args: - device: implementor of get_interface_ip_addresses - test_interface: name of interface that DAD is operating on - - Returns: - A string containing the private-local IPv6 address of the device. - - Raises: - TestFailure: If unable to acquire an IPv6 address. - """ - now = time.time() - start = now - elapsed = now - start - - while elapsed < DAD_TIMEOUT_SEC: - addrs = device.get_interface_ip_addresses(test_interface) - now = time.time() - elapsed = now - start - if addrs["ipv6_private_local"]: - # DAD has completed - addr = addrs["ipv6_private_local"][0] - self.log.info('DAD resolved with "{}" after {}s'.format(addr, elapsed)) - return addr - time.sleep(1) - else: - asserts.fail( - "Unable to acquire a private-local IPv6 address for testing " - "after {}s".format(elapsed) - ) - - def run_rvr( - self, - ssid, - security_mode=None, - password=None, - band="2g", - traffic_dir="tx", - ip_version=4, - ): - """Setups and runs the RvR test - - Args: - ssid: The SSID for the client to associate to. - password: Password for the network, if necessary. - band: 2g or 5g - traffic_dir: rx or tx, bi is not supported by iperf3 - ip_version: 4 or 6 - - Returns: - The bokeh graph data. - """ - throughput = [] - relative_attn = [] - if band == "2g": - rvr_attenuators = self.attenuators_2g - elif band == "5g": - rvr_attenuators = self.attenuators_5g - else: - raise ValueError("Invalid WLAN band specified: %s" % band) - if ip_version == 6: - self.router_adv_daemon = Radvd( - self.access_point.ssh, - self.access_point.interfaces.get_bridge_interface()[0], - ) - radvd_config = RadvdConfig() - self.router_adv_daemon.start(radvd_config) - - for _ in range(0, self.debug_loop_count): - for rvr_attenuator in rvr_attenuators: - rvr_attenuator.set_atten(self.starting_attn) - - associate_counter = 0 - associate_max_attempts = 3 - while associate_counter < associate_max_attempts: - if self.dut.associate( - ssid, - target_pwd=password, - target_security=hostapd_constants.SECURITY_STRING_TO_DEFAULT_TARGET_SECURITY.get( - security_mode - ), - check_connectivity=False, - ): - break - else: - associate_counter += 1 - else: - asserts.fail( - "Unable to associate at starting " - "attenuation: %s" % self.starting_attn - ) - - if ip_version == 4: - iperf_server_ip_address = self._wait_for_ipv4_addrs() - elif ip_version == 6: - self.iperf_server.renew_test_interface_ip_address() - self.log.info( - "Waiting for iperf server to complete Duplicate " - "Address Detection..." - ) - iperf_server_ip_address = self._wait_for_dad( - self.iperf_server, self.iperf_server.test_interface - ) - - self.log.info( - "Waiting for DUT to complete Duplicate Address " - 'Detection for "{}"...'.format(self.dut_iperf_client.test_interface) - ) - _ = self._wait_for_dad( - self.dut.device, self.dut_iperf_client.test_interface - ) - else: - raise ValueError("Invalid IP version: {}".format(ip_version)) - - throughput, relative_attn = self.rvr_loop( - traffic_dir, - rvr_attenuators, - iperf_server_ip_address, - ip_version, - throughput=throughput, - relative_attn=relative_attn, - ) - if self.reverse_rvr_after_forward: - throughput, relative_attn = self.rvr_loop( - traffic_dir, - rvr_attenuators, - iperf_server_ip_address, - ip_version, - ssid=ssid, - security_mode=security_mode, - password=password, - reverse=True, - throughput=throughput, - relative_attn=relative_attn, - ) - self.dut.disconnect() - - throughput_vs_attn = { - "throughput": throughput, - "relative_attn": relative_attn, - "x_label": "Attenuation(db)", - "y_label": "Throughput(%s)" % REPORTING_SPEED_UNITS, - } - graph_data = {"throughput_vs_attn": throughput_vs_attn} - return graph_data - - def rvr_loop( - self, - traffic_dir, - rvr_attenuators, - iperf_server_ip_address, - ip_version, - ssid=None, - security_mode=None, - password=None, - reverse=False, - throughput=None, - relative_attn=None, - ): - """The loop that goes through each attenuation level and runs the iperf - throughput pair. - Args: - traffic_dir: The traffic direction from the perspective of the DUT. - rvr_attenuators: A list of attenuators to set. - iperf_server_ip_address: The IP address of the iperf server. - ssid: The ssid of the wireless network that the should associated - to. - password: Password of the wireless network. - reverse: Whether to run RvR test starting from the highest - attenuation and going to the lowest. This is run after the - normal low attenuation to high attenuation RvR test. - throughput: The list of throughput data for the test. - relative_attn: The list of attenuation data for the test. - - Returns: - throughput: The list of throughput data for the test. - relative_attn: The list of attenuation data for the test. - """ - iperf_flags = self.iperf_flags - if traffic_dir == "rx": - iperf_flags = "%s -R" % self.iperf_flags - starting_attn = self.starting_attn - ending_attn = self.ending_attn - step_size_in_db = self.step_size_in_db - if reverse: - starting_attn = self.ending_attn - ending_attn = self.starting_attn - step_size_in_db = step_size_in_db * -1 - self.dut.disconnect() - for step in range(starting_attn, ending_attn, step_size_in_db): - try: - for attenuator in rvr_attenuators: - attenuator.set_atten(step) - except ValueError as e: - self.log.error( - f"{step} is beyond the max or min of the testbed " - f"attenuator's capability. Stopping. {e}" - ) - break - self.log.info("Set relative attenuation to %s db" % step) - - associated = self.dut.is_connected() - if associated: - self.log.info("DUT is currently associated.") - else: - self.log.info("DUT is not currently associated.") - - if reverse: - if not associated: - self.log.info( - "Trying to associate at relative " "attenuation of %s db" % step - ) - if self.dut.associate( - ssid, - target_pwd=password, - target_security=hostapd_constants.SECURITY_STRING_TO_DEFAULT_TARGET_SECURITY.get( - security_mode - ), - check_connectivity=False, - ): - associated = True - self.log.info("Successfully associated.") - else: - associated = False - self.log.info( - "Association failed. Marking a 0 %s for" - " throughput. Skipping running traffic." - % REPORTING_SPEED_UNITS - ) - attn_value_inserted = False - value_to_insert = str(step) - while not attn_value_inserted: - if value_to_insert in relative_attn: - value_to_insert = "%s " % value_to_insert - else: - relative_attn.append(value_to_insert) - attn_value_inserted = True - - dut_ip_addresses = self.dut.device.get_interface_ip_addresses( - self.dut_iperf_client.test_interface - ) - if ip_version == 4: - if not dut_ip_addresses["ipv4_private"]: - self.log.info( - "DUT does not have an IPv4 address. " - "Traffic attempt to be run if the server " - "is pingable." - ) - else: - self.log.info( - 'DUT has the following IPv4 address: "%s"' - % dut_ip_addresses["ipv4_private"][0] - ) - elif ip_version == 6: - if not dut_ip_addresses["ipv6_private_local"]: - self.log.info( - "DUT does not have an IPv6 address. " - "Traffic attempt to be run if the server " - "is pingable." - ) - else: - self.log.info( - 'DUT has the following IPv6 address: "%s"' - % dut_ip_addresses["ipv6_private_local"][0] - ) - server_pingable = self.dut.can_ping(iperf_server_ip_address) - if not server_pingable: - self.log.info( - 'Iperf server "%s" is not pingable. Marking ' - "a 0 %s for throughput. Skipping running " - "traffic." % (iperf_server_ip_address, REPORTING_SPEED_UNITS) - ) - else: - self.log.info( - 'Iperf server "%s" is pingable.' % iperf_server_ip_address - ) - if self.debug_pre_traffic_cmd: - self.log.info( - "\nDEBUG: Sending command '%s' to DUT" % self.debug_pre_traffic_cmd - ) - self.log.info( - "\n%s" % self.dut.send_command(self.debug_pre_traffic_cmd) - ) - if server_pingable: - if traffic_dir == "tx": - self.log.info( - "Running traffic DUT to %s at relative " - "attenuation of %s" % (iperf_server_ip_address, step) - ) - elif traffic_dir == "rx": - self.log.info( - "Running traffic %s to DUT at relative " - "attenuation of %s" % (iperf_server_ip_address, step) - ) - else: - raise ValueError("Invalid traffic direction") - try: - iperf_tag = "decreasing" - if reverse: - iperf_tag = "increasing" - iperf_results_file = self.dut_iperf_client.start( - iperf_server_ip_address, - iperf_flags, - "%s_%s_%s" % (iperf_tag, traffic_dir, self.starting_attn), - timeout=(self.dwell_time_in_secs * 2), - ) - except TimeoutError as e: - iperf_results_file = None - self.log.error( - f"Iperf traffic timed out. Marking 0 {REPORTING_SPEED_UNITS} for " - f"throughput. {e}" - ) - - if not iperf_results_file: - throughput.append(0) - else: - try: - iperf_results = IPerfResult( - iperf_results_file, - reporting_speed_units=REPORTING_SPEED_UNITS, - ) - if iperf_results.error: - self.iperf_server.stop() - self.iperf_server.start() - self.log.error( - f"Errors in iperf logs:\n{iperf_results.error}" - ) - if not iperf_results.avg_send_rate: - throughput.append(0) - else: - throughput.append(iperf_results.avg_send_rate) - except ValueError as e: - self.iperf_server.stop() - self.iperf_server.start() - self.log.error( - f"No data in iPerf3 file. Marking 0 {REPORTING_SPEED_UNITS} " - f"for throughput: {e}" - ) - throughput.append(0) - except Exception as e: - self.iperf_server.stop() - self.iperf_server.start() - self.log.error( - f"Unknown exception. Marking 0 {REPORTING_SPEED_UNITS} for " - f"throughput: {e}" - ) - self.log.error(e) - throughput.append(0) - - self.log.info( - "Iperf traffic complete. %s traffic received at " - "%s %s at relative attenuation of %s db" - % ( - traffic_dir, - throughput[-1], - REPORTING_SPEED_UNITS, - str(relative_attn[-1]).strip(), - ) - ) - - else: - self.log.debug("DUT Associated: %s" % associated) - self.log.debug( - "%s pingable: %s" % (iperf_server_ip_address, server_pingable) - ) - throughput.append(0) - if self.debug_post_traffic_cmd: - self.log.info( - "\nDEBUG: Sending command '%s' to DUT" % self.debug_post_traffic_cmd - ) - self.log.info( - "\n%s" % self.dut.send_command(self.debug_post_traffic_cmd) - ) - return throughput, relative_attn - - def test_rvr_11ac_5g_80mhz_open_tx_ipv4(self): - ssid = rand_ascii_str(20) - setup_ap( - access_point=self.access_point, - profile_name="whirlwind", - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=ssid, - setup_bridge=True, - ) - graph_data = self.run_rvr(ssid, band="5g", traffic_dir="tx", ip_version=4) - for rvr_graph in create_rvr_graph( - self.test_name, - context.get_current_context().get_full_output_path(), - graph_data, - ): - self.rvr_graph_summary.append(rvr_graph) - write_csv_rvr_data( - self.test_name, - context.get_current_context().get_full_output_path(), - graph_data, - ) - - def test_rvr_11ac_5g_80mhz_open_rx_ipv4(self): - ssid = rand_ascii_str(20) - setup_ap( - access_point=self.access_point, - profile_name="whirlwind", - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=ssid, - setup_bridge=True, - ) - graph_data = self.run_rvr(ssid, band="5g", traffic_dir="rx", ip_version=4) - for rvr_graph in create_rvr_graph( - self.test_name, - context.get_current_context().get_full_output_path(), - graph_data, - ): - self.rvr_graph_summary.append(rvr_graph) - write_csv_rvr_data( - self.test_name, - context.get_current_context().get_full_output_path(), - graph_data, - ) - - def test_rvr_11ac_5g_80mhz_open_tx_ipv6(self): - ssid = rand_ascii_str(20) - setup_ap( - access_point=self.access_point, - profile_name="whirlwind", - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=ssid, - setup_bridge=True, - ) - graph_data = self.run_rvr(ssid, band="5g", traffic_dir="tx", ip_version=6) - for rvr_graph in create_rvr_graph( - self.test_name, - context.get_current_context().get_full_output_path(), - graph_data, - ): - self.rvr_graph_summary.append(rvr_graph) - write_csv_rvr_data( - self.test_name, - context.get_current_context().get_full_output_path(), - graph_data, - ) - - def test_rvr_11ac_5g_80mhz_open_rx_ipv6(self): - ssid = rand_ascii_str(20) - setup_ap( - access_point=self.access_point, - profile_name="whirlwind", - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=ssid, - setup_bridge=True, - ) - graph_data = self.run_rvr(ssid, band="5g", traffic_dir="rx", ip_version=6) - for rvr_graph in create_rvr_graph( - self.test_name, - context.get_current_context().get_full_output_path(), - graph_data, - ): - self.rvr_graph_summary.append(rvr_graph) - write_csv_rvr_data( - self.test_name, - context.get_current_context().get_full_output_path(), - graph_data, - ) - - def test_rvr_11ac_5g_80mhz_wpa2_tx_ipv4(self): - ssid = rand_ascii_str(20) - password = rand_ascii_str(20) - security_profile = Security(security_mode="wpa2", password=password) - setup_ap( - access_point=self.access_point, - profile_name="whirlwind", - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=ssid, - security=security_profile, - setup_bridge=True, - ) - graph_data = self.run_rvr( - ssid, - security_mode="wpa2", - password=password, - band="5g", - traffic_dir="tx", - ip_version=4, - ) - for rvr_graph in create_rvr_graph( - self.test_name, - context.get_current_context().get_full_output_path(), - graph_data, - ): - self.rvr_graph_summary.append(rvr_graph) - write_csv_rvr_data( - self.test_name, - context.get_current_context().get_full_output_path(), - graph_data, - ) - - def test_rvr_11ac_5g_80mhz_wpa2_rx_ipv4(self): - ssid = rand_ascii_str(20) - password = rand_ascii_str(20) - security_profile = Security(security_mode="wpa2", password=password) - setup_ap( - access_point=self.access_point, - profile_name="whirlwind", - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=ssid, - security=security_profile, - setup_bridge=True, - ) - graph_data = self.run_rvr( - ssid, - security_mode="wpa2", - password=password, - band="5g", - traffic_dir="rx", - ip_version=4, - ) - for rvr_graph in create_rvr_graph( - self.test_name, - context.get_current_context().get_full_output_path(), - graph_data, - ): - self.rvr_graph_summary.append(rvr_graph) - write_csv_rvr_data( - self.test_name, - context.get_current_context().get_full_output_path(), - graph_data, - ) - - def test_rvr_11ac_5g_80mhz_wpa2_tx_ipv6(self): - ssid = rand_ascii_str(20) - password = rand_ascii_str(20) - security_profile = Security(security_mode="wpa2", password=password) - setup_ap( - access_point=self.access_point, - profile_name="whirlwind", - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=ssid, - security=security_profile, - setup_bridge=True, - ) - graph_data = self.run_rvr( - ssid, - security_mode="wpa2", - password=password, - band="5g", - traffic_dir="tx", - ip_version=6, - ) - for rvr_graph in create_rvr_graph( - self.test_name, - context.get_current_context().get_full_output_path(), - graph_data, - ): - self.rvr_graph_summary.append(rvr_graph) - write_csv_rvr_data( - self.test_name, - context.get_current_context().get_full_output_path(), - graph_data, - ) - - def test_rvr_11ac_5g_80mhz_wpa2_rx_ipv6(self): - ssid = rand_ascii_str(20) - password = rand_ascii_str(20) - security_profile = Security(security_mode="wpa2", password=password) - setup_ap( - access_point=self.access_point, - profile_name="whirlwind", - channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G, - ssid=ssid, - security=security_profile, - setup_bridge=True, - ) - graph_data = self.run_rvr( - ssid, - security_mode="wpa2", - password=password, - band="5g", - traffic_dir="rx", - ip_version=6, - ) - for rvr_graph in create_rvr_graph( - self.test_name, - context.get_current_context().get_full_output_path(), - graph_data, - ): - self.rvr_graph_summary.append(rvr_graph) - write_csv_rvr_data( - self.test_name, - context.get_current_context().get_full_output_path(), - graph_data, - ) - - def test_rvr_11n_2g_20mhz_open_tx_ipv4(self): - ssid = rand_ascii_str(20) - setup_ap( - access_point=self.access_point, - profile_name="whirlwind", - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=ssid, - setup_bridge=True, - ) - graph_data = self.run_rvr(ssid, band="2g", traffic_dir="tx", ip_version=4) - for rvr_graph in create_rvr_graph( - self.test_name, - context.get_current_context().get_full_output_path(), - graph_data, - ): - self.rvr_graph_summary.append(rvr_graph) - write_csv_rvr_data( - self.test_name, - context.get_current_context().get_full_output_path(), - graph_data, - ) - - def test_rvr_11n_2g_20mhz_open_rx_ipv4(self): - ssid = rand_ascii_str(20) - setup_ap( - access_point=self.access_point, - profile_name="whirlwind", - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=ssid, - setup_bridge=True, - ) - graph_data = self.run_rvr(ssid, band="2g", traffic_dir="rx", ip_version=4) - for rvr_graph in create_rvr_graph( - self.test_name, - context.get_current_context().get_full_output_path(), - graph_data, - ): - self.rvr_graph_summary.append(rvr_graph) - write_csv_rvr_data( - self.test_name, - context.get_current_context().get_full_output_path(), - graph_data, - ) - - def test_rvr_11n_2g_20mhz_open_tx_ipv6(self): - ssid = rand_ascii_str(20) - setup_ap( - access_point=self.access_point, - profile_name="whirlwind", - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=ssid, - setup_bridge=True, - ) - graph_data = self.run_rvr(ssid, band="2g", traffic_dir="tx", ip_version=6) - for rvr_graph in create_rvr_graph( - self.test_name, - context.get_current_context().get_full_output_path(), - graph_data, - ): - self.rvr_graph_summary.append(rvr_graph) - write_csv_rvr_data( - self.test_name, - context.get_current_context().get_full_output_path(), - graph_data, - ) - - def test_rvr_11n_2g_20mhz_open_rx_ipv6(self): - ssid = rand_ascii_str(20) - setup_ap( - access_point=self.access_point, - profile_name="whirlwind", - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=ssid, - setup_bridge=True, - ) - graph_data = self.run_rvr(ssid, band="2g", traffic_dir="rx", ip_version=6) - for rvr_graph in create_rvr_graph( - self.test_name, - context.get_current_context().get_full_output_path(), - graph_data, - ): - self.rvr_graph_summary.append(rvr_graph) - write_csv_rvr_data( - self.test_name, - context.get_current_context().get_full_output_path(), - graph_data, - ) - - def test_rvr_11n_2g_20mhz_wpa2_tx_ipv4(self): - ssid = rand_ascii_str(20) - password = rand_ascii_str(20) - security_profile = Security(security_mode="wpa2", password=password) - setup_ap( - access_point=self.access_point, - profile_name="whirlwind", - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=ssid, - security=security_profile, - setup_bridge=True, - ) - graph_data = self.run_rvr( - ssid, - security_mode="wpa2", - password=password, - band="2g", - traffic_dir="tx", - ip_version=4, - ) - for rvr_graph in create_rvr_graph( - self.test_name, - context.get_current_context().get_full_output_path(), - graph_data, - ): - self.rvr_graph_summary.append(rvr_graph) - write_csv_rvr_data( - self.test_name, - context.get_current_context().get_full_output_path(), - graph_data, - ) - - def test_rvr_11n_2g_20mhz_wpa2_rx_ipv4(self): - ssid = rand_ascii_str(20) - password = rand_ascii_str(20) - security_profile = Security(security_mode="wpa2", password=password) - setup_ap( - access_point=self.access_point, - profile_name="whirlwind", - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=ssid, - security=security_profile, - setup_bridge=True, - ) - graph_data = self.run_rvr( - ssid, - security_mode="wpa2", - password=password, - band="2g", - traffic_dir="rx", - ip_version=4, - ) - for rvr_graph in create_rvr_graph( - self.test_name, - context.get_current_context().get_full_output_path(), - graph_data, - ): - self.rvr_graph_summary.append(rvr_graph) - write_csv_rvr_data( - self.test_name, - context.get_current_context().get_full_output_path(), - graph_data, - ) - - def test_rvr_11n_2g_20mhz_wpa2_tx_ipv6(self): - ssid = rand_ascii_str(20) - password = rand_ascii_str(20) - security_profile = Security(security_mode="wpa2", password=password) - setup_ap( - access_point=self.access_point, - profile_name="whirlwind", - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=ssid, - security=security_profile, - setup_bridge=True, - ) - graph_data = self.run_rvr( - ssid, - security_mode="wpa2", - password=password, - band="2g", - traffic_dir="tx", - ip_version=6, - ) - for rvr_graph in create_rvr_graph( - self.test_name, - context.get_current_context().get_full_output_path(), - graph_data, - ): - self.rvr_graph_summary.append(rvr_graph) - write_csv_rvr_data( - self.test_name, - context.get_current_context().get_full_output_path(), - graph_data, - ) - - def test_rvr_11n_2g_20mhz_wpa2_rx_ipv6(self): - ssid = rand_ascii_str(20) - password = rand_ascii_str(20) - security_profile = Security(security_mode="wpa2", password=password) - setup_ap( - access_point=self.access_point, - profile_name="whirlwind", - channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G, - ssid=ssid, - security=security_profile, - setup_bridge=True, - ) - graph_data = self.run_rvr( - ssid, - security_mode="wpa2", - password=password, - band="2g", - traffic_dir="rx", - ip_version=6, - ) - for rvr_graph in create_rvr_graph( - self.test_name, - context.get_current_context().get_full_output_path(), - graph_data, - ): - self.rvr_graph_summary.append(rvr_graph) - write_csv_rvr_data( - self.test_name, - context.get_current_context().get_full_output_path(), - graph_data, - ) - - -if __name__ == "__main__": - test_runner.main()
diff --git a/src/antlion/tests/wlan/performance/WlanWmmTest.py b/src/antlion/tests/wlan/performance/WlanWmmTest.py deleted file mode 100644 index bf155fc..0000000 --- a/src/antlion/tests/wlan/performance/WlanWmmTest.py +++ /dev/null
@@ -1,898 +0,0 @@ -#!/usr/bin/env python3 -# -# Copyright 2022 The Fuchsia Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import operator -import time - -from antlion import context, utils -from antlion.controllers.access_point import setup_ap -from antlion.controllers.ap_lib import hostapd_constants, hostapd_security -from antlion.test_utils.abstract_devices import wmm_transceiver -from antlion.test_utils.fuchsia import wmm_test_cases -from antlion.test_utils.wifi import base_test -from antlion.test_utils.abstract_devices.wlan_device import create_wlan_device - -from mobly import asserts, test_runner - -DEFAULT_N_CAPABILITIES_20_MHZ = [ - hostapd_constants.N_CAPABILITY_LDPC, - hostapd_constants.N_CAPABILITY_SGI20, - hostapd_constants.N_CAPABILITY_TX_STBC, - hostapd_constants.N_CAPABILITY_RX_STBC1, - hostapd_constants.N_CAPABILITY_HT20, -] - -DEFAULT_AP_PARAMS = { - "profile_name": "whirlwind", - "channel": hostapd_constants.AP_DEFAULT_CHANNEL_2G, - "n_capabilities": DEFAULT_N_CAPABILITIES_20_MHZ, - "ac_capabilities": None, -} - -DEFAULT_BW_PERCENTAGE = 1 -DEFAULT_STREAM_TIMEOUT = 60 -DEFAULT_STREAM_TIME = 10 - -OPERATORS = { - ">": operator.gt, - ">=": operator.ge, - "<": operator.lt, - "<=": operator.le, - "==": operator.eq, -} - -GRAPH_COLOR_LEN = 10 -GRAPH_DEFAULT_LINE_WIDTH = 2 -GRAPH_DEFAULT_CIRCLE_SIZE = 10 - - -def eval_operator( - operator_string, - actual_value, - expected_value, - max_bw, - rel_tolerance=0, - abs_tolerance=0, - max_bw_rel_tolerance=0, -): - """ - Determines if an inequality evaluates to True, given relative and absolute - tolerance. - - Args: - operator_string: string, the operator to use for the comparison - actual_value: the value to compare to some expected value - expected_value: the value the actual value is compared to - rel_tolerance: decimal representing the percent tolerance, relative to - the expected value. E.g. (101 <= 100) w/ rel_tol=0.01 is True - abs_tolerance: the lowest actual (not percent) tolerance for error. - E.g. (101 == 100) w/ rel_tol=0.005 is False, but - (101 == 100) w/ rel_tol=0.005 and abs_tol=1 is True - max_bw_rel_tolerance: decimal representing the percent tolerance, - relative to the maximimum allowed bandwidth. - E.g. (101 <= max bw of 100) w/ max_bw_rel_tol=0.01 is True - - - Returns: - True, if inequality evaluates to True within tolerances - False, otherwise - """ - op = OPERATORS[operator_string] - if op(actual_value, expected_value): - return True - - error = abs(actual_value - expected_value) - accepted_error = max( - expected_value * rel_tolerance, abs_tolerance, max_bw * max_bw_rel_tolerance - ) - return error <= accepted_error - - -class WlanWmmTest(base_test.WifiBaseTest): - """Tests WMM QoS Functionality (Station only) - - Testbed Requirements: - * One ACTS compatible wlan_device (staut) - * One Whirlwind Access Point - * For some tests, One additional ACTS compatible device (secondary_sta) - - For accurate results, must be performed in an RF isolated environment. - """ - - def setup_class(self): - super().setup_class() - - try: - self.wmm_test_params = self.user_params["wmm_test_params"] - self._wmm_transceiver_configs = self.wmm_test_params["wmm_transceivers"] - except KeyError: - raise AttributeError( - "Must provide at least 2 WmmTransceivers in " - '"wmm_test_params" field of ACTS config.' - ) - - if len(self._wmm_transceiver_configs) < 2: - raise AttributeError("At least 2 WmmTransceivers must be provided.") - - self.android_devices = getattr(self, "android_devices", []) - self.fuchsia_devices = getattr(self, "fuchsia_devices", []) - - self.wlan_devices = [ - create_wlan_device(device) - for device in self.android_devices + self.fuchsia_devices - ] - - # Create STAUT transceiver - if "staut" not in self._wmm_transceiver_configs: - raise AttributeError( - 'Must provide a WmmTransceiver labeled "staut" with a ' "wlan_device." - ) - self.staut = wmm_transceiver.create( - self._wmm_transceiver_configs["staut"], - identifier="staut", - wlan_devices=self.wlan_devices, - ) - - # Required to for automated power cycling - self.dut = self.staut.wlan_device - - # Create AP transceiver - if "access_point" not in self._wmm_transceiver_configs: - raise AttributeError( - 'Must provide a WmmTransceiver labeled "access_point" with a ' - "access_point." - ) - self.access_point_transceiver = wmm_transceiver.create( - self._wmm_transceiver_configs["access_point"], - identifier="access_point", - access_points=self.access_points, - ) - - self.wmm_transceivers = [self.staut, self.access_point_transceiver] - - # Create secondary station transceiver, if present - if "secondary_sta" in self._wmm_transceiver_configs: - self.secondary_sta = wmm_transceiver.create( - self._wmm_transceiver_configs["secondary_sta"], - identifier="secondary_sta", - wlan_devices=self.wlan_devices, - ) - self.wmm_transceivers.append(self.secondary_sta) - else: - self.secondary_sta = None - - self.wmm_transceiver_map = {tc.identifier: tc for tc in self.wmm_transceivers} - - def setup_test(self): - for tc in self.wmm_transceivers: - if tc.wlan_device: - tc.wlan_device.wifi_toggle_state(True) - tc.wlan_device.disconnect() - if tc.access_point: - tc.access_point.stop_all_aps() - - def teardown_test(self): - for tc in self.wmm_transceivers: - tc.cleanup_asynchronous_streams() - if tc.wlan_device: - tc.wlan_device.disconnect() - tc.wlan_device.reset_wifi() - if tc.access_point: - self.download_ap_logs() - tc.access_point.stop_all_aps() - - def teardown_class(self): - for tc in self.wmm_transceivers: - tc.destroy_resources() - super().teardown_class() - - def on_fail(self, test_name, begin_time): - for wlan_device in self.wlan_devices: - super().on_device_fail(wlan_device.device, test_name, begin_time) - - def start_ap_with_wmm_params(self, ap_parameters, wmm_parameters): - """Sets up WMM network on AP. - - Args: - ap_parameters: a dictionary of kwargs to set up on ap - wmm_parameters: a dictionary of wmm_params to set up on ap - - Returns: - String, subnet of the network setup (e.g. '192.168.1.0/24') - """ - # Defaults for required parameters - ap_parameters["force_wmm"] = True - if "ssid" not in ap_parameters: - ap_parameters["ssid"] = utils.rand_ascii_str( - hostapd_constants.AP_SSID_LENGTH_2G - ) - - if "profile_name" not in ap_parameters: - ap_parameters["profile_name"] = "whirlwind" - - if "channel" not in ap_parameters: - ap_parameters["channel"] = 6 - - if "n_capabilities" not in ap_parameters: - ap_parameters["n_capabilities"] = DEFAULT_N_CAPABILITIES_20_MHZ - - if "additional_ap_parameters" in ap_parameters: - ap_parameters["additional_ap_parameters"].update(wmm_parameters) - else: - ap_parameters["additional_ap_parameters"] = wmm_parameters - - # Optional security - security_config = ap_parameters.get("security_config", None) - if security_config: - ap_parameters["security"] = hostapd_security.Security(**security_config) - ap_parameters.pop("security_config") - - # Start AP with kwargs - self.log.info("Setting up WMM network: %s" % ap_parameters["ssid"]) - setup_ap(self.access_point_transceiver.access_point, **ap_parameters) - self.log.info("Network (%s) is up." % ap_parameters["ssid"]) - - # Return subnet - if ap_parameters["channel"] < hostapd_constants.LOWEST_5G_CHANNEL: - return self.access_point_transceiver.access_point._AP_2G_SUBNET_STR - else: - return self.access_point_transceiver.access_point._AP_5G_SUBNET_STR - - def associate_transceiver(self, wmm_transceiver, ap_params): - """Associates a WmmTransceiver that has a wlan_device. - - Args: - wmm_transceiver: transceiver to associate - ap_params: dict, contains ssid and password, if any, for network - """ - if not wmm_transceiver.wlan_device: - raise AttributeError( - "Cannot associate a WmmTransceiver that does not have a " "WlanDevice." - ) - ssid = ap_params["ssid"] - password = None - target_security = None - security = ap_params.get("security") - if security: - password = security.password - target_security = ( - hostapd_constants.SECURITY_STRING_TO_DEFAULT_TARGET_SECURITY.get( - security.security_mode_string - ) - ) - associated = wmm_transceiver.wlan_device.associate( - target_ssid=ssid, target_pwd=password, target_security=target_security - ) - if not associated: - raise ConnectionError( - "Failed to associate WmmTransceiver %s." % wmm_transceiver.identifier - ) - self.log.info("WmmTransceiver %s associated." % wmm_transceiver.identifier) - - def validate_streams_in_phase(self, phase_id, phases, max_bw): - """Validates any stream in a phase that has validation criteria. - - Args: - phase_id: identifier of the phase to check - phases: dictionary containing phases for retrieving stream - transmitters, expected bandwidths, etc. - max_bw: the max link bandwidth, measured in the test - - Returns: - True, if ALL validation criteria for ALL streams in phase pass - False, otherwise - """ - pass_val = True - for stream_id, stream in phases[phase_id].items(): - if "validation" in stream: - transmitter = stream["transmitter"] - uuid = stream["uuid"] - actual_bw = transmitter.get_results(uuid).avg_rate - if not actual_bw: - raise ConnectionError( - "(Phase: %s, Stream: %s) - Stream results show " - "bandwidth: None" % (phase_id, stream_id) - ) - for check in stream["validation"]: - operator_str = check["operator"] - rel_tolerance = check.get("rel_tolerance", 0) - abs_tolerance = check.get("abs_tolerance", 0) - max_bw_rel_tolerance = check.get("max_bw_rel_tolerance", 0) - expected_bw_percentage = check.get( - "bandwidth_percentage", DEFAULT_BW_PERCENTAGE - ) - # Explicit Bandwidth Validation - if "bandwidth" in check: - comp_bw = check["bandwidth"] - log_msg = ( - "Expected Bandwidth: %s (explicit validation " - "bandwidth [%s] x expected bandwidth " - "percentage [%s])" - % ( - expected_bw_percentage * comp_bw, - comp_bw, - expected_bw_percentage, - ) - ) - - # Stream Comparison Validation - elif "phase" in check and "stream" in check: - comp_phase_id = check["phase"] - comp_stream_id = check["stream"] - comp_stream = phases[comp_phase_id][comp_stream_id] - comp_transmitter = comp_stream["transmitter"] - comp_uuid = comp_stream["uuid"] - comp_bw = comp_transmitter.get_results(comp_uuid).avg_rate - log_msg = ( - "Expected Bandwidth: %s (bandwidth for phase: %s, " - "stream: %s [%s] x expected bandwidth percentage " - "[%s])" - % ( - expected_bw_percentage * comp_bw, - comp_phase_id, - comp_stream_id, - comp_bw, - expected_bw_percentage, - ) - ) - - # Expected Bandwidth Validation - else: - if "bandwidth" in stream: - comp_bw = stream["bandwidth"] - log_msg = ( - "Expected Bandwidth: %s (expected stream " - "bandwidth [%s] x expected bandwidth " - "percentage [%s])" - % ( - expected_bw_percentage * comp_bw, - comp_bw, - expected_bw_percentage, - ) - ) - else: - max_bw_percentage = stream.get( - "max_bandwidth_percentage", DEFAULT_BW_PERCENTAGE - ) - comp_bw = max_bw * max_bw_percentage - log_msg = ( - "Expected Bandwidth: %s (max bandwidth [%s] x " - "stream bandwidth percentage [%s] x expected " - "bandwidth percentage [%s])" - % ( - expected_bw_percentage * comp_bw, - max_bw, - max_bw_percentage, - expected_bw_percentage, - ) - ) - - self.log.info( - "Validation criteria - Stream: %s, " - "Actual Bandwidth: %s, Operator: %s, %s, " - "Relative Tolerance: %s, Absolute Tolerance: %s, Max " - "Bandwidth Relative Tolerance: %s" - % ( - stream_id, - actual_bw, - operator_str, - log_msg, - rel_tolerance, - abs_tolerance, - max_bw_rel_tolerance, - ) - ) - - if eval_operator( - operator_str, - actual_bw, - comp_bw * expected_bw_percentage, - max_bw, - rel_tolerance=rel_tolerance, - abs_tolerance=abs_tolerance, - max_bw_rel_tolerance=max_bw_rel_tolerance, - ): - self.log.info( - "(Phase: %s, Stream: %s) - PASSES validation check!" - % (phase_id, stream_id) - ) - else: - self.log.info( - "(Phase: %s, Stream: %s) - Stream FAILS validation " - "check." % (phase_id, stream_id) - ) - pass_val = False - if pass_val: - self.log.info( - "(Phase %s) - All streams' validation criteria were met." % phase_id - ) - return True - else: - self.log.error( - "(Phase %s) - At least one stream validation criterion was not " - "met." % phase_id - ) - return False - - def graph_test(self, phases, max_bw): - """Outputs a bokeh html graph of the streams. Saves to ACTS log - directory. - - Args: - phases: dictionary containing phases for retrieving stream - transmitters, expected bandwidths, etc. - max_bw: the max link bandwidth, measured in the test - - """ - - try: - from bokeh.palettes import Category10 - from bokeh.plotting import ColumnDataSource, figure, output_file, save - from bokeh.models import Span, Label - except ImportError as e: - self.log.warn( - "bokeh is not installed: skipping creation of graphs. " - "Note CSV files are still available. If graphs are " - 'desired, install antlion with the "bokeh" feature.' - ) - return - - output_path = context.get_current_context().get_base_output_path() - output_file_name = "%s/WlanWmmTest/%s.html" % (output_path, self.test_name) - output_file(output_file_name) - - start_time = 0 - graph_lines = [] - - # Used for scaling - highest_stream_bw = 0 - lowest_stream_bw = 100000 - - for phase_id, phase in phases.items(): - longest_stream_time = 0 - for stream_id, stream in phase.items(): - transmitter = stream["transmitter"] - uuid = stream["uuid"] - - if "bandwidth" in stream: - stream_bw = "{:.3f}".format(stream["bandwidth"]) - stream_bw_formula_str = "%sMb/s" % stream_bw - elif "max_bandwidth_percentage" in stream: - max_bw_percentage = stream["max_bandwidth_percentage"] - stream_bw = "{:.3f}".format(max_bw * max_bw_percentage) - stream_bw_formula_str = "%sMb/s (%s%% of max bandwidth)" % ( - stream_bw, - str(max_bw_percentage * 100), - ) - else: - raise AttributeError( - "Stream %s must have either a bandwidth or " - "max_bandwidth_percentage parameter." % stream_id - ) - - stream_time = stream.get("time", DEFAULT_STREAM_TIME) - longest_stream_time = max(longest_stream_time, stream_time) - - avg_rate = transmitter.get_results(uuid).avg_rate - - instantaneous_rates = transmitter.get_results(uuid).instantaneous_rates - highest_stream_bw = max(highest_stream_bw, max(instantaneous_rates)) - lowest_stream_bw = min(lowest_stream_bw, min(instantaneous_rates)) - - stream_data = ColumnDataSource( - dict( - time=[x for x in range(start_time, start_time + stream_time)], - instantaneous_bws=instantaneous_rates, - avg_bw=[avg_rate for _ in range(stream_time)], - stream_id=[stream_id for _ in range(stream_time)], - attempted_bw=[ - stream_bw_formula_str for _ in range(stream_time) - ], - ) - ) - line = { - "x_axis": "time", - "y_axis": "instantaneous_bws", - "source": stream_data, - "line_width": GRAPH_DEFAULT_LINE_WIDTH, - "legend_label": "%s:%s" % (phase_id, stream_id), - } - graph_lines.append(line) - - start_time = start_time + longest_stream_time - TOOLTIPS = [ - ("Time", "@time"), - ("Attempted Bandwidth", "@attempted_bw"), - ("Instantaneous Bandwidth", "@instantaneous_bws"), - ("Stream Average Bandwidth", "@avg_bw"), - ("Stream", "@stream_id"), - ] - - # Create and scale graph appropriately - time_vs_bandwidth_graph = figure( - title=("Bandwidth for %s" % self.test_name), - x_axis_label="Time", - y_axis_label="Bandwidth", - tooltips=TOOLTIPS, - y_range=( - lowest_stream_bw - (0.5 * (highest_stream_bw - lowest_stream_bw)), - 1.05 * max_bw, - ), - ) - time_vs_bandwidth_graph.sizing_mode = "stretch_both" - time_vs_bandwidth_graph.title.align = "center" - colors = Category10[GRAPH_COLOR_LEN] - color_ind = 0 - - # Draw max bandwidth line - max_bw_span = Span( - location=max_bw, - dimension="width", - line_color="black", - line_dash="dashed", - line_width=GRAPH_DEFAULT_LINE_WIDTH, - ) - max_bw_label = Label( - x=(0.5 * start_time), - y=max_bw, - text=("Max Bandwidth: %sMb/s" % max_bw), - text_align="center", - ) - time_vs_bandwidth_graph.add_layout(max_bw_span) - time_vs_bandwidth_graph.add_layout(max_bw_label) - - # Draw stream lines - for line in graph_lines: - time_vs_bandwidth_graph.line( - line["x_axis"], - line["y_axis"], - source=line["source"], - line_width=line["line_width"], - legend_label=line["legend_label"], - color=colors[color_ind], - ) - time_vs_bandwidth_graph.circle( - line["x_axis"], - line["y_axis"], - source=line["source"], - size=GRAPH_DEFAULT_CIRCLE_SIZE, - legend_label=line["legend_label"], - color=colors[color_ind], - ) - color_ind = (color_ind + 1) % GRAPH_COLOR_LEN - time_vs_bandwidth_graph.legend.location = "top_left" - time_vs_bandwidth_graph.legend.click_policy = "hide" - graph_file = save([time_vs_bandwidth_graph]) - self.log.info("Saved graph to %s" % graph_file) - - def run_wmm_test( - self, - phases, - ap_parameters=DEFAULT_AP_PARAMS, - wmm_parameters=hostapd_constants.WMM_PHYS_11A_11G_11N_11AC_DEFAULT_PARAMS, - stream_timeout=DEFAULT_STREAM_TIMEOUT, - ): - """Runs a WMM test case. - - Args: - phases: dictionary of phases of streams to run in parallel, - including any validation critera (see example below). - ap_parameters: dictionary of custom kwargs to setup on AP (see - start_ap_with_wmm_parameters) - wmm_parameters: dictionary of WMM AC parameters - stream_timeout: int, time in seconds to wait before force joining - parallel streams - - Asserts: - PASS, if all validation criteria for all phases are met - FAIL, otherwise - """ - # Setup AP - subnet_str = self.start_ap_with_wmm_params(ap_parameters, wmm_parameters) - # Determine transmitters and receivers used in test case - transmitters = set() - receivers = set() - for phase in phases.values(): - for stream in phase.values(): - transmitter = self.wmm_transceiver_map[stream["transmitter_str"]] - transmitters.add(transmitter) - stream["transmitter"] = transmitter - receiver = self.wmm_transceiver_map[stream["receiver_str"]] - receivers.add(receiver) - stream["receiver"] = receiver - transceivers = transmitters.union(receivers) - - # Associate all transceivers with wlan_devices - for tc in transceivers: - if tc.wlan_device: - self.associate_transceiver(tc, ap_parameters) - - # Determine link max bandwidth - self.log.info("Determining link maximum bandwidth.") - uuid = self.staut.run_synchronous_traffic_stream( - {"receiver": self.access_point_transceiver}, subnet_str - ) - max_bw = self.staut.get_results(uuid).avg_send_rate - self.log.info("Link maximum bandwidth: %s Mb/s" % max_bw) - - # Run parallel phases - pass_test = True - for phase_id, phase in phases.items(): - self.log.info("Setting up phase: %s" % phase_id) - - for stream_id, stream in phase.items(): - transmitter = stream["transmitter"] - receiver = stream["receiver"] - access_category = stream.get("access_category", None) - stream_time = stream.get("time", DEFAULT_STREAM_TIME) - - # Determine stream type - if "bandwidth" in stream: - bw = stream["bandwidth"] - elif "max_bandwidth_percentage" in stream: - max_bw_percentage = stream["max_bandwidth_percentage"] - bw = max_bw * max_bw_percentage - else: - raise AttributeError( - "Stream %s must have either a bandwidth or " - "max_bandwidth_percentage parameter." % stream_id - ) - - stream_params = { - "receiver": receiver, - "access_category": access_category, - "bandwidth": bw, - "time": stream_time, - } - - uuid = transmitter.prepare_asynchronous_stream( - stream_params, subnet_str - ) - stream["uuid"] = uuid - - # Start all streams in phase - start_time = time.time() + 5 - for transmitter in transmitters: - transmitter.start_asynchronous_streams(start_time=start_time) - - # Wait for streams to join - for transmitter in transmitters: - end_time = time.time() + stream_timeout - while transmitter.has_active_streams: - if time.time() > end_time: - raise ConnectionError( - "Transmitter's (%s) active streams are not finishing." - % transmitter.identifier - ) - time.sleep(1) - - # Cleanup all streams - for transmitter in transmitters: - transmitter.cleanup_asynchronous_streams() - - # Validate streams - pass_test = pass_test and self.validate_streams_in_phase( - phase_id, phases, max_bw - ) - - self.graph_test(phases, max_bw) - if pass_test: - asserts.explicit_pass( - "Validation criteria met for all streams in all phases." - ) - else: - asserts.fail("At least one stream failed to meet validation criteria.") - - # Test Cases - - # Internal Traffic Differentiation - - def test_internal_traffic_diff_VO_VI(self): - self.run_wmm_test(wmm_test_cases.test_internal_traffic_diff_VO_VI) - - def test_internal_traffic_diff_VO_BE(self): - self.run_wmm_test(wmm_test_cases.test_internal_traffic_diff_VO_BE) - - def test_internal_traffic_diff_VO_BK(self): - self.run_wmm_test(wmm_test_cases.test_internal_traffic_diff_VO_BK) - - def test_internal_traffic_diff_VI_BE(self): - self.run_wmm_test(wmm_test_cases.test_internal_traffic_diff_VI_BE) - - def test_internal_traffic_diff_VI_BK(self): - self.run_wmm_test(wmm_test_cases.test_internal_traffic_diff_VI_BK) - - def test_internal_traffic_diff_BE_BK(self): - self.run_wmm_test(wmm_test_cases.test_internal_traffic_diff_BE_BK) - - # External Traffic Differentiation - - """Single station, STAUT transmits high priority""" - - def test_external_traffic_diff_staut_VO_ap_VI(self): - self.run_wmm_test(wmm_test_cases.test_external_traffic_diff_staut_VO_ap_VI) - - def test_external_traffic_diff_staut_VO_ap_BE(self): - self.run_wmm_test(wmm_test_cases.test_external_traffic_diff_staut_VO_ap_BE) - - def test_external_traffic_diff_staut_VO_ap_BK(self): - self.run_wmm_test(wmm_test_cases.test_external_traffic_diff_staut_VO_ap_BK) - - def test_external_traffic_diff_staut_VI_ap_BE(self): - self.run_wmm_test(wmm_test_cases.test_external_traffic_diff_staut_VI_ap_BE) - - def test_external_traffic_diff_staut_VI_ap_BK(self): - self.run_wmm_test(wmm_test_cases.test_external_traffic_diff_staut_VI_ap_BK) - - def test_external_traffic_diff_staut_BE_ap_BK(self): - self.run_wmm_test(wmm_test_cases.test_external_traffic_diff_staut_BE_ap_BK) - - """Single station, STAUT transmits low priority""" - - def test_external_traffic_diff_staut_VI_ap_VO(self): - self.run_wmm_test(wmm_test_cases.test_external_traffic_diff_staut_VI_ap_VO) - - def test_external_traffic_diff_staut_BE_ap_VO(self): - self.run_wmm_test(wmm_test_cases.test_external_traffic_diff_staut_BE_ap_VO) - - def test_external_traffic_diff_staut_BK_ap_VO(self): - self.run_wmm_test(wmm_test_cases.test_external_traffic_diff_staut_BK_ap_VO) - - def test_external_traffic_diff_staut_BE_ap_VI(self): - self.run_wmm_test(wmm_test_cases.test_external_traffic_diff_staut_BE_ap_VI) - - def test_external_traffic_diff_staut_BK_ap_VI(self): - self.run_wmm_test(wmm_test_cases.test_external_traffic_diff_staut_BK_ap_VI) - - def test_external_traffic_diff_staut_BK_ap_BE(self): - self.run_wmm_test(wmm_test_cases.test_external_traffic_diff_staut_BK_ap_BE) - - # # Dual Internal/External Traffic Differentiation (Single station) - - def test_dual_traffic_diff_staut_VO_VI_ap_VI(self): - self.run_wmm_test(wmm_test_cases.test_dual_traffic_diff_staut_VO_VI_ap_VI) - - def test_dual_traffic_diff_staut_VO_BE_ap_BE(self): - self.run_wmm_test(wmm_test_cases.test_dual_traffic_diff_staut_VO_BE_ap_BE) - - def test_dual_traffic_diff_staut_VO_BK_ap_BK(self): - self.run_wmm_test(wmm_test_cases.test_dual_traffic_diff_staut_VO_BK_ap_BK) - - def test_dual_traffic_diff_staut_VI_BE_ap_BE(self): - self.run_wmm_test(wmm_test_cases.test_dual_traffic_diff_staut_VI_BE_ap_BE) - - def test_dual_traffic_diff_staut_VI_BK_ap_BK(self): - self.run_wmm_test(wmm_test_cases.test_dual_traffic_diff_staut_VI_BK_ap_BK) - - def test_dual_traffic_diff_staut_BE_BK_ap_BK(self): - self.run_wmm_test(wmm_test_cases.test_dual_traffic_diff_staut_BE_BK_ap_BK) - - # ACM Bit Conformance Tests (Single station, as WFA test below uses two) - - def test_acm_bit_on_VI(self): - wmm_params_VI_ACM = utils.merge_dicts( - hostapd_constants.WMM_PHYS_11A_11G_11N_11AC_DEFAULT_PARAMS, - hostapd_constants.WMM_ACM_VI, - ) - self.run_wmm_test( - wmm_test_cases.test_acm_bit_on_VI, wmm_parameters=wmm_params_VI_ACM - ) - - # AC Parameter Modificiation Tests (Single station, as WFA test below uses two) - - def test_ac_param_degrade_VO(self): - self.run_wmm_test( - wmm_test_cases.test_ac_param_degrade_VO, - wmm_parameters=hostapd_constants.WMM_DEGRADED_VO_PARAMS, - ) - - def test_ac_param_degrade_VI(self): - self.run_wmm_test( - wmm_test_cases.test_ac_param_degrade_VI, - wmm_parameters=hostapd_constants.WMM_DEGRADED_VI_PARAMS, - ) - - def test_ac_param_improve_BE(self): - self.run_wmm_test( - wmm_test_cases.test_ac_param_improve_BE, - wmm_parameters=hostapd_constants.WMM_IMPROVE_BE_PARAMS, - ) - - def test_ac_param_improve_BK(self): - self.run_wmm_test( - wmm_test_cases.test_ac_param_improve_BK, - wmm_parameters=hostapd_constants.WMM_IMPROVE_BK_PARAMS, - ) - - # WFA Test Plan Tests - - """Traffic Differentiation in Single BSS (Single Station)""" - - def test_wfa_traffic_diff_single_station_staut_BE_ap_VI_BE(self): - self.run_wmm_test( - wmm_test_cases.test_wfa_traffic_diff_single_station_staut_BE_ap_VI_BE - ) - - def test_wfa_traffic_diff_single_station_staut_VI_BE(self): - self.run_wmm_test( - wmm_test_cases.test_wfa_traffic_diff_single_station_staut_VI_BE - ) - - def test_wfa_traffic_diff_single_station_staut_VI_BE_ap_BE(self): - self.run_wmm_test( - wmm_test_cases.test_wfa_traffic_diff_single_station_staut_VI_BE_ap_BE - ) - - def test_wfa_traffic_diff_single_station_staut_BE_BK_ap_BK(self): - self.run_wmm_test( - wmm_test_cases.test_wfa_traffic_diff_single_station_staut_BE_BK_ap_BK - ) - - def test_wfa_traffic_diff_single_station_staut_VO_VI_ap_VI(self): - self.run_wmm_test( - wmm_test_cases.test_wfa_traffic_diff_single_station_staut_VO_VI_ap_VI - ) - - """Traffic Differentiation in Single BSS (Two Stations)""" - - def test_wfa_traffic_diff_two_stations_staut_BE_secondary_VI_BE(self): - asserts.skip_if(not self.secondary_sta, "No secondary station.") - self.run_wmm_test( - wmm_test_cases.test_wfa_traffic_diff_two_stations_staut_BE_secondary_VI_BE - ) - - def test_wfa_traffic_diff_two_stations_staut_VI_secondary_BE(self): - asserts.skip_if(not self.secondary_sta, "No secondary station.") - self.run_wmm_test( - wmm_test_cases.test_wfa_traffic_diff_two_stations_staut_VI_secondary_BE - ) - - def test_wfa_traffic_diff_two_stations_staut_BK_secondary_BE_BK(self): - asserts.skip_if(not self.secondary_sta, "No secondary station.") - self.run_wmm_test( - wmm_test_cases.test_wfa_traffic_diff_two_stations_staut_BK_secondary_BE_BK - ) - - def test_wfa_traffic_diff_two_stations_staut_VI_secondary_VO_VI(self): - asserts.skip_if(not self.secondary_sta, "No secondary station.") - self.run_wmm_test( - wmm_test_cases.test_wfa_traffic_diff_two_stations_staut_VI_secondary_VO_VI - ) - - """Test ACM Bit Conformance (Two Stations)""" - - def test_wfa_acm_bit_on_VI(self): - asserts.skip_if(not self.secondary_sta, "No secondary station.") - wmm_params_VI_ACM = utils.merge_dicts( - hostapd_constants.WMM_PHYS_11A_11G_11N_11AC_DEFAULT_PARAMS, - hostapd_constants.WMM_ACM_VI, - ) - self.run_wmm_test( - wmm_test_cases.test_wfa_acm_bit_on_VI, wmm_parameters=wmm_params_VI_ACM - ) - - """Test the AC Parameter Modification""" - - def test_wfa_ac_param_degrade_VI(self): - asserts.skip_if(not self.secondary_sta, "No secondary station.") - self.run_wmm_test( - wmm_test_cases.test_wfa_ac_param_degrade_VI, - wmm_parameters=hostapd_constants.WMM_DEGRADED_VI_PARAMS, - ) - - -if __name__ == "__main__": - test_runner.main()
diff --git a/src/antlion/tests/wlan/performance/channel_sweep_test_params.yaml b/src/antlion/tests/wlan/performance/channel_sweep_test_params.yaml deleted file mode 100644 index b70490b..0000000 --- a/src/antlion/tests/wlan/performance/channel_sweep_test_params.yaml +++ /dev/null
@@ -1,5408 +0,0 @@ -channel_sweep_test_params: - skip_performance: false - debug_channel_performance_tests: - - test_name: test_random_2g_20mhz_channel - test_channels: - - 8 - test_channel_bandwidth: 20 - - test_name: test_random_dfs_5g_80mhz_channel - test_channels: - - 100 - test_channel_bandwidth: 80 - - test_name: test_random_nondfs_5g_80mhz_channel - test_channels: - - 157 - test_channel_bandwidth: 80 - - test_name: test_channel_165 - test_channels: - - 165 - test_channel_bandwidth: 20 - regulatory_compliance_tests: - - test_name: test_SE_regulatory_compliance - country_code: SE - test_channels: - "1": - - 20 - "2": - - 20 - "3": - - 20 - "4": - - 20 - "5": - - 20 - "6": - - 20 - "7": - - 20 - "8": - - 20 - "9": - - 20 - "10": - - 20 - "11": - - 20 - "12": - - 20 - "13": - - 20 - "14": - - 20 - "36": - - 20 - - 40 - - 80 - "40": - - 20 - - 40 - - 80 - "44": - - 20 - - 40 - - 80 - "48": - - 20 - - 40 - - 80 - "52": - - 20 - - 40 - - 80 - "56": - - 20 - - 40 - - 80 - "60": - - 20 - - 40 - - 80 - "64": - - 20 - - 40 - - 80 - "100": - - 20 - - 40 - - 80 - "104": - - 20 - - 40 - - 80 - "108": - - 20 - - 40 - - 80 - "112": - - 20 - - 40 - - 80 - "116": - - 20 - - 40 - - 80 - "120": - - 20 - - 40 - - 80 - "124": - - 20 - - 40 - - 80 - "128": - - 20 - - 40 - - 80 - "132": - - 20 - - 40 - - 80 - "136": - - 20 - - 40 - - 80 - "140": - - 20 - - 40 - - 80 - "144": - - 20 - - 40 - - 80 - "149": - - 20 - - 40 - - 80 - "153": - - 20 - - 40 - - 80 - "157": - - 20 - - 40 - - 80 - "161": - - 20 - - 40 - - 80 - "165": - - 20 - allowed_channels: - "1": - - 20 - "2": - - 20 - "3": - - 20 - "4": - - 20 - "5": - - 20 - "6": - - 20 - "7": - - 20 - "8": - - 20 - "9": - - 20 - "10": - - 20 - "11": - - 20 - "12": - - 20 - "13": - - 20 - "36": - - 20 - - 40 - - 80 - "40": - - 20 - - 40 - - 80 - "44": - - 20 - - 40 - - 80 - "48": - - 20 - - 40 - - 80 - "52": - - 20 - - 40 - - 80 - "56": - - 20 - - 40 - - 80 - "60": - - 20 - - 40 - - 80 - "64": - - 20 - - 40 - - 80 - "100": - - 20 - - 40 - - 80 - "104": - - 20 - - 40 - - 80 - "108": - - 20 - - 40 - - 80 - "112": - - 20 - - 40 - - 80 - "116": - - 20 - - 40 - - 80 - "120": - - 20 - - 40 - - 80 - "124": - - 20 - - 40 - - 80 - "128": - - 20 - - 40 - - 80 - "132": - - 20 - - 40 - - 80 - "136": - - 20 - - 40 - - 80 - "140": - - 20 - - 40 - - 80 - - test_name: test_us_regulatory_compliance - country_code: US - test_channels: - "1": - - 20 - "2": - - 20 - "3": - - 20 - "4": - - 20 - "5": - - 20 - "6": - - 20 - "7": - - 20 - "8": - - 20 - "9": - - 20 - "10": - - 20 - "11": - - 20 - "12": - - 20 - "13": - - 20 - "14": - - 20 - "36": - - 20 - - 40 - - 80 - "40": - - 20 - - 40 - - 80 - "44": - - 20 - - 40 - - 80 - "48": - - 20 - - 40 - - 80 - "52": - - 20 - - 40 - - 80 - "56": - - 20 - - 40 - - 80 - "60": - - 20 - - 40 - - 80 - "64": - - 20 - - 40 - - 80 - "100": - - 20 - - 40 - - 80 - "104": - - 20 - - 40 - - 80 - "108": - - 20 - - 40 - - 80 - "112": - - 20 - - 40 - - 80 - "116": - - 20 - - 40 - - 80 - "120": - - 20 - - 40 - - 80 - "124": - - 20 - - 40 - - 80 - "128": - - 20 - - 40 - - 80 - "132": - - 20 - - 40 - - 80 - "136": - - 20 - - 40 - - 80 - "140": - - 20 - - 40 - - 80 - "144": - - 20 - - 40 - - 80 - "149": - - 20 - - 40 - - 80 - "153": - - 20 - - 40 - - 80 - "157": - - 20 - - 40 - - 80 - "161": - - 20 - - 40 - - 80 - "165": - - 20 - allowed_channels: - "1": - - 20 - "2": - - 20 - "3": - - 20 - "4": - - 20 - "5": - - 20 - "6": - - 20 - "7": - - 20 - "8": - - 20 - "9": - - 20 - "10": - - 20 - "11": - - 20 - "36": - - 20 - - 40 - - 80 - "40": - - 20 - - 40 - - 80 - "44": - - 20 - - 40 - - 80 - "48": - - 20 - - 40 - - 80 - "52": - - 20 - - 40 - - 80 - "56": - - 20 - - 40 - - 80 - "60": - - 20 - - 40 - - 80 - "64": - - 20 - - 40 - - 80 - "100": - - 20 - - 40 - - 80 - "104": - - 20 - - 40 - - 80 - "108": - - 20 - - 40 - - 80 - "112": - - 20 - - 40 - - 80 - "116": - - 20 - - 40 - - 80 - "120": - - 20 - - 40 - - 80 - "124": - - 20 - - 40 - - 80 - "128": - - 20 - - 40 - - 80 - "132": - - 20 - - 40 - - 80 - "136": - - 20 - - 40 - - 80 - "140": - - 20 - - 40 - - 80 - "144": - - 20 - - 40 - - 80 - "149": - - 20 - - 40 - - 80 - "153": - - 20 - - 40 - - 80 - "157": - - 20 - - 40 - - 80 - "161": - - 20 - - 40 - - 80 - "165": - - 20 - - test_name: test_MX_regulatory_compliance - country_code: MX - test_channels: - "1": - - 20 - "2": - - 20 - "3": - - 20 - "4": - - 20 - "5": - - 20 - "6": - - 20 - "7": - - 20 - "8": - - 20 - "9": - - 20 - "10": - - 20 - "11": - - 20 - "12": - - 20 - "13": - - 20 - "14": - - 20 - "36": - - 20 - - 40 - - 80 - "40": - - 20 - - 40 - - 80 - "44": - - 20 - - 40 - - 80 - "48": - - 20 - - 40 - - 80 - "52": - - 20 - - 40 - - 80 - "56": - - 20 - - 40 - - 80 - "60": - - 20 - - 40 - - 80 - "64": - - 20 - - 40 - - 80 - "100": - - 20 - - 40 - - 80 - "104": - - 20 - - 40 - - 80 - "108": - - 20 - - 40 - - 80 - "112": - - 20 - - 40 - - 80 - "116": - - 20 - - 40 - - 80 - "120": - - 20 - - 40 - - 80 - "124": - - 20 - - 40 - - 80 - "128": - - 20 - - 40 - - 80 - "132": - - 20 - - 40 - - 80 - "136": - - 20 - - 40 - - 80 - "140": - - 20 - - 40 - - 80 - "144": - - 20 - - 40 - - 80 - "149": - - 20 - - 40 - - 80 - "153": - - 20 - - 40 - - 80 - "157": - - 20 - - 40 - - 80 - "161": - - 20 - - 40 - - 80 - "165": - - 20 - allowed_channels: - "1": - - 20 - "2": - - 20 - "3": - - 20 - "4": - - 20 - "5": - - 20 - "6": - - 20 - "7": - - 20 - "8": - - 20 - "9": - - 20 - "10": - - 20 - "11": - - 20 - "36": - - 20 - - 40 - - 80 - "40": - - 20 - - 40 - - 80 - "44": - - 20 - - 40 - - 80 - "48": - - 20 - - 40 - - 80 - "52": - - 20 - - 40 - - 80 - "56": - - 20 - - 40 - - 80 - "60": - - 20 - - 40 - - 80 - "64": - - 20 - - 40 - - 80 - "100": - - 20 - - 40 - - 80 - "104": - - 20 - - 40 - - 80 - "108": - - 20 - - 40 - - 80 - "112": - - 20 - - 40 - - 80 - "116": - - 20 - - 40 - - 80 - "132": - - 20 - - 40 - - 80 - "136": - - 20 - - 40 - - 80 - "140": - - 20 - - 40 - - 80 - "144": - - 20 - - 40 - - 80 - "149": - - 20 - - 40 - - 80 - "153": - - 20 - - 40 - - 80 - "157": - - 20 - - 40 - - 80 - "161": - - 20 - - 40 - - 80 - "165": - - 20 - - test_name: test_CA_regulatory_compliance - country_code: CA - test_channels: - "1": - - 20 - "2": - - 20 - "3": - - 20 - "4": - - 20 - "5": - - 20 - "6": - - 20 - "7": - - 20 - "8": - - 20 - "9": - - 20 - "10": - - 20 - "11": - - 20 - "12": - - 20 - "13": - - 20 - "14": - - 20 - "36": - - 20 - - 40 - - 80 - "40": - - 20 - - 40 - - 80 - "44": - - 20 - - 40 - - 80 - "48": - - 20 - - 40 - - 80 - "52": - - 20 - - 40 - - 80 - "56": - - 20 - - 40 - - 80 - "60": - - 20 - - 40 - - 80 - "64": - - 20 - - 40 - - 80 - "100": - - 20 - - 40 - - 80 - "104": - - 20 - - 40 - - 80 - "108": - - 20 - - 40 - - 80 - "112": - - 20 - - 40 - - 80 - "116": - - 20 - - 40 - - 80 - "120": - - 20 - - 40 - - 80 - "124": - - 20 - - 40 - - 80 - "128": - - 20 - - 40 - - 80 - "132": - - 20 - - 40 - - 80 - "136": - - 20 - - 40 - - 80 - "140": - - 20 - - 40 - - 80 - "144": - - 20 - - 40 - - 80 - "149": - - 20 - - 40 - - 80 - "153": - - 20 - - 40 - - 80 - "157": - - 20 - - 40 - - 80 - "161": - - 20 - - 40 - - 80 - "165": - - 20 - allowed_channels: - "1": - - 20 - "2": - - 20 - "3": - - 20 - "4": - - 20 - "5": - - 20 - "6": - - 20 - "7": - - 20 - "8": - - 20 - "9": - - 20 - "10": - - 20 - "11": - - 20 - "36": - - 20 - - 40 - - 80 - "40": - - 20 - - 40 - - 80 - "44": - - 20 - - 40 - - 80 - "48": - - 20 - - 40 - - 80 - "52": - - 20 - - 40 - - 80 - "56": - - 20 - - 40 - - 80 - "60": - - 20 - - 40 - - 80 - "64": - - 20 - - 40 - - 80 - "100": - - 20 - - 40 - - 80 - "104": - - 20 - - 40 - - 80 - "108": - - 20 - - 40 - - 80 - "112": - - 20 - - 40 - - 80 - "116": - - 20 - - 40 - - 80 - "132": - - 20 - - 40 - - 80 - "136": - - 20 - - 40 - - 80 - "140": - - 20 - - 40 - - 80 - "144": - - 20 - - 40 - - 80 - "149": - - 20 - - 40 - - 80 - "153": - - 20 - - 40 - - 80 - "157": - - 20 - - 40 - - 80 - "161": - - 20 - - 40 - - 80 - "165": - - 20 - - test_name: test_IN_regulatory_compliance - country_code: IN - test_channels: - "1": - - 20 - "2": - - 20 - "3": - - 20 - "4": - - 20 - "5": - - 20 - "6": - - 20 - "7": - - 20 - "8": - - 20 - "9": - - 20 - "10": - - 20 - "11": - - 20 - "12": - - 20 - "13": - - 20 - "14": - - 20 - "36": - - 20 - - 40 - - 80 - "40": - - 20 - - 40 - - 80 - "44": - - 20 - - 40 - - 80 - "48": - - 20 - - 40 - - 80 - "52": - - 20 - - 40 - - 80 - "56": - - 20 - - 40 - - 80 - "60": - - 20 - - 40 - - 80 - "64": - - 20 - - 40 - - 80 - "100": - - 20 - - 40 - - 80 - "104": - - 20 - - 40 - - 80 - "108": - - 20 - - 40 - - 80 - "112": - - 20 - - 40 - - 80 - "116": - - 20 - - 40 - - 80 - "120": - - 20 - - 40 - - 80 - "124": - - 20 - - 40 - - 80 - "128": - - 20 - - 40 - - 80 - "132": - - 20 - - 40 - - 80 - "136": - - 20 - - 40 - - 80 - "140": - - 20 - - 40 - - 80 - "144": - - 20 - - 40 - - 80 - "149": - - 20 - - 40 - - 80 - "153": - - 20 - - 40 - - 80 - "157": - - 20 - - 40 - - 80 - "161": - - 20 - - 40 - - 80 - "165": - - 20 - allowed_channels: - "1": - - 20 - "2": - - 20 - "3": - - 20 - "4": - - 20 - "5": - - 20 - "6": - - 20 - "7": - - 20 - "8": - - 20 - "9": - - 20 - "10": - - 20 - "11": - - 20 - "12": - - 20 - "13": - - 20 - "36": - - 20 - - 40 - - 80 - "40": - - 20 - - 40 - - 80 - "44": - - 20 - - 40 - - 80 - "48": - - 20 - - 40 - - 80 - "52": - - 20 - - 40 - - 80 - "56": - - 20 - - 40 - - 80 - "60": - - 20 - - 40 - - 80 - "64": - - 20 - - 40 - - 80 - "100": - - 20 - - 40 - - 80 - "104": - - 20 - - 40 - - 80 - "108": - - 20 - - 40 - - 80 - "112": - - 20 - - 40 - - 80 - "116": - - 20 - - 40 - - 80 - "120": - - 20 - - 40 - - 80 - "124": - - 20 - - 40 - - 80 - "128": - - 20 - - 40 - - 80 - "132": - - 20 - - 40 - - 80 - "136": - - 20 - - 40 - - 80 - "140": - - 20 - - 40 - - 80 - "144": - - 20 - - 40 - - 80 - "149": - - 20 - - 40 - - 80 - "153": - - 20 - - 40 - - 80 - "157": - - 20 - - 40 - - 80 - "161": - - 20 - - 40 - - 80 - "165": - - 20 - - test_name: test_NZ_regulatory_compliance - country_code: NZ - test_channels: - "1": - - 20 - "2": - - 20 - "3": - - 20 - "4": - - 20 - "5": - - 20 - "6": - - 20 - "7": - - 20 - "8": - - 20 - "9": - - 20 - "10": - - 20 - "11": - - 20 - "12": - - 20 - "13": - - 20 - "14": - - 20 - "36": - - 20 - - 40 - - 80 - "40": - - 20 - - 40 - - 80 - "44": - - 20 - - 40 - - 80 - "48": - - 20 - - 40 - - 80 - "52": - - 20 - - 40 - - 80 - "56": - - 20 - - 40 - - 80 - "60": - - 20 - - 40 - - 80 - "64": - - 20 - - 40 - - 80 - "100": - - 20 - - 40 - - 80 - "104": - - 20 - - 40 - - 80 - "108": - - 20 - - 40 - - 80 - "112": - - 20 - - 40 - - 80 - "116": - - 20 - - 40 - - 80 - "120": - - 20 - - 40 - - 80 - "124": - - 20 - - 40 - - 80 - "128": - - 20 - - 40 - - 80 - "132": - - 20 - - 40 - - 80 - "136": - - 20 - - 40 - - 80 - "140": - - 20 - - 40 - - 80 - "144": - - 20 - - 40 - - 80 - "149": - - 20 - - 40 - - 80 - "153": - - 20 - - 40 - - 80 - "157": - - 20 - - 40 - - 80 - "161": - - 20 - - 40 - - 80 - "165": - - 20 - allowed_channels: - "1": - - 20 - "2": - - 20 - "3": - - 20 - "4": - - 20 - "5": - - 20 - "6": - - 20 - "7": - - 20 - "8": - - 20 - "9": - - 20 - "10": - - 20 - "11": - - 20 - "12": - - 20 - "13": - - 20 - "36": - - 20 - - 40 - - 80 - "40": - - 20 - - 40 - - 80 - "44": - - 20 - - 40 - - 80 - "48": - - 20 - - 40 - - 80 - "52": - - 20 - - 40 - - 80 - "56": - - 20 - - 40 - - 80 - "60": - - 20 - - 40 - - 80 - "64": - - 20 - - 40 - - 80 - "100": - - 20 - - 40 - - 80 - "104": - - 20 - - 40 - - 80 - "108": - - 20 - - 40 - - 80 - "112": - - 20 - - 40 - - 80 - "116": - - 20 - - 40 - - 80 - "120": - - 20 - - 40 - - 80 - "124": - - 20 - - 40 - - 80 - "128": - - 20 - - 40 - - 80 - "132": - - 20 - - 40 - - 80 - "136": - - 20 - - 40 - - 80 - "140": - - 20 - - 40 - - 80 - "144": - - 20 - - 40 - - 80 - "149": - - 20 - - 40 - - 80 - "153": - - 20 - - 40 - - 80 - "157": - - 20 - - 40 - - 80 - "161": - - 20 - - 40 - - 80 - "165": - - 20 - - test_name: test_GB_regulatory_compliance - country_code: GB - test_channels: - "1": - - 20 - "2": - - 20 - "3": - - 20 - "4": - - 20 - "5": - - 20 - "6": - - 20 - "7": - - 20 - "8": - - 20 - "9": - - 20 - "10": - - 20 - "11": - - 20 - "12": - - 20 - "13": - - 20 - "14": - - 20 - "36": - - 20 - - 40 - - 80 - "40": - - 20 - - 40 - - 80 - "44": - - 20 - - 40 - - 80 - "48": - - 20 - - 40 - - 80 - "52": - - 20 - - 40 - - 80 - "56": - - 20 - - 40 - - 80 - "60": - - 20 - - 40 - - 80 - "64": - - 20 - - 40 - - 80 - "100": - - 20 - - 40 - - 80 - "104": - - 20 - - 40 - - 80 - "108": - - 20 - - 40 - - 80 - "112": - - 20 - - 40 - - 80 - "116": - - 20 - - 40 - - 80 - "120": - - 20 - - 40 - - 80 - "124": - - 20 - - 40 - - 80 - "128": - - 20 - - 40 - - 80 - "132": - - 20 - - 40 - - 80 - "136": - - 20 - - 40 - - 80 - "140": - - 20 - - 40 - - 80 - "144": - - 20 - - 40 - - 80 - "149": - - 20 - - 40 - - 80 - "153": - - 20 - - 40 - - 80 - "157": - - 20 - - 40 - - 80 - "161": - - 20 - - 40 - - 80 - "165": - - 20 - allowed_channels: - "1": - - 20 - "2": - - 20 - "3": - - 20 - "4": - - 20 - "5": - - 20 - "6": - - 20 - "7": - - 20 - "8": - - 20 - "9": - - 20 - "10": - - 20 - "11": - - 20 - "12": - - 20 - "13": - - 20 - "36": - - 20 - - 40 - - 80 - "40": - - 20 - - 40 - - 80 - "44": - - 20 - - 40 - - 80 - "48": - - 20 - - 40 - - 80 - "52": - - 20 - - 40 - - 80 - "56": - - 20 - - 40 - - 80 - "60": - - 20 - - 40 - - 80 - "64": - - 20 - - 40 - - 80 - "100": - - 20 - - 40 - - 80 - "104": - - 20 - - 40 - - 80 - "108": - - 20 - - 40 - - 80 - "112": - - 20 - - 40 - - 80 - "116": - - 20 - - 40 - - 80 - "120": - - 20 - - 40 - - 80 - "124": - - 20 - - 40 - - 80 - "128": - - 20 - - 40 - - 80 - "132": - - 20 - - 40 - - 80 - "136": - - 20 - - 40 - - 80 - "140": - - 20 - - 40 - - 80 - - test_name: test_AU_regulatory_compliance - country_code: AU - test_channels: - "1": - - 20 - "2": - - 20 - "3": - - 20 - "4": - - 20 - "5": - - 20 - "6": - - 20 - "7": - - 20 - "8": - - 20 - "9": - - 20 - "10": - - 20 - "11": - - 20 - "12": - - 20 - "13": - - 20 - "14": - - 20 - "36": - - 20 - - 40 - - 80 - "40": - - 20 - - 40 - - 80 - "44": - - 20 - - 40 - - 80 - "48": - - 20 - - 40 - - 80 - "52": - - 20 - - 40 - - 80 - "56": - - 20 - - 40 - - 80 - "60": - - 20 - - 40 - - 80 - "64": - - 20 - - 40 - - 80 - "100": - - 20 - - 40 - - 80 - "104": - - 20 - - 40 - - 80 - "108": - - 20 - - 40 - - 80 - "112": - - 20 - - 40 - - 80 - "116": - - 20 - - 40 - - 80 - "120": - - 20 - - 40 - - 80 - "124": - - 20 - - 40 - - 80 - "128": - - 20 - - 40 - - 80 - "132": - - 20 - - 40 - - 80 - "136": - - 20 - - 40 - - 80 - "140": - - 20 - - 40 - - 80 - "144": - - 20 - - 40 - - 80 - "149": - - 20 - - 40 - - 80 - "153": - - 20 - - 40 - - 80 - "157": - - 20 - - 40 - - 80 - "161": - - 20 - - 40 - - 80 - "165": - - 20 - allowed_channels: - "1": - - 20 - "2": - - 20 - "3": - - 20 - "4": - - 20 - "5": - - 20 - "6": - - 20 - "7": - - 20 - "8": - - 20 - "9": - - 20 - "10": - - 20 - "11": - - 20 - "12": - - 20 - "13": - - 20 - "36": - - 20 - - 40 - - 80 - "40": - - 20 - - 40 - - 80 - "44": - - 20 - - 40 - - 80 - "48": - - 20 - - 40 - - 80 - "52": - - 20 - - 40 - - 80 - "56": - - 20 - - 40 - - 80 - "60": - - 20 - - 40 - - 80 - "64": - - 20 - - 40 - - 80 - "100": - - 20 - - 40 - - 80 - "104": - - 20 - - 40 - - 80 - "108": - - 20 - - 40 - - 80 - "112": - - 20 - - 40 - - 80 - "116": - - 20 - - 40 - - 80 - "132": - - 20 - - 40 - - 80 - "136": - - 20 - - 40 - - 80 - "140": - - 20 - - 40 - - 80 - "144": - - 20 - - 40 - - 80 - "149": - - 20 - - 40 - - 80 - "153": - - 20 - - 40 - - 80 - "157": - - 20 - - 40 - - 80 - "161": - - 20 - - 40 - - 80 - "165": - - 20 - - test_name: test_JP_regulatory_compliance - country_code: JP - test_channels: - "1": - - 20 - "2": - - 20 - "3": - - 20 - "4": - - 20 - "5": - - 20 - "6": - - 20 - "7": - - 20 - "8": - - 20 - "9": - - 20 - "10": - - 20 - "11": - - 20 - "12": - - 20 - "13": - - 20 - "14": - - 20 - "36": - - 20 - - 40 - - 80 - "40": - - 20 - - 40 - - 80 - "44": - - 20 - - 40 - - 80 - "48": - - 20 - - 40 - - 80 - "52": - - 20 - - 40 - - 80 - "56": - - 20 - - 40 - - 80 - "60": - - 20 - - 40 - - 80 - "64": - - 20 - - 40 - - 80 - "100": - - 20 - - 40 - - 80 - "104": - - 20 - - 40 - - 80 - "108": - - 20 - - 40 - - 80 - "112": - - 20 - - 40 - - 80 - "116": - - 20 - - 40 - - 80 - "120": - - 20 - - 40 - - 80 - "124": - - 20 - - 40 - - 80 - "128": - - 20 - - 40 - - 80 - "132": - - 20 - - 40 - - 80 - "136": - - 20 - - 40 - - 80 - "140": - - 20 - - 40 - - 80 - "144": - - 20 - - 40 - - 80 - "149": - - 20 - - 40 - - 80 - "153": - - 20 - - 40 - - 80 - "157": - - 20 - - 40 - - 80 - "161": - - 20 - - 40 - - 80 - "165": - - 20 - allowed_channels: - "1": - - 20 - "2": - - 20 - "3": - - 20 - "4": - - 20 - "5": - - 20 - "6": - - 20 - "7": - - 20 - "8": - - 20 - "9": - - 20 - "10": - - 20 - "11": - - 20 - "12": - - 20 - "13": - - 20 - "36": - - 20 - - 40 - - 80 - "40": - - 20 - - 40 - - 80 - "44": - - 20 - - 40 - - 80 - "48": - - 20 - - 40 - - 80 - "52": - - 20 - - 40 - - 80 - "56": - - 20 - - 40 - - 80 - "60": - - 20 - - 40 - - 80 - "64": - - 20 - - 40 - - 80 - "100": - - 20 - - 40 - - 80 - "104": - - 20 - - 40 - - 80 - "108": - - 20 - - 40 - - 80 - "112": - - 20 - - 40 - - 80 - "116": - - 20 - - 40 - - 80 - "120": - - 20 - - 40 - - 80 - "124": - - 20 - - 40 - - 80 - "128": - - 20 - - 40 - - 80 - "132": - - 20 - - 40 - - 80 - "136": - - 20 - - 40 - - 80 - "140": - - 20 - - 40 - - 80 - "144": - - 20 - - 40 - - 80 - - test_name: test_FR_regulatory_compliance - country_code: FR - test_channels: - "1": - - 20 - "2": - - 20 - "3": - - 20 - "4": - - 20 - "5": - - 20 - "6": - - 20 - "7": - - 20 - "8": - - 20 - "9": - - 20 - "10": - - 20 - "11": - - 20 - "12": - - 20 - "13": - - 20 - "14": - - 20 - "36": - - 20 - - 40 - - 80 - "40": - - 20 - - 40 - - 80 - "44": - - 20 - - 40 - - 80 - "48": - - 20 - - 40 - - 80 - "52": - - 20 - - 40 - - 80 - "56": - - 20 - - 40 - - 80 - "60": - - 20 - - 40 - - 80 - "64": - - 20 - - 40 - - 80 - "100": - - 20 - - 40 - - 80 - "104": - - 20 - - 40 - - 80 - "108": - - 20 - - 40 - - 80 - "112": - - 20 - - 40 - - 80 - "116": - - 20 - - 40 - - 80 - "120": - - 20 - - 40 - - 80 - "124": - - 20 - - 40 - - 80 - "128": - - 20 - - 40 - - 80 - "132": - - 20 - - 40 - - 80 - "136": - - 20 - - 40 - - 80 - "140": - - 20 - - 40 - - 80 - "144": - - 20 - - 40 - - 80 - "149": - - 20 - - 40 - - 80 - "153": - - 20 - - 40 - - 80 - "157": - - 20 - - 40 - - 80 - "161": - - 20 - - 40 - - 80 - "165": - - 20 - allowed_channels: - "1": - - 20 - "2": - - 20 - "3": - - 20 - "4": - - 20 - "5": - - 20 - "6": - - 20 - "7": - - 20 - "8": - - 20 - "9": - - 20 - "10": - - 20 - "11": - - 20 - "12": - - 20 - "13": - - 20 - "36": - - 20 - - 40 - - 80 - "40": - - 20 - - 40 - - 80 - "44": - - 20 - - 40 - - 80 - "48": - - 20 - - 40 - - 80 - "52": - - 20 - - 40 - - 80 - "56": - - 20 - - 40 - - 80 - "60": - - 20 - - 40 - - 80 - "64": - - 20 - - 40 - - 80 - "100": - - 20 - - 40 - - 80 - "104": - - 20 - - 40 - - 80 - "108": - - 20 - - 40 - - 80 - "112": - - 20 - - 40 - - 80 - "116": - - 20 - - 40 - - 80 - "120": - - 20 - - 40 - - 80 - "124": - - 20 - - 40 - - 80 - "128": - - 20 - - 40 - - 80 - "132": - - 20 - - 40 - - 80 - "136": - - 20 - - 40 - - 80 - "140": - - 20 - - 40 - - 80 - - test_name: test_DE_regulatory_compliance - country_code: DE - test_channels: - "1": - - 20 - "2": - - 20 - "3": - - 20 - "4": - - 20 - "5": - - 20 - "6": - - 20 - "7": - - 20 - "8": - - 20 - "9": - - 20 - "10": - - 20 - "11": - - 20 - "12": - - 20 - "13": - - 20 - "14": - - 20 - "36": - - 20 - - 40 - - 80 - "40": - - 20 - - 40 - - 80 - "44": - - 20 - - 40 - - 80 - "48": - - 20 - - 40 - - 80 - "52": - - 20 - - 40 - - 80 - "56": - - 20 - - 40 - - 80 - "60": - - 20 - - 40 - - 80 - "64": - - 20 - - 40 - - 80 - "100": - - 20 - - 40 - - 80 - "104": - - 20 - - 40 - - 80 - "108": - - 20 - - 40 - - 80 - "112": - - 20 - - 40 - - 80 - "116": - - 20 - - 40 - - 80 - "120": - - 20 - - 40 - - 80 - "124": - - 20 - - 40 - - 80 - "128": - - 20 - - 40 - - 80 - "132": - - 20 - - 40 - - 80 - "136": - - 20 - - 40 - - 80 - "140": - - 20 - - 40 - - 80 - "144": - - 20 - - 40 - - 80 - "149": - - 20 - - 40 - - 80 - "153": - - 20 - - 40 - - 80 - "157": - - 20 - - 40 - - 80 - "161": - - 20 - - 40 - - 80 - "165": - - 20 - allowed_channels: - "1": - - 20 - "2": - - 20 - "3": - - 20 - "4": - - 20 - "5": - - 20 - "6": - - 20 - "7": - - 20 - "8": - - 20 - "9": - - 20 - "10": - - 20 - "11": - - 20 - "12": - - 20 - "13": - - 20 - "36": - - 20 - - 40 - - 80 - "40": - - 20 - - 40 - - 80 - "44": - - 20 - - 40 - - 80 - "48": - - 20 - - 40 - - 80 - "52": - - 20 - - 40 - - 80 - "56": - - 20 - - 40 - - 80 - "60": - - 20 - - 40 - - 80 - "64": - - 20 - - 40 - - 80 - "100": - - 20 - - 40 - - 80 - "104": - - 20 - - 40 - - 80 - "108": - - 20 - - 40 - - 80 - "112": - - 20 - - 40 - - 80 - "116": - - 20 - - 40 - - 80 - "120": - - 20 - - 40 - - 80 - "124": - - 20 - - 40 - - 80 - "128": - - 20 - - 40 - - 80 - "132": - - 20 - - 40 - - 80 - "136": - - 20 - - 40 - - 80 - "140": - - 20 - - 40 - - 80 - - test_name: test_CH_regulatory_compliance - country_code: CH - test_channels: - "1": - - 20 - "2": - - 20 - "3": - - 20 - "4": - - 20 - "5": - - 20 - "6": - - 20 - "7": - - 20 - "8": - - 20 - "9": - - 20 - "10": - - 20 - "11": - - 20 - "12": - - 20 - "13": - - 20 - "14": - - 20 - "36": - - 20 - - 40 - - 80 - "40": - - 20 - - 40 - - 80 - "44": - - 20 - - 40 - - 80 - "48": - - 20 - - 40 - - 80 - "52": - - 20 - - 40 - - 80 - "56": - - 20 - - 40 - - 80 - "60": - - 20 - - 40 - - 80 - "64": - - 20 - - 40 - - 80 - "100": - - 20 - - 40 - - 80 - "104": - - 20 - - 40 - - 80 - "108": - - 20 - - 40 - - 80 - "112": - - 20 - - 40 - - 80 - "116": - - 20 - - 40 - - 80 - "120": - - 20 - - 40 - - 80 - "124": - - 20 - - 40 - - 80 - "128": - - 20 - - 40 - - 80 - "132": - - 20 - - 40 - - 80 - "136": - - 20 - - 40 - - 80 - "140": - - 20 - - 40 - - 80 - "144": - - 20 - - 40 - - 80 - "149": - - 20 - - 40 - - 80 - "153": - - 20 - - 40 - - 80 - "157": - - 20 - - 40 - - 80 - "161": - - 20 - - 40 - - 80 - "165": - - 20 - allowed_channels: - "1": - - 20 - "2": - - 20 - "3": - - 20 - "4": - - 20 - "5": - - 20 - "6": - - 20 - "7": - - 20 - "8": - - 20 - "9": - - 20 - "10": - - 20 - "11": - - 20 - "12": - - 20 - "13": - - 20 - "36": - - 20 - - 40 - - 80 - "40": - - 20 - - 40 - - 80 - "44": - - 20 - - 40 - - 80 - "48": - - 20 - - 40 - - 80 - "52": - - 20 - - 40 - - 80 - "56": - - 20 - - 40 - - 80 - "60": - - 20 - - 40 - - 80 - "64": - - 20 - - 40 - - 80 - "100": - - 20 - - 40 - - 80 - "104": - - 20 - - 40 - - 80 - "108": - - 20 - - 40 - - 80 - "112": - - 20 - - 40 - - 80 - "116": - - 20 - - 40 - - 80 - "120": - - 20 - - 40 - - 80 - "124": - - 20 - - 40 - - 80 - "128": - - 20 - - 40 - - 80 - "132": - - 20 - - 40 - - 80 - "136": - - 20 - - 40 - - 80 - "140": - - 20 - - 40 - - 80 - - test_name: test_BE_regulatory_compliance - country_code: BE - test_channels: - "1": - - 20 - "2": - - 20 - "3": - - 20 - "4": - - 20 - "5": - - 20 - "6": - - 20 - "7": - - 20 - "8": - - 20 - "9": - - 20 - "10": - - 20 - "11": - - 20 - "12": - - 20 - "13": - - 20 - "14": - - 20 - "36": - - 20 - - 40 - - 80 - "40": - - 20 - - 40 - - 80 - "44": - - 20 - - 40 - - 80 - "48": - - 20 - - 40 - - 80 - "52": - - 20 - - 40 - - 80 - "56": - - 20 - - 40 - - 80 - "60": - - 20 - - 40 - - 80 - "64": - - 20 - - 40 - - 80 - "100": - - 20 - - 40 - - 80 - "104": - - 20 - - 40 - - 80 - "108": - - 20 - - 40 - - 80 - "112": - - 20 - - 40 - - 80 - "116": - - 20 - - 40 - - 80 - "120": - - 20 - - 40 - - 80 - "124": - - 20 - - 40 - - 80 - "128": - - 20 - - 40 - - 80 - "132": - - 20 - - 40 - - 80 - "136": - - 20 - - 40 - - 80 - "140": - - 20 - - 40 - - 80 - "144": - - 20 - - 40 - - 80 - "149": - - 20 - - 40 - - 80 - "153": - - 20 - - 40 - - 80 - "157": - - 20 - - 40 - - 80 - "161": - - 20 - - 40 - - 80 - "165": - - 20 - allowed_channels: - "1": - - 20 - "2": - - 20 - "3": - - 20 - "4": - - 20 - "5": - - 20 - "6": - - 20 - "7": - - 20 - "8": - - 20 - "9": - - 20 - "10": - - 20 - "11": - - 20 - "12": - - 20 - "13": - - 20 - "36": - - 20 - - 40 - - 80 - "40": - - 20 - - 40 - - 80 - "44": - - 20 - - 40 - - 80 - "48": - - 20 - - 40 - - 80 - "52": - - 20 - - 40 - - 80 - "56": - - 20 - - 40 - - 80 - "60": - - 20 - - 40 - - 80 - "64": - - 20 - - 40 - - 80 - "100": - - 20 - - 40 - - 80 - "104": - - 20 - - 40 - - 80 - "108": - - 20 - - 40 - - 80 - "112": - - 20 - - 40 - - 80 - "116": - - 20 - - 40 - - 80 - "120": - - 20 - - 40 - - 80 - "124": - - 20 - - 40 - - 80 - "128": - - 20 - - 40 - - 80 - "132": - - 20 - - 40 - - 80 - "136": - - 20 - - 40 - - 80 - "140": - - 20 - - 40 - - 80 - - test_name: test_IE_regulatory_compliance - country_code: IE - test_channels: - "1": - - 20 - "2": - - 20 - "3": - - 20 - "4": - - 20 - "5": - - 20 - "6": - - 20 - "7": - - 20 - "8": - - 20 - "9": - - 20 - "10": - - 20 - "11": - - 20 - "12": - - 20 - "13": - - 20 - "14": - - 20 - "36": - - 20 - - 40 - - 80 - "40": - - 20 - - 40 - - 80 - "44": - - 20 - - 40 - - 80 - "48": - - 20 - - 40 - - 80 - "52": - - 20 - - 40 - - 80 - "56": - - 20 - - 40 - - 80 - "60": - - 20 - - 40 - - 80 - "64": - - 20 - - 40 - - 80 - "100": - - 20 - - 40 - - 80 - "104": - - 20 - - 40 - - 80 - "108": - - 20 - - 40 - - 80 - "112": - - 20 - - 40 - - 80 - "116": - - 20 - - 40 - - 80 - "120": - - 20 - - 40 - - 80 - "124": - - 20 - - 40 - - 80 - "128": - - 20 - - 40 - - 80 - "132": - - 20 - - 40 - - 80 - "136": - - 20 - - 40 - - 80 - "140": - - 20 - - 40 - - 80 - "144": - - 20 - - 40 - - 80 - "149": - - 20 - - 40 - - 80 - "153": - - 20 - - 40 - - 80 - "157": - - 20 - - 40 - - 80 - "161": - - 20 - - 40 - - 80 - "165": - - 20 - allowed_channels: - "1": - - 20 - "2": - - 20 - "3": - - 20 - "4": - - 20 - "5": - - 20 - "6": - - 20 - "7": - - 20 - "8": - - 20 - "9": - - 20 - "10": - - 20 - "11": - - 20 - "12": - - 20 - "13": - - 20 - "36": - - 20 - - 40 - - 80 - "40": - - 20 - - 40 - - 80 - "44": - - 20 - - 40 - - 80 - "48": - - 20 - - 40 - - 80 - "52": - - 20 - - 40 - - 80 - "56": - - 20 - - 40 - - 80 - "60": - - 20 - - 40 - - 80 - "64": - - 20 - - 40 - - 80 - "100": - - 20 - - 40 - - 80 - "104": - - 20 - - 40 - - 80 - "108": - - 20 - - 40 - - 80 - "112": - - 20 - - 40 - - 80 - "116": - - 20 - - 40 - - 80 - "120": - - 20 - - 40 - - 80 - "124": - - 20 - - 40 - - 80 - "128": - - 20 - - 40 - - 80 - "132": - - 20 - - 40 - - 80 - "136": - - 20 - - 40 - - 80 - "140": - - 20 - - 40 - - 80 - - test_name: test_NO_regulatory_compliance - country_code: "NO" - test_channels: - "1": - - 20 - "2": - - 20 - "3": - - 20 - "4": - - 20 - "5": - - 20 - "6": - - 20 - "7": - - 20 - "8": - - 20 - "9": - - 20 - "10": - - 20 - "11": - - 20 - "12": - - 20 - "13": - - 20 - "14": - - 20 - "36": - - 20 - - 40 - - 80 - "40": - - 20 - - 40 - - 80 - "44": - - 20 - - 40 - - 80 - "48": - - 20 - - 40 - - 80 - "52": - - 20 - - 40 - - 80 - "56": - - 20 - - 40 - - 80 - "60": - - 20 - - 40 - - 80 - "64": - - 20 - - 40 - - 80 - "100": - - 20 - - 40 - - 80 - "104": - - 20 - - 40 - - 80 - "108": - - 20 - - 40 - - 80 - "112": - - 20 - - 40 - - 80 - "116": - - 20 - - 40 - - 80 - "120": - - 20 - - 40 - - 80 - "124": - - 20 - - 40 - - 80 - "128": - - 20 - - 40 - - 80 - "132": - - 20 - - 40 - - 80 - "136": - - 20 - - 40 - - 80 - "140": - - 20 - - 40 - - 80 - "144": - - 20 - - 40 - - 80 - "149": - - 20 - - 40 - - 80 - "153": - - 20 - - 40 - - 80 - "157": - - 20 - - 40 - - 80 - "161": - - 20 - - 40 - - 80 - "165": - - 20 - allowed_channels: - "1": - - 20 - "2": - - 20 - "3": - - 20 - "4": - - 20 - "5": - - 20 - "6": - - 20 - "7": - - 20 - "8": - - 20 - "9": - - 20 - "10": - - 20 - "11": - - 20 - "12": - - 20 - "13": - - 20 - "36": - - 20 - - 40 - - 80 - "40": - - 20 - - 40 - - 80 - "44": - - 20 - - 40 - - 80 - "48": - - 20 - - 40 - - 80 - "52": - - 20 - - 40 - - 80 - "56": - - 20 - - 40 - - 80 - "60": - - 20 - - 40 - - 80 - "64": - - 20 - - 40 - - 80 - "100": - - 20 - - 40 - - 80 - "104": - - 20 - - 40 - - 80 - "108": - - 20 - - 40 - - 80 - "112": - - 20 - - 40 - - 80 - "116": - - 20 - - 40 - - 80 - "120": - - 20 - - 40 - - 80 - "124": - - 20 - - 40 - - 80 - "128": - - 20 - - 40 - - 80 - "132": - - 20 - - 40 - - 80 - "136": - - 20 - - 40 - - 80 - "140": - - 20 - - 40 - - 80 - - test_name: test_ES_regulatory_compliance - country_code: ES - test_channels: - "1": - - 20 - "2": - - 20 - "3": - - 20 - "4": - - 20 - "5": - - 20 - "6": - - 20 - "7": - - 20 - "8": - - 20 - "9": - - 20 - "10": - - 20 - "11": - - 20 - "12": - - 20 - "13": - - 20 - "14": - - 20 - "36": - - 20 - - 40 - - 80 - "40": - - 20 - - 40 - - 80 - "44": - - 20 - - 40 - - 80 - "48": - - 20 - - 40 - - 80 - "52": - - 20 - - 40 - - 80 - "56": - - 20 - - 40 - - 80 - "60": - - 20 - - 40 - - 80 - "64": - - 20 - - 40 - - 80 - "100": - - 20 - - 40 - - 80 - "104": - - 20 - - 40 - - 80 - "108": - - 20 - - 40 - - 80 - "112": - - 20 - - 40 - - 80 - "116": - - 20 - - 40 - - 80 - "120": - - 20 - - 40 - - 80 - "124": - - 20 - - 40 - - 80 - "128": - - 20 - - 40 - - 80 - "132": - - 20 - - 40 - - 80 - "136": - - 20 - - 40 - - 80 - "140": - - 20 - - 40 - - 80 - "144": - - 20 - - 40 - - 80 - "149": - - 20 - - 40 - - 80 - "153": - - 20 - - 40 - - 80 - "157": - - 20 - - 40 - - 80 - "161": - - 20 - - 40 - - 80 - "165": - - 20 - allowed_channels: - "1": - - 20 - "2": - - 20 - "3": - - 20 - "4": - - 20 - "5": - - 20 - "6": - - 20 - "7": - - 20 - "8": - - 20 - "9": - - 20 - "10": - - 20 - "11": - - 20 - "12": - - 20 - "13": - - 20 - "36": - - 20 - - 40 - - 80 - "40": - - 20 - - 40 - - 80 - "44": - - 20 - - 40 - - 80 - "48": - - 20 - - 40 - - 80 - "52": - - 20 - - 40 - - 80 - "56": - - 20 - - 40 - - 80 - "60": - - 20 - - 40 - - 80 - "64": - - 20 - - 40 - - 80 - "100": - - 20 - - 40 - - 80 - "104": - - 20 - - 40 - - 80 - "108": - - 20 - - 40 - - 80 - "112": - - 20 - - 40 - - 80 - "116": - - 20 - - 40 - - 80 - "120": - - 20 - - 40 - - 80 - "124": - - 20 - - 40 - - 80 - "128": - - 20 - - 40 - - 80 - "132": - - 20 - - 40 - - 80 - "136": - - 20 - - 40 - - 80 - "140": - - 20 - - 40 - - 80 - - test_name: test_IT_regulatory_compliance - country_code: IT - test_channels: - "1": - - 20 - "2": - - 20 - "3": - - 20 - "4": - - 20 - "5": - - 20 - "6": - - 20 - "7": - - 20 - "8": - - 20 - "9": - - 20 - "10": - - 20 - "11": - - 20 - "12": - - 20 - "13": - - 20 - "14": - - 20 - "36": - - 20 - - 40 - - 80 - "40": - - 20 - - 40 - - 80 - "44": - - 20 - - 40 - - 80 - "48": - - 20 - - 40 - - 80 - "52": - - 20 - - 40 - - 80 - "56": - - 20 - - 40 - - 80 - "60": - - 20 - - 40 - - 80 - "64": - - 20 - - 40 - - 80 - "100": - - 20 - - 40 - - 80 - "104": - - 20 - - 40 - - 80 - "108": - - 20 - - 40 - - 80 - "112": - - 20 - - 40 - - 80 - "116": - - 20 - - 40 - - 80 - "120": - - 20 - - 40 - - 80 - "124": - - 20 - - 40 - - 80 - "128": - - 20 - - 40 - - 80 - "132": - - 20 - - 40 - - 80 - "136": - - 20 - - 40 - - 80 - "140": - - 20 - - 40 - - 80 - "144": - - 20 - - 40 - - 80 - "149": - - 20 - - 40 - - 80 - "153": - - 20 - - 40 - - 80 - "157": - - 20 - - 40 - - 80 - "161": - - 20 - - 40 - - 80 - "165": - - 20 - allowed_channels: - "1": - - 20 - "2": - - 20 - "3": - - 20 - "4": - - 20 - "5": - - 20 - "6": - - 20 - "7": - - 20 - "8": - - 20 - "9": - - 20 - "10": - - 20 - "11": - - 20 - "12": - - 20 - "13": - - 20 - "36": - - 20 - - 40 - - 80 - "40": - - 20 - - 40 - - 80 - "44": - - 20 - - 40 - - 80 - "48": - - 20 - - 40 - - 80 - "52": - - 20 - - 40 - - 80 - "56": - - 20 - - 40