[Python] Make it easier to run Python tests in CI (#15714)
* Run Python Test script on CI
* Redirect log for pretty output
* Update output
* Fix
* Update doc
* Update script
* Fix
* Disable re-CASE
* Update test
* Add no-wifi variant
* Fix log
* Remove test in build.yaml
* Enrich functions for enable and disable tests
* Remove wait time, update doc
* Fix
* These words should not by typo?
diff --git a/.github/.wordlist.txt b/.github/.wordlist.txt
index 6bc042e..2136de4 100644
--- a/.github/.wordlist.txt
+++ b/.github/.wordlist.txt
@@ -1415,6 +1415,12 @@
kOperate
kView
xFFFFFFFD
+ClusterObjectTests
+TestTimedRequestTimeout
+datamodel
+appliable
+commissionee
+configs
NAMESERVER
UTF
localedef
diff --git a/.github/workflows/build.yaml b/.github/workflows/build.yaml
index 26d5113..84b0c28 100644
--- a/.github/workflows/build.yaml
+++ b/.github/workflows/build.yaml
@@ -265,6 +265,7 @@
run: |
scripts/run_in_build_env.sh 'pip3 install ./out/controller/python/chip-0.0-cp37-abi3-linux_x86_64.whl'
scripts/run_in_build_env.sh '(cd src/controller/python/test/unit_tests/ && python3 -m unittest -v)'
+
build_darwin:
name: Build on Darwin (clang, python_lib, simulated)
timeout-minutes: 200
diff --git a/.github/workflows/tests.yaml b/.github/workflows/tests.yaml
index 4557756..58a781c 100644
--- a/.github/workflows/tests.yaml
+++ b/.github/workflows/tests.yaml
@@ -216,3 +216,169 @@
path: objdir-clone/
# objdirs are big; don't hold on to them too long.
retention-days: 5
+ repl_tests_linux:
+ name: REPL Tests - Linux
+ timeout-minutes: 120
+
+ strategy:
+ matrix:
+ build_variant: [no-ble-no-wifi-tsan]
+
+ env:
+ BUILD_VARIANT: ${{matrix.build_variant}}
+ TSAN_OPTIONS: "halt_on_error=1 suppressions=scripts/tests/chiptest/tsan-linux-suppressions.txt"
+
+ if: github.actor != 'restyled-io[bot]'
+ runs-on: ubuntu-latest
+
+ container:
+ image: connectedhomeip/chip-build:0.5.56
+ options:
+ --privileged --sysctl "net.ipv6.conf.all.disable_ipv6=0
+ net.ipv4.conf.all.forwarding=1 net.ipv6.conf.all.forwarding=1"
+
+ steps:
+ - name: Checkout
+ uses: actions/checkout@v2
+ with:
+ submodules: true
+ - name:
+ Try to ensure the directories for core dumping exist and we
+ can write them.
+ run: |
+ mkdir /tmp/cores || true
+ sysctl -w kernel.core_pattern=/tmp/cores/core.%u.%p.%t || true
+ mkdir objdir-clone || true
+ - name: Bootstrap
+ timeout-minutes: 10
+ run: scripts/build/gn_bootstrap.sh
+ - name: Uploading bootstrap logs
+ uses: actions/upload-artifact@v2
+ if: ${{ always() }} && ${{ !env.ACT }}
+ with:
+ name:
+ bootstrap-logs-linux-${{ matrix.build_variant }}${{ matrix.chip_tool }}
+ path: |
+ .environment/gn_out/.ninja_log
+ .environment/pigweed-venv/*.log
+ - name: Build Python REPL and example apps
+ timeout-minutes: 50
+ run: |
+ scripts/run_in_build_env.sh './scripts/build_python.sh --install_wheel build-env'
+ ./scripts/run_in_build_env.sh \
+ "./scripts/build/build_examples.py \
+ --target linux-x64-all-clusters-${BUILD_VARIANT} \
+ build \
+ --copy-artifacts-to objdir-clone \
+ "
+ - name: Run Tests
+ timeout-minutes: 30
+ run: |
+ scripts/run_in_build_env.sh './scripts/tests/run_python_test.py --app chip-all-clusters-app --factoryreset -- -t 3600 --disable-test ClusterObjectTests.TestTimedRequestTimeout'
+ - name: Uploading core files
+ uses: actions/upload-artifact@v2
+ if: ${{ failure() }} && ${{ !env.ACT }}
+ with:
+ name:
+ crash-core-linux-python-repl
+ path: /tmp/cores/
+ # Cores are big; don't hold on to them too long.
+ retention-days: 5
+ - name: Uploading objdir for debugging
+ uses: actions/upload-artifact@v2
+ if: ${{ failure() }} && ${{ !env.ACT }}
+ with:
+ name:
+ crash-objdir-linux-python-repl
+ path: objdir-clone/
+ # objdirs are big; don't hold on to them too long.
+ retention-days: 5
+
+ repl_tests_darwin:
+ name: REPL Tests - Darwin
+ timeout-minutes: 120
+
+ strategy:
+ matrix:
+ build_variant: [no-ble-no-wifi-tsan]
+ env:
+ BUILD_VARIANT: ${{matrix.build_variant}}
+ TSAN_OPTIONS: "halt_on_error=1"
+
+ if: github.actor != 'restyled-io[bot]'
+ runs-on: macos-latest
+
+ steps:
+ - name: Checkout
+ uses: actions/checkout@v2
+ with:
+ submodules: true
+ - name: Setup Environment
+ # coreutils for stdbuf
+ run: brew install openssl pkg-config coreutils
+ - name:
+ Try to ensure the directories for core dumping and diagnostic
+ log collection exist and we can write them.
+ run: |
+ sudo chown ${USER} /cores || true
+ mkdir -p ~/Library/Logs/DiagnosticReports || true
+ mkdir objdir-clone || true
+ - name: Fix pkgconfig link
+ working-directory: /usr/local/lib/pkgconfig
+ run: |
+ pwd
+ ls -la /usr/local/Cellar/
+ ls -la /usr/local/Cellar/openssl@1.1
+ OPEN_SSL_VERSION=`ls -la /usr/local/Cellar/openssl@1.1 | cat | tail -n1 | awk '{print $NF}'`
+ ln -s /usr/local/Cellar/openssl@1.1/$OPEN_SSL_VERSION/lib/pkgconfig/* .
+ - name: Bootstrap
+ timeout-minutes: 25
+ run: scripts/build/gn_bootstrap.sh
+ - name: Uploading bootstrap logs
+ uses: actions/upload-artifact@v2
+ if: ${{ always() }} && ${{ !env.ACT }}
+ with:
+ name:
+ bootstrap-logs-darwin-${{ matrix.build_variant }}${{ matrix.chip_tool }}
+ path: |
+ .environment/gn_out/.ninja_log
+ .environment/pigweed-venv/*.log
+ - name: Build Python REPL and example apps
+ timeout-minutes: 50
+ run: |
+ scripts/run_in_build_env.sh './scripts/build_python.sh --install_wheel build-env'
+ ./scripts/run_in_build_env.sh \
+ "./scripts/build/build_examples.py \
+ --target darwin-x64-all-clusters-${BUILD_VARIANT} \
+ build \
+ --copy-artifacts-to objdir-clone \
+ "
+ - name: Run Tests
+ timeout-minutes: 30
+ run: |
+ scripts/run_in_build_env.sh './scripts/tests/run_python_test.py --app chip-all-clusters-app --factoryreset --app-params "--discriminator 3840 --interface-id -1" -- -t 3600 --disable-test ClusterObjectTests.TestTimedRequestTimeout'
+ - name: Uploading core files
+ uses: actions/upload-artifact@v2
+ if: ${{ failure() }} && ${{ !env.ACT }}
+ with:
+ name:
+ crash-core-darwin-python-repl
+ path: /cores/
+ # Cores are big; don't hold on to them too long.
+ retention-days: 5
+ - name: Uploading diagnostic logs
+ uses: actions/upload-artifact@v2
+ if: ${{ failure() }} && ${{ !env.ACT }}
+ with:
+ name:
+ crash-log-darwin-python-repl
+ path: ~/Library/Logs/DiagnosticReports/
+ - name: Uploading objdir for debugging
+ uses: actions/upload-artifact@v2
+ if: ${{ failure() }} && ${{ !env.ACT }}
+ with:
+ name:
+ crash-objdir-darwin-python-repl
+ path: objdir-clone/
+ # objdirs are big; don't hold on to them too long.
+ retention-days: 5
diff --git a/docs/guides/matter-repl.md b/docs/guides/matter-repl.md
index c9a6aa8..52e0c31 100644
--- a/docs/guides/matter-repl.md
+++ b/docs/guides/matter-repl.md
@@ -163,3 +163,85 @@
[Multi Fabric Commissioning](https://deepnote.com/viewer/github/project-chip/connectedhomeip/blob/master/docs/guides/repl/Matter%20-%20Multi%20Fabric%20Commissioning.ipynb)
[Access Control](https://deepnote.com/viewer/github/project-chip/connectedhomeip/blob/master/docs/guides/repl/Matter%20-%20Access%20Control.ipynb)
+
+## Testing
+
+We also provide `mobile-device-test.py` for testing your accessories, you can
+run it manually or using a wrapper script.
+
+### Usage
+
+mobile-device-test.py provides the following options for running the tests:
+
+```
+ --controller-nodeid INTEGER NodeId of the controller.
+ --device-nodeid INTEGER NodeId of the device.
+ -a, --address TEXT Skip commissionee discovery, commission the
+ device with the IP directly.
+
+ -t, --timeout INTEGER The program will return with timeout after
+ specified seconds.
+
+ --discriminator INTEGER Discriminator of the device.
+ --setup-pin INTEGER Setup pincode of the device.
+ --enable-test TEXT The tests to be executed. By default, all
+ tests will be executed, use this option to
+ run a specific set of tests. Use --print-
+ test-list for a list of appliable tests.
+
+ --disable-test TEXT The tests to be excluded from the set of
+ enabled tests. Use --print-test-list for a
+ list of appliable tests.
+
+ --log-level [ERROR|WARN|INFO|DEBUG]
+ The log level of the test.
+ --log-format TEXT Override logging format
+ --print-test-list Print a list of test cases and test sets
+ that can be toggled via --enable-test and
+ --disable-test, then exit
+
+ --help Show this message and exit.
+```
+
+By default, all tests will be executed, however, you can exclude one or more
+tests or only include a few tests if you want.
+
+For example, if you are working for commissioning, then you may want to exclude
+the data model test cases by adding `--disable-test datamodel` to disable all
+data model tests.
+
+Some tests provides the option to exclude them. For example, you can use
+`--disable-test ClusterObjectTests.TestTimedRequestTimeout` to exclude the
+"TestTimedRequestTimeout" test case.
+
+It is recommanded to use the test wrapper to run mobile-device-test.py, for
+example, you can run:
+
+```
+./scripts/tests/run_python_test.py --app chip-all-clusters-app --factoryreset
+```
+
+It provides some extra options, for example:
+
+```
+ --app TEXT Local application to use, omit to use external apps, use
+ a path for a specific binary or use a filename to search
+ under the current matter checkout.
+
+ --factoryreset Remove app config and repl configs (/tmp/chip* and
+ /tmp/repl*) before running the tests.
+
+ --app-params TEXT The extra parameters passed to the device.
+ --script PATH Test script to use.
+ --help Show this message and exit.
+```
+
+You can pass your own flags for mobile-device-test.py by appending them to the
+command line with two dashes, for example:
+
+```
+./scripts/tests/run_python_test.py --app chip-all-clusters-app --factoryreset -- -t 90 --disable-test ClusterObjectTests.TestTimedRequestTimeout
+```
+
+will pass `-t 90 --disable-test ClusterObjectTests.TestTimedRequestTimeout` to
+`mobile-device-test.py`
diff --git a/scripts/build/build/targets.py b/scripts/build/build/targets.py
index 3a3f381..e591dfa 100644
--- a/scripts/build/build/targets.py
+++ b/scripts/build/build/targets.py
@@ -229,6 +229,7 @@
# builds is exponential here
builder.AppendVariant(name="ipv6only", enable_ipv4=False),
builder.AppendVariant(name="no-ble", enable_ble=False),
+ builder.AppendVariant(name="no-wifi", enable_wifi=False),
builder.AppendVariant(name="tsan", conflicts=['asan'], use_tsan=True),
builder.AppendVariant(name="asan", conflicts=['tsan'], use_asan=True),
builder.AppendVariant(
diff --git a/scripts/build/builders/host.py b/scripts/build/builders/host.py
index 4e78755..a97b44c 100644
--- a/scripts/build/builders/host.py
+++ b/scripts/build/builders/host.py
@@ -154,7 +154,7 @@
class HostBuilder(GnBuilder):
def __init__(self, root, runner, app: HostApp, board=HostBoard.NATIVE, enable_ipv4=True,
- enable_ble=True, use_tsan=False, use_asan=False, separate_event_loop=True,
+ enable_ble=True, enable_wifi=True, use_tsan=False, use_asan=False, separate_event_loop=True,
test_group=False, use_libfuzzer=False, use_clang=False,
use_platform_mdns=False):
super(HostBuilder, self).__init__(
@@ -171,6 +171,9 @@
if not enable_ble:
self.extra_gn_options.append('chip_config_network_layer_ble=false')
+ if not enable_wifi:
+ self.extra_gn_options.append('chip_enable_wifi=false')
+
if use_tsan:
self.extra_gn_options.append('is_tsan=true')
diff --git a/scripts/build_python.sh b/scripts/build_python.sh
index 7949f04..c4504f1 100755
--- a/scripts/build_python.sh
+++ b/scripts/build_python.sh
@@ -42,6 +42,7 @@
declare enable_pybindings=false
declare chip_mdns
declare case_retry_delta
+declare install_wheel=no
help() {
@@ -58,6 +59,10 @@
-t --time_between_case_retries MRPActiveRetryInterval Specify MRPActiveRetryInterval value
Default is 300 ms
+ -i, --install_wheel no|build-env|separate Where to install the Python wheel
+ no: Do not install
+ build-env: install to virtual env for build matter
+ separate: install to another virtual env (out/python_env)
"
}
@@ -85,6 +90,10 @@
chip_case_retry_delta=$2
shift
;;
+ --install_wheel | -i)
+ install_wheel=$2
+ shift
+ ;;
-*)
help
echo "Unknown Option \"$1\""
@@ -114,24 +123,34 @@
ninja -C "$OUTPUT_ROOT" python
fi
-# Create a virtual environment that has access to the built python tools
-virtualenv --clear "$ENVIRONMENT_ROOT"
-
-# Activate the new environment to register the python WHL
-
if [ "$enable_pybindings" == true ]; then
WHEEL=$(ls "$OUTPUT_ROOT"/pybindings/pycontroller/pychip-*.whl | head -n 1)
else
WHEEL=$(ls "$OUTPUT_ROOT"/controller/python/chip-*.whl | head -n 1)
fi
-source "$ENVIRONMENT_ROOT"/bin/activate
-"$ENVIRONMENT_ROOT"/bin/python -m pip install --upgrade pip
-"$ENVIRONMENT_ROOT"/bin/pip install --upgrade --force-reinstall --no-cache-dir "$WHEEL"
+if [ "$install_wheel" = "no" ]; then
+ exit 0
+elif [ "$install_wheel" = "separate" ]; then
+ # Create a virtual environment that has access to the built python tools
+ virtualenv --clear "$ENVIRONMENT_ROOT"
-echo ""
-echo_green "Compilation completed and WHL package installed in: "
-echo_blue " $ENVIRONMENT_ROOT"
-echo ""
-echo_green "To use please run:"
-echo_bold_white " source $ENVIRONMENT_ROOT/bin/activate"
+ source "$ENVIRONMENT_ROOT"/bin/activate
+ "$ENVIRONMENT_ROOT"/bin/python -m pip install --upgrade pip
+ "$ENVIRONMENT_ROOT"/bin/pip install --upgrade --force-reinstall --no-cache-dir "$WHEEL"
+
+ echo ""
+ echo_green "Compilation completed and WHL package installed in: "
+ echo_blue " $ENVIRONMENT_ROOT"
+ echo ""
+ echo_green "To use please run:"
+ echo_bold_white " source $ENVIRONMENT_ROOT/bin/activate"
+elif [ "$install_wheel" = "build-env" ]; then
+ pip install --force-reinstall "$WHEEL"
+
+ echo ""
+ echo_green "Compilation completed and WHL package installed in virtualenv for building sdk"
+ echo ""
+ echo_green "To use please run:"
+ echo_bold_white " source $CHIP_ROOT/scripts/activate.sh"
+fi
diff --git a/scripts/tests/run_python_test.py b/scripts/tests/run_python_test.py
new file mode 100755
index 0000000..229e265
--- /dev/null
+++ b/scripts/tests/run_python_test.py
@@ -0,0 +1,157 @@
+#!/usr/bin/env -S python3 -B
+
+# Copyright (c) 2022 Project CHIP Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import pty
+import subprocess
+import click
+import os
+import pathlib
+import typing
+import queue
+import threading
+import sys
+import time
+import datetime
+import shlex
+import logging
+
+DEFAULT_CHIP_ROOT = os.path.abspath(
+ os.path.join(os.path.dirname(__file__), '..', '..'))
+
+
+def FindBinaryPath(name: str):
+ for path in pathlib.Path(DEFAULT_CHIP_ROOT).rglob(name):
+ if not path.is_file():
+ continue
+ if path.name != name:
+ continue
+ return str(path)
+
+ return None
+
+
+def EnqueueLogOutput(fp, tag, q):
+ for line in iter(fp.readline, b''):
+ timestamp = time.time()
+ if len(line) > len('[1646290606.901990]') and line[0:1] == b'[':
+ try:
+ timestamp = float(line[1:18].decode())
+ line = line[19:]
+ except Exception as ex:
+ pass
+ q.put((tag, line, datetime.datetime.fromtimestamp(
+ timestamp).isoformat(sep=" ")))
+ fp.close()
+
+
+def RedirectQueueThread(fp, tag, queue) -> threading.Thread:
+ log_queue_thread = threading.Thread(target=EnqueueLogOutput, args=(
+ fp, tag, queue))
+ log_queue_thread.start()
+ return log_queue_thread
+
+
+def DumpLogOutput(q: queue.Queue):
+ # TODO: Due to the nature of os pipes, the order of the timestamp is not guaranteed, need to figure out a better output format.
+ while True:
+ line = q.get_nowait()
+ sys.stdout.buffer.write(
+ (f"[{line[2]}]").encode() + line[0] + line[1])
+ sys.stdout.flush()
+
+
+def DumpProgramOutputToQueue(thread_list: typing.List[threading.Thread], tag: str, process: subprocess.Popen, queue: queue.Queue):
+ thread_list.append(RedirectQueueThread(process.stdout,
+ (f"[{tag}][\33[33mSTDOUT\33[0m]").encode(), queue))
+ thread_list.append(RedirectQueueThread(process.stderr,
+ (f"[{tag}][\33[31mSTDERR\33[0m]").encode(), queue))
+
+
+@click.command()
+@click.option("--app", type=str, default=None, help='Local application to use, omit to use external apps, use a path for a specific binary or use a filename to search under the current matter checkout.')
+@click.option("--factoryreset", is_flag=True, help='Remove app config and repl configs (/tmp/chip* and /tmp/repl*) before running the tests.')
+@click.option("--app-params", type=str, default='', help='The extra parameters passed to the device.')
+@click.option("--script", type=click.Path(exists=True), default=FindBinaryPath("mobile-device-test.py"), help='Test script to use.')
+@click.argument("script-args", nargs=-1, type=str)
+def main(app: str, factoryreset: bool, app_params: str, script: str, script_args: typing.List[str]):
+ if factoryreset:
+ retcode = subprocess.call("rm -rf /tmp/chip* /tmp/repl*", shell=True)
+ if retcode != 0:
+ raise Exception("Failed to remove /tmp/chip* for factory reset.")
+
+ log_queue = queue.Queue()
+ log_cooking_threads = []
+
+ app_process = None
+ if app:
+ if not os.path.exists(app):
+ app = FindBinaryPath(app)
+ if app is None:
+ raise FileNotFoundError(f"{app} not found")
+ app_args = [app] + shlex.split(app_params)
+ logging.info(f"Execute: {app_args}")
+ app_process = subprocess.Popen(
+ app_args, stdout=subprocess.PIPE, stderr=subprocess.PIPE, bufsize=0)
+ DumpProgramOutputToQueue(
+ log_cooking_threads, "\33[34mAPP \33[0m", app_process, log_queue)
+
+ script_command = ["/usr/bin/env", "python3", script,
+ '--log-format', '%(message)s'] + [v for v in script_args]
+ logging.info(f"Execute: {script_command}")
+ test_script_process = subprocess.Popen(
+ script_command, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+ DumpProgramOutputToQueue(log_cooking_threads, "\33[32mTEST\33[0m",
+ test_script_process, log_queue)
+
+ test_script_exit_code = test_script_process.poll()
+ while test_script_exit_code is None:
+ try:
+ DumpLogOutput(log_queue)
+ except queue.Empty:
+ pass
+ test_script_exit_code = test_script_process.poll()
+
+ test_app_exit_code = 0
+ if app_process:
+ app_process.send_signal(2)
+
+ test_app_exit_code = app_process.poll()
+ while test_app_exit_code is None:
+ try:
+ DumpLogOutput(log_queue)
+ except queue.Empty:
+ pass
+ test_app_exit_code = app_process.poll()
+
+ # There are some logs not cooked, so we wait until we have processed all logs.
+ # This procedure should be very fast since the related processes are finished.
+ for thread in log_cooking_threads:
+ thread.join()
+
+ try:
+ DumpLogOutput(log_queue)
+ except queue.Empty:
+ pass
+
+ if test_script_exit_code != 0:
+ sys.exit(test_script_exit_code)
+ else:
+ # We expect both app and test script should exit with 0
+ sys.exit(test_app_exit_code)
+
+
+if __name__ == '__main__':
+ main()
diff --git a/src/controller/python/test/test_scripts/base.py b/src/controller/python/test/test_scripts/base.py
index 4094b2e..2db688e 100644
--- a/src/controller/python/test/test_scripts/base.py
+++ b/src/controller/python/test/test_scripts/base.py
@@ -15,8 +15,10 @@
# limitations under the License.
#
+import asyncio
from dataclasses import dataclass
from inspect import Attribute
+import inspect
from typing import Any
import typing
from chip import ChipDeviceCtrl
@@ -55,6 +57,67 @@
TestFail(message)
+_configurable_tests = set()
+_configurable_test_sets = set()
+_enabled_tests = []
+_disabled_tests = []
+
+
+def SetTestSet(enabled_tests, disabled_tests):
+ global _enabled_tests, _disabled_tests
+ _enabled_tests = enabled_tests[:]
+ _disabled_tests = disabled_tests[:]
+
+
+def TestIsEnabled(test_name: str):
+ enabled_len = -1
+ disabled_len = -1
+ if 'all' in _enabled_tests:
+ enabled_len = 0
+ if 'all' in _disabled_tests:
+ disabled_len = 0
+
+ for test_item in _enabled_tests:
+ if test_name.startswith(test_item) and (len(test_item) > enabled_len):
+ enabled_len = len(test_item)
+
+ for test_item in _disabled_tests:
+ if test_name.startswith(test_item) and (len(test_item) > disabled_len):
+ disabled_len = len(test_item)
+
+ return enabled_len > disabled_len
+
+
+def test_set(cls):
+ _configurable_test_sets.add(cls.__qualname__)
+ return cls
+
+
+def test_case(func):
+ test_name = func.__qualname__
+ _configurable_tests.add(test_name)
+
+ def CheckEnableBeforeRun(*args, **kwargs):
+ if TestIsEnabled(test_name=test_name):
+ return func(*args, **kwargs)
+ elif inspect.iscoroutinefunction(func):
+ # noop, so users can use await as usual
+ return asyncio.sleep(0)
+ return CheckEnableBeforeRun
+
+
+def configurable_tests():
+ res = [v for v in _configurable_test_sets]
+ res.sort()
+ return res
+
+
+def configurable_test_cases():
+ res = [v for v in _configurable_tests]
+ res.sort()
+ return res
+
+
class TestTimeout(threading.Thread):
def __init__(self, timeout: int):
threading.Thread.__init__(self)
@@ -136,6 +199,16 @@
self.logger.info(f"Found device at {res}")
return res
+ def TestKeyExchangeBLE(self, discriminator: int, setuppin: int, nodeid: int):
+ self.logger.info(
+ "Conducting key exchange with device {}".format(discriminator))
+ if not self.devCtrl.ConnectBLE(discriminator, setuppin, nodeid):
+ self.logger.info(
+ "Failed to finish key exchange with device {}".format(discriminator))
+ return False
+ self.logger.info("Device finished key exchange.")
+ return True
+
def TestKeyExchange(self, ip: str, setuppin: int, nodeid: int):
self.logger.info("Conducting key exchange with device {}".format(ip))
if not self.devCtrl.CommissionIP(ip.encode("utf-8"), setuppin, nodeid):
diff --git a/src/controller/python/test/test_scripts/cluster_objects.py b/src/controller/python/test/test_scripts/cluster_objects.py
index bc2b9f2..2586c18 100644
--- a/src/controller/python/test/test_scripts/cluster_objects.py
+++ b/src/controller/python/test/test_scripts/cluster_objects.py
@@ -15,7 +15,7 @@
# limitations under the License.
#
-
+import pprint
import chip.clusters as Clusters
import chip.exceptions
import logging
@@ -24,6 +24,8 @@
import asyncio
import time
+import base
+
logger = logging.getLogger('PythonMatterControllerTEST')
logger.setLevel(logging.INFO)
@@ -44,19 +46,18 @@
def VerifyDecodeSuccess(values):
- print(f"{values}")
+ pprint.pprint(values)
for endpoint in values:
for cluster in values[endpoint]:
for attribute in values[endpoint][cluster]:
v = values[endpoint][cluster][attribute]
- print(f"EP{endpoint}/{cluster}/{attribute} = {v}")
if (isinstance(v, ValueDecodeFailure)):
if _IgnoreAttributeDecodeFailure((endpoint, cluster, attribute)):
print(
- f"Ignoring attribute decode failure for path {endpoint}/{cluster}/{attribute}")
+ f"Ignoring attribute decode failure for path {endpoint}/{attribute}")
else:
raise AssertionError(
- f"Cannot decode value for path {endpoint}/{cluster}/{attribute}, got error: '{str(v.Reason)}', raw TLV data: '{v.TLVValue}'")
+ f"Cannot decode value for path {endpoint}/{attribute}, got error: '{str(v.Reason)}', raw TLV data: '{v.TLVValue}'")
for endpoint in values:
for cluster in values[endpoint]:
@@ -71,8 +72,10 @@
print(f"Dump the events: {values} ")
+@base.test_set
class ClusterObjectTests:
@classmethod
+ @base.test_case
def TestAPI(cls):
if Clusters.OnOff.id != 6:
raise ValueError()
@@ -86,7 +89,8 @@
raise ValueError()
@classmethod
- async def RoundTripTest(cls, devCtrl):
+ @base.test_case
+ async def TestCommandRoundTrip(cls, devCtrl):
req = Clusters.OnOff.Commands.On()
res = await devCtrl.SendCommand(nodeid=NODE_ID, endpoint=LIGHTING_ENDPOINT_ID, payload=req)
if res is not None:
@@ -95,7 +99,8 @@
raise ValueError()
@classmethod
- async def RoundTripTestWithBadEndpoint(cls, devCtrl):
+ @base.test_case
+ async def TestCommandRoundTripWithBadEndpoint(cls, devCtrl):
req = Clusters.OnOff.Commands.On()
try:
await devCtrl.SendCommand(nodeid=NODE_ID, endpoint=233, payload=req)
@@ -105,7 +110,8 @@
return
@classmethod
- async def SendCommandWithResponse(cls, devCtrl):
+ @base.test_case
+ async def TestCommandWithResponse(cls, devCtrl):
req = Clusters.TestCluster.Commands.TestAddArguments(arg1=2, arg2=3)
res = await devCtrl.SendCommand(nodeid=NODE_ID, endpoint=LIGHTING_ENDPOINT_ID, payload=req)
if not isinstance(res, Clusters.TestCluster.Commands.TestAddArgumentsResponse):
@@ -116,7 +122,8 @@
raise ValueError()
@classmethod
- async def SendWriteRequest(cls, devCtrl):
+ @base.test_case
+ async def TestWriteRequest(cls, devCtrl):
logger.info("1: Trivial writes (multiple attributes)")
res = await devCtrl.WriteAttribute(nodeid=NODE_ID,
attributes=[
@@ -154,6 +161,7 @@
raise AssertionError("Write returned unexpected result.")
@classmethod
+ @base.test_case
async def TestSubscribeAttribute(cls, devCtrl):
logger.info("Test Subscription")
sub = await devCtrl.ReadAttribute(nodeid=NODE_ID, attributes=[(1, Clusters.OnOff.Attributes.OnOff)], reportInterval=(3, 10))
@@ -179,6 +187,7 @@
sub.Shutdown()
@classmethod
+ @base.test_case
async def TestReadAttributeRequests(cls, devCtrl):
'''
Tests out various permutations of endpoint, cluster and attribute ID (with wildcards) to validate
@@ -284,6 +293,7 @@
raise AssertionError("Got no events back")
@classmethod
+ @base.test_case
async def TestReadEventRequests(cls, devCtrl, expectEventsNum):
logger.info("1: Reading Ex Cx Ex")
req = [
@@ -323,6 +333,7 @@
# TODO: Add more wildcard test for IM events.
@classmethod
+ @base.test_case
async def TestTimedRequest(cls, devCtrl):
logger.info("1: Send Timed Command Request")
req = Clusters.TestCluster.Commands.TimedInvokeRequest()
@@ -336,29 +347,8 @@
],
timedRequestTimeoutMs=1000)
- logger.info("3: Send Timed Command Request -- Timeout")
- try:
- req = Clusters.TestCluster.Commands.TimedInvokeRequest()
- # 10ms is a pretty short timeout, RTT is 400ms in simulated network on CI, so this test should fail.
- await devCtrl.SendCommand(nodeid=NODE_ID, endpoint=1, payload=req, timedRequestTimeoutMs=10)
- raise AssertionError("Timeout expected!")
- except chip.exceptions.ChipStackException:
- pass
-
- logger.info("4: Send Timed Write Request -- Timeout")
- try:
- await devCtrl.WriteAttribute(nodeid=NODE_ID,
- attributes=[
- (1, Clusters.TestCluster.Attributes.TimedWriteBoolean(
- True)),
- ],
- timedRequestTimeoutMs=10)
- raise AssertionError("Timeout expected!")
- except chip.exceptions.ChipStackException:
- pass
-
logger.info(
- "5: Sending TestCluster-TimedInvokeRequest without timedRequestTimeoutMs should be rejected")
+ "3: Sending TestCluster-TimedInvokeRequest without timedRequestTimeoutMs should be rejected")
try:
req = Clusters.TestCluster.Commands.TimedInvokeRequest()
await devCtrl.SendCommand(nodeid=NODE_ID, endpoint=1, payload=req)
@@ -367,7 +357,7 @@
pass
logger.info(
- "6: Writing TestCluster-TimedWriteBoolean without timedRequestTimeoutMs should be rejected")
+ "4: Writing TestCluster-TimedWriteBoolean without timedRequestTimeoutMs should be rejected")
try:
await devCtrl.WriteAttribute(nodeid=NODE_ID,
attributes=[
@@ -379,6 +369,31 @@
pass
@classmethod
+ @base.test_case
+ async def TestTimedRequestTimeout(cls, devCtrl):
+ logger.info("1: Send Timed Command Request -- Timeout")
+ try:
+ req = Clusters.TestCluster.Commands.TimedInvokeRequest()
+ # 10ms is a pretty short timeout, RTT is 400ms in simulated network on CI, so this test should fail.
+ await devCtrl.SendCommand(nodeid=NODE_ID, endpoint=1, payload=req, timedRequestTimeoutMs=1)
+ raise AssertionError("Timeout expected!")
+ except chip.exceptions.ChipStackException:
+ pass
+
+ logger.info("2: Send Timed Write Request -- Timeout")
+ try:
+ await devCtrl.WriteAttribute(nodeid=NODE_ID,
+ attributes=[
+ (1, Clusters.TestCluster.Attributes.TimedWriteBoolean(
+ True)),
+ ],
+ timedRequestTimeoutMs=1)
+ raise AssertionError("Timeout expected!")
+ except chip.exceptions.ChipStackException:
+ pass
+
+ @classmethod
+ @base.test_case
async def TestReadWriteAttributeRequestsWithVersion(cls, devCtrl):
logger.info("TestReadWriteAttributeRequestsWithVersion")
req = [
@@ -457,16 +472,17 @@
async def RunTest(cls, devCtrl):
try:
cls.TestAPI()
- await cls.RoundTripTest(devCtrl)
- await cls.RoundTripTestWithBadEndpoint(devCtrl)
- await cls.SendCommandWithResponse(devCtrl)
+ await cls.TestCommandRoundTrip(devCtrl)
+ await cls.TestCommandRoundTripWithBadEndpoint(devCtrl)
+ await cls.TestCommandWithResponse(devCtrl)
await cls.TestReadEventRequests(devCtrl, 1)
await cls.TestReadWriteAttributeRequestsWithVersion(devCtrl)
await cls.TestReadAttributeRequests(devCtrl)
await cls.TestSubscribeAttribute(devCtrl)
# Note: Write will change some attribute values, always put it after read tests
- await cls.SendWriteRequest(devCtrl)
+ await cls.TestWriteRequest(devCtrl)
await cls.TestTimedRequest(devCtrl)
+ await cls.TestTimedRequestTimeout(devCtrl)
except Exception as ex:
logger.error(
f"Unexpected error occurred when running tests: {ex}")
diff --git a/src/controller/python/test/test_scripts/mobile-device-test.py b/src/controller/python/test/test_scripts/mobile-device-test.py
index fdaaeca..4c8a137 100755
--- a/src/controller/python/test/test_scripts/mobile-device-test.py
+++ b/src/controller/python/test/test_scripts/mobile-device-test.py
@@ -18,10 +18,15 @@
#
# Commissioning test.
+from logging import disable
import os
import sys
-from optparse import OptionParser
-from base import TestFail, TestTimeout, BaseTestHelper, FailIfNot, logger
+import click
+import coloredlogs
+import chip.logging
+import logging
+from base import TestFail, TestTimeout, BaseTestHelper, FailIfNot, logger, TestIsEnabled, SetTestSet
+import base
from cluster_objects import NODE_ID, ClusterObjectTests
from network_commissioning import NetworkCommissioningTests
import asyncio
@@ -40,53 +45,35 @@
# Network id, for the thread network, current a const value, will be changed to XPANID of the thread network.
TEST_THREAD_NETWORK_ID = "fedcba9876543210"
TEST_DISCRIMINATOR = 3840
+TEST_SETUPPIN = 20202021
ENDPOINT_ID = 0
LIGHTING_ENDPOINT_ID = 1
GROUP_ID = 0
+TEST_CONTROLLER_NODE_ID = 112233
+TEST_DEVICE_NODE_ID = 1
-def main():
- optParser = OptionParser()
- optParser.add_option(
- "-t",
- "--timeout",
- action="store",
- dest="testTimeout",
- default=75,
- type='int',
- help="The program will return with timeout after specified seconds.",
- metavar="<timeout-second>",
- )
- optParser.add_option(
- "-a",
- "--address",
- action="store",
- dest="deviceAddress",
- default='',
- type='str',
- help="Address of the device",
- metavar="<device-addr>",
- )
+ALL_TESTS = ['network_commissioning', 'datamodel']
- (options, remainingArgs) = optParser.parse_args(sys.argv[1:])
- timeoutTicker = TestTimeout(options.testTimeout)
- timeoutTicker.start()
-
- test = BaseTestHelper(nodeid=112233)
-
+def ethernet_commissioning(test: BaseTestHelper, discriminator: int, setup_pin: int, address_override: str, device_nodeid: int):
logger.info("Testing discovery")
- FailIfNot(test.TestDiscovery(discriminator=TEST_DISCRIMINATOR),
- "Failed to discover any devices.")
+ address = test.TestDiscovery(discriminator=discriminator)
+ FailIfNot(address, "Failed to discover any devices.")
# FailIfNot(test.SetNetworkCommissioningParameters(dataset=TEST_THREAD_NETWORK_DATASET_TLV),
# "Failed to finish network commissioning")
+ if address_override:
+ address = address_override
+ else:
+ address = address.decode("utf-8")
+
logger.info("Testing key exchange")
- FailIfNot(test.TestKeyExchange(ip=options.deviceAddress,
- setuppin=20202021,
- nodeid=1),
+ FailIfNot(test.TestKeyExchange(ip=address,
+ setuppin=setup_pin,
+ nodeid=device_nodeid),
"Failed to finish key exchange")
#
@@ -95,38 +82,34 @@
#
# Issue: #15688
#
- # asyncio.run(test.TestMultiFabric(ip=options.deviceAddress,
+ # asyncio.run(test.TestMultiFabric(ip=address.decode("utf-8"),
# setuppin=20202021,
# nodeid=1))
#
- # logger.info("Testing writing/reading fabric sensitive data")
- # asyncio.run(test.TestFabricSensitive(nodeid=1))
+ # The server will crash if we are aborting / closing it too fast.
+ # Issue: #15987
+ # logger.info("Testing closing sessions")
+ # FailIfNot(test.TestCloseSession(nodeid=device_nodeid),
+ # "Failed to close sessions")
- logger.info("Testing closing sessions")
- FailIfNot(test.TestCloseSession(nodeid=1), "Failed to close sessions")
- logger.info("Testing resolve")
- FailIfNot(test.TestResolve(nodeid=1),
- "Failed to resolve nodeid")
-
- # Still test network commissioning
- logger.info("Testing network commissioning")
- FailIfNot(asyncio.run(NetworkCommissioningTests(devCtrl=test.devCtrl, nodeid=1).run()),
- "Failed to finish network commissioning")
+@base.test_case
+def TestDatamodel(test: BaseTestHelper, device_nodeid: int):
+ logger.info("Testing datamodel functions")
logger.info("Testing on off cluster")
- FailIfNot(test.TestOnOffCluster(nodeid=1,
+ FailIfNot(test.TestOnOffCluster(nodeid=device_nodeid,
endpoint=LIGHTING_ENDPOINT_ID,
group=GROUP_ID), "Failed to test on off cluster")
logger.info("Testing level control cluster")
- FailIfNot(test.TestLevelControlCluster(nodeid=1,
+ FailIfNot(test.TestLevelControlCluster(nodeid=device_nodeid,
endpoint=LIGHTING_ENDPOINT_ID,
group=GROUP_ID),
"Failed to test level control cluster")
logger.info("Testing sending commands to non exist endpoint")
- FailIfNot(not test.TestOnOffCluster(nodeid=1,
+ FailIfNot(not test.TestOnOffCluster(nodeid=device_nodeid,
endpoint=233,
group=GROUP_ID), "Failed to test on off cluster on non-exist endpoint")
@@ -136,13 +119,13 @@
"Failed when testing Python Cluster Object APIs")
logger.info("Testing attribute reading")
- FailIfNot(test.TestReadBasicAttributes(nodeid=1,
+ FailIfNot(test.TestReadBasicAttributes(nodeid=device_nodeid,
endpoint=ENDPOINT_ID,
group=GROUP_ID),
"Failed to test Read Basic Attributes")
logger.info("Testing attribute writing")
- FailIfNot(test.TestWriteBasicAttributes(nodeid=1,
+ FailIfNot(test.TestWriteBasicAttributes(nodeid=device_nodeid,
endpoint=ENDPOINT_ID,
group=GROUP_ID),
"Failed to test Write Basic Attributes")
@@ -154,18 +137,46 @@
"Failed to test Read Basic Attributes")
logger.info("Testing subscription")
- FailIfNot(test.TestSubscription(nodeid=1, endpoint=LIGHTING_ENDPOINT_ID),
+ FailIfNot(test.TestSubscription(nodeid=device_nodeid, endpoint=LIGHTING_ENDPOINT_ID),
"Failed to subscribe attributes.")
logger.info("Testing another subscription that kills previous subscriptions")
- FailIfNot(test.TestSubscription(nodeid=1, endpoint=LIGHTING_ENDPOINT_ID),
+ FailIfNot(test.TestSubscription(nodeid=device_nodeid, endpoint=LIGHTING_ENDPOINT_ID),
"Failed to subscribe attributes.")
logger.info("Testing on off cluster over resolved connection")
- FailIfNot(test.TestOnOffCluster(nodeid=1,
+ FailIfNot(test.TestOnOffCluster(nodeid=device_nodeid,
endpoint=LIGHTING_ENDPOINT_ID,
group=GROUP_ID), "Failed to test on off cluster")
+ # logger.info("Testing writing/reading fabric sensitive data")
+ # asyncio.run(test.TestFabricSensitive(nodeid=device_nodeid))
+
+
+def do_tests(controller_nodeid, device_nodeid, address, timeout, discriminator, setup_pin):
+ timeoutTicker = TestTimeout(timeout)
+ timeoutTicker.start()
+
+ test = BaseTestHelper(nodeid=controller_nodeid)
+
+ chip.logging.RedirectToPythonLogging()
+
+ ethernet_commissioning(test, discriminator, setup_pin, address,
+ device_nodeid)
+
+ logger.info("Testing resolve")
+ FailIfNot(test.TestResolve(nodeid=device_nodeid),
+ "Failed to resolve nodeid")
+
+ # Still test network commissioning
+ FailIfNot(asyncio.run(NetworkCommissioningTests(devCtrl=test.devCtrl, nodeid=device_nodeid).run()),
+ "Failed to finish network commissioning")
+
+ TestDatamodel(test, device_nodeid)
+
+ logger.info("Testing non-controller APIs")
+ FailIfNot(test.TestNonControllerAPIs(), "Non controller API test failed")
+
timeoutTicker.stop()
logger.info("Test finished")
@@ -175,9 +186,45 @@
os._exit(0)
+@click.command()
+@click.option("--controller-nodeid", default=TEST_CONTROLLER_NODE_ID, type=int, help="NodeId of the controller.")
+@click.option("--device-nodeid", default=TEST_DEVICE_NODE_ID, type=int, help="NodeId of the device.")
+@click.option("--address", "-a", default='', type=str, help="Skip commissionee discovery, commission the device with the IP directly.")
+@click.option("--timeout", "-t", default=240, type=int, help="The program will return with timeout after specified seconds.")
+@click.option("--discriminator", default=TEST_DISCRIMINATOR, type=int, help="Discriminator of the device.")
+@click.option("--setup-pin", default=TEST_SETUPPIN, type=int, help="Setup pincode of the device.")
+@click.option('--enable-test', default=['all'], type=str, multiple=True, help='The tests to be executed. By default, all tests will be executed, use this option to run a specific set of tests. Use --print-test-list for a list of appliable tests.')
+@click.option('--disable-test', default=[], type=str, multiple=True, help='The tests to be excluded from the set of enabled tests. Use --print-test-list for a list of appliable tests.')
+@click.option('--log-level', default='WARN', type=click.Choice(['ERROR', 'WARN', 'INFO', 'DEBUG']), help="The log level of the test.")
+@click.option('--log-format', default=None, type=str, help="Override logging format")
+@click.option('--print-test-list', is_flag=True, help="Print a list of test cases and test sets that can be toggled via --enable-test and --disable-test, then exit")
+def run(controller_nodeid, device_nodeid, address, timeout, discriminator, setup_pin, enable_test, disable_test, log_level, log_format, print_test_list):
+ coloredlogs.install(level=log_level, fmt=log_format, logger=logger)
+
+ if print_test_list:
+ print("Test sets:")
+ for name in base.configurable_tests():
+ print(f"\t{name}")
+ print("Test cases:")
+ for name in base.configurable_test_cases():
+ print(f"\t{name}")
+ return
+
+ logger.info("Test Parameters:")
+ logger.info(f"\tController NodeId: {controller_nodeid}")
+ logger.info(f"\tDevice NodeId: {device_nodeid}")
+ logger.info(f"\tTest Timeout: {timeout}s")
+ logger.info(f"\tDiscriminator: {discriminator}")
+ logger.info(f"\tEnabled Tests: {enable_test}")
+ logger.info(f"\tDisabled Tests: {disable_test}")
+ SetTestSet(enable_test, disable_test)
+ do_tests(controller_nodeid, device_nodeid, address, timeout,
+ discriminator, setup_pin)
+
+
if __name__ == "__main__":
try:
- main()
+ run()
except Exception as ex:
logger.exception(ex)
TestFail("Exception occurred when running tests.")
diff --git a/src/controller/python/test/test_scripts/network_commissioning.py b/src/controller/python/test/test_scripts/network_commissioning.py
index 13f64bd..baf8a6d 100644
--- a/src/controller/python/test/test_scripts/network_commissioning.py
+++ b/src/controller/python/test/test_scripts/network_commissioning.py
@@ -22,6 +22,8 @@
import chip.interaction_model
import asyncio
+import base
+
logger = logging.getLogger('NetworkCommissioning')
logger.setLevel(logging.INFO)
@@ -48,6 +50,7 @@
THREAD_NETWORK_FEATURE_MAP = 2
+@base.test_set
class NetworkCommissioningTests:
def __init__(self, devCtrl, nodeid):
self._devCtrl = devCtrl
@@ -279,26 +282,34 @@
raise AssertionError(
f"Unexpected result: network is not marked as connected")
+ @base.test_case
+ async def Test(self):
+ clusters = await self._devCtrl.ReadAttribute(nodeid=self._nodeid, attributes=[(Clusters.Descriptor.Attributes.ServerList)], returnClusterObject=True)
+ if Clusters.NetworkCommissioning.id not in clusters[0][Clusters.Descriptor].serverList:
+ logger.info(
+ f"Network commissioning cluster {endpoint} is not enabled on this device.")
+ return
+ endpoints = await self._devCtrl.ReadAttribute(nodeid=self._nodeid, attributes=[(Clusters.NetworkCommissioning.Attributes.FeatureMap)], returnClusterObject=True)
+ logger.info(endpoints)
+ for endpoint, obj in endpoints.items():
+ clus = obj[Clusters.NetworkCommissioning]
+ if clus.featureMap == WIFI_NETWORK_FEATURE_MAP:
+ logger.info(
+ f"Endpoint {endpoint} is configured as WiFi network, run WiFi commissioning test.")
+ await self.test_negative(endpoint)
+ await self.test_wifi(endpoint)
+ elif clus.featureMap == THREAD_NETWORK_FEATURE_MAP:
+ logger.info(
+ f"Endpoint {endpoint} is configured as Thread network, run Thread commissioning test.")
+ await self.test_negative(endpoint)
+ await self.test_thread(endpoint)
+ else:
+ logger.info(
+ f"Skip endpoint {endpoint} with featureMap {clus.featureMap}")
+
async def run(self):
try:
- endpoints = await self._devCtrl.ReadAttribute(nodeid=self._nodeid, attributes=[(Clusters.NetworkCommissioning.Attributes.FeatureMap)], returnClusterObject=True)
- logger.info(endpoints)
- for endpoint, obj in endpoints.items():
- clus = obj[Clusters.NetworkCommissioning]
- if clus.featureMap == WIFI_NETWORK_FEATURE_MAP:
- logger.info(
- f"Endpoint {endpoint} is configured as WiFi network, run WiFi commissioning test.")
- await self.test_negative(endpoint)
- await self.test_wifi(endpoint)
- elif clus.featureMap == THREAD_NETWORK_FEATURE_MAP:
- logger.info(
- f"Endpoint {endpoint} is configured as Thread network, run Thread commissioning test.")
- await self.test_negative(endpoint)
- await self.test_thread(endpoint)
- else:
- logger.info(
- f"Skip endpoint {endpoint} with featureMap {clus.featureMap}")
+ await self.Test()
+ return True
except Exception as ex:
- logger.exception(ex)
return False
- return True
diff --git a/src/test_driver/linux-cirque/MobileDeviceTest.py b/src/test_driver/linux-cirque/MobileDeviceTest.py
index 20fb502..9af2969 100755
--- a/src/test_driver/linux-cirque/MobileDeviceTest.py
+++ b/src/test_driver/linux-cirque/MobileDeviceTest.py
@@ -93,8 +93,7 @@
command = "gdb -return-child-result -q -ex run -ex bt --args python3 {} -t 150 -a {}".format(
os.path.join(
- CHIP_REPO, "src/controller/python/test/test_scripts/mobile-device-test.py"),
- ethernet_ip)
+ CHIP_REPO, "src/controller/python/test/test_scripts/mobile-device-test.py"), ethernet_ip)
ret = self.execute_device_cmd(req_device_id, command)
self.assertEqual(ret['return_code'], '0',