blob: 474aadb3da82b72789fcd3559b545d81b69242c9 [file] [log] [blame]
Anas Nashifce2b4182020-03-24 14:40:28 -04001#!/usr/bin/env python3
2# vim: set syntax=python ts=4 :
3#
4# Copyright (c) 2018 Intel Corporation
5# SPDX-License-Identifier: Apache-2.0
6
7import os
8import contextlib
9import string
10import mmap
11import sys
12import re
13import subprocess
14import select
15import shutil
16import shlex
17import signal
18import threading
19import concurrent.futures
20from collections import OrderedDict
21from threading import BoundedSemaphore
22import queue
23import time
24import csv
25import glob
26import concurrent
27import xml.etree.ElementTree as ET
28import logging
29from pathlib import Path
30from distutils.spawn import find_executable
31from colorama import Fore
Martí Bolívar07dce822020-04-13 16:50:51 -070032import platform
Anas Nashifae61b7e2020-07-06 11:30:55 -040033import yaml
34try:
35 # Use the C LibYAML parser if available, rather than the Python parser.
36 # It's much faster.
Anas Nashifae61b7e2020-07-06 11:30:55 -040037 from yaml import CSafeLoader as SafeLoader
38 from yaml import CDumper as Dumper
39except ImportError:
Martí Bolívard8698cb2020-07-08 14:55:14 -070040 from yaml import SafeLoader, Dumper
Anas Nashifce2b4182020-03-24 14:40:28 -040041
42try:
43 import serial
44except ImportError:
45 print("Install pyserial python module with pip to use --device-testing option.")
46
47try:
48 from tabulate import tabulate
49except ImportError:
50 print("Install tabulate python module with pip to use --device-testing option.")
51
Wentong Wu0d619ae2020-05-05 19:46:49 -040052try:
53 import psutil
54except ImportError:
Anas Nashif77946fa2020-05-21 18:19:01 -040055 print("Install psutil python module with pip to run in Qemu.")
Wentong Wu0d619ae2020-05-05 19:46:49 -040056
Anas Nashifce2b4182020-03-24 14:40:28 -040057ZEPHYR_BASE = os.getenv("ZEPHYR_BASE")
58if not ZEPHYR_BASE:
59 sys.exit("$ZEPHYR_BASE environment variable undefined")
60
61sys.path.insert(0, os.path.join(ZEPHYR_BASE, "scripts", "dts"))
62import edtlib
63
64hw_map_local = threading.Lock()
65report_lock = threading.Lock()
66
67# Use this for internal comparisons; that's what canonicalization is
68# for. Don't use it when invoking other components of the build system
69# to avoid confusing and hard to trace inconsistencies in error messages
70# and logs, generated Makefiles, etc. compared to when users invoke these
71# components directly.
72# Note "normalization" is different from canonicalization, see os.path.
73canonical_zephyr_base = os.path.realpath(ZEPHYR_BASE)
74
75sys.path.insert(0, os.path.join(ZEPHYR_BASE, "scripts/"))
76
77from sanity_chk import scl
78from sanity_chk import expr_parser
79
Anas Nashifce2b4182020-03-24 14:40:28 -040080logger = logging.getLogger('sanitycheck')
81logger.setLevel(logging.DEBUG)
82
Anas Nashifce2b4182020-03-24 14:40:28 -040083pipeline = queue.LifoQueue()
84
85class CMakeCacheEntry:
86 '''Represents a CMake cache entry.
87
88 This class understands the type system in a CMakeCache.txt, and
89 converts the following cache types to Python types:
90
91 Cache Type Python type
92 ---------- -------------------------------------------
93 FILEPATH str
94 PATH str
95 STRING str OR list of str (if ';' is in the value)
96 BOOL bool
97 INTERNAL str OR list of str (if ';' is in the value)
98 ---------- -------------------------------------------
99 '''
100
101 # Regular expression for a cache entry.
102 #
103 # CMake variable names can include escape characters, allowing a
104 # wider set of names than is easy to match with a regular
105 # expression. To be permissive here, use a non-greedy match up to
106 # the first colon (':'). This breaks if the variable name has a
107 # colon inside, but it's good enough.
108 CACHE_ENTRY = re.compile(
109 r'''(?P<name>.*?) # name
110 :(?P<type>FILEPATH|PATH|STRING|BOOL|INTERNAL) # type
111 =(?P<value>.*) # value
112 ''', re.X)
113
114 @classmethod
115 def _to_bool(cls, val):
116 # Convert a CMake BOOL string into a Python bool.
117 #
118 # "True if the constant is 1, ON, YES, TRUE, Y, or a
119 # non-zero number. False if the constant is 0, OFF, NO,
120 # FALSE, N, IGNORE, NOTFOUND, the empty string, or ends in
121 # the suffix -NOTFOUND. Named boolean constants are
122 # case-insensitive. If the argument is not one of these
123 # constants, it is treated as a variable."
124 #
125 # https://cmake.org/cmake/help/v3.0/command/if.html
126 val = val.upper()
127 if val in ('ON', 'YES', 'TRUE', 'Y'):
128 return 1
129 elif val in ('OFF', 'NO', 'FALSE', 'N', 'IGNORE', 'NOTFOUND', ''):
130 return 0
131 elif val.endswith('-NOTFOUND'):
132 return 0
133 else:
134 try:
135 v = int(val)
136 return v != 0
137 except ValueError as exc:
138 raise ValueError('invalid bool {}'.format(val)) from exc
139
140 @classmethod
141 def from_line(cls, line, line_no):
142 # Comments can only occur at the beginning of a line.
143 # (The value of an entry could contain a comment character).
144 if line.startswith('//') or line.startswith('#'):
145 return None
146
147 # Whitespace-only lines do not contain cache entries.
148 if not line.strip():
149 return None
150
151 m = cls.CACHE_ENTRY.match(line)
152 if not m:
153 return None
154
155 name, type_, value = (m.group(g) for g in ('name', 'type', 'value'))
156 if type_ == 'BOOL':
157 try:
158 value = cls._to_bool(value)
159 except ValueError as exc:
160 args = exc.args + ('on line {}: {}'.format(line_no, line),)
161 raise ValueError(args) from exc
162 elif type_ in ['STRING', 'INTERNAL']:
163 # If the value is a CMake list (i.e. is a string which
164 # contains a ';'), convert to a Python list.
165 if ';' in value:
166 value = value.split(';')
167
168 return CMakeCacheEntry(name, value)
169
170 def __init__(self, name, value):
171 self.name = name
172 self.value = value
173
174 def __str__(self):
175 fmt = 'CMakeCacheEntry(name={}, value={})'
176 return fmt.format(self.name, self.value)
177
178
179class CMakeCache:
180 '''Parses and represents a CMake cache file.'''
181
182 @staticmethod
183 def from_file(cache_file):
184 return CMakeCache(cache_file)
185
186 def __init__(self, cache_file):
187 self.cache_file = cache_file
188 self.load(cache_file)
189
190 def load(self, cache_file):
191 entries = []
192 with open(cache_file, 'r') as cache:
193 for line_no, line in enumerate(cache):
194 entry = CMakeCacheEntry.from_line(line, line_no)
195 if entry:
196 entries.append(entry)
197 self._entries = OrderedDict((e.name, e) for e in entries)
198
199 def get(self, name, default=None):
200 entry = self._entries.get(name)
201 if entry is not None:
202 return entry.value
203 else:
204 return default
205
206 def get_list(self, name, default=None):
207 if default is None:
208 default = []
209 entry = self._entries.get(name)
210 if entry is not None:
211 value = entry.value
212 if isinstance(value, list):
213 return value
214 elif isinstance(value, str):
215 return [value] if value else []
216 else:
217 msg = 'invalid value {} type {}'
218 raise RuntimeError(msg.format(value, type(value)))
219 else:
220 return default
221
222 def __contains__(self, name):
223 return name in self._entries
224
225 def __getitem__(self, name):
226 return self._entries[name].value
227
228 def __setitem__(self, name, entry):
229 if not isinstance(entry, CMakeCacheEntry):
230 msg = 'improper type {} for value {}, expecting CMakeCacheEntry'
231 raise TypeError(msg.format(type(entry), entry))
232 self._entries[name] = entry
233
234 def __delitem__(self, name):
235 del self._entries[name]
236
237 def __iter__(self):
238 return iter(self._entries.values())
239
240
241class SanityCheckException(Exception):
242 pass
243
244
245class SanityRuntimeError(SanityCheckException):
246 pass
247
248
249class ConfigurationError(SanityCheckException):
250 def __init__(self, cfile, message):
251 SanityCheckException.__init__(self, cfile + ": " + message)
252
253
254class BuildError(SanityCheckException):
255 pass
256
257
258class ExecutionError(SanityCheckException):
259 pass
260
261
262class HarnessImporter:
263
264 def __init__(self, name):
265 sys.path.insert(0, os.path.join(ZEPHYR_BASE, "scripts/sanity_chk"))
266 module = __import__("harness")
267 if name:
268 my_class = getattr(module, name)
269 else:
270 my_class = getattr(module, "Test")
271
272 self.instance = my_class()
273
274
275class Handler:
276 def __init__(self, instance, type_str="build"):
277 """Constructor
278
279 """
280 self.lock = threading.Lock()
281
282 self.state = "waiting"
283 self.run = False
284 self.duration = 0
285 self.type_str = type_str
286
287 self.binary = None
288 self.pid_fn = None
289 self.call_make_run = False
290
291 self.name = instance.name
292 self.instance = instance
293 self.timeout = instance.testcase.timeout
294 self.sourcedir = instance.testcase.source_dir
295 self.build_dir = instance.build_dir
296 self.log = os.path.join(self.build_dir, "handler.log")
297 self.returncode = 0
298 self.set_state("running", self.duration)
299 self.generator = None
300 self.generator_cmd = None
301
302 self.args = []
303
304 def set_state(self, state, duration):
305 self.lock.acquire()
306 self.state = state
307 self.duration = duration
308 self.lock.release()
309
310 def get_state(self):
311 self.lock.acquire()
312 ret = (self.state, self.duration)
313 self.lock.release()
314 return ret
315
316 def record(self, harness):
317 if harness.recording:
318 filename = os.path.join(self.build_dir, "recording.csv")
319 with open(filename, "at") as csvfile:
320 cw = csv.writer(csvfile, harness.fieldnames, lineterminator=os.linesep)
321 cw.writerow(harness.fieldnames)
322 for instance in harness.recording:
323 cw.writerow(instance)
324
325
326class BinaryHandler(Handler):
327 def __init__(self, instance, type_str):
328 """Constructor
329
330 @param instance Test Instance
331 """
332 super().__init__(instance, type_str)
333
334 self.terminated = False
335
336 # Tool options
337 self.valgrind = False
338 self.lsan = False
339 self.asan = False
Christian Taedcke3dbe9f22020-07-06 16:00:57 +0200340 self.ubsan = False
Anas Nashifce2b4182020-03-24 14:40:28 -0400341 self.coverage = False
342
343 def try_kill_process_by_pid(self):
344 if self.pid_fn:
345 pid = int(open(self.pid_fn).read())
346 os.unlink(self.pid_fn)
347 self.pid_fn = None # clear so we don't try to kill the binary twice
348 try:
349 os.kill(pid, signal.SIGTERM)
350 except ProcessLookupError:
351 pass
352
353 def terminate(self, proc):
354 # encapsulate terminate functionality so we do it consistently where ever
355 # we might want to terminate the proc. We need try_kill_process_by_pid
356 # because of both how newer ninja (1.6.0 or greater) and .NET / renode
357 # work. Newer ninja's don't seem to pass SIGTERM down to the children
358 # so we need to use try_kill_process_by_pid.
359 self.try_kill_process_by_pid()
360 proc.terminate()
Anas Nashif227392c2020-04-27 20:31:56 -0400361 # sleep for a while before attempting to kill
362 time.sleep(0.5)
363 proc.kill()
Anas Nashifce2b4182020-03-24 14:40:28 -0400364 self.terminated = True
365
366 def _output_reader(self, proc, harness):
367 log_out_fp = open(self.log, "wt")
368 for line in iter(proc.stdout.readline, b''):
369 logger.debug("OUTPUT: {0}".format(line.decode('utf-8').rstrip()))
370 log_out_fp.write(line.decode('utf-8'))
371 log_out_fp.flush()
372 harness.handle(line.decode('utf-8').rstrip())
373 if harness.state:
374 try:
375 # POSIX arch based ztests end on their own,
376 # so let's give it up to 100ms to do so
377 proc.wait(0.1)
378 except subprocess.TimeoutExpired:
379 self.terminate(proc)
380 break
381
382 log_out_fp.close()
383
384 def handle(self):
385
386 harness_name = self.instance.testcase.harness.capitalize()
387 harness_import = HarnessImporter(harness_name)
388 harness = harness_import.instance
389 harness.configure(self.instance)
390
391 if self.call_make_run:
392 command = [self.generator_cmd, "run"]
393 else:
394 command = [self.binary]
395
396 run_valgrind = False
397 if self.valgrind and shutil.which("valgrind"):
398 command = ["valgrind", "--error-exitcode=2",
399 "--leak-check=full",
400 "--suppressions=" + ZEPHYR_BASE + "/scripts/valgrind.supp",
401 "--log-file=" + self.build_dir + "/valgrind.log"
402 ] + command
403 run_valgrind = True
404
405 logger.debug("Spawning process: " +
406 " ".join(shlex.quote(word) for word in command) + os.linesep +
407 "in directory: " + self.build_dir)
408
409 start_time = time.time()
410
411 env = os.environ.copy()
412 if self.asan:
413 env["ASAN_OPTIONS"] = "log_path=stdout:" + \
414 env.get("ASAN_OPTIONS", "")
415 if not self.lsan:
416 env["ASAN_OPTIONS"] += "detect_leaks=0"
417
Christian Taedcke3dbe9f22020-07-06 16:00:57 +0200418 if self.ubsan:
419 env["UBSAN_OPTIONS"] = "log_path=stdout:halt_on_error=1:" + \
420 env.get("UBSAN_OPTIONS", "")
421
Anas Nashifce2b4182020-03-24 14:40:28 -0400422 with subprocess.Popen(command, stdout=subprocess.PIPE,
423 stderr=subprocess.PIPE, cwd=self.build_dir, env=env) as proc:
424 logger.debug("Spawning BinaryHandler Thread for %s" % self.name)
425 t = threading.Thread(target=self._output_reader, args=(proc, harness,), daemon=True)
426 t.start()
427 t.join(self.timeout)
428 if t.is_alive():
429 self.terminate(proc)
430 t.join()
431 proc.wait()
432 self.returncode = proc.returncode
433
434 handler_time = time.time() - start_time
435
436 if self.coverage:
437 subprocess.call(["GCOV_PREFIX=" + self.build_dir,
438 "gcov", self.sourcedir, "-b", "-s", self.build_dir], shell=True)
439
440 self.try_kill_process_by_pid()
441
442 # FIXME: This is needed when killing the simulator, the console is
443 # garbled and needs to be reset. Did not find a better way to do that.
444
445 subprocess.call(["stty", "sane"])
446 self.instance.results = harness.tests
447
448 if not self.terminated and self.returncode != 0:
449 # When a process is killed, the default handler returns 128 + SIGTERM
450 # so in that case the return code itself is not meaningful
451 self.set_state("failed", handler_time)
452 self.instance.reason = "Failed"
453 elif run_valgrind and self.returncode == 2:
454 self.set_state("failed", handler_time)
455 self.instance.reason = "Valgrind error"
456 elif harness.state:
457 self.set_state(harness.state, handler_time)
Anas Nashifb802af82020-04-26 21:57:38 -0400458 if harness.state == "failed":
459 self.instance.reason = "Failed"
Anas Nashifce2b4182020-03-24 14:40:28 -0400460 else:
461 self.set_state("timeout", handler_time)
462 self.instance.reason = "Timeout"
463
464 self.record(harness)
465
466
467class DeviceHandler(Handler):
468
469 def __init__(self, instance, type_str):
470 """Constructor
471
472 @param instance Test Instance
473 """
474 super().__init__(instance, type_str)
475
476 self.suite = None
Anas Nashifce2b4182020-03-24 14:40:28 -0400477
478 def monitor_serial(self, ser, halt_fileno, harness):
479 log_out_fp = open(self.log, "wt")
480
481 ser_fileno = ser.fileno()
482 readlist = [halt_fileno, ser_fileno]
483
484 while ser.isOpen():
485 readable, _, _ = select.select(readlist, [], [], self.timeout)
486
487 if halt_fileno in readable:
488 logger.debug('halted')
489 ser.close()
490 break
491 if ser_fileno not in readable:
492 continue # Timeout.
493
494 serial_line = None
495 try:
496 serial_line = ser.readline()
497 except TypeError:
498 pass
499 except serial.SerialException:
500 ser.close()
501 break
502
503 # Just because ser_fileno has data doesn't mean an entire line
504 # is available yet.
505 if serial_line:
506 sl = serial_line.decode('utf-8', 'ignore').lstrip()
507 logger.debug("DEVICE: {0}".format(sl.rstrip()))
508
509 log_out_fp.write(sl)
510 log_out_fp.flush()
511 harness.handle(sl.rstrip())
512
513 if harness.state:
514 ser.close()
515 break
516
517 log_out_fp.close()
518
Anas Nashif3b86f132020-05-21 10:35:33 -0400519 def device_is_available(self, instance):
520 device = instance.platform.name
521 fixture = instance.testcase.harness_config.get("fixture")
Anas Nashifce2b4182020-03-24 14:40:28 -0400522 for i in self.suite.connected_hardware:
Anas Nashif3b86f132020-05-21 10:35:33 -0400523 if fixture and fixture not in i.get('fixtures', []):
524 continue
Anas Nashifce2b4182020-03-24 14:40:28 -0400525 if i['platform'] == device and i['available'] and i['serial']:
526 return True
527
528 return False
529
Anas Nashif3b86f132020-05-21 10:35:33 -0400530 def get_available_device(self, instance):
531 device = instance.platform.name
Anas Nashifce2b4182020-03-24 14:40:28 -0400532 for i in self.suite.connected_hardware:
533 if i['platform'] == device and i['available'] and i['serial']:
534 i['available'] = False
535 i['counter'] += 1
536 return i
537
538 return None
539
540 def make_device_available(self, serial):
541 with hw_map_local:
542 for i in self.suite.connected_hardware:
543 if i['serial'] == serial:
544 i['available'] = True
545
546 @staticmethod
547 def run_custom_script(script, timeout):
548 with subprocess.Popen(script, stderr=subprocess.PIPE, stdout=subprocess.PIPE) as proc:
549 try:
550 stdout, _ = proc.communicate(timeout=timeout)
551 logger.debug(stdout.decode())
552
553 except subprocess.TimeoutExpired:
554 proc.kill()
555 proc.communicate()
556 logger.error("{} timed out".format(script))
557
558 def handle(self):
559 out_state = "failed"
560
Kumar Gala8ca56912020-06-17 06:20:10 -0500561 if self.suite.west_flash is not None:
Anas Nashifce2b4182020-03-24 14:40:28 -0400562 command = ["west", "flash", "--skip-rebuild", "-d", self.build_dir]
Anas Nashifc11f8ac2020-03-31 08:37:55 -0400563 if self.suite.west_runner:
Anas Nashifce2b4182020-03-24 14:40:28 -0400564 command.append("--runner")
Anas Nashifc11f8ac2020-03-31 08:37:55 -0400565 command.append(self.suite.west_runner)
Anas Nashifce2b4182020-03-24 14:40:28 -0400566 # There are three ways this option is used.
567 # 1) bare: --west-flash
568 # This results in options.west_flash == []
569 # 2) with a value: --west-flash="--board-id=42"
570 # This results in options.west_flash == "--board-id=42"
571 # 3) Multiple values: --west-flash="--board-id=42,--erase"
572 # This results in options.west_flash == "--board-id=42 --erase"
Anas Nashifc11f8ac2020-03-31 08:37:55 -0400573 if self.suite.west_flash != []:
Anas Nashifce2b4182020-03-24 14:40:28 -0400574 command.append('--')
Anas Nashifc11f8ac2020-03-31 08:37:55 -0400575 command.extend(self.suite.west_flash.split(','))
Anas Nashifce2b4182020-03-24 14:40:28 -0400576 else:
577 command = [self.generator_cmd, "-C", self.build_dir, "flash"]
578
Anas Nashif3b86f132020-05-21 10:35:33 -0400579 while not self.device_is_available(self.instance):
Anas Nashifce2b4182020-03-24 14:40:28 -0400580 logger.debug("Waiting for device {} to become available".format(self.instance.platform.name))
581 time.sleep(1)
582
Anas Nashif3b86f132020-05-21 10:35:33 -0400583 hardware = self.get_available_device(self.instance)
Anas Nashifce2b4182020-03-24 14:40:28 -0400584
Anas Nashif3b86f132020-05-21 10:35:33 -0400585 if hardware:
586 runner = hardware.get('runner', None)
Anas Nashifce2b4182020-03-24 14:40:28 -0400587 if runner:
588 board_id = hardware.get("probe_id", hardware.get("id", None))
589 product = hardware.get("product", None)
590 command = ["west", "flash", "--skip-rebuild", "-d", self.build_dir]
591 command.append("--runner")
592 command.append(hardware.get('runner', None))
593 if runner == "pyocd":
594 command.append("--board-id")
595 command.append(board_id)
596 elif runner == "nrfjprog":
597 command.append('--')
598 command.append("--snr")
599 command.append(board_id)
600 elif runner == "openocd" and product == "STM32 STLink":
601 command.append('--')
602 command.append("--cmd-pre-init")
603 command.append("hla_serial %s" % (board_id))
Erwan Gouriou2339fa02020-07-07 17:15:22 +0200604 elif runner == "openocd" and product == "STLINK-V3":
605 command.append('--')
606 command.append("--cmd-pre-init")
607 command.append("hla_serial %s" % (board_id))
Anas Nashifce2b4182020-03-24 14:40:28 -0400608 elif runner == "openocd" and product == "EDBG CMSIS-DAP":
609 command.append('--')
610 command.append("--cmd-pre-init")
611 command.append("cmsis_dap_serial %s" % (board_id))
612 elif runner == "jlink":
613 command.append("--tool-opt=-SelectEmuBySN %s" % (board_id))
614
615 serial_device = hardware['serial']
616
617 try:
618 ser = serial.Serial(
619 serial_device,
620 baudrate=115200,
621 parity=serial.PARITY_NONE,
622 stopbits=serial.STOPBITS_ONE,
623 bytesize=serial.EIGHTBITS,
624 timeout=self.timeout
625 )
626 except serial.SerialException as e:
627 self.set_state("failed", 0)
628 self.instance.reason = "Failed"
629 logger.error("Serial device error: %s" % (str(e)))
630 self.make_device_available(serial_device)
631 return
632
633 ser.flush()
634
635 harness_name = self.instance.testcase.harness.capitalize()
636 harness_import = HarnessImporter(harness_name)
637 harness = harness_import.instance
638 harness.configure(self.instance)
639 read_pipe, write_pipe = os.pipe()
640 start_time = time.time()
641
642 pre_script = hardware.get('pre_script')
643 post_flash_script = hardware.get('post_flash_script')
644 post_script = hardware.get('post_script')
645
646 if pre_script:
647 self.run_custom_script(pre_script, 30)
648
649 t = threading.Thread(target=self.monitor_serial, daemon=True,
650 args=(ser, read_pipe, harness))
651 t.start()
652
653 d_log = "{}/device.log".format(self.instance.build_dir)
654 logger.debug('Flash command: %s', command)
655 try:
656 stdout = stderr = None
657 with subprocess.Popen(command, stderr=subprocess.PIPE, stdout=subprocess.PIPE) as proc:
658 try:
659 (stdout, stderr) = proc.communicate(timeout=30)
660 logger.debug(stdout.decode())
661
662 if proc.returncode != 0:
663 self.instance.reason = "Device issue (Flash?)"
664 with open(d_log, "w") as dlog_fp:
665 dlog_fp.write(stderr.decode())
666 except subprocess.TimeoutExpired:
667 proc.kill()
668 (stdout, stderr) = proc.communicate()
669 self.instance.reason = "Device issue (Timeout)"
670
671 with open(d_log, "w") as dlog_fp:
672 dlog_fp.write(stderr.decode())
673
674 except subprocess.CalledProcessError:
675 os.write(write_pipe, b'x') # halt the thread
676
677 if post_flash_script:
678 self.run_custom_script(post_flash_script, 30)
679
680
681 t.join(self.timeout)
682 if t.is_alive():
683 logger.debug("Timed out while monitoring serial output on {}".format(self.instance.platform.name))
684 out_state = "timeout"
685
686 if ser.isOpen():
687 ser.close()
688
689 os.close(write_pipe)
690 os.close(read_pipe)
691
692 handler_time = time.time() - start_time
693
694 if out_state == "timeout":
695 for c in self.instance.testcase.cases:
696 if c not in harness.tests:
697 harness.tests[c] = "BLOCK"
698
699 self.instance.reason = "Timeout"
700
701 self.instance.results = harness.tests
702
703 if harness.state:
704 self.set_state(harness.state, handler_time)
705 if harness.state == "failed":
706 self.instance.reason = "Failed"
707 else:
708 self.set_state(out_state, handler_time)
709
710 if post_script:
711 self.run_custom_script(post_script, 30)
712
713 self.make_device_available(serial_device)
714
715 self.record(harness)
716
717
718class QEMUHandler(Handler):
719 """Spawns a thread to monitor QEMU output from pipes
720
721 We pass QEMU_PIPE to 'make run' and monitor the pipes for output.
722 We need to do this as once qemu starts, it runs forever until killed.
723 Test cases emit special messages to the console as they run, we check
724 for these to collect whether the test passed or failed.
725 """
726
727 def __init__(self, instance, type_str):
728 """Constructor
729
730 @param instance Test instance
731 """
732
733 super().__init__(instance, type_str)
734 self.fifo_fn = os.path.join(instance.build_dir, "qemu-fifo")
735
736 self.pid_fn = os.path.join(instance.build_dir, "qemu.pid")
737
738 @staticmethod
Wentong Wu0d619ae2020-05-05 19:46:49 -0400739 def _get_cpu_time(pid):
740 """get process CPU time.
741
742 The guest virtual time in QEMU icount mode isn't host time and
743 it's maintained by counting guest instructions, so we use QEMU
744 process exection time to mostly simulate the time of guest OS.
745 """
746 proc = psutil.Process(pid)
747 cpu_time = proc.cpu_times()
748 return cpu_time.user + cpu_time.system
749
750 @staticmethod
Anas Nashifce2b4182020-03-24 14:40:28 -0400751 def _thread(handler, timeout, outdir, logfile, fifo_fn, pid_fn, results, harness):
752 fifo_in = fifo_fn + ".in"
753 fifo_out = fifo_fn + ".out"
754
755 # These in/out nodes are named from QEMU's perspective, not ours
756 if os.path.exists(fifo_in):
757 os.unlink(fifo_in)
758 os.mkfifo(fifo_in)
759 if os.path.exists(fifo_out):
760 os.unlink(fifo_out)
761 os.mkfifo(fifo_out)
762
763 # We don't do anything with out_fp but we need to open it for
764 # writing so that QEMU doesn't block, due to the way pipes work
765 out_fp = open(fifo_in, "wb")
766 # Disable internal buffering, we don't
767 # want read() or poll() to ever block if there is data in there
768 in_fp = open(fifo_out, "rb", buffering=0)
769 log_out_fp = open(logfile, "wt")
770
771 start_time = time.time()
772 timeout_time = start_time + timeout
773 p = select.poll()
774 p.register(in_fp, select.POLLIN)
775 out_state = None
776
777 line = ""
778 timeout_extended = False
Wentong Wu0d619ae2020-05-05 19:46:49 -0400779
780 pid = 0
781 if os.path.exists(pid_fn):
782 pid = int(open(pid_fn).read())
783
Anas Nashifce2b4182020-03-24 14:40:28 -0400784 while True:
785 this_timeout = int((timeout_time - time.time()) * 1000)
786 if this_timeout < 0 or not p.poll(this_timeout):
Wentong Wu0d619ae2020-05-05 19:46:49 -0400787 if pid and this_timeout > 0:
788 #there is possibility we polled nothing because
789 #of host not scheduled QEMU process enough CPU
790 #time during p.poll(this_timeout)
791 cpu_time = QEMUHandler._get_cpu_time(pid)
792 if cpu_time < timeout and not out_state:
793 timeout_time = time.time() + (timeout - cpu_time)
794 continue
795
Anas Nashifce2b4182020-03-24 14:40:28 -0400796 if not out_state:
797 out_state = "timeout"
798 break
799
Wentong Wu0d619ae2020-05-05 19:46:49 -0400800 if pid == 0 and os.path.exists(pid_fn):
801 pid = int(open(pid_fn).read())
802
Anas Nashifce2b4182020-03-24 14:40:28 -0400803 try:
804 c = in_fp.read(1).decode("utf-8")
805 except UnicodeDecodeError:
806 # Test is writing something weird, fail
807 out_state = "unexpected byte"
808 break
809
810 if c == "":
811 # EOF, this shouldn't happen unless QEMU crashes
812 out_state = "unexpected eof"
813 break
814 line = line + c
815 if c != "\n":
816 continue
817
818 # line contains a full line of data output from QEMU
819 log_out_fp.write(line)
820 log_out_fp.flush()
821 line = line.strip()
822 logger.debug("QEMU: %s" % line)
823
824 harness.handle(line)
825 if harness.state:
826 # if we have registered a fail make sure the state is not
827 # overridden by a false success message coming from the
828 # testsuite
Anas Nashif869ca052020-07-07 14:29:07 -0400829 if out_state not in ['failed', 'unexpected eof', 'unexpected byte']:
Anas Nashifce2b4182020-03-24 14:40:28 -0400830 out_state = harness.state
831
832 # if we get some state, that means test is doing well, we reset
833 # the timeout and wait for 2 more seconds to catch anything
834 # printed late. We wait much longer if code
835 # coverage is enabled since dumping this information can
836 # take some time.
837 if not timeout_extended or harness.capture_coverage:
838 timeout_extended = True
839 if harness.capture_coverage:
840 timeout_time = time.time() + 30
841 else:
842 timeout_time = time.time() + 2
Anas Nashifce2b4182020-03-24 14:40:28 -0400843 line = ""
844
845 handler.record(harness)
846
847 handler_time = time.time() - start_time
848 logger.debug("QEMU complete (%s) after %f seconds" %
849 (out_state, handler_time))
Anas Nashif869ca052020-07-07 14:29:07 -0400850
Anas Nashifce2b4182020-03-24 14:40:28 -0400851 if out_state == "timeout":
852 handler.instance.reason = "Timeout"
Anas Nashif06052922020-07-15 22:44:24 -0400853 handler.set_state("failed", handler_time)
Anas Nashifce2b4182020-03-24 14:40:28 -0400854 elif out_state == "failed":
855 handler.instance.reason = "Failed"
Anas Nashif869ca052020-07-07 14:29:07 -0400856 handler.set_state("failed", handler_time)
Anas Nashif06052922020-07-15 22:44:24 -0400857 elif out_state in ['unexpected eof', 'unexpected byte']:
Anas Nashif869ca052020-07-07 14:29:07 -0400858 handler.instance.reason = out_state
Anas Nashif06052922020-07-15 22:44:24 -0400859 handler.set_state("failed", handler_time)
860 else:
861 handler.set_state(out_state, handler_time)
Anas Nashifce2b4182020-03-24 14:40:28 -0400862
863 log_out_fp.close()
864 out_fp.close()
865 in_fp.close()
Wentong Wu0d619ae2020-05-05 19:46:49 -0400866 if pid:
Anas Nashifce2b4182020-03-24 14:40:28 -0400867 try:
868 if pid:
869 os.kill(pid, signal.SIGTERM)
870 except ProcessLookupError:
871 # Oh well, as long as it's dead! User probably sent Ctrl-C
872 pass
873
874 os.unlink(fifo_in)
875 os.unlink(fifo_out)
876
877 def handle(self):
878 self.results = {}
879 self.run = True
880
881 # We pass this to QEMU which looks for fifos with .in and .out
882 # suffixes.
883 self.fifo_fn = os.path.join(self.instance.build_dir, "qemu-fifo")
884
885 self.pid_fn = os.path.join(self.instance.build_dir, "qemu.pid")
886 if os.path.exists(self.pid_fn):
887 os.unlink(self.pid_fn)
888
889 self.log_fn = self.log
890
891 harness_import = HarnessImporter(self.instance.testcase.harness.capitalize())
892 harness = harness_import.instance
893 harness.configure(self.instance)
894 self.thread = threading.Thread(name=self.name, target=QEMUHandler._thread,
895 args=(self, self.timeout, self.build_dir,
896 self.log_fn, self.fifo_fn,
897 self.pid_fn, self.results, harness))
898
899 self.instance.results = harness.tests
900 self.thread.daemon = True
901 logger.debug("Spawning QEMUHandler Thread for %s" % self.name)
902 self.thread.start()
903 subprocess.call(["stty", "sane"])
904
905 logger.debug("Running %s (%s)" % (self.name, self.type_str))
906 command = [self.generator_cmd]
907 command += ["-C", self.build_dir, "run"]
908
909 with subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=self.build_dir) as proc:
910 logger.debug("Spawning QEMUHandler Thread for %s" % self.name)
Wentong Wu7ec57b42020-05-05 19:19:18 -0400911 try:
912 proc.wait(self.timeout)
913 except subprocess.TimeoutExpired:
914 #sometimes QEMU can't handle SIGTERM signal correctly
915 #in that case kill -9 QEMU process directly and leave
916 #sanitycheck judge testing result by console output
917 if os.path.exists(self.pid_fn):
918 qemu_pid = int(open(self.pid_fn).read())
919 try:
920 os.kill(qemu_pid, signal.SIGKILL)
921 except ProcessLookupError:
922 pass
923 proc.wait()
Wentong Wu6fae53c2020-06-24 22:55:59 +0800924 if harness.state == "passed":
925 self.returncode = 0
926 else:
927 self.returncode = proc.returncode
Wentong Wu7ec57b42020-05-05 19:19:18 -0400928 else:
929 proc.terminate()
930 proc.kill()
931 self.returncode = proc.returncode
932 else:
Anas Nashif869ca052020-07-07 14:29:07 -0400933 logger.debug(f"No timeout, return code from qemu: {self.returncode}")
Wentong Wu7ec57b42020-05-05 19:19:18 -0400934 self.returncode = proc.returncode
935
936 if os.path.exists(self.pid_fn):
937 os.unlink(self.pid_fn)
Anas Nashifce2b4182020-03-24 14:40:28 -0400938
Anas Nashif869ca052020-07-07 14:29:07 -0400939 logger.debug(f"return code from qemu: {self.returncode}")
940
Anas Nashif06052922020-07-15 22:44:24 -0400941 if self.returncode != 0 or not harness.state:
Anas Nashifce2b4182020-03-24 14:40:28 -0400942 self.set_state("failed", 0)
943 self.instance.reason = "Exited with {}".format(self.returncode)
944
945 def get_fifo(self):
946 return self.fifo_fn
947
948
949class SizeCalculator:
950 alloc_sections = [
951 "bss",
952 "noinit",
953 "app_bss",
954 "app_noinit",
955 "ccm_bss",
956 "ccm_noinit"
957 ]
958
959 rw_sections = [
960 "datas",
961 "initlevel",
962 "exceptions",
963 "initshell",
Andrew Boie45979da2020-05-23 14:38:39 -0700964 "_static_thread_data_area",
965 "k_timer_area",
966 "k_mem_slab_area",
967 "k_mem_pool_area",
Anas Nashifce2b4182020-03-24 14:40:28 -0400968 "sw_isr_table",
Andrew Boie45979da2020-05-23 14:38:39 -0700969 "k_sem_area",
970 "k_mutex_area",
Anas Nashifce2b4182020-03-24 14:40:28 -0400971 "app_shmem_regions",
972 "_k_fifo_area",
973 "_k_lifo_area",
Andrew Boie45979da2020-05-23 14:38:39 -0700974 "k_stack_area",
975 "k_msgq_area",
976 "k_mbox_area",
977 "k_pipe_area",
Anas Nashifce2b4182020-03-24 14:40:28 -0400978 "net_if",
979 "net_if_dev",
Anas Nashifce2b4182020-03-24 14:40:28 -0400980 "net_l2_data",
Andrew Boie45979da2020-05-23 14:38:39 -0700981 "k_queue_area",
Anas Nashifce2b4182020-03-24 14:40:28 -0400982 "_net_buf_pool_area",
983 "app_datas",
984 "kobject_data",
985 "mmu_tables",
986 "app_pad",
987 "priv_stacks",
988 "ccm_data",
989 "usb_descriptor",
990 "usb_data", "usb_bos_desc",
Jukka Rissanen420b1952020-04-01 12:47:53 +0300991 "uart_mux",
Anas Nashifce2b4182020-03-24 14:40:28 -0400992 'log_backends_sections',
993 'log_dynamic_sections',
994 'log_const_sections',
995 "app_smem",
996 'shell_root_cmds_sections',
997 'log_const_sections',
998 "font_entry_sections",
999 "priv_stacks_noinit",
1000 "_GCOV_BSS_SECTION_NAME",
1001 "gcov",
1002 "nocache"
1003 ]
1004
1005 # These get copied into RAM only on non-XIP
1006 ro_sections = [
1007 "rom_start",
1008 "text",
1009 "ctors",
1010 "init_array",
1011 "reset",
Andrew Boie45979da2020-05-23 14:38:39 -07001012 "z_object_assignment_area",
Anas Nashifce2b4182020-03-24 14:40:28 -04001013 "rodata",
1014 "devconfig",
1015 "net_l2",
1016 "vector",
1017 "sw_isr_table",
Andrew Boie45979da2020-05-23 14:38:39 -07001018 "settings_handler_static_area",
1019 "bt_l2cap_fixed_chan",
1020 "bt_l2cap_br_fixec_chan",
1021 "bt_gatt_service_static",
Anas Nashifce2b4182020-03-24 14:40:28 -04001022 "vectors",
Andrew Boie45979da2020-05-23 14:38:39 -07001023 "net_socket_register_area",
1024 "net_ppp_proto",
1025 "shell_area",
1026 "tracing_backend_area",
Anas Nashifce2b4182020-03-24 14:40:28 -04001027 ]
1028
1029 def __init__(self, filename, extra_sections):
1030 """Constructor
1031
1032 @param filename Path to the output binary
1033 The <filename> is parsed by objdump to determine section sizes
1034 """
1035 # Make sure this is an ELF binary
1036 with open(filename, "rb") as f:
1037 magic = f.read(4)
1038
1039 try:
1040 if magic != b'\x7fELF':
1041 raise SanityRuntimeError("%s is not an ELF binary" % filename)
1042 except Exception as e:
1043 print(str(e))
1044 sys.exit(2)
1045
1046 # Search for CONFIG_XIP in the ELF's list of symbols using NM and AWK.
1047 # GREP can not be used as it returns an error if the symbol is not
1048 # found.
1049 is_xip_command = "nm " + filename + \
1050 " | awk '/CONFIG_XIP/ { print $3 }'"
1051 is_xip_output = subprocess.check_output(
1052 is_xip_command, shell=True, stderr=subprocess.STDOUT).decode(
1053 "utf-8").strip()
1054 try:
1055 if is_xip_output.endswith("no symbols"):
1056 raise SanityRuntimeError("%s has no symbol information" % filename)
1057 except Exception as e:
1058 print(str(e))
1059 sys.exit(2)
1060
1061 self.is_xip = (len(is_xip_output) != 0)
1062
1063 self.filename = filename
1064 self.sections = []
1065 self.rom_size = 0
1066 self.ram_size = 0
1067 self.extra_sections = extra_sections
1068
1069 self._calculate_sizes()
1070
1071 def get_ram_size(self):
1072 """Get the amount of RAM the application will use up on the device
1073
1074 @return amount of RAM, in bytes
1075 """
1076 return self.ram_size
1077
1078 def get_rom_size(self):
1079 """Get the size of the data that this application uses on device's flash
1080
1081 @return amount of ROM, in bytes
1082 """
1083 return self.rom_size
1084
1085 def unrecognized_sections(self):
1086 """Get a list of sections inside the binary that weren't recognized
1087
1088 @return list of unrecognized section names
1089 """
1090 slist = []
1091 for v in self.sections:
1092 if not v["recognized"]:
1093 slist.append(v["name"])
1094 return slist
1095
1096 def _calculate_sizes(self):
1097 """ Calculate RAM and ROM usage by section """
1098 objdump_command = "objdump -h " + self.filename
1099 objdump_output = subprocess.check_output(
1100 objdump_command, shell=True).decode("utf-8").splitlines()
1101
1102 for line in objdump_output:
1103 words = line.split()
1104
1105 if not words: # Skip lines that are too short
1106 continue
1107
1108 index = words[0]
1109 if not index[0].isdigit(): # Skip lines that do not start
1110 continue # with a digit
1111
1112 name = words[1] # Skip lines with section names
1113 if name[0] == '.': # starting with '.'
1114 continue
1115
1116 # TODO this doesn't actually reflect the size in flash or RAM as
1117 # it doesn't include linker-imposed padding between sections.
1118 # It is close though.
1119 size = int(words[2], 16)
1120 if size == 0:
1121 continue
1122
1123 load_addr = int(words[4], 16)
1124 virt_addr = int(words[3], 16)
1125
1126 # Add section to memory use totals (for both non-XIP and XIP scenarios)
1127 # Unrecognized section names are not included in the calculations.
1128 recognized = True
1129 if name in SizeCalculator.alloc_sections:
1130 self.ram_size += size
1131 stype = "alloc"
1132 elif name in SizeCalculator.rw_sections:
1133 self.ram_size += size
1134 self.rom_size += size
1135 stype = "rw"
1136 elif name in SizeCalculator.ro_sections:
1137 self.rom_size += size
1138 if not self.is_xip:
1139 self.ram_size += size
1140 stype = "ro"
1141 else:
1142 stype = "unknown"
1143 if name not in self.extra_sections:
1144 recognized = False
1145
1146 self.sections.append({"name": name, "load_addr": load_addr,
1147 "size": size, "virt_addr": virt_addr,
1148 "type": stype, "recognized": recognized})
1149
1150
1151
1152class SanityConfigParser:
1153 """Class to read test case files with semantic checking
1154 """
1155
1156 def __init__(self, filename, schema):
1157 """Instantiate a new SanityConfigParser object
1158
1159 @param filename Source .yaml file to read
1160 """
1161 self.data = {}
1162 self.schema = schema
1163 self.filename = filename
1164 self.tests = {}
1165 self.common = {}
1166
1167 def load(self):
1168 self.data = scl.yaml_load_verify(self.filename, self.schema)
1169
1170 if 'tests' in self.data:
1171 self.tests = self.data['tests']
1172 if 'common' in self.data:
1173 self.common = self.data['common']
1174
1175 def _cast_value(self, value, typestr):
1176 if isinstance(value, str):
1177 v = value.strip()
1178 if typestr == "str":
1179 return v
1180
1181 elif typestr == "float":
1182 return float(value)
1183
1184 elif typestr == "int":
1185 return int(value)
1186
1187 elif typestr == "bool":
1188 return value
1189
1190 elif typestr.startswith("list") and isinstance(value, list):
1191 return value
1192 elif typestr.startswith("list") and isinstance(value, str):
1193 vs = v.split()
1194 if len(typestr) > 4 and typestr[4] == ":":
1195 return [self._cast_value(vsi, typestr[5:]) for vsi in vs]
1196 else:
1197 return vs
1198
1199 elif typestr.startswith("set"):
1200 vs = v.split()
1201 if len(typestr) > 3 and typestr[3] == ":":
1202 return {self._cast_value(vsi, typestr[4:]) for vsi in vs}
1203 else:
1204 return set(vs)
1205
1206 elif typestr.startswith("map"):
1207 return value
1208 else:
1209 raise ConfigurationError(
1210 self.filename, "unknown type '%s'" % value)
1211
1212 def get_test(self, name, valid_keys):
1213 """Get a dictionary representing the keys/values within a test
1214
1215 @param name The test in the .yaml file to retrieve data from
1216 @param valid_keys A dictionary representing the intended semantics
1217 for this test. Each key in this dictionary is a key that could
1218 be specified, if a key is given in the .yaml file which isn't in
1219 here, it will generate an error. Each value in this dictionary
1220 is another dictionary containing metadata:
1221
1222 "default" - Default value if not given
1223 "type" - Data type to convert the text value to. Simple types
1224 supported are "str", "float", "int", "bool" which will get
1225 converted to respective Python data types. "set" and "list"
1226 may also be specified which will split the value by
1227 whitespace (but keep the elements as strings). finally,
1228 "list:<type>" and "set:<type>" may be given which will
1229 perform a type conversion after splitting the value up.
1230 "required" - If true, raise an error if not defined. If false
1231 and "default" isn't specified, a type conversion will be
1232 done on an empty string
1233 @return A dictionary containing the test key-value pairs with
1234 type conversion and default values filled in per valid_keys
1235 """
1236
1237 d = {}
1238 for k, v in self.common.items():
1239 d[k] = v
1240
1241 for k, v in self.tests[name].items():
1242 if k not in valid_keys:
1243 raise ConfigurationError(
1244 self.filename,
1245 "Unknown config key '%s' in definition for '%s'" %
1246 (k, name))
1247
1248 if k in d:
1249 if isinstance(d[k], str):
1250 # By default, we just concatenate string values of keys
1251 # which appear both in "common" and per-test sections,
1252 # but some keys are handled in adhoc way based on their
1253 # semantics.
1254 if k == "filter":
1255 d[k] = "(%s) and (%s)" % (d[k], v)
1256 else:
1257 d[k] += " " + v
1258 else:
1259 d[k] = v
1260
1261 for k, kinfo in valid_keys.items():
1262 if k not in d:
1263 if "required" in kinfo:
1264 required = kinfo["required"]
1265 else:
1266 required = False
1267
1268 if required:
1269 raise ConfigurationError(
1270 self.filename,
1271 "missing required value for '%s' in test '%s'" %
1272 (k, name))
1273 else:
1274 if "default" in kinfo:
1275 default = kinfo["default"]
1276 else:
1277 default = self._cast_value("", kinfo["type"])
1278 d[k] = default
1279 else:
1280 try:
1281 d[k] = self._cast_value(d[k], kinfo["type"])
1282 except ValueError:
1283 raise ConfigurationError(
1284 self.filename, "bad %s value '%s' for key '%s' in name '%s'" %
1285 (kinfo["type"], d[k], k, name))
1286
1287 return d
1288
1289
1290class Platform:
1291 """Class representing metadata for a particular platform
1292
1293 Maps directly to BOARD when building"""
1294
1295 platform_schema = scl.yaml_load(os.path.join(ZEPHYR_BASE,
1296 "scripts", "sanity_chk", "platform-schema.yaml"))
1297
1298 def __init__(self):
1299 """Constructor.
1300
1301 """
1302
1303 self.name = ""
1304 self.sanitycheck = True
1305 # if no RAM size is specified by the board, take a default of 128K
1306 self.ram = 128
1307
1308 self.ignore_tags = []
1309 self.default = False
1310 # if no flash size is specified by the board, take a default of 512K
1311 self.flash = 512
1312 self.supported = set()
1313
1314 self.arch = ""
1315 self.type = "na"
1316 self.simulation = "na"
1317 self.supported_toolchains = []
1318 self.env = []
1319 self.env_satisfied = True
1320 self.filter_data = dict()
1321
1322 def load(self, platform_file):
1323 scp = SanityConfigParser(platform_file, self.platform_schema)
1324 scp.load()
1325 data = scp.data
1326
1327 self.name = data['identifier']
1328 self.sanitycheck = data.get("sanitycheck", True)
1329 # if no RAM size is specified by the board, take a default of 128K
1330 self.ram = data.get("ram", 128)
1331 testing = data.get("testing", {})
1332 self.ignore_tags = testing.get("ignore_tags", [])
1333 self.default = testing.get("default", False)
1334 # if no flash size is specified by the board, take a default of 512K
1335 self.flash = data.get("flash", 512)
1336 self.supported = set()
1337 for supp_feature in data.get("supported", []):
1338 for item in supp_feature.split(":"):
1339 self.supported.add(item)
1340
1341 self.arch = data['arch']
1342 self.type = data.get('type', "na")
1343 self.simulation = data.get('simulation', "na")
1344 self.supported_toolchains = data.get("toolchain", [])
1345 self.env = data.get("env", [])
1346 self.env_satisfied = True
1347 for env in self.env:
1348 if not os.environ.get(env, None):
1349 self.env_satisfied = False
1350
1351 def __repr__(self):
1352 return "<%s on %s>" % (self.name, self.arch)
1353
1354
Anas Nashifaff616d2020-04-17 21:24:57 -04001355class DisablePyTestCollectionMixin(object):
1356 __test__ = False
1357
1358
1359class TestCase(DisablePyTestCollectionMixin):
Anas Nashifce2b4182020-03-24 14:40:28 -04001360 """Class representing a test application
1361 """
1362
Anas Nashifaff616d2020-04-17 21:24:57 -04001363 def __init__(self, testcase_root, workdir, name):
Anas Nashifce2b4182020-03-24 14:40:28 -04001364 """TestCase constructor.
1365
1366 This gets called by TestSuite as it finds and reads test yaml files.
1367 Multiple TestCase instances may be generated from a single testcase.yaml,
1368 each one corresponds to an entry within that file.
1369
1370 We need to have a unique name for every single test case. Since
1371 a testcase.yaml can define multiple tests, the canonical name for
1372 the test case is <workdir>/<name>.
1373
1374 @param testcase_root os.path.abspath() of one of the --testcase-root
1375 @param workdir Sub-directory of testcase_root where the
1376 .yaml test configuration file was found
1377 @param name Name of this test case, corresponding to the entry name
1378 in the test case configuration file. For many test cases that just
1379 define one test, can be anything and is usually "test". This is
1380 really only used to distinguish between different cases when
1381 the testcase.yaml defines multiple tests
Anas Nashifce2b4182020-03-24 14:40:28 -04001382 """
1383
Anas Nashifaff616d2020-04-17 21:24:57 -04001384
Anas Nashifce2b4182020-03-24 14:40:28 -04001385 self.source_dir = ""
1386 self.yamlfile = ""
1387 self.cases = []
Anas Nashifaff616d2020-04-17 21:24:57 -04001388 self.name = self.get_unique(testcase_root, workdir, name)
1389 self.id = name
Anas Nashifce2b4182020-03-24 14:40:28 -04001390
1391 self.type = None
Anas Nashifaff616d2020-04-17 21:24:57 -04001392 self.tags = set()
Anas Nashifce2b4182020-03-24 14:40:28 -04001393 self.extra_args = None
1394 self.extra_configs = None
1395 self.arch_whitelist = None
1396 self.arch_exclude = None
Anas Nashifaff616d2020-04-17 21:24:57 -04001397 self.skip = False
Anas Nashifce2b4182020-03-24 14:40:28 -04001398 self.platform_exclude = None
1399 self.platform_whitelist = None
1400 self.toolchain_exclude = None
1401 self.toolchain_whitelist = None
1402 self.tc_filter = None
1403 self.timeout = 60
1404 self.harness = ""
1405 self.harness_config = {}
1406 self.build_only = True
1407 self.build_on_all = False
1408 self.slow = False
Anas Nashifaff616d2020-04-17 21:24:57 -04001409 self.min_ram = -1
Anas Nashifce2b4182020-03-24 14:40:28 -04001410 self.depends_on = None
Anas Nashifaff616d2020-04-17 21:24:57 -04001411 self.min_flash = -1
Anas Nashifce2b4182020-03-24 14:40:28 -04001412 self.extra_sections = None
1413
1414 @staticmethod
1415 def get_unique(testcase_root, workdir, name):
1416
1417 canonical_testcase_root = os.path.realpath(testcase_root)
1418 if Path(canonical_zephyr_base) in Path(canonical_testcase_root).parents:
1419 # This is in ZEPHYR_BASE, so include path in name for uniqueness
1420 # FIXME: We should not depend on path of test for unique names.
1421 relative_tc_root = os.path.relpath(canonical_testcase_root,
1422 start=canonical_zephyr_base)
1423 else:
1424 relative_tc_root = ""
1425
1426 # workdir can be "."
1427 unique = os.path.normpath(os.path.join(relative_tc_root, workdir, name))
Anas Nashif7a691252020-05-07 07:47:51 -04001428 check = name.split(".")
1429 if len(check) < 2:
1430 raise SanityCheckException(f"""bad test name '{name}' in {testcase_root}/{workdir}. \
1431Tests should reference the category and subsystem with a dot as a separator.
1432 """
1433 )
Anas Nashifce2b4182020-03-24 14:40:28 -04001434 return unique
1435
1436 @staticmethod
1437 def scan_file(inf_name):
1438 suite_regex = re.compile(
1439 # do not match until end-of-line, otherwise we won't allow
1440 # stc_regex below to catch the ones that are declared in the same
1441 # line--as we only search starting the end of this match
1442 br"^\s*ztest_test_suite\(\s*(?P<suite_name>[a-zA-Z0-9_]+)\s*,",
1443 re.MULTILINE)
1444 stc_regex = re.compile(
1445 br"^\s*" # empy space at the beginning is ok
1446 # catch the case where it is declared in the same sentence, e.g:
1447 #
1448 # ztest_test_suite(mutex_complex, ztest_user_unit_test(TESTNAME));
1449 br"(?:ztest_test_suite\([a-zA-Z0-9_]+,\s*)?"
1450 # Catch ztest[_user]_unit_test-[_setup_teardown](TESTNAME)
1451 br"ztest_(?:1cpu_)?(?:user_)?unit_test(?:_setup_teardown)?"
1452 # Consume the argument that becomes the extra testcse
1453 br"\(\s*"
1454 br"(?P<stc_name>[a-zA-Z0-9_]+)"
1455 # _setup_teardown() variant has two extra arguments that we ignore
1456 br"(?:\s*,\s*[a-zA-Z0-9_]+\s*,\s*[a-zA-Z0-9_]+)?"
1457 br"\s*\)",
1458 # We don't check how it finishes; we don't care
1459 re.MULTILINE)
1460 suite_run_regex = re.compile(
1461 br"^\s*ztest_run_test_suite\((?P<suite_name>[a-zA-Z0-9_]+)\)",
1462 re.MULTILINE)
1463 achtung_regex = re.compile(
1464 br"(#ifdef|#endif)",
1465 re.MULTILINE)
1466 warnings = None
1467
1468 with open(inf_name) as inf:
1469 if os.name == 'nt':
1470 mmap_args = {'fileno': inf.fileno(), 'length': 0, 'access': mmap.ACCESS_READ}
1471 else:
1472 mmap_args = {'fileno': inf.fileno(), 'length': 0, 'flags': mmap.MAP_PRIVATE, 'prot': mmap.PROT_READ,
1473 'offset': 0}
1474
1475 with contextlib.closing(mmap.mmap(**mmap_args)) as main_c:
Anas Nashifce2b4182020-03-24 14:40:28 -04001476 suite_regex_match = suite_regex.search(main_c)
1477 if not suite_regex_match:
1478 # can't find ztest_test_suite, maybe a client, because
1479 # it includes ztest.h
1480 return None, None
1481
1482 suite_run_match = suite_run_regex.search(main_c)
1483 if not suite_run_match:
1484 raise ValueError("can't find ztest_run_test_suite")
1485
1486 achtung_matches = re.findall(
1487 achtung_regex,
1488 main_c[suite_regex_match.end():suite_run_match.start()])
1489 if achtung_matches:
1490 warnings = "found invalid %s in ztest_test_suite()" \
Spoorthy Priya Yeraboluad4d4fc2020-06-25 02:57:05 -07001491 % ", ".join(sorted({match.decode() for match in achtung_matches},reverse = True))
Anas Nashifce2b4182020-03-24 14:40:28 -04001492 _matches = re.findall(
1493 stc_regex,
1494 main_c[suite_regex_match.end():suite_run_match.start()])
Anas Nashif44f7ba02020-05-12 12:26:41 -04001495 for match in _matches:
1496 if not match.decode().startswith("test_"):
1497 warnings = "Found a test that does not start with test_"
Anas Nashifce2b4182020-03-24 14:40:28 -04001498 matches = [match.decode().replace("test_", "") for match in _matches]
1499 return matches, warnings
1500
1501 def scan_path(self, path):
1502 subcases = []
Anas Nashif91fd68d2020-05-08 07:22:58 -04001503 for filename in glob.glob(os.path.join(path, "src", "*.c*")):
Anas Nashifce2b4182020-03-24 14:40:28 -04001504 try:
1505 _subcases, warnings = self.scan_file(filename)
1506 if warnings:
1507 logger.error("%s: %s" % (filename, warnings))
Anas Nashif61c6e2b2020-05-07 07:03:30 -04001508 raise SanityRuntimeError("%s: %s" % (filename, warnings))
Anas Nashifce2b4182020-03-24 14:40:28 -04001509 if _subcases:
1510 subcases += _subcases
1511 except ValueError as e:
1512 logger.error("%s: can't find: %s" % (filename, e))
Anas Nashif61c6e2b2020-05-07 07:03:30 -04001513
Anas Nashifce2b4182020-03-24 14:40:28 -04001514 for filename in glob.glob(os.path.join(path, "*.c")):
1515 try:
1516 _subcases, warnings = self.scan_file(filename)
1517 if warnings:
1518 logger.error("%s: %s" % (filename, warnings))
1519 if _subcases:
1520 subcases += _subcases
1521 except ValueError as e:
1522 logger.error("%s: can't find: %s" % (filename, e))
1523 return subcases
1524
1525 def parse_subcases(self, test_path):
1526 results = self.scan_path(test_path)
1527 for sub in results:
1528 name = "{}.{}".format(self.id, sub)
1529 self.cases.append(name)
1530
1531 if not results:
1532 self.cases.append(self.id)
1533
1534 def __str__(self):
1535 return self.name
1536
1537
Anas Nashifaff616d2020-04-17 21:24:57 -04001538class TestInstance(DisablePyTestCollectionMixin):
Anas Nashifce2b4182020-03-24 14:40:28 -04001539 """Class representing the execution of a particular TestCase on a platform
1540
1541 @param test The TestCase object we want to build/execute
1542 @param platform Platform object that we want to build and run against
1543 @param base_outdir Base directory for all test results. The actual
1544 out directory used is <outdir>/<platform>/<test case name>
1545 """
1546
1547 def __init__(self, testcase, platform, outdir):
1548
1549 self.testcase = testcase
1550 self.platform = platform
1551
1552 self.status = None
1553 self.reason = "Unknown"
1554 self.metrics = dict()
1555 self.handler = None
1556 self.outdir = outdir
1557
1558 self.name = os.path.join(platform.name, testcase.name)
1559 self.build_dir = os.path.join(outdir, platform.name, testcase.name)
1560
1561 self.build_only = True
1562 self.run = False
1563
1564 self.results = {}
1565
1566 def __lt__(self, other):
1567 return self.name < other.name
1568
Anas Nashifaff616d2020-04-17 21:24:57 -04001569 # Global testsuite parameters
Anas Nashifce8c12e2020-05-21 09:11:40 -04001570 def check_build_or_run(self, build_only=False, enable_slow=False, device_testing=False, fixtures=[]):
Anas Nashifce2b4182020-03-24 14:40:28 -04001571
1572 # right now we only support building on windows. running is still work
1573 # in progress.
1574 if os.name == 'nt':
1575 self.build_only = True
1576 self.run = False
1577 return
1578
1579 _build_only = True
1580
1581 # we asked for build-only on the command line
1582 if build_only or self.testcase.build_only:
1583 self.build_only = True
1584 self.run = False
1585 return
1586
1587 # Do not run slow tests:
1588 skip_slow = self.testcase.slow and not enable_slow
1589 if skip_slow:
1590 self.build_only = True
1591 self.run = False
1592 return
1593
1594 runnable = bool(self.testcase.type == "unit" or \
1595 self.platform.type == "native" or \
1596 self.platform.simulation in ["nsim", "renode", "qemu"] or \
1597 device_testing)
1598
1599 if self.platform.simulation == "nsim":
1600 if not find_executable("nsimdrv"):
1601 runnable = False
1602
1603 if self.platform.simulation == "renode":
1604 if not find_executable("renode"):
1605 runnable = False
1606
1607 # console harness allows us to run the test and capture data.
Anas Nashifce8c12e2020-05-21 09:11:40 -04001608 if self.testcase.harness in [ 'console', 'ztest']:
Anas Nashifce2b4182020-03-24 14:40:28 -04001609
1610 # if we have a fixture that is also being supplied on the
1611 # command-line, then we need to run the test, not just build it.
Anas Nashifce8c12e2020-05-21 09:11:40 -04001612 fixture = self.testcase.harness_config.get('fixture')
1613 if fixture:
1614 if fixture in fixtures:
Anas Nashifce2b4182020-03-24 14:40:28 -04001615 _build_only = False
1616 else:
1617 _build_only = True
1618 else:
1619 _build_only = False
Anas Nashif3b86f132020-05-21 10:35:33 -04001620
Anas Nashifce2b4182020-03-24 14:40:28 -04001621 elif self.testcase.harness:
1622 _build_only = True
1623 else:
1624 _build_only = False
1625
1626 self.build_only = not (not _build_only and runnable)
1627 self.run = not self.build_only
1628 return
1629
Christian Taedcke3dbe9f22020-07-06 16:00:57 +02001630 def create_overlay(self, platform, enable_asan=False, enable_ubsan=False, enable_coverage=False, coverage_platform=[]):
Anas Nashifce2b4182020-03-24 14:40:28 -04001631 # Create this in a "sanitycheck/" subdirectory otherwise this
1632 # will pass this overlay to kconfig.py *twice* and kconfig.cmake
1633 # will silently give that second time precedence over any
1634 # --extra-args=CONFIG_*
1635 subdir = os.path.join(self.build_dir, "sanitycheck")
1636 os.makedirs(subdir, exist_ok=True)
1637 file = os.path.join(subdir, "testcase_extra.conf")
1638
1639 with open(file, "w") as f:
1640 content = ""
1641
1642 if self.testcase.extra_configs:
1643 content = "\n".join(self.testcase.extra_configs)
1644
1645 if enable_coverage:
1646 if platform.name in coverage_platform:
1647 content = content + "\nCONFIG_COVERAGE=y"
1648 content = content + "\nCONFIG_COVERAGE_DUMP=y"
1649
1650 if enable_asan:
1651 if platform.type == "native":
1652 content = content + "\nCONFIG_ASAN=y"
1653
Christian Taedcke3dbe9f22020-07-06 16:00:57 +02001654 if enable_ubsan:
1655 if platform.type == "native":
1656 content = content + "\nCONFIG_UBSAN=y"
1657
Anas Nashifce2b4182020-03-24 14:40:28 -04001658 f.write(content)
Spoorthy Priya Yerabolud434dfc2020-05-30 03:38:35 -07001659 return content
Anas Nashifce2b4182020-03-24 14:40:28 -04001660
1661 def calculate_sizes(self):
1662 """Get the RAM/ROM sizes of a test case.
1663
1664 This can only be run after the instance has been executed by
1665 MakeGenerator, otherwise there won't be any binaries to measure.
1666
1667 @return A SizeCalculator object
1668 """
1669 fns = glob.glob(os.path.join(self.build_dir, "zephyr", "*.elf"))
1670 fns.extend(glob.glob(os.path.join(self.build_dir, "zephyr", "*.exe")))
1671 fns = [x for x in fns if not x.endswith('_prebuilt.elf')]
1672 if len(fns) != 1:
1673 raise BuildError("Missing/multiple output ELF binary")
1674
1675 return SizeCalculator(fns[0], self.testcase.extra_sections)
1676
1677 def __repr__(self):
1678 return "<TestCase %s on %s>" % (self.testcase.name, self.platform.name)
1679
1680
1681class CMake():
1682 config_re = re.compile('(CONFIG_[A-Za-z0-9_]+)[=]\"?([^\"]*)\"?$')
1683 dt_re = re.compile('([A-Za-z0-9_]+)[=]\"?([^\"]*)\"?$')
1684
1685 def __init__(self, testcase, platform, source_dir, build_dir):
1686
1687 self.cwd = None
1688 self.capture_output = True
1689
1690 self.defconfig = {}
1691 self.cmake_cache = {}
1692
1693 self.instance = None
1694 self.testcase = testcase
1695 self.platform = platform
1696 self.source_dir = source_dir
1697 self.build_dir = build_dir
1698 self.log = "build.log"
1699 self.generator = None
1700 self.generator_cmd = None
1701
1702 def parse_generated(self):
1703 self.defconfig = {}
1704 return {}
1705
1706 def run_build(self, args=[]):
1707
1708 logger.debug("Building %s for %s" % (self.source_dir, self.platform.name))
1709
1710 cmake_args = []
1711 cmake_args.extend(args)
1712 cmake = shutil.which('cmake')
1713 cmd = [cmake] + cmake_args
1714 kwargs = dict()
1715
1716 if self.capture_output:
1717 kwargs['stdout'] = subprocess.PIPE
1718 # CMake sends the output of message() to stderr unless it's STATUS
1719 kwargs['stderr'] = subprocess.STDOUT
1720
1721 if self.cwd:
1722 kwargs['cwd'] = self.cwd
1723
1724 p = subprocess.Popen(cmd, **kwargs)
1725 out, _ = p.communicate()
1726
1727 results = {}
1728 if p.returncode == 0:
1729 msg = "Finished building %s for %s" % (self.source_dir, self.platform.name)
1730
1731 self.instance.status = "passed"
1732 results = {'msg': msg, "returncode": p.returncode, "instance": self.instance}
1733
1734 if out:
1735 log_msg = out.decode(sys.getdefaultencoding())
1736 with open(os.path.join(self.build_dir, self.log), "a") as log:
1737 log.write(log_msg)
1738
1739 else:
1740 return None
1741 else:
1742 # A real error occurred, raise an exception
1743 if out:
1744 log_msg = out.decode(sys.getdefaultencoding())
1745 with open(os.path.join(self.build_dir, self.log), "a") as log:
1746 log.write(log_msg)
1747
1748 if log_msg:
1749 res = re.findall("region `(FLASH|RAM|SRAM)' overflowed by", log_msg)
1750 if res:
1751 logger.debug("Test skipped due to {} Overflow".format(res[0]))
1752 self.instance.status = "skipped"
1753 self.instance.reason = "{} overflow".format(res[0])
1754 else:
Anas Nashiff04461e2020-06-29 10:07:02 -04001755 self.instance.status = "error"
Anas Nashifce2b4182020-03-24 14:40:28 -04001756 self.instance.reason = "Build failure"
1757
1758 results = {
1759 "returncode": p.returncode,
1760 "instance": self.instance,
1761 }
1762
1763 return results
1764
1765 def run_cmake(self, args=[]):
1766
1767 ldflags = "-Wl,--fatal-warnings"
1768 logger.debug("Running cmake on %s for %s" % (self.source_dir, self.platform.name))
1769
1770 # fixme: add additional cflags based on options
1771 cmake_args = [
1772 '-B{}'.format(self.build_dir),
1773 '-S{}'.format(self.source_dir),
1774 '-DEXTRA_CFLAGS="-Werror ',
1775 '-DEXTRA_AFLAGS=-Wa,--fatal-warnings',
1776 '-DEXTRA_LDFLAGS="{}'.format(ldflags),
1777 '-G{}'.format(self.generator)
1778 ]
1779
1780 if self.cmake_only:
1781 cmake_args.append("-DCMAKE_EXPORT_COMPILE_COMMANDS=1")
1782
1783 args = ["-D{}".format(a.replace('"', '')) for a in args]
1784 cmake_args.extend(args)
1785
1786 cmake_opts = ['-DBOARD={}'.format(self.platform.name)]
1787 cmake_args.extend(cmake_opts)
1788
1789
1790 logger.debug("Calling cmake with arguments: {}".format(cmake_args))
1791 cmake = shutil.which('cmake')
1792 cmd = [cmake] + cmake_args
1793 kwargs = dict()
1794
1795 if self.capture_output:
1796 kwargs['stdout'] = subprocess.PIPE
1797 # CMake sends the output of message() to stderr unless it's STATUS
1798 kwargs['stderr'] = subprocess.STDOUT
1799
1800 if self.cwd:
1801 kwargs['cwd'] = self.cwd
1802
1803 p = subprocess.Popen(cmd, **kwargs)
1804 out, _ = p.communicate()
1805
1806 if p.returncode == 0:
1807 filter_results = self.parse_generated()
1808 msg = "Finished building %s for %s" % (self.source_dir, self.platform.name)
1809 logger.debug(msg)
1810 results = {'msg': msg, 'filter': filter_results}
1811
1812 else:
Anas Nashiff04461e2020-06-29 10:07:02 -04001813 self.instance.status = "error"
Anas Nashifce2b4182020-03-24 14:40:28 -04001814 self.instance.reason = "Cmake build failure"
1815 logger.error("Cmake build failure: %s for %s" % (self.source_dir, self.platform.name))
1816 results = {"returncode": p.returncode}
1817
1818 if out:
1819 with open(os.path.join(self.build_dir, self.log), "a") as log:
1820 log_msg = out.decode(sys.getdefaultencoding())
1821 log.write(log_msg)
1822
1823 return results
1824
1825
1826class FilterBuilder(CMake):
1827
1828 def __init__(self, testcase, platform, source_dir, build_dir):
1829 super().__init__(testcase, platform, source_dir, build_dir)
1830
1831 self.log = "config-sanitycheck.log"
1832
1833 def parse_generated(self):
1834
1835 if self.platform.name == "unit_testing":
1836 return {}
1837
1838 cmake_cache_path = os.path.join(self.build_dir, "CMakeCache.txt")
1839 defconfig_path = os.path.join(self.build_dir, "zephyr", ".config")
1840
1841 with open(defconfig_path, "r") as fp:
1842 defconfig = {}
1843 for line in fp.readlines():
1844 m = self.config_re.match(line)
1845 if not m:
1846 if line.strip() and not line.startswith("#"):
1847 sys.stderr.write("Unrecognized line %s\n" % line)
1848 continue
1849 defconfig[m.group(1)] = m.group(2).strip()
1850
1851 self.defconfig = defconfig
1852
1853 cmake_conf = {}
1854 try:
1855 cache = CMakeCache.from_file(cmake_cache_path)
1856 except FileNotFoundError:
1857 cache = {}
1858
1859 for k in iter(cache):
1860 cmake_conf[k.name] = k.value
1861
1862 self.cmake_cache = cmake_conf
1863
1864 filter_data = {
1865 "ARCH": self.platform.arch,
1866 "PLATFORM": self.platform.name
1867 }
1868 filter_data.update(os.environ)
1869 filter_data.update(self.defconfig)
1870 filter_data.update(self.cmake_cache)
1871
1872 dts_path = os.path.join(self.build_dir, "zephyr", self.platform.name + ".dts.pre.tmp")
1873 if self.testcase and self.testcase.tc_filter:
1874 try:
1875 if os.path.exists(dts_path):
Kumar Gala6a2cb942020-05-08 16:32:16 -05001876 edt = edtlib.EDT(dts_path, [os.path.join(ZEPHYR_BASE, "dts", "bindings")],
1877 warn_reg_unit_address_mismatch=False)
Anas Nashifce2b4182020-03-24 14:40:28 -04001878 else:
1879 edt = None
1880 res = expr_parser.parse(self.testcase.tc_filter, filter_data, edt)
1881
1882 except (ValueError, SyntaxError) as se:
1883 sys.stderr.write(
1884 "Failed processing %s\n" % self.testcase.yamlfile)
1885 raise se
1886
1887 if not res:
1888 return {os.path.join(self.platform.name, self.testcase.name): True}
1889 else:
1890 return {os.path.join(self.platform.name, self.testcase.name): False}
1891 else:
1892 self.platform.filter_data = filter_data
1893 return filter_data
1894
1895
1896class ProjectBuilder(FilterBuilder):
1897
1898 def __init__(self, suite, instance, **kwargs):
1899 super().__init__(instance.testcase, instance.platform, instance.testcase.source_dir, instance.build_dir)
1900
1901 self.log = "build.log"
1902 self.instance = instance
1903 self.suite = suite
1904
1905 self.lsan = kwargs.get('lsan', False)
1906 self.asan = kwargs.get('asan', False)
Christian Taedcke3dbe9f22020-07-06 16:00:57 +02001907 self.ubsan = kwargs.get('ubsan', False)
Anas Nashifce2b4182020-03-24 14:40:28 -04001908 self.valgrind = kwargs.get('valgrind', False)
1909 self.extra_args = kwargs.get('extra_args', [])
1910 self.device_testing = kwargs.get('device_testing', False)
1911 self.cmake_only = kwargs.get('cmake_only', False)
1912 self.cleanup = kwargs.get('cleanup', False)
1913 self.coverage = kwargs.get('coverage', False)
1914 self.inline_logs = kwargs.get('inline_logs', False)
Anas Nashifce2b4182020-03-24 14:40:28 -04001915 self.generator = kwargs.get('generator', None)
1916 self.generator_cmd = kwargs.get('generator_cmd', None)
Anas Nashiff6462a32020-03-29 19:02:51 -04001917 self.verbose = kwargs.get('verbose', None)
Anas Nashifce2b4182020-03-24 14:40:28 -04001918
1919 @staticmethod
1920 def log_info(filename, inline_logs):
1921 filename = os.path.abspath(os.path.realpath(filename))
1922 if inline_logs:
1923 logger.info("{:-^100}".format(filename))
1924
1925 try:
1926 with open(filename) as fp:
1927 data = fp.read()
1928 except Exception as e:
1929 data = "Unable to read log data (%s)\n" % (str(e))
1930
1931 logger.error(data)
1932
1933 logger.info("{:-^100}".format(filename))
1934 else:
1935 logger.error("see: " + Fore.YELLOW + filename + Fore.RESET)
1936
1937 def log_info_file(self, inline_logs):
1938 build_dir = self.instance.build_dir
1939 h_log = "{}/handler.log".format(build_dir)
1940 b_log = "{}/build.log".format(build_dir)
1941 v_log = "{}/valgrind.log".format(build_dir)
1942 d_log = "{}/device.log".format(build_dir)
1943
1944 if os.path.exists(v_log) and "Valgrind" in self.instance.reason:
1945 self.log_info("{}".format(v_log), inline_logs)
1946 elif os.path.exists(h_log) and os.path.getsize(h_log) > 0:
1947 self.log_info("{}".format(h_log), inline_logs)
1948 elif os.path.exists(d_log) and os.path.getsize(d_log) > 0:
1949 self.log_info("{}".format(d_log), inline_logs)
1950 else:
1951 self.log_info("{}".format(b_log), inline_logs)
1952
1953 def setup_handler(self):
1954
1955 instance = self.instance
1956 args = []
1957
1958 # FIXME: Needs simplification
1959 if instance.platform.simulation == "qemu":
1960 instance.handler = QEMUHandler(instance, "qemu")
1961 args.append("QEMU_PIPE=%s" % instance.handler.get_fifo())
1962 instance.handler.call_make_run = True
1963 elif instance.testcase.type == "unit":
1964 instance.handler = BinaryHandler(instance, "unit")
1965 instance.handler.binary = os.path.join(instance.build_dir, "testbinary")
Anas Nashif051602f2020-04-28 14:27:46 -04001966 if self.coverage:
1967 args.append("COVERAGE=1")
Anas Nashifce2b4182020-03-24 14:40:28 -04001968 elif instance.platform.type == "native":
1969 handler = BinaryHandler(instance, "native")
1970
1971 handler.asan = self.asan
1972 handler.valgrind = self.valgrind
1973 handler.lsan = self.lsan
Christian Taedcke3dbe9f22020-07-06 16:00:57 +02001974 handler.ubsan = self.ubsan
Anas Nashifce2b4182020-03-24 14:40:28 -04001975 handler.coverage = self.coverage
1976
1977 handler.binary = os.path.join(instance.build_dir, "zephyr", "zephyr.exe")
1978 instance.handler = handler
1979 elif instance.platform.simulation == "nsim":
1980 if find_executable("nsimdrv"):
1981 instance.handler = BinaryHandler(instance, "nsim")
1982 instance.handler.call_make_run = True
1983 elif instance.platform.simulation == "renode":
1984 if find_executable("renode"):
1985 instance.handler = BinaryHandler(instance, "renode")
1986 instance.handler.pid_fn = os.path.join(instance.build_dir, "renode.pid")
1987 instance.handler.call_make_run = True
1988 elif self.device_testing:
1989 instance.handler = DeviceHandler(instance, "device")
1990
1991 if instance.handler:
1992 instance.handler.args = args
Anas Nashifb3669492020-03-24 22:33:50 -04001993 instance.handler.generator_cmd = self.generator_cmd
1994 instance.handler.generator = self.generator
Anas Nashifce2b4182020-03-24 14:40:28 -04001995
1996 def process(self, message):
1997 op = message.get('op')
1998
1999 if not self.instance.handler:
2000 self.setup_handler()
2001
2002 # The build process, call cmake and build with configured generator
2003 if op == "cmake":
2004 results = self.cmake()
Anas Nashiff04461e2020-06-29 10:07:02 -04002005 if self.instance.status in ["failed", "error"]:
Anas Nashifce2b4182020-03-24 14:40:28 -04002006 pipeline.put({"op": "report", "test": self.instance})
2007 elif self.cmake_only:
2008 pipeline.put({"op": "report", "test": self.instance})
2009 else:
2010 if self.instance.name in results['filter'] and results['filter'][self.instance.name]:
2011 logger.debug("filtering %s" % self.instance.name)
2012 self.instance.status = "skipped"
2013 self.instance.reason = "filter"
Maciej Perkowskib2fa99c2020-05-21 14:45:29 +02002014 for case in self.instance.testcase.cases:
2015 self.instance.results.update({case: 'SKIP'})
Anas Nashifce2b4182020-03-24 14:40:28 -04002016 pipeline.put({"op": "report", "test": self.instance})
2017 else:
2018 pipeline.put({"op": "build", "test": self.instance})
2019
2020 elif op == "build":
2021 logger.debug("build test: %s" % self.instance.name)
2022 results = self.build()
2023
2024 if not results:
Anas Nashiff04461e2020-06-29 10:07:02 -04002025 self.instance.status = "error"
Anas Nashifce2b4182020-03-24 14:40:28 -04002026 self.instance.reason = "Build Failure"
2027 pipeline.put({"op": "report", "test": self.instance})
2028 else:
2029 if results.get('returncode', 1) > 0:
2030 pipeline.put({"op": "report", "test": self.instance})
2031 else:
2032 if self.instance.run:
2033 pipeline.put({"op": "run", "test": self.instance})
2034 else:
2035 pipeline.put({"op": "report", "test": self.instance})
2036 # Run the generated binary using one of the supported handlers
2037 elif op == "run":
2038 logger.debug("run test: %s" % self.instance.name)
2039 self.run()
2040 self.instance.status, _ = self.instance.handler.get_state()
Anas Nashif869ca052020-07-07 14:29:07 -04002041 logger.debug(f"run status: {self.instance.status}")
Anas Nashifce2b4182020-03-24 14:40:28 -04002042 pipeline.put({
2043 "op": "report",
2044 "test": self.instance,
2045 "state": "executed",
2046 "status": self.instance.status,
2047 "reason": self.instance.reason}
2048 )
2049
2050 # Report results and output progress to screen
2051 elif op == "report":
2052 with report_lock:
2053 self.report_out()
2054
2055 if self.cleanup and not self.coverage and self.instance.status == "passed":
2056 pipeline.put({
2057 "op": "cleanup",
2058 "test": self.instance
2059 })
2060
2061 elif op == "cleanup":
2062 self.cleanup_artifacts()
2063
2064 def cleanup_artifacts(self):
2065 logger.debug("Cleaning up {}".format(self.instance.build_dir))
2066 whitelist = [
2067 'zephyr/.config',
2068 'handler.log',
2069 'build.log',
2070 'device.log',
Anas Nashif9ace63e2020-04-28 07:14:43 -04002071 'recording.csv',
Anas Nashifce2b4182020-03-24 14:40:28 -04002072 ]
2073 whitelist = [os.path.join(self.instance.build_dir, file) for file in whitelist]
2074
2075 for dirpath, dirnames, filenames in os.walk(self.instance.build_dir, topdown=False):
2076 for name in filenames:
2077 path = os.path.join(dirpath, name)
2078 if path not in whitelist:
2079 os.remove(path)
2080 # Remove empty directories and symbolic links to directories
2081 for dir in dirnames:
2082 path = os.path.join(dirpath, dir)
2083 if os.path.islink(path):
2084 os.remove(path)
2085 elif not os.listdir(path):
2086 os.rmdir(path)
2087
2088 def report_out(self):
2089 total_tests_width = len(str(self.suite.total_tests))
2090 self.suite.total_done += 1
2091 instance = self.instance
2092
Anas Nashiff04461e2020-06-29 10:07:02 -04002093 if instance.status in ["error", "failed", "timeout"]:
Anas Nashifdc43c292020-07-09 09:46:45 -04002094 if instance.status == "error":
2095 self.suite.total_errors += 1
Anas Nashifce2b4182020-03-24 14:40:28 -04002096 self.suite.total_failed += 1
Anas Nashiff6462a32020-03-29 19:02:51 -04002097 if self.verbose:
Anas Nashifce2b4182020-03-24 14:40:28 -04002098 status = Fore.RED + "FAILED " + Fore.RESET + instance.reason
2099 else:
2100 print("")
2101 logger.error(
2102 "{:<25} {:<50} {}FAILED{}: {}".format(
2103 instance.platform.name,
2104 instance.testcase.name,
2105 Fore.RED,
2106 Fore.RESET,
2107 instance.reason))
Anas Nashiff6462a32020-03-29 19:02:51 -04002108 if not self.verbose:
Anas Nashifce2b4182020-03-24 14:40:28 -04002109 self.log_info_file(self.inline_logs)
2110 elif instance.status == "skipped":
2111 self.suite.total_skipped += 1
2112 status = Fore.YELLOW + "SKIPPED" + Fore.RESET
Anas Nashif869ca052020-07-07 14:29:07 -04002113 elif instance.status == "passed":
Anas Nashifdc43c292020-07-09 09:46:45 -04002114 self.suite.total_passed += 1
Anas Nashifce2b4182020-03-24 14:40:28 -04002115 status = Fore.GREEN + "PASSED" + Fore.RESET
Anas Nashif869ca052020-07-07 14:29:07 -04002116 else:
2117 logger.debug(f"Unknown status = {instance.status}")
2118 status = Fore.YELLOW + "UNKNOWN" + Fore.RESET
Anas Nashifce2b4182020-03-24 14:40:28 -04002119
Anas Nashiff6462a32020-03-29 19:02:51 -04002120 if self.verbose:
Anas Nashifce2b4182020-03-24 14:40:28 -04002121 if self.cmake_only:
2122 more_info = "cmake"
2123 elif instance.status == "skipped":
2124 more_info = instance.reason
2125 else:
2126 if instance.handler and instance.run:
2127 more_info = instance.handler.type_str
2128 htime = instance.handler.duration
2129 if htime:
2130 more_info += " {:.3f}s".format(htime)
2131 else:
2132 more_info = "build"
2133
2134 logger.info("{:>{}}/{} {:<25} {:<50} {} ({})".format(
2135 self.suite.total_done, total_tests_width, self.suite.total_tests, instance.platform.name,
2136 instance.testcase.name, status, more_info))
2137
Anas Nashiff04461e2020-06-29 10:07:02 -04002138 if instance.status in ["error", "failed", "timeout"]:
Anas Nashifce2b4182020-03-24 14:40:28 -04002139 self.log_info_file(self.inline_logs)
2140 else:
2141 sys.stdout.write("\rINFO - Total complete: %s%4d/%4d%s %2d%% skipped: %s%4d%s, failed: %s%4d%s" % (
2142 Fore.GREEN,
2143 self.suite.total_done,
2144 self.suite.total_tests,
2145 Fore.RESET,
2146 int((float(self.suite.total_done) / self.suite.total_tests) * 100),
2147 Fore.YELLOW if self.suite.total_skipped > 0 else Fore.RESET,
2148 self.suite.total_skipped,
2149 Fore.RESET,
2150 Fore.RED if self.suite.total_failed > 0 else Fore.RESET,
2151 self.suite.total_failed,
2152 Fore.RESET
2153 )
2154 )
2155 sys.stdout.flush()
2156
2157 def cmake(self):
2158
2159 instance = self.instance
2160 args = self.testcase.extra_args[:]
2161 args += self.extra_args
2162
2163 if instance.handler:
2164 args += instance.handler.args
2165
2166 # merge overlay files into one variable
2167 def extract_overlays(args):
2168 re_overlay = re.compile('OVERLAY_CONFIG=(.*)')
2169 other_args = []
2170 overlays = []
2171 for arg in args:
2172 match = re_overlay.search(arg)
2173 if match:
2174 overlays.append(match.group(1).strip('\'"'))
2175 else:
2176 other_args.append(arg)
2177
2178 args[:] = other_args
2179 return overlays
2180
2181 overlays = extract_overlays(args)
2182
2183 if (self.testcase.extra_configs or self.coverage or
Christian Taedcke3dbe9f22020-07-06 16:00:57 +02002184 self.asan or self.ubsan):
Anas Nashifce2b4182020-03-24 14:40:28 -04002185 overlays.append(os.path.join(instance.build_dir,
2186 "sanitycheck", "testcase_extra.conf"))
2187
2188 if overlays:
2189 args.append("OVERLAY_CONFIG=\"%s\"" % (" ".join(overlays)))
2190
2191 results = self.run_cmake(args)
2192 return results
2193
2194 def build(self):
2195 results = self.run_build(['--build', self.build_dir])
2196 return results
2197
2198 def run(self):
2199
2200 instance = self.instance
2201
2202 if instance.handler.type_str == "device":
2203 instance.handler.suite = self.suite
2204
2205 instance.handler.handle()
2206
2207 sys.stdout.flush()
2208
2209
2210class BoundedExecutor(concurrent.futures.ThreadPoolExecutor):
2211 """BoundedExecutor behaves as a ThreadPoolExecutor which will block on
2212 calls to submit() once the limit given as "bound" work items are queued for
2213 execution.
2214 :param bound: Integer - the maximum number of items in the work queue
2215 :param max_workers: Integer - the size of the thread pool
2216 """
2217
2218 def __init__(self, bound, max_workers, **kwargs):
2219 super().__init__(max_workers)
2220 # self.executor = ThreadPoolExecutor(max_workers=max_workers)
2221 self.semaphore = BoundedSemaphore(bound + max_workers)
2222
2223 def submit(self, fn, *args, **kwargs):
2224 self.semaphore.acquire()
2225 try:
2226 future = super().submit(fn, *args, **kwargs)
2227 except Exception:
2228 self.semaphore.release()
2229 raise
2230 else:
2231 future.add_done_callback(lambda x: self.semaphore.release())
2232 return future
2233
2234
Anas Nashifaff616d2020-04-17 21:24:57 -04002235class TestSuite(DisablePyTestCollectionMixin):
Anas Nashifce2b4182020-03-24 14:40:28 -04002236 config_re = re.compile('(CONFIG_[A-Za-z0-9_]+)[=]\"?([^\"]*)\"?$')
2237 dt_re = re.compile('([A-Za-z0-9_]+)[=]\"?([^\"]*)\"?$')
2238
2239 tc_schema = scl.yaml_load(
2240 os.path.join(ZEPHYR_BASE,
2241 "scripts", "sanity_chk", "testcase-schema.yaml"))
2242
2243 testcase_valid_keys = {"tags": {"type": "set", "required": False},
2244 "type": {"type": "str", "default": "integration"},
2245 "extra_args": {"type": "list"},
2246 "extra_configs": {"type": "list"},
2247 "build_only": {"type": "bool", "default": False},
2248 "build_on_all": {"type": "bool", "default": False},
2249 "skip": {"type": "bool", "default": False},
2250 "slow": {"type": "bool", "default": False},
2251 "timeout": {"type": "int", "default": 60},
2252 "min_ram": {"type": "int", "default": 8},
2253 "depends_on": {"type": "set"},
2254 "min_flash": {"type": "int", "default": 32},
2255 "arch_whitelist": {"type": "set"},
2256 "arch_exclude": {"type": "set"},
2257 "extra_sections": {"type": "list", "default": []},
2258 "platform_exclude": {"type": "set"},
2259 "platform_whitelist": {"type": "set"},
2260 "toolchain_exclude": {"type": "set"},
2261 "toolchain_whitelist": {"type": "set"},
2262 "filter": {"type": "str"},
2263 "harness": {"type": "str"},
2264 "harness_config": {"type": "map", "default": {}}
2265 }
2266
2267 RELEASE_DATA = os.path.join(ZEPHYR_BASE, "scripts", "sanity_chk",
2268 "sanity_last_release.csv")
2269
Aastha Grovera0ae5342020-05-13 13:34:00 -07002270 SAMPLE_FILENAME = 'sample.yaml'
2271 TESTCASE_FILENAME = 'testcase.yaml'
2272
Anas Nashifaff616d2020-04-17 21:24:57 -04002273 def __init__(self, board_root_list=[], testcase_roots=[], outdir=None):
Anas Nashifce2b4182020-03-24 14:40:28 -04002274
2275 self.roots = testcase_roots
2276 if not isinstance(board_root_list, list):
2277 self.board_roots = [board_root_list]
2278 else:
2279 self.board_roots = board_root_list
2280
2281 # Testsuite Options
2282 self.coverage_platform = []
2283 self.build_only = False
2284 self.cmake_only = False
2285 self.cleanup = False
2286 self.enable_slow = False
2287 self.device_testing = False
Anas Nashifce8c12e2020-05-21 09:11:40 -04002288 self.fixtures = []
Anas Nashifce2b4182020-03-24 14:40:28 -04002289 self.enable_coverage = False
Christian Taedcke3dbe9f22020-07-06 16:00:57 +02002290 self.enable_ubsan = False
Anas Nashifce2b4182020-03-24 14:40:28 -04002291 self.enable_lsan = False
2292 self.enable_asan = False
2293 self.enable_valgrind = False
2294 self.extra_args = []
2295 self.inline_logs = False
2296 self.enable_sizes_report = False
2297 self.west_flash = None
2298 self.west_runner = None
2299 self.generator = None
2300 self.generator_cmd = None
2301
2302 # Keep track of which test cases we've filtered out and why
2303 self.testcases = {}
2304 self.platforms = []
2305 self.selected_platforms = []
2306 self.default_platforms = []
2307 self.outdir = os.path.abspath(outdir)
Anas Nashifaff616d2020-04-17 21:24:57 -04002308 self.discards = {}
Anas Nashifce2b4182020-03-24 14:40:28 -04002309 self.load_errors = 0
2310 self.instances = dict()
2311
2312 self.total_tests = 0 # number of test instances
2313 self.total_cases = 0 # number of test cases
2314 self.total_done = 0 # tests completed
2315 self.total_failed = 0
2316 self.total_skipped = 0
Anas Nashifdc43c292020-07-09 09:46:45 -04002317 self.total_passed = 0
2318 self.total_errors = 0
Anas Nashifce2b4182020-03-24 14:40:28 -04002319
2320 self.total_platforms = 0
2321 self.start_time = 0
2322 self.duration = 0
2323 self.warnings = 0
2324 self.cv = threading.Condition()
2325
2326 # hardcoded for now
2327 self.connected_hardware = []
2328
Anas Nashifbb280352020-05-07 12:02:48 -04002329 def get_platform_instances(self, platform):
2330 filtered_dict = {k:v for k,v in self.instances.items() if k.startswith(platform + "/")}
2331 return filtered_dict
2332
Anas Nashifce2b4182020-03-24 14:40:28 -04002333 def config(self):
2334 logger.info("coverage platform: {}".format(self.coverage_platform))
2335
2336 # Debug Functions
2337 @staticmethod
2338 def info(what):
2339 sys.stdout.write(what + "\n")
2340 sys.stdout.flush()
2341
2342 def update(self):
2343 self.total_tests = len(self.instances)
2344 self.total_cases = len(self.testcases)
2345
2346 def compare_metrics(self, filename):
2347 # name, datatype, lower results better
2348 interesting_metrics = [("ram_size", int, True),
2349 ("rom_size", int, True)]
2350
2351 if not os.path.exists(filename):
2352 logger.info("Cannot compare metrics, %s not found" % filename)
2353 return []
2354
2355 results = []
2356 saved_metrics = {}
2357 with open(filename) as fp:
2358 cr = csv.DictReader(fp)
2359 for row in cr:
2360 d = {}
2361 for m, _, _ in interesting_metrics:
2362 d[m] = row[m]
2363 saved_metrics[(row["test"], row["platform"])] = d
2364
2365 for instance in self.instances.values():
2366 mkey = (instance.testcase.name, instance.platform.name)
2367 if mkey not in saved_metrics:
2368 continue
2369 sm = saved_metrics[mkey]
2370 for metric, mtype, lower_better in interesting_metrics:
2371 if metric not in instance.metrics:
2372 continue
2373 if sm[metric] == "":
2374 continue
2375 delta = instance.metrics.get(metric, 0) - mtype(sm[metric])
2376 if delta == 0:
2377 continue
2378 results.append((instance, metric, instance.metrics.get(metric, 0), delta,
2379 lower_better))
2380 return results
2381
2382 def misc_reports(self, report, show_footprint, all_deltas,
2383 footprint_threshold, last_metrics):
2384
2385 if not report:
2386 return
2387
2388 deltas = self.compare_metrics(report)
2389 warnings = 0
2390 if deltas and show_footprint:
2391 for i, metric, value, delta, lower_better in deltas:
2392 if not all_deltas and ((delta < 0 and lower_better) or
2393 (delta > 0 and not lower_better)):
2394 continue
2395
2396 percentage = (float(delta) / float(value - delta))
2397 if not all_deltas and (percentage <
2398 (footprint_threshold / 100.0)):
2399 continue
2400
2401 logger.info("{:<25} {:<60} {}{}{}: {} {:<+4}, is now {:6} {:+.2%}".format(
2402 i.platform.name, i.testcase.name, Fore.YELLOW,
2403 "INFO" if all_deltas else "WARNING", Fore.RESET,
2404 metric, delta, value, percentage))
2405 warnings += 1
2406
2407 if warnings:
2408 logger.warning("Deltas based on metrics from last %s" %
2409 ("release" if not last_metrics else "run"))
2410
2411 def summary(self, unrecognized_sections):
2412 failed = 0
Anas Nashif4258d8d2020-05-08 08:40:27 -04002413 run = 0
Anas Nashifce2b4182020-03-24 14:40:28 -04002414 for instance in self.instances.values():
2415 if instance.status == "failed":
2416 failed += 1
2417 elif instance.metrics.get("unrecognized") and not unrecognized_sections:
2418 logger.error("%sFAILED%s: %s has unrecognized binary sections: %s" %
2419 (Fore.RED, Fore.RESET, instance.name,
2420 str(instance.metrics.get("unrecognized", []))))
2421 failed += 1
2422
Anas Nashif4258d8d2020-05-08 08:40:27 -04002423 if instance.metrics['handler_time']:
2424 run += 1
2425
Anas Nashifce2b4182020-03-24 14:40:28 -04002426 if self.total_tests and self.total_tests != self.total_skipped:
Anas Nashifdc43c292020-07-09 09:46:45 -04002427 pass_rate = (float(self.total_passed) / float(
Anas Nashifce2b4182020-03-24 14:40:28 -04002428 self.total_tests - self.total_skipped))
2429 else:
2430 pass_rate = 0
2431
2432 logger.info(
2433 "{}{} of {}{} tests passed ({:.2%}), {}{}{} failed, {} skipped with {}{}{} warnings in {:.2f} seconds".format(
2434 Fore.RED if failed else Fore.GREEN,
Anas Nashifdc43c292020-07-09 09:46:45 -04002435 self.total_passed,
Anas Nashifce2b4182020-03-24 14:40:28 -04002436 self.total_tests - self.total_skipped,
2437 Fore.RESET,
2438 pass_rate,
2439 Fore.RED if self.total_failed else Fore.RESET,
2440 self.total_failed,
2441 Fore.RESET,
2442 self.total_skipped,
2443 Fore.YELLOW if self.warnings else Fore.RESET,
2444 self.warnings,
2445 Fore.RESET,
2446 self.duration))
2447
2448 self.total_platforms = len(self.platforms)
2449 if self.platforms:
2450 logger.info("In total {} test cases were executed on {} out of total {} platforms ({:02.2f}%)".format(
2451 self.total_cases,
2452 len(self.selected_platforms),
2453 self.total_platforms,
2454 (100 * len(self.selected_platforms) / len(self.platforms))
2455 ))
2456
Anas Nashif4258d8d2020-05-08 08:40:27 -04002457 logger.info(f"{Fore.GREEN}{run}{Fore.RESET} tests executed on platforms, \
2458{Fore.RED}{self.total_tests - run}{Fore.RESET} tests were only built.")
2459
Anas Nashif6915adf2020-04-22 09:39:42 -04002460 def save_reports(self, name, suffix, report_dir, no_update, release, only_failed):
Anas Nashifce2b4182020-03-24 14:40:28 -04002461 if not self.instances:
2462 return
2463
2464 if name:
2465 report_name = name
2466 else:
2467 report_name = "sanitycheck"
2468
2469 if report_dir:
2470 os.makedirs(report_dir, exist_ok=True)
2471 filename = os.path.join(report_dir, report_name)
2472 outdir = report_dir
2473 else:
2474 filename = os.path.join(self.outdir, report_name)
2475 outdir = self.outdir
2476
Anas Nashif6915adf2020-04-22 09:39:42 -04002477 if suffix:
2478 filename = "{}_{}".format(filename, suffix)
2479
Anas Nashifce2b4182020-03-24 14:40:28 -04002480 if not no_update:
Anas Nashif90415502020-04-11 22:15:04 -04002481 self.xunit_report(filename + ".xml", full_report=False, append=only_failed)
2482 self.xunit_report(filename + "_report.xml", full_report=True, append=only_failed)
Anas Nashifce2b4182020-03-24 14:40:28 -04002483 self.csv_report(filename + ".csv")
Anas Nashif90415502020-04-11 22:15:04 -04002484
Anas Nashif6915adf2020-04-22 09:39:42 -04002485 self.target_report(outdir, suffix, append=only_failed)
Anas Nashifce2b4182020-03-24 14:40:28 -04002486 if self.discards:
2487 self.discard_report(filename + "_discard.csv")
2488
2489 if release:
2490 self.csv_report(self.RELEASE_DATA)
2491
2492 def add_configurations(self):
2493
2494 for board_root in self.board_roots:
2495 board_root = os.path.abspath(board_root)
2496
2497 logger.debug("Reading platform configuration files under %s..." %
2498 board_root)
2499
2500 for file in glob.glob(os.path.join(board_root, "*", "*", "*.yaml")):
2501 logger.debug("Found platform configuration " + file)
2502 try:
2503 platform = Platform()
2504 platform.load(file)
2505 if platform.sanitycheck:
2506 self.platforms.append(platform)
2507 if platform.default:
2508 self.default_platforms.append(platform.name)
2509
2510 except RuntimeError as e:
2511 logger.error("E: %s: can't load: %s" % (file, e))
2512 self.load_errors += 1
2513
2514 def get_all_tests(self):
2515 tests = []
2516 for _, tc in self.testcases.items():
2517 for case in tc.cases:
2518 tests.append(case)
2519
2520 return tests
2521
2522 @staticmethod
2523 def get_toolchain():
2524 toolchain = os.environ.get("ZEPHYR_TOOLCHAIN_VARIANT", None) or \
2525 os.environ.get("ZEPHYR_GCC_VARIANT", None)
2526
2527 if toolchain == "gccarmemb":
2528 # Remove this translation when gccarmemb is no longer supported.
2529 toolchain = "gnuarmemb"
2530
2531 try:
2532 if not toolchain:
2533 raise SanityRuntimeError("E: Variable ZEPHYR_TOOLCHAIN_VARIANT is not defined")
2534 except Exception as e:
2535 print(str(e))
2536 sys.exit(2)
2537
2538 return toolchain
2539
2540 def add_testcases(self, testcase_filter=[]):
2541 for root in self.roots:
2542 root = os.path.abspath(root)
2543
2544 logger.debug("Reading test case configuration files under %s..." % root)
2545
2546 for dirpath, dirnames, filenames in os.walk(root, topdown=True):
2547 logger.debug("scanning %s" % dirpath)
Aastha Grovera0ae5342020-05-13 13:34:00 -07002548 if self.SAMPLE_FILENAME in filenames:
2549 filename = self.SAMPLE_FILENAME
2550 elif self.TESTCASE_FILENAME in filenames:
2551 filename = self.TESTCASE_FILENAME
Anas Nashifce2b4182020-03-24 14:40:28 -04002552 else:
2553 continue
2554
2555 logger.debug("Found possible test case in " + dirpath)
2556
2557 dirnames[:] = []
2558 tc_path = os.path.join(dirpath, filename)
2559
2560 try:
2561 parsed_data = SanityConfigParser(tc_path, self.tc_schema)
2562 parsed_data.load()
2563
2564 tc_path = os.path.dirname(tc_path)
2565 workdir = os.path.relpath(tc_path, root)
2566
2567 for name in parsed_data.tests.keys():
Anas Nashifaff616d2020-04-17 21:24:57 -04002568 tc = TestCase(root, workdir, name)
Anas Nashifce2b4182020-03-24 14:40:28 -04002569
2570 tc_dict = parsed_data.get_test(name, self.testcase_valid_keys)
2571
2572 tc.source_dir = tc_path
2573 tc.yamlfile = tc_path
2574
Anas Nashifce2b4182020-03-24 14:40:28 -04002575 tc.type = tc_dict["type"]
2576 tc.tags = tc_dict["tags"]
2577 tc.extra_args = tc_dict["extra_args"]
2578 tc.extra_configs = tc_dict["extra_configs"]
2579 tc.arch_whitelist = tc_dict["arch_whitelist"]
2580 tc.arch_exclude = tc_dict["arch_exclude"]
2581 tc.skip = tc_dict["skip"]
2582 tc.platform_exclude = tc_dict["platform_exclude"]
2583 tc.platform_whitelist = tc_dict["platform_whitelist"]
2584 tc.toolchain_exclude = tc_dict["toolchain_exclude"]
2585 tc.toolchain_whitelist = tc_dict["toolchain_whitelist"]
2586 tc.tc_filter = tc_dict["filter"]
2587 tc.timeout = tc_dict["timeout"]
2588 tc.harness = tc_dict["harness"]
2589 tc.harness_config = tc_dict["harness_config"]
Anas Nashif43275c82020-05-04 18:22:16 -04002590 if tc.harness == 'console' and not tc.harness_config:
2591 raise Exception('Harness config error: console harness defined without a configuration.')
Anas Nashifce2b4182020-03-24 14:40:28 -04002592 tc.build_only = tc_dict["build_only"]
2593 tc.build_on_all = tc_dict["build_on_all"]
2594 tc.slow = tc_dict["slow"]
2595 tc.min_ram = tc_dict["min_ram"]
2596 tc.depends_on = tc_dict["depends_on"]
2597 tc.min_flash = tc_dict["min_flash"]
2598 tc.extra_sections = tc_dict["extra_sections"]
2599
2600 tc.parse_subcases(tc_path)
2601
2602 if testcase_filter:
2603 if tc.name and tc.name in testcase_filter:
2604 self.testcases[tc.name] = tc
2605 else:
2606 self.testcases[tc.name] = tc
2607
2608 except Exception as e:
2609 logger.error("%s: can't load (skipping): %s" % (tc_path, e))
2610 self.load_errors += 1
2611
2612
2613 def get_platform(self, name):
2614 selected_platform = None
2615 for platform in self.platforms:
2616 if platform.name == name:
2617 selected_platform = platform
2618 break
2619 return selected_platform
2620
2621 def load_from_file(self, file, filter_status=[]):
2622 try:
2623 with open(file, "r") as fp:
2624 cr = csv.DictReader(fp)
2625 instance_list = []
2626 for row in cr:
2627 if row["status"] in filter_status:
2628 continue
2629 test = row["test"]
2630
2631 platform = self.get_platform(row["platform"])
2632 instance = TestInstance(self.testcases[test], platform, self.outdir)
2633 instance.check_build_or_run(
2634 self.build_only,
2635 self.enable_slow,
2636 self.device_testing,
Anas Nashifce8c12e2020-05-21 09:11:40 -04002637 self.fixtures
Anas Nashifce2b4182020-03-24 14:40:28 -04002638 )
Christian Taedcke3dbe9f22020-07-06 16:00:57 +02002639 instance.create_overlay(platform, self.enable_asan, self.enable_ubsan, self.enable_coverage, self.coverage_platform)
Anas Nashifce2b4182020-03-24 14:40:28 -04002640 instance_list.append(instance)
2641 self.add_instances(instance_list)
2642
2643 except KeyError as e:
2644 logger.error("Key error while parsing tests file.({})".format(str(e)))
2645 sys.exit(2)
2646
2647 except FileNotFoundError as e:
2648 logger.error("Couldn't find input file with list of tests. ({})".format(e))
2649 sys.exit(2)
2650
2651 def apply_filters(self, **kwargs):
2652
2653 toolchain = self.get_toolchain()
2654
2655 discards = {}
2656 platform_filter = kwargs.get('platform')
Anas Nashifaff616d2020-04-17 21:24:57 -04002657 exclude_platform = kwargs.get('exclude_platform', [])
2658 testcase_filter = kwargs.get('run_individual_tests', [])
Anas Nashifce2b4182020-03-24 14:40:28 -04002659 arch_filter = kwargs.get('arch')
2660 tag_filter = kwargs.get('tag')
2661 exclude_tag = kwargs.get('exclude_tag')
2662 all_filter = kwargs.get('all')
2663 device_testing_filter = kwargs.get('device_testing')
2664 force_toolchain = kwargs.get('force_toolchain')
Anas Nashif1a5defa2020-05-01 14:57:00 -04002665 force_platform = kwargs.get('force_platform')
Anas Nashifce2b4182020-03-24 14:40:28 -04002666
2667 logger.debug("platform filter: " + str(platform_filter))
2668 logger.debug(" arch_filter: " + str(arch_filter))
2669 logger.debug(" tag_filter: " + str(tag_filter))
2670 logger.debug(" exclude_tag: " + str(exclude_tag))
2671
2672 default_platforms = False
2673
2674 if platform_filter:
2675 platforms = list(filter(lambda p: p.name in platform_filter, self.platforms))
2676 else:
2677 platforms = self.platforms
2678
2679 if all_filter:
2680 logger.info("Selecting all possible platforms per test case")
2681 # When --all used, any --platform arguments ignored
2682 platform_filter = []
2683 elif not platform_filter:
2684 logger.info("Selecting default platforms per test case")
2685 default_platforms = True
2686
2687 logger.info("Building initial testcase list...")
2688
2689 for tc_name, tc in self.testcases.items():
2690 # list of instances per testcase, aka configurations.
2691 instance_list = []
2692 for plat in platforms:
2693 instance = TestInstance(tc, plat, self.outdir)
2694 instance.check_build_or_run(
2695 self.build_only,
2696 self.enable_slow,
2697 self.device_testing,
Anas Nashifce8c12e2020-05-21 09:11:40 -04002698 self.fixtures
Anas Nashifce2b4182020-03-24 14:40:28 -04002699 )
Anas Nashiff04461e2020-06-29 10:07:02 -04002700 for t in tc.cases:
2701 instance.results[t] = None
Anas Nashif3b86f132020-05-21 10:35:33 -04002702
2703 if device_testing_filter:
2704 for h in self.connected_hardware:
2705 if h['platform'] == plat.name:
2706 if tc.harness_config.get('fixture') in h.get('fixtures', []):
2707 instance.build_only = False
2708 instance.run = True
2709
Anas Nashif1a5defa2020-05-01 14:57:00 -04002710 if not force_platform and plat.name in exclude_platform:
Anas Nashifce2b4182020-03-24 14:40:28 -04002711 discards[instance] = "Platform is excluded on command line."
2712 continue
2713
2714 if (plat.arch == "unit") != (tc.type == "unit"):
2715 # Discard silently
2716 continue
2717
2718 if device_testing_filter and instance.build_only:
2719 discards[instance] = "Not runnable on device"
2720 continue
2721
2722 if tc.skip:
2723 discards[instance] = "Skip filter"
2724 continue
2725
2726 if tc.build_on_all and not platform_filter:
2727 platform_filter = []
2728
2729 if tag_filter and not tc.tags.intersection(tag_filter):
2730 discards[instance] = "Command line testcase tag filter"
2731 continue
2732
2733 if exclude_tag and tc.tags.intersection(exclude_tag):
2734 discards[instance] = "Command line testcase exclude filter"
2735 continue
2736
2737 if testcase_filter and tc_name not in testcase_filter:
2738 discards[instance] = "Testcase name filter"
2739 continue
2740
2741 if arch_filter and plat.arch not in arch_filter:
2742 discards[instance] = "Command line testcase arch filter"
2743 continue
2744
Anas Nashif1a5defa2020-05-01 14:57:00 -04002745 if not force_platform:
Anas Nashifce2b4182020-03-24 14:40:28 -04002746
Anas Nashif1a5defa2020-05-01 14:57:00 -04002747 if tc.arch_whitelist and plat.arch not in tc.arch_whitelist:
2748 discards[instance] = "Not in test case arch whitelist"
2749 continue
Anas Nashifce2b4182020-03-24 14:40:28 -04002750
Anas Nashif1a5defa2020-05-01 14:57:00 -04002751 if tc.arch_exclude and plat.arch in tc.arch_exclude:
2752 discards[instance] = "In test case arch exclude"
2753 continue
2754
2755 if tc.platform_exclude and plat.name in tc.platform_exclude:
2756 discards[instance] = "In test case platform exclude"
2757 continue
Anas Nashifce2b4182020-03-24 14:40:28 -04002758
2759 if tc.toolchain_exclude and toolchain in tc.toolchain_exclude:
2760 discards[instance] = "In test case toolchain exclude"
2761 continue
2762
2763 if platform_filter and plat.name not in platform_filter:
2764 discards[instance] = "Command line platform filter"
2765 continue
2766
2767 if tc.platform_whitelist and plat.name not in tc.platform_whitelist:
2768 discards[instance] = "Not in testcase platform whitelist"
2769 continue
2770
2771 if tc.toolchain_whitelist and toolchain not in tc.toolchain_whitelist:
2772 discards[instance] = "Not in testcase toolchain whitelist"
2773 continue
2774
2775 if not plat.env_satisfied:
2776 discards[instance] = "Environment ({}) not satisfied".format(", ".join(plat.env))
2777 continue
2778
2779 if not force_toolchain \
2780 and toolchain and (toolchain not in plat.supported_toolchains) \
2781 and tc.type != 'unit':
2782 discards[instance] = "Not supported by the toolchain"
2783 continue
2784
2785 if plat.ram < tc.min_ram:
2786 discards[instance] = "Not enough RAM"
2787 continue
2788
2789 if tc.depends_on:
2790 dep_intersection = tc.depends_on.intersection(set(plat.supported))
2791 if dep_intersection != set(tc.depends_on):
2792 discards[instance] = "No hardware support"
2793 continue
2794
2795 if plat.flash < tc.min_flash:
2796 discards[instance] = "Not enough FLASH"
2797 continue
2798
2799 if set(plat.ignore_tags) & tc.tags:
2800 discards[instance] = "Excluded tags per platform"
2801 continue
2802
2803 # if nothing stopped us until now, it means this configuration
2804 # needs to be added.
2805 instance_list.append(instance)
2806
2807 # no configurations, so jump to next testcase
2808 if not instance_list:
2809 continue
2810
2811 # if sanitycheck was launched with no platform options at all, we
2812 # take all default platforms
2813 if default_platforms and not tc.build_on_all:
2814 if tc.platform_whitelist:
2815 a = set(self.default_platforms)
2816 b = set(tc.platform_whitelist)
2817 c = a.intersection(b)
2818 if c:
2819 aa = list(filter(lambda tc: tc.platform.name in c, instance_list))
2820 self.add_instances(aa)
2821 else:
2822 self.add_instances(instance_list[:1])
2823 else:
2824 instances = list(filter(lambda tc: tc.platform.default, instance_list))
2825 self.add_instances(instances)
2826
Anas Nashifaff616d2020-04-17 21:24:57 -04002827 for instance in list(filter(lambda inst: not inst.platform.default, instance_list)):
Anas Nashifce2b4182020-03-24 14:40:28 -04002828 discards[instance] = "Not a default test platform"
2829
2830 else:
2831 self.add_instances(instance_list)
2832
2833 for _, case in self.instances.items():
Christian Taedcke3dbe9f22020-07-06 16:00:57 +02002834 case.create_overlay(case.platform, self.enable_asan, self.enable_ubsan, self.enable_coverage, self.coverage_platform)
Anas Nashifce2b4182020-03-24 14:40:28 -04002835
2836 self.discards = discards
2837 self.selected_platforms = set(p.platform.name for p in self.instances.values())
2838
2839 return discards
2840
2841 def add_instances(self, instance_list):
2842 for instance in instance_list:
2843 self.instances[instance.name] = instance
2844
2845 def add_tasks_to_queue(self, test_only=False):
2846 for instance in self.instances.values():
2847 if test_only:
2848 if instance.run:
2849 pipeline.put({"op": "run", "test": instance, "status": "built"})
2850 else:
Anas Nashifdc43c292020-07-09 09:46:45 -04002851 if instance.status not in ['passed', 'skipped', 'error']:
Anas Nashifce2b4182020-03-24 14:40:28 -04002852 instance.status = None
2853 pipeline.put({"op": "cmake", "test": instance})
2854
2855 return "DONE FEEDING"
2856
2857 def execute(self):
Anas Nashifdc43c292020-07-09 09:46:45 -04002858
Anas Nashifce2b4182020-03-24 14:40:28 -04002859 def calc_one_elf_size(instance):
Anas Nashiff04461e2020-06-29 10:07:02 -04002860 if instance.status not in ["error", "failed", "skipped"]:
Anas Nashifce2b4182020-03-24 14:40:28 -04002861 if instance.platform.type != "native":
2862 size_calc = instance.calculate_sizes()
2863 instance.metrics["ram_size"] = size_calc.get_ram_size()
2864 instance.metrics["rom_size"] = size_calc.get_rom_size()
2865 instance.metrics["unrecognized"] = size_calc.unrecognized_sections()
2866 else:
2867 instance.metrics["ram_size"] = 0
2868 instance.metrics["rom_size"] = 0
2869 instance.metrics["unrecognized"] = []
2870
2871 instance.metrics["handler_time"] = instance.handler.duration if instance.handler else 0
2872
2873 logger.info("Adding tasks to the queue...")
2874 # We can use a with statement to ensure threads are cleaned up promptly
2875 with BoundedExecutor(bound=self.jobs, max_workers=self.jobs) as executor:
2876
2877 # start a future for a thread which sends work in through the queue
2878 future_to_test = {
2879 executor.submit(self.add_tasks_to_queue, self.test_only): 'FEEDER DONE'}
2880
2881 while future_to_test:
2882 # check for status of the futures which are currently working
2883 done, pending = concurrent.futures.wait(future_to_test, timeout=1,
2884 return_when=concurrent.futures.FIRST_COMPLETED)
2885
2886 # if there is incoming work, start a new future
2887 while not pipeline.empty():
2888 # fetch a url from the queue
2889 message = pipeline.get()
2890 test = message['test']
2891
2892 pb = ProjectBuilder(self,
2893 test,
2894 lsan=self.enable_lsan,
2895 asan=self.enable_asan,
Christian Taedcke3dbe9f22020-07-06 16:00:57 +02002896 ubsan=self.enable_ubsan,
Anas Nashifce2b4182020-03-24 14:40:28 -04002897 coverage=self.enable_coverage,
2898 extra_args=self.extra_args,
2899 device_testing=self.device_testing,
2900 cmake_only=self.cmake_only,
2901 cleanup=self.cleanup,
2902 valgrind=self.enable_valgrind,
2903 inline_logs=self.inline_logs,
Anas Nashifce2b4182020-03-24 14:40:28 -04002904 generator=self.generator,
Anas Nashiff6462a32020-03-29 19:02:51 -04002905 generator_cmd=self.generator_cmd,
2906 verbose=self.verbose
Anas Nashifce2b4182020-03-24 14:40:28 -04002907 )
2908 future_to_test[executor.submit(pb.process, message)] = test.name
2909
2910 # process any completed futures
2911 for future in done:
2912 test = future_to_test[future]
2913 try:
2914 data = future.result()
2915 except Exception as exc:
2916 logger.error('%r generated an exception: %s' % (test, exc))
2917 sys.exit('%r generated an exception: %s' % (test, exc))
2918
2919 else:
2920 if data:
2921 logger.debug(data)
2922
2923 # remove the now completed future
2924 del future_to_test[future]
2925
2926 for future in pending:
2927 test = future_to_test[future]
2928
2929 try:
2930 future.result(timeout=180)
2931 except concurrent.futures.TimeoutError:
2932 logger.warning("{} stuck?".format(test))
2933
2934 if self.enable_size_report and not self.cmake_only:
2935 # Parallelize size calculation
2936 executor = concurrent.futures.ThreadPoolExecutor(self.jobs)
2937 futures = [executor.submit(calc_one_elf_size, instance)
2938 for instance in self.instances.values()]
2939 concurrent.futures.wait(futures)
2940 else:
2941 for instance in self.instances.values():
2942 instance.metrics["ram_size"] = 0
2943 instance.metrics["rom_size"] = 0
2944 instance.metrics["handler_time"] = instance.handler.duration if instance.handler else 0
2945 instance.metrics["unrecognized"] = []
2946
2947 def discard_report(self, filename):
2948
2949 try:
Aastha Groverdcbd9152020-06-16 10:19:51 -07002950 if not self.discards:
Anas Nashifce2b4182020-03-24 14:40:28 -04002951 raise SanityRuntimeError("apply_filters() hasn't been run!")
2952 except Exception as e:
2953 logger.error(str(e))
2954 sys.exit(2)
2955
2956 with open(filename, "wt") as csvfile:
2957 fieldnames = ["test", "arch", "platform", "reason"]
2958 cw = csv.DictWriter(csvfile, fieldnames, lineterminator=os.linesep)
2959 cw.writeheader()
2960 for instance, reason in sorted(self.discards.items()):
2961 rowdict = {"test": instance.testcase.name,
2962 "arch": instance.platform.arch,
2963 "platform": instance.platform.name,
2964 "reason": reason}
2965 cw.writerow(rowdict)
2966
Anas Nashif6915adf2020-04-22 09:39:42 -04002967 def target_report(self, outdir, suffix, append=False):
Anas Nashifce2b4182020-03-24 14:40:28 -04002968 platforms = {inst.platform.name for _, inst in self.instances.items()}
2969 for platform in platforms:
Anas Nashif6915adf2020-04-22 09:39:42 -04002970 if suffix:
2971 filename = os.path.join(outdir,"{}_{}.xml".format(platform, suffix))
2972 else:
2973 filename = os.path.join(outdir,"{}.xml".format(platform))
Anas Nashif90415502020-04-11 22:15:04 -04002974 self.xunit_report(filename, platform, full_report=True, append=append)
Anas Nashifce2b4182020-03-24 14:40:28 -04002975
Anas Nashif90415502020-04-11 22:15:04 -04002976
2977 @staticmethod
2978 def process_log(log_file):
2979 filtered_string = ""
2980 if os.path.exists(log_file):
2981 with open(log_file, "rb") as f:
2982 log = f.read().decode("utf-8")
2983 filtered_string = ''.join(filter(lambda x: x in string.printable, log))
2984
2985 return filtered_string
2986
Anas Nashifa53c8132020-05-05 09:32:46 -04002987
Anas Nashif90415502020-04-11 22:15:04 -04002988 def xunit_report(self, filename, platform=None, full_report=False, append=False):
Anas Nashifa53c8132020-05-05 09:32:46 -04002989 total = 0
2990 if platform:
2991 selected = [platform]
2992 else:
2993 selected = self.selected_platforms
Anas Nashif90415502020-04-11 22:15:04 -04002994
Anas Nashif90415502020-04-11 22:15:04 -04002995 if os.path.exists(filename) and append:
2996 tree = ET.parse(filename)
2997 eleTestsuites = tree.getroot()
Anas Nashif90415502020-04-11 22:15:04 -04002998 else:
Anas Nashifce2b4182020-03-24 14:40:28 -04002999 eleTestsuites = ET.Element('testsuites')
Anas Nashifce2b4182020-03-24 14:40:28 -04003000
Anas Nashifa53c8132020-05-05 09:32:46 -04003001 for p in selected:
3002 inst = self.get_platform_instances(p)
3003 fails = 0
3004 passes = 0
3005 errors = 0
3006 skips = 0
3007 duration = 0
3008
3009 for _, instance in inst.items():
3010 handler_time = instance.metrics.get('handler_time', 0)
3011 duration += handler_time
3012 if full_report:
3013 for k in instance.results.keys():
3014 if instance.results[k] == 'PASS':
3015 passes += 1
3016 elif instance.results[k] == 'BLOCK':
3017 errors += 1
3018 elif instance.results[k] == 'SKIP':
3019 skips += 1
3020 else:
3021 fails += 1
3022 else:
Anas Nashiff04461e2020-06-29 10:07:02 -04003023 if instance.status in ["error", "failed", "timeout"]:
Anas Nashifa53c8132020-05-05 09:32:46 -04003024 if instance.reason in ['build_error', 'handler_crash']:
3025 errors += 1
3026 else:
3027 fails += 1
3028 elif instance.status == 'skipped':
3029 skips += 1
3030 else:
3031 passes += 1
3032
3033 total = (errors + passes + fails + skips)
3034 # do not produce a report if no tests were actually run (only built)
3035 if total == 0:
Anas Nashif90415502020-04-11 22:15:04 -04003036 continue
Anas Nashifce2b4182020-03-24 14:40:28 -04003037
Anas Nashifa53c8132020-05-05 09:32:46 -04003038 run = p
3039 eleTestsuite = None
3040
3041 # When we re-run the tests, we re-use the results and update only with
3042 # the newly run tests.
3043 if os.path.exists(filename) and append:
Anas Nashiff04461e2020-06-29 10:07:02 -04003044 ts = eleTestsuites.findall(f'testsuite/[@name="{p}"]')
3045 if ts:
3046 eleTestsuite = ts[0]
3047 eleTestsuite.attrib['failures'] = "%d" % fails
3048 eleTestsuite.attrib['errors'] = "%d" % errors
3049 eleTestsuite.attrib['skip'] = "%d" % skips
3050 else:
3051 logger.info(f"Did not find any existing results for {p}")
3052 eleTestsuite = ET.SubElement(eleTestsuites, 'testsuite',
3053 name=run, time="%f" % duration,
3054 tests="%d" % (total),
3055 failures="%d" % fails,
3056 errors="%d" % (errors), skip="%s" % (skips))
3057
Anas Nashif90415502020-04-11 22:15:04 -04003058 else:
Anas Nashifa53c8132020-05-05 09:32:46 -04003059 eleTestsuite = ET.SubElement(eleTestsuites, 'testsuite',
3060 name=run, time="%f" % duration,
3061 tests="%d" % (total),
3062 failures="%d" % fails,
3063 errors="%d" % (errors), skip="%s" % (skips))
Anas Nashif90415502020-04-11 22:15:04 -04003064
Anas Nashifa53c8132020-05-05 09:32:46 -04003065 for _, instance in inst.items():
3066 if full_report:
3067 tname = os.path.basename(instance.testcase.name)
3068 else:
3069 tname = instance.testcase.id
Anas Nashif90415502020-04-11 22:15:04 -04003070
Anas Nashifa53c8132020-05-05 09:32:46 -04003071
3072 handler_time = instance.metrics.get('handler_time', 0)
3073
3074 if full_report:
3075 for k in instance.results.keys():
3076
3077 # remove testcases that are being re-run from exiting reports
3078 for tc in eleTestsuite.findall(f'testcase/[@name="{k}"]'):
3079 eleTestsuite.remove(tc)
3080
3081 classname = ".".join(tname.split(".")[:2])
3082 eleTestcase = ET.SubElement(
3083 eleTestsuite, 'testcase',
3084 classname=classname,
3085 name="%s" % (k), time="%f" % handler_time)
3086 if instance.results[k] in ['FAIL', 'BLOCK']:
3087 if instance.results[k] == 'FAIL':
3088 el = ET.SubElement(
3089 eleTestcase,
3090 'failure',
3091 type="failure",
3092 message="failed")
3093 else:
Anas Nashiff04461e2020-06-29 10:07:02 -04003094
Anas Nashifa53c8132020-05-05 09:32:46 -04003095 el = ET.SubElement(
3096 eleTestcase,
3097 'error',
3098 type="failure",
3099 message="failed")
3100 p = os.path.join(self.outdir, instance.platform.name, instance.testcase.name)
3101 log_file = os.path.join(p, "handler.log")
3102 el.text = self.process_log(log_file)
3103
Anas Nashiff04461e2020-06-29 10:07:02 -04003104 elif instance.results[k] == 'PASS':
3105 pass
Anas Nashifa53c8132020-05-05 09:32:46 -04003106 elif instance.results[k] == 'SKIP':
Anas Nashiff04461e2020-06-29 10:07:02 -04003107 el = ET.SubElement(eleTestcase, 'skipped', type="skipped", message="Skipped")
3108 else:
Anas Nashifce2b4182020-03-24 14:40:28 -04003109 el = ET.SubElement(
3110 eleTestcase,
Anas Nashiff04461e2020-06-29 10:07:02 -04003111 'error',
3112 type="error",
3113 message=f"{instance.reason}")
Anas Nashifa53c8132020-05-05 09:32:46 -04003114 else:
3115 if platform:
3116 classname = ".".join(instance.testcase.name.split(".")[:2])
3117 else:
3118 classname = p + ":" + ".".join(instance.testcase.name.split(".")[:2])
Anas Nashifce2b4182020-03-24 14:40:28 -04003119
Anas Nashiff04461e2020-06-29 10:07:02 -04003120 # remove testcases that are being re-run from exiting reports
3121 for tc in eleTestsuite.findall(f'testcase/[@classname="{classname}"]'):
3122 eleTestsuite.remove(tc)
3123
Anas Nashifa53c8132020-05-05 09:32:46 -04003124 eleTestcase = ET.SubElement(eleTestsuite, 'testcase',
3125 classname=classname,
3126 name="%s" % (instance.testcase.name),
3127 time="%f" % handler_time)
Anas Nashiff04461e2020-06-29 10:07:02 -04003128 if instance.status in ["error", "failed", "timeout"]:
Anas Nashifa53c8132020-05-05 09:32:46 -04003129 failure = ET.SubElement(
Anas Nashifce2b4182020-03-24 14:40:28 -04003130 eleTestcase,
Anas Nashifa53c8132020-05-05 09:32:46 -04003131 'failure',
3132 type="failure",
Maciej Perkowskib2fa99c2020-05-21 14:45:29 +02003133 message=instance.reason)
Anas Nashiff04461e2020-06-29 10:07:02 -04003134
Anas Nashifa53c8132020-05-05 09:32:46 -04003135 p = ("%s/%s/%s" % (self.outdir, instance.platform.name, instance.testcase.name))
3136 bl = os.path.join(p, "build.log")
3137 hl = os.path.join(p, "handler.log")
3138 log_file = bl
3139 if instance.reason != 'Build error':
3140 if os.path.exists(hl):
3141 log_file = hl
3142 else:
3143 log_file = bl
Anas Nashifce2b4182020-03-24 14:40:28 -04003144
Anas Nashifa53c8132020-05-05 09:32:46 -04003145 failure.text = self.process_log(log_file)
Anas Nashifce2b4182020-03-24 14:40:28 -04003146
Anas Nashifa53c8132020-05-05 09:32:46 -04003147 elif instance.status == "skipped":
3148 ET.SubElement(eleTestcase, 'skipped', type="skipped", message="Skipped")
Anas Nashifce2b4182020-03-24 14:40:28 -04003149
3150 result = ET.tostring(eleTestsuites)
3151 with open(filename, 'wb') as report:
3152 report.write(result)
3153
Anas Nashif90415502020-04-11 22:15:04 -04003154
Anas Nashifce2b4182020-03-24 14:40:28 -04003155 def csv_report(self, filename):
3156 with open(filename, "wt") as csvfile:
3157 fieldnames = ["test", "arch", "platform", "status",
3158 "extra_args", "handler", "handler_time", "ram_size",
3159 "rom_size"]
3160 cw = csv.DictWriter(csvfile, fieldnames, lineterminator=os.linesep)
3161 cw.writeheader()
3162 for instance in self.instances.values():
3163 rowdict = {"test": instance.testcase.name,
3164 "arch": instance.platform.arch,
3165 "platform": instance.platform.name,
3166 "extra_args": " ".join(instance.testcase.extra_args),
3167 "handler": instance.platform.simulation}
3168
3169 rowdict["status"] = instance.status
Anas Nashiff04461e2020-06-29 10:07:02 -04003170 if instance.status not in ["error", "failed", "timeout"]:
Anas Nashifce2b4182020-03-24 14:40:28 -04003171 if instance.handler:
3172 rowdict["handler_time"] = instance.metrics.get("handler_time", 0)
3173 ram_size = instance.metrics.get("ram_size", 0)
3174 rom_size = instance.metrics.get("rom_size", 0)
3175 rowdict["ram_size"] = ram_size
3176 rowdict["rom_size"] = rom_size
3177 cw.writerow(rowdict)
3178
3179 def get_testcase(self, identifier):
3180 results = []
3181 for _, tc in self.testcases.items():
3182 for case in tc.cases:
3183 if case == identifier:
3184 results.append(tc)
3185 return results
3186
3187
3188class CoverageTool:
3189 """ Base class for every supported coverage tool
3190 """
3191
3192 def __init__(self):
Anas Nashiff6462a32020-03-29 19:02:51 -04003193 self.gcov_tool = None
3194 self.base_dir = None
Anas Nashifce2b4182020-03-24 14:40:28 -04003195
3196 @staticmethod
3197 def factory(tool):
3198 if tool == 'lcov':
Anas Nashiff6462a32020-03-29 19:02:51 -04003199 t = Lcov()
3200 elif tool == 'gcovr':
3201 t = Lcov()
3202 else:
3203 logger.error("Unsupported coverage tool specified: {}".format(tool))
3204 return None
3205
Anas Nashiff6462a32020-03-29 19:02:51 -04003206 return t
Anas Nashifce2b4182020-03-24 14:40:28 -04003207
3208 @staticmethod
3209 def retrieve_gcov_data(intput_file):
Anas Nashiff6462a32020-03-29 19:02:51 -04003210 logger.debug("Working on %s" % intput_file)
Anas Nashifce2b4182020-03-24 14:40:28 -04003211 extracted_coverage_info = {}
3212 capture_data = False
3213 capture_complete = False
3214 with open(intput_file, 'r') as fp:
3215 for line in fp.readlines():
3216 if re.search("GCOV_COVERAGE_DUMP_START", line):
3217 capture_data = True
3218 continue
3219 if re.search("GCOV_COVERAGE_DUMP_END", line):
3220 capture_complete = True
3221 break
3222 # Loop until the coverage data is found.
3223 if not capture_data:
3224 continue
3225 if line.startswith("*"):
3226 sp = line.split("<")
3227 if len(sp) > 1:
3228 # Remove the leading delimiter "*"
3229 file_name = sp[0][1:]
3230 # Remove the trailing new line char
3231 hex_dump = sp[1][:-1]
3232 else:
3233 continue
3234 else:
3235 continue
3236 extracted_coverage_info.update({file_name: hex_dump})
3237 if not capture_data:
3238 capture_complete = True
3239 return {'complete': capture_complete, 'data': extracted_coverage_info}
3240
3241 @staticmethod
3242 def create_gcda_files(extracted_coverage_info):
Anas Nashiff6462a32020-03-29 19:02:51 -04003243 logger.debug("Generating gcda files")
Anas Nashifce2b4182020-03-24 14:40:28 -04003244 for filename, hexdump_val in extracted_coverage_info.items():
3245 # if kobject_hash is given for coverage gcovr fails
3246 # hence skipping it problem only in gcovr v4.1
3247 if "kobject_hash" in filename:
3248 filename = (filename[:-4]) + "gcno"
3249 try:
3250 os.remove(filename)
3251 except Exception:
3252 pass
3253 continue
3254
3255 with open(filename, 'wb') as fp:
3256 fp.write(bytes.fromhex(hexdump_val))
3257
3258 def generate(self, outdir):
3259 for filename in glob.glob("%s/**/handler.log" % outdir, recursive=True):
3260 gcov_data = self.__class__.retrieve_gcov_data(filename)
3261 capture_complete = gcov_data['complete']
3262 extracted_coverage_info = gcov_data['data']
3263 if capture_complete:
3264 self.__class__.create_gcda_files(extracted_coverage_info)
3265 logger.debug("Gcov data captured: {}".format(filename))
3266 else:
3267 logger.error("Gcov data capture incomplete: {}".format(filename))
3268
3269 with open(os.path.join(outdir, "coverage.log"), "a") as coveragelog:
3270 ret = self._generate(outdir, coveragelog)
3271 if ret == 0:
3272 logger.info("HTML report generated: {}".format(
3273 os.path.join(outdir, "coverage", "index.html")))
3274
3275
3276class Lcov(CoverageTool):
3277
3278 def __init__(self):
3279 super().__init__()
3280 self.ignores = []
3281
3282 def add_ignore_file(self, pattern):
3283 self.ignores.append('*' + pattern + '*')
3284
3285 def add_ignore_directory(self, pattern):
3286 self.ignores.append(pattern + '/*')
3287
3288 def _generate(self, outdir, coveragelog):
3289 coveragefile = os.path.join(outdir, "coverage.info")
3290 ztestfile = os.path.join(outdir, "ztest.info")
3291 subprocess.call(["lcov", "--gcov-tool", self.gcov_tool,
3292 "--capture", "--directory", outdir,
3293 "--rc", "lcov_branch_coverage=1",
3294 "--output-file", coveragefile], stdout=coveragelog)
3295 # We want to remove tests/* and tests/ztest/test/* but save tests/ztest
3296 subprocess.call(["lcov", "--gcov-tool", self.gcov_tool, "--extract",
3297 coveragefile,
Anas Nashiff6462a32020-03-29 19:02:51 -04003298 os.path.join(self.base_dir, "tests", "ztest", "*"),
Anas Nashifce2b4182020-03-24 14:40:28 -04003299 "--output-file", ztestfile,
3300 "--rc", "lcov_branch_coverage=1"], stdout=coveragelog)
3301
3302 if os.path.exists(ztestfile) and os.path.getsize(ztestfile) > 0:
3303 subprocess.call(["lcov", "--gcov-tool", self.gcov_tool, "--remove",
3304 ztestfile,
Anas Nashiff6462a32020-03-29 19:02:51 -04003305 os.path.join(self.base_dir, "tests/ztest/test/*"),
Anas Nashifce2b4182020-03-24 14:40:28 -04003306 "--output-file", ztestfile,
3307 "--rc", "lcov_branch_coverage=1"],
3308 stdout=coveragelog)
3309 files = [coveragefile, ztestfile]
3310 else:
3311 files = [coveragefile]
3312
3313 for i in self.ignores:
3314 subprocess.call(
3315 ["lcov", "--gcov-tool", self.gcov_tool, "--remove",
3316 coveragefile, i, "--output-file",
3317 coveragefile, "--rc", "lcov_branch_coverage=1"],
3318 stdout=coveragelog)
3319
3320 # The --ignore-errors source option is added to avoid it exiting due to
3321 # samples/application_development/external_lib/
3322 return subprocess.call(["genhtml", "--legend", "--branch-coverage",
3323 "--ignore-errors", "source",
3324 "-output-directory",
3325 os.path.join(outdir, "coverage")] + files,
3326 stdout=coveragelog)
3327
3328
3329class Gcovr(CoverageTool):
3330
3331 def __init__(self):
3332 super().__init__()
3333 self.ignores = []
3334
3335 def add_ignore_file(self, pattern):
3336 self.ignores.append('.*' + pattern + '.*')
3337
3338 def add_ignore_directory(self, pattern):
3339 self.ignores.append(pattern + '/.*')
3340
3341 @staticmethod
3342 def _interleave_list(prefix, list):
3343 tuple_list = [(prefix, item) for item in list]
3344 return [item for sublist in tuple_list for item in sublist]
3345
3346 def _generate(self, outdir, coveragelog):
3347 coveragefile = os.path.join(outdir, "coverage.json")
3348 ztestfile = os.path.join(outdir, "ztest.json")
3349
3350 excludes = Gcovr._interleave_list("-e", self.ignores)
3351
3352 # We want to remove tests/* and tests/ztest/test/* but save tests/ztest
Anas Nashiff6462a32020-03-29 19:02:51 -04003353 subprocess.call(["gcovr", "-r", self.base_dir, "--gcov-executable",
Anas Nashifce2b4182020-03-24 14:40:28 -04003354 self.gcov_tool, "-e", "tests/*"] + excludes +
3355 ["--json", "-o", coveragefile, outdir],
3356 stdout=coveragelog)
3357
Anas Nashiff6462a32020-03-29 19:02:51 -04003358 subprocess.call(["gcovr", "-r", self.base_dir, "--gcov-executable",
Anas Nashifce2b4182020-03-24 14:40:28 -04003359 self.gcov_tool, "-f", "tests/ztest", "-e",
3360 "tests/ztest/test/*", "--json", "-o", ztestfile,
3361 outdir], stdout=coveragelog)
3362
3363 if os.path.exists(ztestfile) and os.path.getsize(ztestfile) > 0:
3364 files = [coveragefile, ztestfile]
3365 else:
3366 files = [coveragefile]
3367
3368 subdir = os.path.join(outdir, "coverage")
3369 os.makedirs(subdir, exist_ok=True)
3370
3371 tracefiles = self._interleave_list("--add-tracefile", files)
3372
Anas Nashiff6462a32020-03-29 19:02:51 -04003373 return subprocess.call(["gcovr", "-r", self.base_dir, "--html",
Anas Nashifce2b4182020-03-24 14:40:28 -04003374 "--html-details"] + tracefiles +
3375 ["-o", os.path.join(subdir, "index.html")],
3376 stdout=coveragelog)
3377class HardwareMap:
3378
3379 schema_path = os.path.join(ZEPHYR_BASE, "scripts", "sanity_chk", "hwmap-schema.yaml")
3380
3381 manufacturer = [
3382 'ARM',
3383 'SEGGER',
3384 'MBED',
3385 'STMicroelectronics',
3386 'Atmel Corp.',
3387 'Texas Instruments',
3388 'Silicon Labs',
3389 'NXP Semiconductors',
3390 'Microchip Technology Inc.',
3391 'FTDI',
3392 'Digilent'
3393 ]
3394
3395 runner_mapping = {
3396 'pyocd': [
3397 'DAPLink CMSIS-DAP',
3398 'MBED CMSIS-DAP'
3399 ],
3400 'jlink': [
3401 'J-Link',
3402 'J-Link OB'
3403 ],
3404 'openocd': [
Erwan Gouriou2339fa02020-07-07 17:15:22 +02003405 'STM32 STLink', '^XDS110.*', 'STLINK-V3'
Anas Nashifce2b4182020-03-24 14:40:28 -04003406 ],
3407 'dediprog': [
3408 'TTL232R-3V3',
3409 'MCP2200 USB Serial Port Emulator'
3410 ]
3411 }
3412
3413 def __init__(self):
3414 self.detected = []
3415 self.connected_hardware = []
3416
3417 def load_device_from_cmdline(self, serial, platform):
3418 device = {
3419 "serial": serial,
3420 "platform": platform,
3421 "counter": 0,
3422 "available": True,
3423 "connected": True
3424 }
3425 self.connected_hardware.append(device)
3426
3427 def load_hardware_map(self, map_file):
3428 hwm_schema = scl.yaml_load(self.schema_path)
3429 self.connected_hardware = scl.yaml_load_verify(map_file, hwm_schema)
3430 for i in self.connected_hardware:
3431 i['counter'] = 0
3432
Martí Bolívar07dce822020-04-13 16:50:51 -07003433 def scan_hw(self, persistent=False):
Anas Nashifce2b4182020-03-24 14:40:28 -04003434 from serial.tools import list_ports
3435
Martí Bolívar07dce822020-04-13 16:50:51 -07003436 if persistent and platform.system() == 'Linux':
3437 # On Linux, /dev/serial/by-id provides symlinks to
3438 # '/dev/ttyACMx' nodes using names which are unique as
3439 # long as manufacturers fill out USB metadata nicely.
3440 #
3441 # This creates a map from '/dev/ttyACMx' device nodes
3442 # to '/dev/serial/by-id/usb-...' symlinks. The symlinks
3443 # go into the hardware map because they stay the same
3444 # even when the user unplugs / replugs the device.
3445 #
3446 # Some inexpensive USB/serial adapters don't result
3447 # in unique names here, though, so use of this feature
3448 # requires explicitly setting persistent=True.
3449 by_id = Path('/dev/serial/by-id')
3450 def readlink(link):
3451 return str((by_id / link).resolve())
3452
3453 persistent_map = {readlink(link): str(link)
3454 for link in by_id.iterdir()}
3455 else:
3456 persistent_map = {}
3457
Anas Nashifce2b4182020-03-24 14:40:28 -04003458 serial_devices = list_ports.comports()
3459 logger.info("Scanning connected hardware...")
3460 for d in serial_devices:
3461 if d.manufacturer in self.manufacturer:
3462
3463 # TI XDS110 can have multiple serial devices for a single board
3464 # assume endpoint 0 is the serial, skip all others
3465 if d.manufacturer == 'Texas Instruments' and not d.location.endswith('0'):
3466 continue
3467 s_dev = {}
3468 s_dev['platform'] = "unknown"
3469 s_dev['id'] = d.serial_number
Martí Bolívar07dce822020-04-13 16:50:51 -07003470 s_dev['serial'] = persistent_map.get(d.device, d.device)
Anas Nashifce2b4182020-03-24 14:40:28 -04003471 s_dev['product'] = d.product
3472 s_dev['runner'] = 'unknown'
3473 for runner, _ in self.runner_mapping.items():
3474 products = self.runner_mapping.get(runner)
3475 if d.product in products:
3476 s_dev['runner'] = runner
3477 continue
3478 # Try regex matching
3479 for p in products:
3480 if re.match(p, d.product):
3481 s_dev['runner'] = runner
3482
3483 s_dev['available'] = True
3484 s_dev['connected'] = True
3485 self.detected.append(s_dev)
3486 else:
3487 logger.warning("Unsupported device (%s): %s" % (d.manufacturer, d))
3488
3489 def write_map(self, hwm_file):
3490 # use existing map
3491 if os.path.exists(hwm_file):
3492 with open(hwm_file, 'r') as yaml_file:
Anas Nashifae61b7e2020-07-06 11:30:55 -04003493 hwm = yaml.load(yaml_file, Loader=SafeLoader)
Øyvind Rønningstad4813f462020-07-01 16:49:38 +02003494 hwm.sort(key=lambda x: x['serial'] or '')
3495
Anas Nashifce2b4182020-03-24 14:40:28 -04003496 # disconnect everything
3497 for h in hwm:
3498 h['connected'] = False
3499 h['serial'] = None
3500
Øyvind Rønningstad4813f462020-07-01 16:49:38 +02003501 self.detected.sort(key=lambda x: x['serial'] or '')
Anas Nashifce2b4182020-03-24 14:40:28 -04003502 for d in self.detected:
3503 for h in hwm:
Øyvind Rønningstad4813f462020-07-01 16:49:38 +02003504 if d['id'] == h['id'] and d['product'] == h['product'] and not h['connected'] and not d.get('match', False):
Anas Nashifce2b4182020-03-24 14:40:28 -04003505 h['connected'] = True
3506 h['serial'] = d['serial']
3507 d['match'] = True
3508
3509 new = list(filter(lambda n: not n.get('match', False), self.detected))
3510 hwm = hwm + new
3511
3512 logger.info("Registered devices:")
3513 self.dump(hwm)
3514
3515 with open(hwm_file, 'w') as yaml_file:
Anas Nashifae61b7e2020-07-06 11:30:55 -04003516 yaml.dump(hwm, yaml_file, Dumper=Dumper, default_flow_style=False)
Anas Nashifce2b4182020-03-24 14:40:28 -04003517
3518 else:
3519 # create new file
3520 with open(hwm_file, 'w') as yaml_file:
Anas Nashifae61b7e2020-07-06 11:30:55 -04003521 yaml.dump(self.detected, yaml_file, Dumper=Dumper, default_flow_style=False)
Anas Nashifce2b4182020-03-24 14:40:28 -04003522 logger.info("Detected devices:")
3523 self.dump(self.detected)
3524
3525 @staticmethod
3526 def dump(hwmap=[], filtered=[], header=[], connected_only=False):
3527 print("")
3528 table = []
3529 if not header:
3530 header = ["Platform", "ID", "Serial device"]
3531 for p in sorted(hwmap, key=lambda i: i['platform']):
3532 platform = p.get('platform')
3533 connected = p.get('connected', False)
3534 if filtered and platform not in filtered:
3535 continue
3536
3537 if not connected_only or connected:
3538 table.append([platform, p.get('id', None), p.get('serial')])
3539
3540 print(tabulate(table, headers=header, tablefmt="github"))
3541
3542
3543def size_report(sc):
3544 logger.info(sc.filename)
3545 logger.info("SECTION NAME VMA LMA SIZE HEX SZ TYPE")
3546 for i in range(len(sc.sections)):
3547 v = sc.sections[i]
3548
3549 logger.info("%-17s 0x%08x 0x%08x %8d 0x%05x %-7s" %
3550 (v["name"], v["virt_addr"], v["load_addr"], v["size"], v["size"],
3551 v["type"]))
3552
3553 logger.info("Totals: %d bytes (ROM), %d bytes (RAM)" %
3554 (sc.rom_size, sc.ram_size))
3555 logger.info("")
3556
3557
3558
3559def export_tests(filename, tests):
3560 with open(filename, "wt") as csvfile:
3561 fieldnames = ['section', 'subsection', 'title', 'reference']
3562 cw = csv.DictWriter(csvfile, fieldnames, lineterminator=os.linesep)
3563 for test in tests:
3564 data = test.split(".")
3565 if len(data) > 1:
3566 subsec = " ".join(data[1].split("_")).title()
3567 rowdict = {
3568 "section": data[0].capitalize(),
3569 "subsection": subsec,
3570 "title": test,
3571 "reference": test
3572 }
3573 cw.writerow(rowdict)
3574 else:
3575 logger.info("{} can't be exported".format(test))