blob: de71132d46d0981cb93179da35fcb2a82e09d057 [file] [log] [blame]
Anas Nashifce2b4182020-03-24 14:40:28 -04001#!/usr/bin/env python3
2# vim: set syntax=python ts=4 :
3#
4# Copyright (c) 2018 Intel Corporation
5# SPDX-License-Identifier: Apache-2.0
6
7import os
8import contextlib
9import string
10import mmap
11import sys
12import re
13import subprocess
14import select
15import shutil
16import shlex
17import signal
18import threading
19import concurrent.futures
20from collections import OrderedDict
21from threading import BoundedSemaphore
22import queue
23import time
24import csv
25import glob
26import concurrent
27import xml.etree.ElementTree as ET
28import logging
29from pathlib import Path
30from distutils.spawn import find_executable
31from colorama import Fore
32import yaml
Martí Bolívar07dce822020-04-13 16:50:51 -070033import platform
Anas Nashifce2b4182020-03-24 14:40:28 -040034
35try:
36 import serial
37except ImportError:
38 print("Install pyserial python module with pip to use --device-testing option.")
39
40try:
41 from tabulate import tabulate
42except ImportError:
43 print("Install tabulate python module with pip to use --device-testing option.")
44
Wentong Wu0d619ae2020-05-05 19:46:49 -040045try:
46 import psutil
47except ImportError:
48 print("Install psutil python module with pip to use --qemu-testing option.")
49
Anas Nashifce2b4182020-03-24 14:40:28 -040050ZEPHYR_BASE = os.getenv("ZEPHYR_BASE")
51if not ZEPHYR_BASE:
52 sys.exit("$ZEPHYR_BASE environment variable undefined")
53
54sys.path.insert(0, os.path.join(ZEPHYR_BASE, "scripts", "dts"))
55import edtlib
56
57hw_map_local = threading.Lock()
58report_lock = threading.Lock()
59
60# Use this for internal comparisons; that's what canonicalization is
61# for. Don't use it when invoking other components of the build system
62# to avoid confusing and hard to trace inconsistencies in error messages
63# and logs, generated Makefiles, etc. compared to when users invoke these
64# components directly.
65# Note "normalization" is different from canonicalization, see os.path.
66canonical_zephyr_base = os.path.realpath(ZEPHYR_BASE)
67
68sys.path.insert(0, os.path.join(ZEPHYR_BASE, "scripts/"))
69
70from sanity_chk import scl
71from sanity_chk import expr_parser
72
Anas Nashifce2b4182020-03-24 14:40:28 -040073logger = logging.getLogger('sanitycheck')
74logger.setLevel(logging.DEBUG)
75
Anas Nashifce2b4182020-03-24 14:40:28 -040076pipeline = queue.LifoQueue()
77
78class CMakeCacheEntry:
79 '''Represents a CMake cache entry.
80
81 This class understands the type system in a CMakeCache.txt, and
82 converts the following cache types to Python types:
83
84 Cache Type Python type
85 ---------- -------------------------------------------
86 FILEPATH str
87 PATH str
88 STRING str OR list of str (if ';' is in the value)
89 BOOL bool
90 INTERNAL str OR list of str (if ';' is in the value)
91 ---------- -------------------------------------------
92 '''
93
94 # Regular expression for a cache entry.
95 #
96 # CMake variable names can include escape characters, allowing a
97 # wider set of names than is easy to match with a regular
98 # expression. To be permissive here, use a non-greedy match up to
99 # the first colon (':'). This breaks if the variable name has a
100 # colon inside, but it's good enough.
101 CACHE_ENTRY = re.compile(
102 r'''(?P<name>.*?) # name
103 :(?P<type>FILEPATH|PATH|STRING|BOOL|INTERNAL) # type
104 =(?P<value>.*) # value
105 ''', re.X)
106
107 @classmethod
108 def _to_bool(cls, val):
109 # Convert a CMake BOOL string into a Python bool.
110 #
111 # "True if the constant is 1, ON, YES, TRUE, Y, or a
112 # non-zero number. False if the constant is 0, OFF, NO,
113 # FALSE, N, IGNORE, NOTFOUND, the empty string, or ends in
114 # the suffix -NOTFOUND. Named boolean constants are
115 # case-insensitive. If the argument is not one of these
116 # constants, it is treated as a variable."
117 #
118 # https://cmake.org/cmake/help/v3.0/command/if.html
119 val = val.upper()
120 if val in ('ON', 'YES', 'TRUE', 'Y'):
121 return 1
122 elif val in ('OFF', 'NO', 'FALSE', 'N', 'IGNORE', 'NOTFOUND', ''):
123 return 0
124 elif val.endswith('-NOTFOUND'):
125 return 0
126 else:
127 try:
128 v = int(val)
129 return v != 0
130 except ValueError as exc:
131 raise ValueError('invalid bool {}'.format(val)) from exc
132
133 @classmethod
134 def from_line(cls, line, line_no):
135 # Comments can only occur at the beginning of a line.
136 # (The value of an entry could contain a comment character).
137 if line.startswith('//') or line.startswith('#'):
138 return None
139
140 # Whitespace-only lines do not contain cache entries.
141 if not line.strip():
142 return None
143
144 m = cls.CACHE_ENTRY.match(line)
145 if not m:
146 return None
147
148 name, type_, value = (m.group(g) for g in ('name', 'type', 'value'))
149 if type_ == 'BOOL':
150 try:
151 value = cls._to_bool(value)
152 except ValueError as exc:
153 args = exc.args + ('on line {}: {}'.format(line_no, line),)
154 raise ValueError(args) from exc
155 elif type_ in ['STRING', 'INTERNAL']:
156 # If the value is a CMake list (i.e. is a string which
157 # contains a ';'), convert to a Python list.
158 if ';' in value:
159 value = value.split(';')
160
161 return CMakeCacheEntry(name, value)
162
163 def __init__(self, name, value):
164 self.name = name
165 self.value = value
166
167 def __str__(self):
168 fmt = 'CMakeCacheEntry(name={}, value={})'
169 return fmt.format(self.name, self.value)
170
171
172class CMakeCache:
173 '''Parses and represents a CMake cache file.'''
174
175 @staticmethod
176 def from_file(cache_file):
177 return CMakeCache(cache_file)
178
179 def __init__(self, cache_file):
180 self.cache_file = cache_file
181 self.load(cache_file)
182
183 def load(self, cache_file):
184 entries = []
185 with open(cache_file, 'r') as cache:
186 for line_no, line in enumerate(cache):
187 entry = CMakeCacheEntry.from_line(line, line_no)
188 if entry:
189 entries.append(entry)
190 self._entries = OrderedDict((e.name, e) for e in entries)
191
192 def get(self, name, default=None):
193 entry = self._entries.get(name)
194 if entry is not None:
195 return entry.value
196 else:
197 return default
198
199 def get_list(self, name, default=None):
200 if default is None:
201 default = []
202 entry = self._entries.get(name)
203 if entry is not None:
204 value = entry.value
205 if isinstance(value, list):
206 return value
207 elif isinstance(value, str):
208 return [value] if value else []
209 else:
210 msg = 'invalid value {} type {}'
211 raise RuntimeError(msg.format(value, type(value)))
212 else:
213 return default
214
215 def __contains__(self, name):
216 return name in self._entries
217
218 def __getitem__(self, name):
219 return self._entries[name].value
220
221 def __setitem__(self, name, entry):
222 if not isinstance(entry, CMakeCacheEntry):
223 msg = 'improper type {} for value {}, expecting CMakeCacheEntry'
224 raise TypeError(msg.format(type(entry), entry))
225 self._entries[name] = entry
226
227 def __delitem__(self, name):
228 del self._entries[name]
229
230 def __iter__(self):
231 return iter(self._entries.values())
232
233
234class SanityCheckException(Exception):
235 pass
236
237
238class SanityRuntimeError(SanityCheckException):
239 pass
240
241
242class ConfigurationError(SanityCheckException):
243 def __init__(self, cfile, message):
244 SanityCheckException.__init__(self, cfile + ": " + message)
245
246
247class BuildError(SanityCheckException):
248 pass
249
250
251class ExecutionError(SanityCheckException):
252 pass
253
254
255class HarnessImporter:
256
257 def __init__(self, name):
258 sys.path.insert(0, os.path.join(ZEPHYR_BASE, "scripts/sanity_chk"))
259 module = __import__("harness")
260 if name:
261 my_class = getattr(module, name)
262 else:
263 my_class = getattr(module, "Test")
264
265 self.instance = my_class()
266
267
268class Handler:
269 def __init__(self, instance, type_str="build"):
270 """Constructor
271
272 """
273 self.lock = threading.Lock()
274
275 self.state = "waiting"
276 self.run = False
277 self.duration = 0
278 self.type_str = type_str
279
280 self.binary = None
281 self.pid_fn = None
282 self.call_make_run = False
283
284 self.name = instance.name
285 self.instance = instance
286 self.timeout = instance.testcase.timeout
287 self.sourcedir = instance.testcase.source_dir
288 self.build_dir = instance.build_dir
289 self.log = os.path.join(self.build_dir, "handler.log")
290 self.returncode = 0
291 self.set_state("running", self.duration)
292 self.generator = None
293 self.generator_cmd = None
294
295 self.args = []
296
297 def set_state(self, state, duration):
298 self.lock.acquire()
299 self.state = state
300 self.duration = duration
301 self.lock.release()
302
303 def get_state(self):
304 self.lock.acquire()
305 ret = (self.state, self.duration)
306 self.lock.release()
307 return ret
308
309 def record(self, harness):
310 if harness.recording:
311 filename = os.path.join(self.build_dir, "recording.csv")
312 with open(filename, "at") as csvfile:
313 cw = csv.writer(csvfile, harness.fieldnames, lineterminator=os.linesep)
314 cw.writerow(harness.fieldnames)
315 for instance in harness.recording:
316 cw.writerow(instance)
317
318
319class BinaryHandler(Handler):
320 def __init__(self, instance, type_str):
321 """Constructor
322
323 @param instance Test Instance
324 """
325 super().__init__(instance, type_str)
326
327 self.terminated = False
328
329 # Tool options
330 self.valgrind = False
331 self.lsan = False
332 self.asan = False
333 self.coverage = False
334
335 def try_kill_process_by_pid(self):
336 if self.pid_fn:
337 pid = int(open(self.pid_fn).read())
338 os.unlink(self.pid_fn)
339 self.pid_fn = None # clear so we don't try to kill the binary twice
340 try:
341 os.kill(pid, signal.SIGTERM)
342 except ProcessLookupError:
343 pass
344
345 def terminate(self, proc):
346 # encapsulate terminate functionality so we do it consistently where ever
347 # we might want to terminate the proc. We need try_kill_process_by_pid
348 # because of both how newer ninja (1.6.0 or greater) and .NET / renode
349 # work. Newer ninja's don't seem to pass SIGTERM down to the children
350 # so we need to use try_kill_process_by_pid.
351 self.try_kill_process_by_pid()
352 proc.terminate()
Anas Nashif227392c2020-04-27 20:31:56 -0400353 # sleep for a while before attempting to kill
354 time.sleep(0.5)
355 proc.kill()
Anas Nashifce2b4182020-03-24 14:40:28 -0400356 self.terminated = True
357
358 def _output_reader(self, proc, harness):
359 log_out_fp = open(self.log, "wt")
360 for line in iter(proc.stdout.readline, b''):
361 logger.debug("OUTPUT: {0}".format(line.decode('utf-8').rstrip()))
362 log_out_fp.write(line.decode('utf-8'))
363 log_out_fp.flush()
364 harness.handle(line.decode('utf-8').rstrip())
365 if harness.state:
366 try:
367 # POSIX arch based ztests end on their own,
368 # so let's give it up to 100ms to do so
369 proc.wait(0.1)
370 except subprocess.TimeoutExpired:
371 self.terminate(proc)
372 break
373
374 log_out_fp.close()
375
376 def handle(self):
377
378 harness_name = self.instance.testcase.harness.capitalize()
379 harness_import = HarnessImporter(harness_name)
380 harness = harness_import.instance
381 harness.configure(self.instance)
382
383 if self.call_make_run:
384 command = [self.generator_cmd, "run"]
385 else:
386 command = [self.binary]
387
388 run_valgrind = False
389 if self.valgrind and shutil.which("valgrind"):
390 command = ["valgrind", "--error-exitcode=2",
391 "--leak-check=full",
392 "--suppressions=" + ZEPHYR_BASE + "/scripts/valgrind.supp",
393 "--log-file=" + self.build_dir + "/valgrind.log"
394 ] + command
395 run_valgrind = True
396
397 logger.debug("Spawning process: " +
398 " ".join(shlex.quote(word) for word in command) + os.linesep +
399 "in directory: " + self.build_dir)
400
401 start_time = time.time()
402
403 env = os.environ.copy()
404 if self.asan:
405 env["ASAN_OPTIONS"] = "log_path=stdout:" + \
406 env.get("ASAN_OPTIONS", "")
407 if not self.lsan:
408 env["ASAN_OPTIONS"] += "detect_leaks=0"
409
410 with subprocess.Popen(command, stdout=subprocess.PIPE,
411 stderr=subprocess.PIPE, cwd=self.build_dir, env=env) as proc:
412 logger.debug("Spawning BinaryHandler Thread for %s" % self.name)
413 t = threading.Thread(target=self._output_reader, args=(proc, harness,), daemon=True)
414 t.start()
415 t.join(self.timeout)
416 if t.is_alive():
417 self.terminate(proc)
418 t.join()
419 proc.wait()
420 self.returncode = proc.returncode
421
422 handler_time = time.time() - start_time
423
424 if self.coverage:
425 subprocess.call(["GCOV_PREFIX=" + self.build_dir,
426 "gcov", self.sourcedir, "-b", "-s", self.build_dir], shell=True)
427
428 self.try_kill_process_by_pid()
429
430 # FIXME: This is needed when killing the simulator, the console is
431 # garbled and needs to be reset. Did not find a better way to do that.
432
433 subprocess.call(["stty", "sane"])
434 self.instance.results = harness.tests
435
436 if not self.terminated and self.returncode != 0:
437 # When a process is killed, the default handler returns 128 + SIGTERM
438 # so in that case the return code itself is not meaningful
439 self.set_state("failed", handler_time)
440 self.instance.reason = "Failed"
441 elif run_valgrind and self.returncode == 2:
442 self.set_state("failed", handler_time)
443 self.instance.reason = "Valgrind error"
444 elif harness.state:
445 self.set_state(harness.state, handler_time)
Anas Nashifb802af82020-04-26 21:57:38 -0400446 if harness.state == "failed":
447 self.instance.reason = "Failed"
Anas Nashifce2b4182020-03-24 14:40:28 -0400448 else:
449 self.set_state("timeout", handler_time)
450 self.instance.reason = "Timeout"
451
452 self.record(harness)
453
454
455class DeviceHandler(Handler):
456
457 def __init__(self, instance, type_str):
458 """Constructor
459
460 @param instance Test Instance
461 """
462 super().__init__(instance, type_str)
463
464 self.suite = None
Anas Nashifce2b4182020-03-24 14:40:28 -0400465
466 def monitor_serial(self, ser, halt_fileno, harness):
467 log_out_fp = open(self.log, "wt")
468
469 ser_fileno = ser.fileno()
470 readlist = [halt_fileno, ser_fileno]
471
472 while ser.isOpen():
473 readable, _, _ = select.select(readlist, [], [], self.timeout)
474
475 if halt_fileno in readable:
476 logger.debug('halted')
477 ser.close()
478 break
479 if ser_fileno not in readable:
480 continue # Timeout.
481
482 serial_line = None
483 try:
484 serial_line = ser.readline()
485 except TypeError:
486 pass
487 except serial.SerialException:
488 ser.close()
489 break
490
491 # Just because ser_fileno has data doesn't mean an entire line
492 # is available yet.
493 if serial_line:
494 sl = serial_line.decode('utf-8', 'ignore').lstrip()
495 logger.debug("DEVICE: {0}".format(sl.rstrip()))
496
497 log_out_fp.write(sl)
498 log_out_fp.flush()
499 harness.handle(sl.rstrip())
500
501 if harness.state:
502 ser.close()
503 break
504
505 log_out_fp.close()
506
507 def device_is_available(self, device):
508 for i in self.suite.connected_hardware:
509 if i['platform'] == device and i['available'] and i['serial']:
510 return True
511
512 return False
513
514 def get_available_device(self, device):
515 for i in self.suite.connected_hardware:
516 if i['platform'] == device and i['available'] and i['serial']:
517 i['available'] = False
518 i['counter'] += 1
519 return i
520
521 return None
522
523 def make_device_available(self, serial):
524 with hw_map_local:
525 for i in self.suite.connected_hardware:
526 if i['serial'] == serial:
527 i['available'] = True
528
529 @staticmethod
530 def run_custom_script(script, timeout):
531 with subprocess.Popen(script, stderr=subprocess.PIPE, stdout=subprocess.PIPE) as proc:
532 try:
533 stdout, _ = proc.communicate(timeout=timeout)
534 logger.debug(stdout.decode())
535
536 except subprocess.TimeoutExpired:
537 proc.kill()
538 proc.communicate()
539 logger.error("{} timed out".format(script))
540
541 def handle(self):
542 out_state = "failed"
543
Anas Nashifc11f8ac2020-03-31 08:37:55 -0400544 if self.suite.west_flash:
Anas Nashifce2b4182020-03-24 14:40:28 -0400545 command = ["west", "flash", "--skip-rebuild", "-d", self.build_dir]
Anas Nashifc11f8ac2020-03-31 08:37:55 -0400546 if self.suite.west_runner:
Anas Nashifce2b4182020-03-24 14:40:28 -0400547 command.append("--runner")
Anas Nashifc11f8ac2020-03-31 08:37:55 -0400548 command.append(self.suite.west_runner)
Anas Nashifce2b4182020-03-24 14:40:28 -0400549 # There are three ways this option is used.
550 # 1) bare: --west-flash
551 # This results in options.west_flash == []
552 # 2) with a value: --west-flash="--board-id=42"
553 # This results in options.west_flash == "--board-id=42"
554 # 3) Multiple values: --west-flash="--board-id=42,--erase"
555 # This results in options.west_flash == "--board-id=42 --erase"
Anas Nashifc11f8ac2020-03-31 08:37:55 -0400556 if self.suite.west_flash != []:
Anas Nashifce2b4182020-03-24 14:40:28 -0400557 command.append('--')
Anas Nashifc11f8ac2020-03-31 08:37:55 -0400558 command.extend(self.suite.west_flash.split(','))
Anas Nashifce2b4182020-03-24 14:40:28 -0400559 else:
560 command = [self.generator_cmd, "-C", self.build_dir, "flash"]
561
562 while not self.device_is_available(self.instance.platform.name):
563 logger.debug("Waiting for device {} to become available".format(self.instance.platform.name))
564 time.sleep(1)
565
566 hardware = self.get_available_device(self.instance.platform.name)
567
568 runner = hardware.get('runner', None)
569 if runner:
570 board_id = hardware.get("probe_id", hardware.get("id", None))
571 product = hardware.get("product", None)
572 command = ["west", "flash", "--skip-rebuild", "-d", self.build_dir]
573 command.append("--runner")
574 command.append(hardware.get('runner', None))
575 if runner == "pyocd":
576 command.append("--board-id")
577 command.append(board_id)
578 elif runner == "nrfjprog":
579 command.append('--')
580 command.append("--snr")
581 command.append(board_id)
582 elif runner == "openocd" and product == "STM32 STLink":
583 command.append('--')
584 command.append("--cmd-pre-init")
585 command.append("hla_serial %s" % (board_id))
586 elif runner == "openocd" and product == "EDBG CMSIS-DAP":
587 command.append('--')
588 command.append("--cmd-pre-init")
589 command.append("cmsis_dap_serial %s" % (board_id))
590 elif runner == "jlink":
591 command.append("--tool-opt=-SelectEmuBySN %s" % (board_id))
592
593 serial_device = hardware['serial']
594
595 try:
596 ser = serial.Serial(
597 serial_device,
598 baudrate=115200,
599 parity=serial.PARITY_NONE,
600 stopbits=serial.STOPBITS_ONE,
601 bytesize=serial.EIGHTBITS,
602 timeout=self.timeout
603 )
604 except serial.SerialException as e:
605 self.set_state("failed", 0)
606 self.instance.reason = "Failed"
607 logger.error("Serial device error: %s" % (str(e)))
608 self.make_device_available(serial_device)
609 return
610
611 ser.flush()
612
613 harness_name = self.instance.testcase.harness.capitalize()
614 harness_import = HarnessImporter(harness_name)
615 harness = harness_import.instance
616 harness.configure(self.instance)
617 read_pipe, write_pipe = os.pipe()
618 start_time = time.time()
619
620 pre_script = hardware.get('pre_script')
621 post_flash_script = hardware.get('post_flash_script')
622 post_script = hardware.get('post_script')
623
624 if pre_script:
625 self.run_custom_script(pre_script, 30)
626
627 t = threading.Thread(target=self.monitor_serial, daemon=True,
628 args=(ser, read_pipe, harness))
629 t.start()
630
631 d_log = "{}/device.log".format(self.instance.build_dir)
632 logger.debug('Flash command: %s', command)
633 try:
634 stdout = stderr = None
635 with subprocess.Popen(command, stderr=subprocess.PIPE, stdout=subprocess.PIPE) as proc:
636 try:
637 (stdout, stderr) = proc.communicate(timeout=30)
638 logger.debug(stdout.decode())
639
640 if proc.returncode != 0:
641 self.instance.reason = "Device issue (Flash?)"
642 with open(d_log, "w") as dlog_fp:
643 dlog_fp.write(stderr.decode())
644 except subprocess.TimeoutExpired:
645 proc.kill()
646 (stdout, stderr) = proc.communicate()
647 self.instance.reason = "Device issue (Timeout)"
648
649 with open(d_log, "w") as dlog_fp:
650 dlog_fp.write(stderr.decode())
651
652 except subprocess.CalledProcessError:
653 os.write(write_pipe, b'x') # halt the thread
654
655 if post_flash_script:
656 self.run_custom_script(post_flash_script, 30)
657
658
659 t.join(self.timeout)
660 if t.is_alive():
661 logger.debug("Timed out while monitoring serial output on {}".format(self.instance.platform.name))
662 out_state = "timeout"
663
664 if ser.isOpen():
665 ser.close()
666
667 os.close(write_pipe)
668 os.close(read_pipe)
669
670 handler_time = time.time() - start_time
671
672 if out_state == "timeout":
673 for c in self.instance.testcase.cases:
674 if c not in harness.tests:
675 harness.tests[c] = "BLOCK"
676
677 self.instance.reason = "Timeout"
678
679 self.instance.results = harness.tests
680
681 if harness.state:
682 self.set_state(harness.state, handler_time)
683 if harness.state == "failed":
684 self.instance.reason = "Failed"
685 else:
686 self.set_state(out_state, handler_time)
687
688 if post_script:
689 self.run_custom_script(post_script, 30)
690
691 self.make_device_available(serial_device)
692
693 self.record(harness)
694
695
696class QEMUHandler(Handler):
697 """Spawns a thread to monitor QEMU output from pipes
698
699 We pass QEMU_PIPE to 'make run' and monitor the pipes for output.
700 We need to do this as once qemu starts, it runs forever until killed.
701 Test cases emit special messages to the console as they run, we check
702 for these to collect whether the test passed or failed.
703 """
704
705 def __init__(self, instance, type_str):
706 """Constructor
707
708 @param instance Test instance
709 """
710
711 super().__init__(instance, type_str)
712 self.fifo_fn = os.path.join(instance.build_dir, "qemu-fifo")
713
714 self.pid_fn = os.path.join(instance.build_dir, "qemu.pid")
715
716 @staticmethod
Wentong Wu0d619ae2020-05-05 19:46:49 -0400717 def _get_cpu_time(pid):
718 """get process CPU time.
719
720 The guest virtual time in QEMU icount mode isn't host time and
721 it's maintained by counting guest instructions, so we use QEMU
722 process exection time to mostly simulate the time of guest OS.
723 """
724 proc = psutil.Process(pid)
725 cpu_time = proc.cpu_times()
726 return cpu_time.user + cpu_time.system
727
728 @staticmethod
Anas Nashifce2b4182020-03-24 14:40:28 -0400729 def _thread(handler, timeout, outdir, logfile, fifo_fn, pid_fn, results, harness):
730 fifo_in = fifo_fn + ".in"
731 fifo_out = fifo_fn + ".out"
732
733 # These in/out nodes are named from QEMU's perspective, not ours
734 if os.path.exists(fifo_in):
735 os.unlink(fifo_in)
736 os.mkfifo(fifo_in)
737 if os.path.exists(fifo_out):
738 os.unlink(fifo_out)
739 os.mkfifo(fifo_out)
740
741 # We don't do anything with out_fp but we need to open it for
742 # writing so that QEMU doesn't block, due to the way pipes work
743 out_fp = open(fifo_in, "wb")
744 # Disable internal buffering, we don't
745 # want read() or poll() to ever block if there is data in there
746 in_fp = open(fifo_out, "rb", buffering=0)
747 log_out_fp = open(logfile, "wt")
748
749 start_time = time.time()
750 timeout_time = start_time + timeout
751 p = select.poll()
752 p.register(in_fp, select.POLLIN)
753 out_state = None
754
755 line = ""
756 timeout_extended = False
Wentong Wu0d619ae2020-05-05 19:46:49 -0400757
758 pid = 0
759 if os.path.exists(pid_fn):
760 pid = int(open(pid_fn).read())
761
Anas Nashifce2b4182020-03-24 14:40:28 -0400762 while True:
763 this_timeout = int((timeout_time - time.time()) * 1000)
764 if this_timeout < 0 or not p.poll(this_timeout):
Wentong Wu0d619ae2020-05-05 19:46:49 -0400765 if pid and this_timeout > 0:
766 #there is possibility we polled nothing because
767 #of host not scheduled QEMU process enough CPU
768 #time during p.poll(this_timeout)
769 cpu_time = QEMUHandler._get_cpu_time(pid)
770 if cpu_time < timeout and not out_state:
771 timeout_time = time.time() + (timeout - cpu_time)
772 continue
773
Anas Nashifce2b4182020-03-24 14:40:28 -0400774 if not out_state:
775 out_state = "timeout"
776 break
777
Wentong Wu0d619ae2020-05-05 19:46:49 -0400778 if pid == 0 and os.path.exists(pid_fn):
779 pid = int(open(pid_fn).read())
780
Anas Nashifce2b4182020-03-24 14:40:28 -0400781 try:
782 c = in_fp.read(1).decode("utf-8")
783 except UnicodeDecodeError:
784 # Test is writing something weird, fail
785 out_state = "unexpected byte"
786 break
787
788 if c == "":
789 # EOF, this shouldn't happen unless QEMU crashes
790 out_state = "unexpected eof"
791 break
792 line = line + c
793 if c != "\n":
794 continue
795
796 # line contains a full line of data output from QEMU
797 log_out_fp.write(line)
798 log_out_fp.flush()
799 line = line.strip()
800 logger.debug("QEMU: %s" % line)
801
802 harness.handle(line)
803 if harness.state:
804 # if we have registered a fail make sure the state is not
805 # overridden by a false success message coming from the
806 # testsuite
807 if out_state != 'failed':
808 out_state = harness.state
809
810 # if we get some state, that means test is doing well, we reset
811 # the timeout and wait for 2 more seconds to catch anything
812 # printed late. We wait much longer if code
813 # coverage is enabled since dumping this information can
814 # take some time.
815 if not timeout_extended or harness.capture_coverage:
816 timeout_extended = True
817 if harness.capture_coverage:
818 timeout_time = time.time() + 30
819 else:
820 timeout_time = time.time() + 2
821 line = ""
822
823 handler.record(harness)
824
825 handler_time = time.time() - start_time
826 logger.debug("QEMU complete (%s) after %f seconds" %
827 (out_state, handler_time))
828 handler.set_state(out_state, handler_time)
829 if out_state == "timeout":
830 handler.instance.reason = "Timeout"
831 elif out_state == "failed":
832 handler.instance.reason = "Failed"
833
834 log_out_fp.close()
835 out_fp.close()
836 in_fp.close()
Wentong Wu0d619ae2020-05-05 19:46:49 -0400837 if pid:
Anas Nashifce2b4182020-03-24 14:40:28 -0400838 try:
839 if pid:
840 os.kill(pid, signal.SIGTERM)
841 except ProcessLookupError:
842 # Oh well, as long as it's dead! User probably sent Ctrl-C
843 pass
844
845 os.unlink(fifo_in)
846 os.unlink(fifo_out)
847
848 def handle(self):
849 self.results = {}
850 self.run = True
851
852 # We pass this to QEMU which looks for fifos with .in and .out
853 # suffixes.
854 self.fifo_fn = os.path.join(self.instance.build_dir, "qemu-fifo")
855
856 self.pid_fn = os.path.join(self.instance.build_dir, "qemu.pid")
857 if os.path.exists(self.pid_fn):
858 os.unlink(self.pid_fn)
859
860 self.log_fn = self.log
861
862 harness_import = HarnessImporter(self.instance.testcase.harness.capitalize())
863 harness = harness_import.instance
864 harness.configure(self.instance)
865 self.thread = threading.Thread(name=self.name, target=QEMUHandler._thread,
866 args=(self, self.timeout, self.build_dir,
867 self.log_fn, self.fifo_fn,
868 self.pid_fn, self.results, harness))
869
870 self.instance.results = harness.tests
871 self.thread.daemon = True
872 logger.debug("Spawning QEMUHandler Thread for %s" % self.name)
873 self.thread.start()
874 subprocess.call(["stty", "sane"])
875
876 logger.debug("Running %s (%s)" % (self.name, self.type_str))
877 command = [self.generator_cmd]
878 command += ["-C", self.build_dir, "run"]
879
880 with subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=self.build_dir) as proc:
881 logger.debug("Spawning QEMUHandler Thread for %s" % self.name)
Wentong Wu7ec57b42020-05-05 19:19:18 -0400882 try:
883 proc.wait(self.timeout)
884 except subprocess.TimeoutExpired:
885 #sometimes QEMU can't handle SIGTERM signal correctly
886 #in that case kill -9 QEMU process directly and leave
887 #sanitycheck judge testing result by console output
888 if os.path.exists(self.pid_fn):
889 qemu_pid = int(open(self.pid_fn).read())
890 try:
891 os.kill(qemu_pid, signal.SIGKILL)
892 except ProcessLookupError:
893 pass
894 proc.wait()
895 self.returncode = 0
896 else:
897 proc.terminate()
898 proc.kill()
899 self.returncode = proc.returncode
900 else:
901 self.returncode = proc.returncode
902
903 if os.path.exists(self.pid_fn):
904 os.unlink(self.pid_fn)
Anas Nashifce2b4182020-03-24 14:40:28 -0400905
906 if self.returncode != 0:
907 self.set_state("failed", 0)
908 self.instance.reason = "Exited with {}".format(self.returncode)
909
910 def get_fifo(self):
911 return self.fifo_fn
912
913
914class SizeCalculator:
915 alloc_sections = [
916 "bss",
917 "noinit",
918 "app_bss",
919 "app_noinit",
920 "ccm_bss",
921 "ccm_noinit"
922 ]
923
924 rw_sections = [
925 "datas",
926 "initlevel",
927 "exceptions",
928 "initshell",
929 "_static_thread_area",
930 "_k_timer_area",
931 "_k_mem_slab_area",
932 "_k_mem_pool_area",
933 "sw_isr_table",
934 "_k_sem_area",
935 "_k_mutex_area",
936 "app_shmem_regions",
937 "_k_fifo_area",
938 "_k_lifo_area",
939 "_k_stack_area",
940 "_k_msgq_area",
941 "_k_mbox_area",
942 "_k_pipe_area",
943 "net_if",
944 "net_if_dev",
Anas Nashifce2b4182020-03-24 14:40:28 -0400945 "net_l2_data",
946 "_k_queue_area",
947 "_net_buf_pool_area",
948 "app_datas",
949 "kobject_data",
950 "mmu_tables",
951 "app_pad",
952 "priv_stacks",
953 "ccm_data",
954 "usb_descriptor",
955 "usb_data", "usb_bos_desc",
Jukka Rissanen420b1952020-04-01 12:47:53 +0300956 "uart_mux",
Anas Nashifce2b4182020-03-24 14:40:28 -0400957 'log_backends_sections',
958 'log_dynamic_sections',
959 'log_const_sections',
960 "app_smem",
961 'shell_root_cmds_sections',
962 'log_const_sections',
963 "font_entry_sections",
964 "priv_stacks_noinit",
965 "_GCOV_BSS_SECTION_NAME",
966 "gcov",
967 "nocache"
968 ]
969
970 # These get copied into RAM only on non-XIP
971 ro_sections = [
972 "rom_start",
973 "text",
974 "ctors",
975 "init_array",
976 "reset",
977 "object_access",
978 "rodata",
979 "devconfig",
980 "net_l2",
981 "vector",
982 "sw_isr_table",
983 "_settings_handlers_area",
984 "_bt_channels_area",
985 "_bt_br_channels_area",
986 "_bt_services_area",
987 "vectors",
988 "net_socket_register",
989 "net_ppp_proto"
990 ]
991
992 def __init__(self, filename, extra_sections):
993 """Constructor
994
995 @param filename Path to the output binary
996 The <filename> is parsed by objdump to determine section sizes
997 """
998 # Make sure this is an ELF binary
999 with open(filename, "rb") as f:
1000 magic = f.read(4)
1001
1002 try:
1003 if magic != b'\x7fELF':
1004 raise SanityRuntimeError("%s is not an ELF binary" % filename)
1005 except Exception as e:
1006 print(str(e))
1007 sys.exit(2)
1008
1009 # Search for CONFIG_XIP in the ELF's list of symbols using NM and AWK.
1010 # GREP can not be used as it returns an error if the symbol is not
1011 # found.
1012 is_xip_command = "nm " + filename + \
1013 " | awk '/CONFIG_XIP/ { print $3 }'"
1014 is_xip_output = subprocess.check_output(
1015 is_xip_command, shell=True, stderr=subprocess.STDOUT).decode(
1016 "utf-8").strip()
1017 try:
1018 if is_xip_output.endswith("no symbols"):
1019 raise SanityRuntimeError("%s has no symbol information" % filename)
1020 except Exception as e:
1021 print(str(e))
1022 sys.exit(2)
1023
1024 self.is_xip = (len(is_xip_output) != 0)
1025
1026 self.filename = filename
1027 self.sections = []
1028 self.rom_size = 0
1029 self.ram_size = 0
1030 self.extra_sections = extra_sections
1031
1032 self._calculate_sizes()
1033
1034 def get_ram_size(self):
1035 """Get the amount of RAM the application will use up on the device
1036
1037 @return amount of RAM, in bytes
1038 """
1039 return self.ram_size
1040
1041 def get_rom_size(self):
1042 """Get the size of the data that this application uses on device's flash
1043
1044 @return amount of ROM, in bytes
1045 """
1046 return self.rom_size
1047
1048 def unrecognized_sections(self):
1049 """Get a list of sections inside the binary that weren't recognized
1050
1051 @return list of unrecognized section names
1052 """
1053 slist = []
1054 for v in self.sections:
1055 if not v["recognized"]:
1056 slist.append(v["name"])
1057 return slist
1058
1059 def _calculate_sizes(self):
1060 """ Calculate RAM and ROM usage by section """
1061 objdump_command = "objdump -h " + self.filename
1062 objdump_output = subprocess.check_output(
1063 objdump_command, shell=True).decode("utf-8").splitlines()
1064
1065 for line in objdump_output:
1066 words = line.split()
1067
1068 if not words: # Skip lines that are too short
1069 continue
1070
1071 index = words[0]
1072 if not index[0].isdigit(): # Skip lines that do not start
1073 continue # with a digit
1074
1075 name = words[1] # Skip lines with section names
1076 if name[0] == '.': # starting with '.'
1077 continue
1078
1079 # TODO this doesn't actually reflect the size in flash or RAM as
1080 # it doesn't include linker-imposed padding between sections.
1081 # It is close though.
1082 size = int(words[2], 16)
1083 if size == 0:
1084 continue
1085
1086 load_addr = int(words[4], 16)
1087 virt_addr = int(words[3], 16)
1088
1089 # Add section to memory use totals (for both non-XIP and XIP scenarios)
1090 # Unrecognized section names are not included in the calculations.
1091 recognized = True
1092 if name in SizeCalculator.alloc_sections:
1093 self.ram_size += size
1094 stype = "alloc"
1095 elif name in SizeCalculator.rw_sections:
1096 self.ram_size += size
1097 self.rom_size += size
1098 stype = "rw"
1099 elif name in SizeCalculator.ro_sections:
1100 self.rom_size += size
1101 if not self.is_xip:
1102 self.ram_size += size
1103 stype = "ro"
1104 else:
1105 stype = "unknown"
1106 if name not in self.extra_sections:
1107 recognized = False
1108
1109 self.sections.append({"name": name, "load_addr": load_addr,
1110 "size": size, "virt_addr": virt_addr,
1111 "type": stype, "recognized": recognized})
1112
1113
1114
1115class SanityConfigParser:
1116 """Class to read test case files with semantic checking
1117 """
1118
1119 def __init__(self, filename, schema):
1120 """Instantiate a new SanityConfigParser object
1121
1122 @param filename Source .yaml file to read
1123 """
1124 self.data = {}
1125 self.schema = schema
1126 self.filename = filename
1127 self.tests = {}
1128 self.common = {}
1129
1130 def load(self):
1131 self.data = scl.yaml_load_verify(self.filename, self.schema)
1132
1133 if 'tests' in self.data:
1134 self.tests = self.data['tests']
1135 if 'common' in self.data:
1136 self.common = self.data['common']
1137
1138 def _cast_value(self, value, typestr):
1139 if isinstance(value, str):
1140 v = value.strip()
1141 if typestr == "str":
1142 return v
1143
1144 elif typestr == "float":
1145 return float(value)
1146
1147 elif typestr == "int":
1148 return int(value)
1149
1150 elif typestr == "bool":
1151 return value
1152
1153 elif typestr.startswith("list") and isinstance(value, list):
1154 return value
1155 elif typestr.startswith("list") and isinstance(value, str):
1156 vs = v.split()
1157 if len(typestr) > 4 and typestr[4] == ":":
1158 return [self._cast_value(vsi, typestr[5:]) for vsi in vs]
1159 else:
1160 return vs
1161
1162 elif typestr.startswith("set"):
1163 vs = v.split()
1164 if len(typestr) > 3 and typestr[3] == ":":
1165 return {self._cast_value(vsi, typestr[4:]) for vsi in vs}
1166 else:
1167 return set(vs)
1168
1169 elif typestr.startswith("map"):
1170 return value
1171 else:
1172 raise ConfigurationError(
1173 self.filename, "unknown type '%s'" % value)
1174
1175 def get_test(self, name, valid_keys):
1176 """Get a dictionary representing the keys/values within a test
1177
1178 @param name The test in the .yaml file to retrieve data from
1179 @param valid_keys A dictionary representing the intended semantics
1180 for this test. Each key in this dictionary is a key that could
1181 be specified, if a key is given in the .yaml file which isn't in
1182 here, it will generate an error. Each value in this dictionary
1183 is another dictionary containing metadata:
1184
1185 "default" - Default value if not given
1186 "type" - Data type to convert the text value to. Simple types
1187 supported are "str", "float", "int", "bool" which will get
1188 converted to respective Python data types. "set" and "list"
1189 may also be specified which will split the value by
1190 whitespace (but keep the elements as strings). finally,
1191 "list:<type>" and "set:<type>" may be given which will
1192 perform a type conversion after splitting the value up.
1193 "required" - If true, raise an error if not defined. If false
1194 and "default" isn't specified, a type conversion will be
1195 done on an empty string
1196 @return A dictionary containing the test key-value pairs with
1197 type conversion and default values filled in per valid_keys
1198 """
1199
1200 d = {}
1201 for k, v in self.common.items():
1202 d[k] = v
1203
1204 for k, v in self.tests[name].items():
1205 if k not in valid_keys:
1206 raise ConfigurationError(
1207 self.filename,
1208 "Unknown config key '%s' in definition for '%s'" %
1209 (k, name))
1210
1211 if k in d:
1212 if isinstance(d[k], str):
1213 # By default, we just concatenate string values of keys
1214 # which appear both in "common" and per-test sections,
1215 # but some keys are handled in adhoc way based on their
1216 # semantics.
1217 if k == "filter":
1218 d[k] = "(%s) and (%s)" % (d[k], v)
1219 else:
1220 d[k] += " " + v
1221 else:
1222 d[k] = v
1223
1224 for k, kinfo in valid_keys.items():
1225 if k not in d:
1226 if "required" in kinfo:
1227 required = kinfo["required"]
1228 else:
1229 required = False
1230
1231 if required:
1232 raise ConfigurationError(
1233 self.filename,
1234 "missing required value for '%s' in test '%s'" %
1235 (k, name))
1236 else:
1237 if "default" in kinfo:
1238 default = kinfo["default"]
1239 else:
1240 default = self._cast_value("", kinfo["type"])
1241 d[k] = default
1242 else:
1243 try:
1244 d[k] = self._cast_value(d[k], kinfo["type"])
1245 except ValueError:
1246 raise ConfigurationError(
1247 self.filename, "bad %s value '%s' for key '%s' in name '%s'" %
1248 (kinfo["type"], d[k], k, name))
1249
1250 return d
1251
1252
1253class Platform:
1254 """Class representing metadata for a particular platform
1255
1256 Maps directly to BOARD when building"""
1257
1258 platform_schema = scl.yaml_load(os.path.join(ZEPHYR_BASE,
1259 "scripts", "sanity_chk", "platform-schema.yaml"))
1260
1261 def __init__(self):
1262 """Constructor.
1263
1264 """
1265
1266 self.name = ""
1267 self.sanitycheck = True
1268 # if no RAM size is specified by the board, take a default of 128K
1269 self.ram = 128
1270
1271 self.ignore_tags = []
1272 self.default = False
1273 # if no flash size is specified by the board, take a default of 512K
1274 self.flash = 512
1275 self.supported = set()
1276
1277 self.arch = ""
1278 self.type = "na"
1279 self.simulation = "na"
1280 self.supported_toolchains = []
1281 self.env = []
1282 self.env_satisfied = True
1283 self.filter_data = dict()
1284
1285 def load(self, platform_file):
1286 scp = SanityConfigParser(platform_file, self.platform_schema)
1287 scp.load()
1288 data = scp.data
1289
1290 self.name = data['identifier']
1291 self.sanitycheck = data.get("sanitycheck", True)
1292 # if no RAM size is specified by the board, take a default of 128K
1293 self.ram = data.get("ram", 128)
1294 testing = data.get("testing", {})
1295 self.ignore_tags = testing.get("ignore_tags", [])
1296 self.default = testing.get("default", False)
1297 # if no flash size is specified by the board, take a default of 512K
1298 self.flash = data.get("flash", 512)
1299 self.supported = set()
1300 for supp_feature in data.get("supported", []):
1301 for item in supp_feature.split(":"):
1302 self.supported.add(item)
1303
1304 self.arch = data['arch']
1305 self.type = data.get('type', "na")
1306 self.simulation = data.get('simulation', "na")
1307 self.supported_toolchains = data.get("toolchain", [])
1308 self.env = data.get("env", [])
1309 self.env_satisfied = True
1310 for env in self.env:
1311 if not os.environ.get(env, None):
1312 self.env_satisfied = False
1313
1314 def __repr__(self):
1315 return "<%s on %s>" % (self.name, self.arch)
1316
1317
Anas Nashifaff616d2020-04-17 21:24:57 -04001318class DisablePyTestCollectionMixin(object):
1319 __test__ = False
1320
1321
1322class TestCase(DisablePyTestCollectionMixin):
Anas Nashifce2b4182020-03-24 14:40:28 -04001323 """Class representing a test application
1324 """
1325
Anas Nashifaff616d2020-04-17 21:24:57 -04001326 def __init__(self, testcase_root, workdir, name):
Anas Nashifce2b4182020-03-24 14:40:28 -04001327 """TestCase constructor.
1328
1329 This gets called by TestSuite as it finds and reads test yaml files.
1330 Multiple TestCase instances may be generated from a single testcase.yaml,
1331 each one corresponds to an entry within that file.
1332
1333 We need to have a unique name for every single test case. Since
1334 a testcase.yaml can define multiple tests, the canonical name for
1335 the test case is <workdir>/<name>.
1336
1337 @param testcase_root os.path.abspath() of one of the --testcase-root
1338 @param workdir Sub-directory of testcase_root where the
1339 .yaml test configuration file was found
1340 @param name Name of this test case, corresponding to the entry name
1341 in the test case configuration file. For many test cases that just
1342 define one test, can be anything and is usually "test". This is
1343 really only used to distinguish between different cases when
1344 the testcase.yaml defines multiple tests
Anas Nashifce2b4182020-03-24 14:40:28 -04001345 """
1346
Anas Nashifaff616d2020-04-17 21:24:57 -04001347
Anas Nashifce2b4182020-03-24 14:40:28 -04001348 self.source_dir = ""
1349 self.yamlfile = ""
1350 self.cases = []
Anas Nashifaff616d2020-04-17 21:24:57 -04001351 self.name = self.get_unique(testcase_root, workdir, name)
1352 self.id = name
Anas Nashifce2b4182020-03-24 14:40:28 -04001353
1354 self.type = None
Anas Nashifaff616d2020-04-17 21:24:57 -04001355 self.tags = set()
Anas Nashifce2b4182020-03-24 14:40:28 -04001356 self.extra_args = None
1357 self.extra_configs = None
1358 self.arch_whitelist = None
1359 self.arch_exclude = None
Anas Nashifaff616d2020-04-17 21:24:57 -04001360 self.skip = False
Anas Nashifce2b4182020-03-24 14:40:28 -04001361 self.platform_exclude = None
1362 self.platform_whitelist = None
1363 self.toolchain_exclude = None
1364 self.toolchain_whitelist = None
1365 self.tc_filter = None
1366 self.timeout = 60
1367 self.harness = ""
1368 self.harness_config = {}
1369 self.build_only = True
1370 self.build_on_all = False
1371 self.slow = False
Anas Nashifaff616d2020-04-17 21:24:57 -04001372 self.min_ram = -1
Anas Nashifce2b4182020-03-24 14:40:28 -04001373 self.depends_on = None
Anas Nashifaff616d2020-04-17 21:24:57 -04001374 self.min_flash = -1
Anas Nashifce2b4182020-03-24 14:40:28 -04001375 self.extra_sections = None
1376
1377 @staticmethod
1378 def get_unique(testcase_root, workdir, name):
1379
1380 canonical_testcase_root = os.path.realpath(testcase_root)
1381 if Path(canonical_zephyr_base) in Path(canonical_testcase_root).parents:
1382 # This is in ZEPHYR_BASE, so include path in name for uniqueness
1383 # FIXME: We should not depend on path of test for unique names.
1384 relative_tc_root = os.path.relpath(canonical_testcase_root,
1385 start=canonical_zephyr_base)
1386 else:
1387 relative_tc_root = ""
1388
1389 # workdir can be "."
1390 unique = os.path.normpath(os.path.join(relative_tc_root, workdir, name))
Anas Nashif7a691252020-05-07 07:47:51 -04001391 check = name.split(".")
1392 if len(check) < 2:
1393 raise SanityCheckException(f"""bad test name '{name}' in {testcase_root}/{workdir}. \
1394Tests should reference the category and subsystem with a dot as a separator.
1395 """
1396 )
Anas Nashifce2b4182020-03-24 14:40:28 -04001397 return unique
1398
1399 @staticmethod
1400 def scan_file(inf_name):
1401 suite_regex = re.compile(
1402 # do not match until end-of-line, otherwise we won't allow
1403 # stc_regex below to catch the ones that are declared in the same
1404 # line--as we only search starting the end of this match
1405 br"^\s*ztest_test_suite\(\s*(?P<suite_name>[a-zA-Z0-9_]+)\s*,",
1406 re.MULTILINE)
1407 stc_regex = re.compile(
1408 br"^\s*" # empy space at the beginning is ok
1409 # catch the case where it is declared in the same sentence, e.g:
1410 #
1411 # ztest_test_suite(mutex_complex, ztest_user_unit_test(TESTNAME));
1412 br"(?:ztest_test_suite\([a-zA-Z0-9_]+,\s*)?"
1413 # Catch ztest[_user]_unit_test-[_setup_teardown](TESTNAME)
1414 br"ztest_(?:1cpu_)?(?:user_)?unit_test(?:_setup_teardown)?"
1415 # Consume the argument that becomes the extra testcse
1416 br"\(\s*"
1417 br"(?P<stc_name>[a-zA-Z0-9_]+)"
1418 # _setup_teardown() variant has two extra arguments that we ignore
1419 br"(?:\s*,\s*[a-zA-Z0-9_]+\s*,\s*[a-zA-Z0-9_]+)?"
1420 br"\s*\)",
1421 # We don't check how it finishes; we don't care
1422 re.MULTILINE)
1423 suite_run_regex = re.compile(
1424 br"^\s*ztest_run_test_suite\((?P<suite_name>[a-zA-Z0-9_]+)\)",
1425 re.MULTILINE)
1426 achtung_regex = re.compile(
1427 br"(#ifdef|#endif)",
1428 re.MULTILINE)
1429 warnings = None
1430
1431 with open(inf_name) as inf:
1432 if os.name == 'nt':
1433 mmap_args = {'fileno': inf.fileno(), 'length': 0, 'access': mmap.ACCESS_READ}
1434 else:
1435 mmap_args = {'fileno': inf.fileno(), 'length': 0, 'flags': mmap.MAP_PRIVATE, 'prot': mmap.PROT_READ,
1436 'offset': 0}
1437
1438 with contextlib.closing(mmap.mmap(**mmap_args)) as main_c:
1439 # contextlib makes pylint think main_c isn't subscriptable
1440 # pylint: disable=unsubscriptable-object
1441
1442 suite_regex_match = suite_regex.search(main_c)
1443 if not suite_regex_match:
1444 # can't find ztest_test_suite, maybe a client, because
1445 # it includes ztest.h
1446 return None, None
1447
1448 suite_run_match = suite_run_regex.search(main_c)
1449 if not suite_run_match:
1450 raise ValueError("can't find ztest_run_test_suite")
1451
1452 achtung_matches = re.findall(
1453 achtung_regex,
1454 main_c[suite_regex_match.end():suite_run_match.start()])
1455 if achtung_matches:
1456 warnings = "found invalid %s in ztest_test_suite()" \
1457 % ", ".join({match.decode() for match in achtung_matches})
1458 _matches = re.findall(
1459 stc_regex,
1460 main_c[suite_regex_match.end():suite_run_match.start()])
Anas Nashif44f7ba02020-05-12 12:26:41 -04001461 for match in _matches:
1462 if not match.decode().startswith("test_"):
1463 warnings = "Found a test that does not start with test_"
Anas Nashifce2b4182020-03-24 14:40:28 -04001464 matches = [match.decode().replace("test_", "") for match in _matches]
1465 return matches, warnings
1466
1467 def scan_path(self, path):
1468 subcases = []
Anas Nashif91fd68d2020-05-08 07:22:58 -04001469 for filename in glob.glob(os.path.join(path, "src", "*.c*")):
Anas Nashifce2b4182020-03-24 14:40:28 -04001470 try:
1471 _subcases, warnings = self.scan_file(filename)
1472 if warnings:
1473 logger.error("%s: %s" % (filename, warnings))
Anas Nashif61c6e2b2020-05-07 07:03:30 -04001474 raise SanityRuntimeError("%s: %s" % (filename, warnings))
Anas Nashifce2b4182020-03-24 14:40:28 -04001475 if _subcases:
1476 subcases += _subcases
1477 except ValueError as e:
1478 logger.error("%s: can't find: %s" % (filename, e))
Anas Nashif61c6e2b2020-05-07 07:03:30 -04001479
Anas Nashifce2b4182020-03-24 14:40:28 -04001480 for filename in glob.glob(os.path.join(path, "*.c")):
1481 try:
1482 _subcases, warnings = self.scan_file(filename)
1483 if warnings:
1484 logger.error("%s: %s" % (filename, warnings))
1485 if _subcases:
1486 subcases += _subcases
1487 except ValueError as e:
1488 logger.error("%s: can't find: %s" % (filename, e))
1489 return subcases
1490
1491 def parse_subcases(self, test_path):
1492 results = self.scan_path(test_path)
1493 for sub in results:
1494 name = "{}.{}".format(self.id, sub)
1495 self.cases.append(name)
1496
1497 if not results:
1498 self.cases.append(self.id)
1499
1500 def __str__(self):
1501 return self.name
1502
1503
Anas Nashifaff616d2020-04-17 21:24:57 -04001504class TestInstance(DisablePyTestCollectionMixin):
Anas Nashifce2b4182020-03-24 14:40:28 -04001505 """Class representing the execution of a particular TestCase on a platform
1506
1507 @param test The TestCase object we want to build/execute
1508 @param platform Platform object that we want to build and run against
1509 @param base_outdir Base directory for all test results. The actual
1510 out directory used is <outdir>/<platform>/<test case name>
1511 """
1512
1513 def __init__(self, testcase, platform, outdir):
1514
1515 self.testcase = testcase
1516 self.platform = platform
1517
1518 self.status = None
1519 self.reason = "Unknown"
1520 self.metrics = dict()
1521 self.handler = None
1522 self.outdir = outdir
1523
1524 self.name = os.path.join(platform.name, testcase.name)
1525 self.build_dir = os.path.join(outdir, platform.name, testcase.name)
1526
1527 self.build_only = True
1528 self.run = False
1529
1530 self.results = {}
1531
1532 def __lt__(self, other):
1533 return self.name < other.name
1534
Anas Nashifaff616d2020-04-17 21:24:57 -04001535 # Global testsuite parameters
Anas Nashifce2b4182020-03-24 14:40:28 -04001536 def check_build_or_run(self, build_only=False, enable_slow=False, device_testing=False, fixture=[]):
1537
1538 # right now we only support building on windows. running is still work
1539 # in progress.
1540 if os.name == 'nt':
1541 self.build_only = True
1542 self.run = False
1543 return
1544
1545 _build_only = True
1546
1547 # we asked for build-only on the command line
1548 if build_only or self.testcase.build_only:
1549 self.build_only = True
1550 self.run = False
1551 return
1552
1553 # Do not run slow tests:
1554 skip_slow = self.testcase.slow and not enable_slow
1555 if skip_slow:
1556 self.build_only = True
1557 self.run = False
1558 return
1559
1560 runnable = bool(self.testcase.type == "unit" or \
1561 self.platform.type == "native" or \
1562 self.platform.simulation in ["nsim", "renode", "qemu"] or \
1563 device_testing)
1564
1565 if self.platform.simulation == "nsim":
1566 if not find_executable("nsimdrv"):
1567 runnable = False
1568
1569 if self.platform.simulation == "renode":
1570 if not find_executable("renode"):
1571 runnable = False
1572
1573 # console harness allows us to run the test and capture data.
1574 if self.testcase.harness == 'console':
1575
1576 # if we have a fixture that is also being supplied on the
1577 # command-line, then we need to run the test, not just build it.
1578 if "fixture" in self.testcase.harness_config:
1579 fixture_cfg = self.testcase.harness_config['fixture']
1580 if fixture_cfg in fixture:
1581 _build_only = False
1582 else:
1583 _build_only = True
1584 else:
1585 _build_only = False
1586 elif self.testcase.harness:
1587 _build_only = True
1588 else:
1589 _build_only = False
1590
1591 self.build_only = not (not _build_only and runnable)
1592 self.run = not self.build_only
1593 return
1594
1595 def create_overlay(self, platform, enable_asan=False, enable_coverage=False, coverage_platform=[]):
1596 # Create this in a "sanitycheck/" subdirectory otherwise this
1597 # will pass this overlay to kconfig.py *twice* and kconfig.cmake
1598 # will silently give that second time precedence over any
1599 # --extra-args=CONFIG_*
1600 subdir = os.path.join(self.build_dir, "sanitycheck")
1601 os.makedirs(subdir, exist_ok=True)
1602 file = os.path.join(subdir, "testcase_extra.conf")
1603
1604 with open(file, "w") as f:
1605 content = ""
1606
1607 if self.testcase.extra_configs:
1608 content = "\n".join(self.testcase.extra_configs)
1609
1610 if enable_coverage:
1611 if platform.name in coverage_platform:
1612 content = content + "\nCONFIG_COVERAGE=y"
1613 content = content + "\nCONFIG_COVERAGE_DUMP=y"
1614
1615 if enable_asan:
1616 if platform.type == "native":
1617 content = content + "\nCONFIG_ASAN=y"
1618
1619 f.write(content)
1620
1621 def calculate_sizes(self):
1622 """Get the RAM/ROM sizes of a test case.
1623
1624 This can only be run after the instance has been executed by
1625 MakeGenerator, otherwise there won't be any binaries to measure.
1626
1627 @return A SizeCalculator object
1628 """
1629 fns = glob.glob(os.path.join(self.build_dir, "zephyr", "*.elf"))
1630 fns.extend(glob.glob(os.path.join(self.build_dir, "zephyr", "*.exe")))
1631 fns = [x for x in fns if not x.endswith('_prebuilt.elf')]
1632 if len(fns) != 1:
1633 raise BuildError("Missing/multiple output ELF binary")
1634
1635 return SizeCalculator(fns[0], self.testcase.extra_sections)
1636
1637 def __repr__(self):
1638 return "<TestCase %s on %s>" % (self.testcase.name, self.platform.name)
1639
1640
1641class CMake():
1642 config_re = re.compile('(CONFIG_[A-Za-z0-9_]+)[=]\"?([^\"]*)\"?$')
1643 dt_re = re.compile('([A-Za-z0-9_]+)[=]\"?([^\"]*)\"?$')
1644
1645 def __init__(self, testcase, platform, source_dir, build_dir):
1646
1647 self.cwd = None
1648 self.capture_output = True
1649
1650 self.defconfig = {}
1651 self.cmake_cache = {}
1652
1653 self.instance = None
1654 self.testcase = testcase
1655 self.platform = platform
1656 self.source_dir = source_dir
1657 self.build_dir = build_dir
1658 self.log = "build.log"
1659 self.generator = None
1660 self.generator_cmd = None
1661
1662 def parse_generated(self):
1663 self.defconfig = {}
1664 return {}
1665
1666 def run_build(self, args=[]):
1667
1668 logger.debug("Building %s for %s" % (self.source_dir, self.platform.name))
1669
1670 cmake_args = []
1671 cmake_args.extend(args)
1672 cmake = shutil.which('cmake')
1673 cmd = [cmake] + cmake_args
1674 kwargs = dict()
1675
1676 if self.capture_output:
1677 kwargs['stdout'] = subprocess.PIPE
1678 # CMake sends the output of message() to stderr unless it's STATUS
1679 kwargs['stderr'] = subprocess.STDOUT
1680
1681 if self.cwd:
1682 kwargs['cwd'] = self.cwd
1683
1684 p = subprocess.Popen(cmd, **kwargs)
1685 out, _ = p.communicate()
1686
1687 results = {}
1688 if p.returncode == 0:
1689 msg = "Finished building %s for %s" % (self.source_dir, self.platform.name)
1690
1691 self.instance.status = "passed"
1692 results = {'msg': msg, "returncode": p.returncode, "instance": self.instance}
1693
1694 if out:
1695 log_msg = out.decode(sys.getdefaultencoding())
1696 with open(os.path.join(self.build_dir, self.log), "a") as log:
1697 log.write(log_msg)
1698
1699 else:
1700 return None
1701 else:
1702 # A real error occurred, raise an exception
1703 if out:
1704 log_msg = out.decode(sys.getdefaultencoding())
1705 with open(os.path.join(self.build_dir, self.log), "a") as log:
1706 log.write(log_msg)
1707
1708 if log_msg:
1709 res = re.findall("region `(FLASH|RAM|SRAM)' overflowed by", log_msg)
1710 if res:
1711 logger.debug("Test skipped due to {} Overflow".format(res[0]))
1712 self.instance.status = "skipped"
1713 self.instance.reason = "{} overflow".format(res[0])
1714 else:
1715 self.instance.status = "failed"
1716 self.instance.reason = "Build failure"
1717
1718 results = {
1719 "returncode": p.returncode,
1720 "instance": self.instance,
1721 }
1722
1723 return results
1724
1725 def run_cmake(self, args=[]):
1726
1727 ldflags = "-Wl,--fatal-warnings"
1728 logger.debug("Running cmake on %s for %s" % (self.source_dir, self.platform.name))
1729
1730 # fixme: add additional cflags based on options
1731 cmake_args = [
1732 '-B{}'.format(self.build_dir),
1733 '-S{}'.format(self.source_dir),
1734 '-DEXTRA_CFLAGS="-Werror ',
1735 '-DEXTRA_AFLAGS=-Wa,--fatal-warnings',
1736 '-DEXTRA_LDFLAGS="{}'.format(ldflags),
1737 '-G{}'.format(self.generator)
1738 ]
1739
1740 if self.cmake_only:
1741 cmake_args.append("-DCMAKE_EXPORT_COMPILE_COMMANDS=1")
1742
1743 args = ["-D{}".format(a.replace('"', '')) for a in args]
1744 cmake_args.extend(args)
1745
1746 cmake_opts = ['-DBOARD={}'.format(self.platform.name)]
1747 cmake_args.extend(cmake_opts)
1748
1749
1750 logger.debug("Calling cmake with arguments: {}".format(cmake_args))
1751 cmake = shutil.which('cmake')
1752 cmd = [cmake] + cmake_args
1753 kwargs = dict()
1754
1755 if self.capture_output:
1756 kwargs['stdout'] = subprocess.PIPE
1757 # CMake sends the output of message() to stderr unless it's STATUS
1758 kwargs['stderr'] = subprocess.STDOUT
1759
1760 if self.cwd:
1761 kwargs['cwd'] = self.cwd
1762
1763 p = subprocess.Popen(cmd, **kwargs)
1764 out, _ = p.communicate()
1765
1766 if p.returncode == 0:
1767 filter_results = self.parse_generated()
1768 msg = "Finished building %s for %s" % (self.source_dir, self.platform.name)
1769 logger.debug(msg)
1770 results = {'msg': msg, 'filter': filter_results}
1771
1772 else:
1773 self.instance.status = "failed"
1774 self.instance.reason = "Cmake build failure"
1775 logger.error("Cmake build failure: %s for %s" % (self.source_dir, self.platform.name))
1776 results = {"returncode": p.returncode}
1777
1778 if out:
1779 with open(os.path.join(self.build_dir, self.log), "a") as log:
1780 log_msg = out.decode(sys.getdefaultencoding())
1781 log.write(log_msg)
1782
1783 return results
1784
1785
1786class FilterBuilder(CMake):
1787
1788 def __init__(self, testcase, platform, source_dir, build_dir):
1789 super().__init__(testcase, platform, source_dir, build_dir)
1790
1791 self.log = "config-sanitycheck.log"
1792
1793 def parse_generated(self):
1794
1795 if self.platform.name == "unit_testing":
1796 return {}
1797
1798 cmake_cache_path = os.path.join(self.build_dir, "CMakeCache.txt")
1799 defconfig_path = os.path.join(self.build_dir, "zephyr", ".config")
1800
1801 with open(defconfig_path, "r") as fp:
1802 defconfig = {}
1803 for line in fp.readlines():
1804 m = self.config_re.match(line)
1805 if not m:
1806 if line.strip() and not line.startswith("#"):
1807 sys.stderr.write("Unrecognized line %s\n" % line)
1808 continue
1809 defconfig[m.group(1)] = m.group(2).strip()
1810
1811 self.defconfig = defconfig
1812
1813 cmake_conf = {}
1814 try:
1815 cache = CMakeCache.from_file(cmake_cache_path)
1816 except FileNotFoundError:
1817 cache = {}
1818
1819 for k in iter(cache):
1820 cmake_conf[k.name] = k.value
1821
1822 self.cmake_cache = cmake_conf
1823
1824 filter_data = {
1825 "ARCH": self.platform.arch,
1826 "PLATFORM": self.platform.name
1827 }
1828 filter_data.update(os.environ)
1829 filter_data.update(self.defconfig)
1830 filter_data.update(self.cmake_cache)
1831
1832 dts_path = os.path.join(self.build_dir, "zephyr", self.platform.name + ".dts.pre.tmp")
1833 if self.testcase and self.testcase.tc_filter:
1834 try:
1835 if os.path.exists(dts_path):
Kumar Gala6a2cb942020-05-08 16:32:16 -05001836 edt = edtlib.EDT(dts_path, [os.path.join(ZEPHYR_BASE, "dts", "bindings")],
1837 warn_reg_unit_address_mismatch=False)
Anas Nashifce2b4182020-03-24 14:40:28 -04001838 else:
1839 edt = None
1840 res = expr_parser.parse(self.testcase.tc_filter, filter_data, edt)
1841
1842 except (ValueError, SyntaxError) as se:
1843 sys.stderr.write(
1844 "Failed processing %s\n" % self.testcase.yamlfile)
1845 raise se
1846
1847 if not res:
1848 return {os.path.join(self.platform.name, self.testcase.name): True}
1849 else:
1850 return {os.path.join(self.platform.name, self.testcase.name): False}
1851 else:
1852 self.platform.filter_data = filter_data
1853 return filter_data
1854
1855
1856class ProjectBuilder(FilterBuilder):
1857
1858 def __init__(self, suite, instance, **kwargs):
1859 super().__init__(instance.testcase, instance.platform, instance.testcase.source_dir, instance.build_dir)
1860
1861 self.log = "build.log"
1862 self.instance = instance
1863 self.suite = suite
1864
1865 self.lsan = kwargs.get('lsan', False)
1866 self.asan = kwargs.get('asan', False)
1867 self.valgrind = kwargs.get('valgrind', False)
1868 self.extra_args = kwargs.get('extra_args', [])
1869 self.device_testing = kwargs.get('device_testing', False)
1870 self.cmake_only = kwargs.get('cmake_only', False)
1871 self.cleanup = kwargs.get('cleanup', False)
1872 self.coverage = kwargs.get('coverage', False)
1873 self.inline_logs = kwargs.get('inline_logs', False)
Anas Nashifce2b4182020-03-24 14:40:28 -04001874 self.generator = kwargs.get('generator', None)
1875 self.generator_cmd = kwargs.get('generator_cmd', None)
Anas Nashiff6462a32020-03-29 19:02:51 -04001876 self.verbose = kwargs.get('verbose', None)
Anas Nashifce2b4182020-03-24 14:40:28 -04001877
1878 @staticmethod
1879 def log_info(filename, inline_logs):
1880 filename = os.path.abspath(os.path.realpath(filename))
1881 if inline_logs:
1882 logger.info("{:-^100}".format(filename))
1883
1884 try:
1885 with open(filename) as fp:
1886 data = fp.read()
1887 except Exception as e:
1888 data = "Unable to read log data (%s)\n" % (str(e))
1889
1890 logger.error(data)
1891
1892 logger.info("{:-^100}".format(filename))
1893 else:
1894 logger.error("see: " + Fore.YELLOW + filename + Fore.RESET)
1895
1896 def log_info_file(self, inline_logs):
1897 build_dir = self.instance.build_dir
1898 h_log = "{}/handler.log".format(build_dir)
1899 b_log = "{}/build.log".format(build_dir)
1900 v_log = "{}/valgrind.log".format(build_dir)
1901 d_log = "{}/device.log".format(build_dir)
1902
1903 if os.path.exists(v_log) and "Valgrind" in self.instance.reason:
1904 self.log_info("{}".format(v_log), inline_logs)
1905 elif os.path.exists(h_log) and os.path.getsize(h_log) > 0:
1906 self.log_info("{}".format(h_log), inline_logs)
1907 elif os.path.exists(d_log) and os.path.getsize(d_log) > 0:
1908 self.log_info("{}".format(d_log), inline_logs)
1909 else:
1910 self.log_info("{}".format(b_log), inline_logs)
1911
1912 def setup_handler(self):
1913
1914 instance = self.instance
1915 args = []
1916
1917 # FIXME: Needs simplification
1918 if instance.platform.simulation == "qemu":
1919 instance.handler = QEMUHandler(instance, "qemu")
1920 args.append("QEMU_PIPE=%s" % instance.handler.get_fifo())
1921 instance.handler.call_make_run = True
1922 elif instance.testcase.type == "unit":
1923 instance.handler = BinaryHandler(instance, "unit")
1924 instance.handler.binary = os.path.join(instance.build_dir, "testbinary")
Anas Nashif051602f2020-04-28 14:27:46 -04001925 if self.coverage:
1926 args.append("COVERAGE=1")
Anas Nashifce2b4182020-03-24 14:40:28 -04001927 elif instance.platform.type == "native":
1928 handler = BinaryHandler(instance, "native")
1929
1930 handler.asan = self.asan
1931 handler.valgrind = self.valgrind
1932 handler.lsan = self.lsan
1933 handler.coverage = self.coverage
1934
1935 handler.binary = os.path.join(instance.build_dir, "zephyr", "zephyr.exe")
1936 instance.handler = handler
1937 elif instance.platform.simulation == "nsim":
1938 if find_executable("nsimdrv"):
1939 instance.handler = BinaryHandler(instance, "nsim")
1940 instance.handler.call_make_run = True
1941 elif instance.platform.simulation == "renode":
1942 if find_executable("renode"):
1943 instance.handler = BinaryHandler(instance, "renode")
1944 instance.handler.pid_fn = os.path.join(instance.build_dir, "renode.pid")
1945 instance.handler.call_make_run = True
1946 elif self.device_testing:
1947 instance.handler = DeviceHandler(instance, "device")
1948
1949 if instance.handler:
1950 instance.handler.args = args
Anas Nashifb3669492020-03-24 22:33:50 -04001951 instance.handler.generator_cmd = self.generator_cmd
1952 instance.handler.generator = self.generator
Anas Nashifce2b4182020-03-24 14:40:28 -04001953
1954 def process(self, message):
1955 op = message.get('op')
1956
1957 if not self.instance.handler:
1958 self.setup_handler()
1959
1960 # The build process, call cmake and build with configured generator
1961 if op == "cmake":
1962 results = self.cmake()
1963 if self.instance.status == "failed":
1964 pipeline.put({"op": "report", "test": self.instance})
1965 elif self.cmake_only:
1966 pipeline.put({"op": "report", "test": self.instance})
1967 else:
1968 if self.instance.name in results['filter'] and results['filter'][self.instance.name]:
1969 logger.debug("filtering %s" % self.instance.name)
1970 self.instance.status = "skipped"
1971 self.instance.reason = "filter"
1972 pipeline.put({"op": "report", "test": self.instance})
1973 else:
1974 pipeline.put({"op": "build", "test": self.instance})
1975
1976 elif op == "build":
1977 logger.debug("build test: %s" % self.instance.name)
1978 results = self.build()
1979
1980 if not results:
1981 self.instance.status = "failed"
1982 self.instance.reason = "Build Failure"
1983 pipeline.put({"op": "report", "test": self.instance})
1984 else:
1985 if results.get('returncode', 1) > 0:
1986 pipeline.put({"op": "report", "test": self.instance})
1987 else:
1988 if self.instance.run:
1989 pipeline.put({"op": "run", "test": self.instance})
1990 else:
1991 pipeline.put({"op": "report", "test": self.instance})
1992 # Run the generated binary using one of the supported handlers
1993 elif op == "run":
1994 logger.debug("run test: %s" % self.instance.name)
1995 self.run()
1996 self.instance.status, _ = self.instance.handler.get_state()
1997 pipeline.put({
1998 "op": "report",
1999 "test": self.instance,
2000 "state": "executed",
2001 "status": self.instance.status,
2002 "reason": self.instance.reason}
2003 )
2004
2005 # Report results and output progress to screen
2006 elif op == "report":
2007 with report_lock:
2008 self.report_out()
2009
2010 if self.cleanup and not self.coverage and self.instance.status == "passed":
2011 pipeline.put({
2012 "op": "cleanup",
2013 "test": self.instance
2014 })
2015
2016 elif op == "cleanup":
2017 self.cleanup_artifacts()
2018
2019 def cleanup_artifacts(self):
2020 logger.debug("Cleaning up {}".format(self.instance.build_dir))
2021 whitelist = [
2022 'zephyr/.config',
2023 'handler.log',
2024 'build.log',
2025 'device.log',
Anas Nashif9ace63e2020-04-28 07:14:43 -04002026 'recording.csv',
Anas Nashifce2b4182020-03-24 14:40:28 -04002027 ]
2028 whitelist = [os.path.join(self.instance.build_dir, file) for file in whitelist]
2029
2030 for dirpath, dirnames, filenames in os.walk(self.instance.build_dir, topdown=False):
2031 for name in filenames:
2032 path = os.path.join(dirpath, name)
2033 if path not in whitelist:
2034 os.remove(path)
2035 # Remove empty directories and symbolic links to directories
2036 for dir in dirnames:
2037 path = os.path.join(dirpath, dir)
2038 if os.path.islink(path):
2039 os.remove(path)
2040 elif not os.listdir(path):
2041 os.rmdir(path)
2042
2043 def report_out(self):
2044 total_tests_width = len(str(self.suite.total_tests))
2045 self.suite.total_done += 1
2046 instance = self.instance
2047
2048 if instance.status in ["failed", "timeout"]:
2049 self.suite.total_failed += 1
Anas Nashiff6462a32020-03-29 19:02:51 -04002050 if self.verbose:
Anas Nashifce2b4182020-03-24 14:40:28 -04002051 status = Fore.RED + "FAILED " + Fore.RESET + instance.reason
2052 else:
2053 print("")
2054 logger.error(
2055 "{:<25} {:<50} {}FAILED{}: {}".format(
2056 instance.platform.name,
2057 instance.testcase.name,
2058 Fore.RED,
2059 Fore.RESET,
2060 instance.reason))
Anas Nashiff6462a32020-03-29 19:02:51 -04002061 if not self.verbose:
Anas Nashifce2b4182020-03-24 14:40:28 -04002062 self.log_info_file(self.inline_logs)
2063 elif instance.status == "skipped":
2064 self.suite.total_skipped += 1
2065 status = Fore.YELLOW + "SKIPPED" + Fore.RESET
2066 else:
2067 status = Fore.GREEN + "PASSED" + Fore.RESET
2068
Anas Nashiff6462a32020-03-29 19:02:51 -04002069 if self.verbose:
Anas Nashifce2b4182020-03-24 14:40:28 -04002070 if self.cmake_only:
2071 more_info = "cmake"
2072 elif instance.status == "skipped":
2073 more_info = instance.reason
2074 else:
2075 if instance.handler and instance.run:
2076 more_info = instance.handler.type_str
2077 htime = instance.handler.duration
2078 if htime:
2079 more_info += " {:.3f}s".format(htime)
2080 else:
2081 more_info = "build"
2082
2083 logger.info("{:>{}}/{} {:<25} {:<50} {} ({})".format(
2084 self.suite.total_done, total_tests_width, self.suite.total_tests, instance.platform.name,
2085 instance.testcase.name, status, more_info))
2086
2087 if instance.status in ["failed", "timeout"]:
2088 self.log_info_file(self.inline_logs)
2089 else:
2090 sys.stdout.write("\rINFO - Total complete: %s%4d/%4d%s %2d%% skipped: %s%4d%s, failed: %s%4d%s" % (
2091 Fore.GREEN,
2092 self.suite.total_done,
2093 self.suite.total_tests,
2094 Fore.RESET,
2095 int((float(self.suite.total_done) / self.suite.total_tests) * 100),
2096 Fore.YELLOW if self.suite.total_skipped > 0 else Fore.RESET,
2097 self.suite.total_skipped,
2098 Fore.RESET,
2099 Fore.RED if self.suite.total_failed > 0 else Fore.RESET,
2100 self.suite.total_failed,
2101 Fore.RESET
2102 )
2103 )
2104 sys.stdout.flush()
2105
2106 def cmake(self):
2107
2108 instance = self.instance
2109 args = self.testcase.extra_args[:]
2110 args += self.extra_args
2111
2112 if instance.handler:
2113 args += instance.handler.args
2114
2115 # merge overlay files into one variable
2116 def extract_overlays(args):
2117 re_overlay = re.compile('OVERLAY_CONFIG=(.*)')
2118 other_args = []
2119 overlays = []
2120 for arg in args:
2121 match = re_overlay.search(arg)
2122 if match:
2123 overlays.append(match.group(1).strip('\'"'))
2124 else:
2125 other_args.append(arg)
2126
2127 args[:] = other_args
2128 return overlays
2129
2130 overlays = extract_overlays(args)
2131
2132 if (self.testcase.extra_configs or self.coverage or
2133 self.asan):
2134 overlays.append(os.path.join(instance.build_dir,
2135 "sanitycheck", "testcase_extra.conf"))
2136
2137 if overlays:
2138 args.append("OVERLAY_CONFIG=\"%s\"" % (" ".join(overlays)))
2139
2140 results = self.run_cmake(args)
2141 return results
2142
2143 def build(self):
2144 results = self.run_build(['--build', self.build_dir])
2145 return results
2146
2147 def run(self):
2148
2149 instance = self.instance
2150
2151 if instance.handler.type_str == "device":
2152 instance.handler.suite = self.suite
2153
2154 instance.handler.handle()
2155
2156 sys.stdout.flush()
2157
2158
2159class BoundedExecutor(concurrent.futures.ThreadPoolExecutor):
2160 """BoundedExecutor behaves as a ThreadPoolExecutor which will block on
2161 calls to submit() once the limit given as "bound" work items are queued for
2162 execution.
2163 :param bound: Integer - the maximum number of items in the work queue
2164 :param max_workers: Integer - the size of the thread pool
2165 """
2166
2167 def __init__(self, bound, max_workers, **kwargs):
2168 super().__init__(max_workers)
2169 # self.executor = ThreadPoolExecutor(max_workers=max_workers)
2170 self.semaphore = BoundedSemaphore(bound + max_workers)
2171
2172 def submit(self, fn, *args, **kwargs):
2173 self.semaphore.acquire()
2174 try:
2175 future = super().submit(fn, *args, **kwargs)
2176 except Exception:
2177 self.semaphore.release()
2178 raise
2179 else:
2180 future.add_done_callback(lambda x: self.semaphore.release())
2181 return future
2182
2183
Anas Nashifaff616d2020-04-17 21:24:57 -04002184class TestSuite(DisablePyTestCollectionMixin):
Anas Nashifce2b4182020-03-24 14:40:28 -04002185 config_re = re.compile('(CONFIG_[A-Za-z0-9_]+)[=]\"?([^\"]*)\"?$')
2186 dt_re = re.compile('([A-Za-z0-9_]+)[=]\"?([^\"]*)\"?$')
2187
2188 tc_schema = scl.yaml_load(
2189 os.path.join(ZEPHYR_BASE,
2190 "scripts", "sanity_chk", "testcase-schema.yaml"))
2191
2192 testcase_valid_keys = {"tags": {"type": "set", "required": False},
2193 "type": {"type": "str", "default": "integration"},
2194 "extra_args": {"type": "list"},
2195 "extra_configs": {"type": "list"},
2196 "build_only": {"type": "bool", "default": False},
2197 "build_on_all": {"type": "bool", "default": False},
2198 "skip": {"type": "bool", "default": False},
2199 "slow": {"type": "bool", "default": False},
2200 "timeout": {"type": "int", "default": 60},
2201 "min_ram": {"type": "int", "default": 8},
2202 "depends_on": {"type": "set"},
2203 "min_flash": {"type": "int", "default": 32},
2204 "arch_whitelist": {"type": "set"},
2205 "arch_exclude": {"type": "set"},
2206 "extra_sections": {"type": "list", "default": []},
2207 "platform_exclude": {"type": "set"},
2208 "platform_whitelist": {"type": "set"},
2209 "toolchain_exclude": {"type": "set"},
2210 "toolchain_whitelist": {"type": "set"},
2211 "filter": {"type": "str"},
2212 "harness": {"type": "str"},
2213 "harness_config": {"type": "map", "default": {}}
2214 }
2215
2216 RELEASE_DATA = os.path.join(ZEPHYR_BASE, "scripts", "sanity_chk",
2217 "sanity_last_release.csv")
2218
Aastha Grovera0ae5342020-05-13 13:34:00 -07002219 SAMPLE_FILENAME = 'sample.yaml'
2220 TESTCASE_FILENAME = 'testcase.yaml'
2221
Anas Nashifaff616d2020-04-17 21:24:57 -04002222 def __init__(self, board_root_list=[], testcase_roots=[], outdir=None):
Anas Nashifce2b4182020-03-24 14:40:28 -04002223
2224 self.roots = testcase_roots
2225 if not isinstance(board_root_list, list):
2226 self.board_roots = [board_root_list]
2227 else:
2228 self.board_roots = board_root_list
2229
2230 # Testsuite Options
2231 self.coverage_platform = []
2232 self.build_only = False
2233 self.cmake_only = False
2234 self.cleanup = False
2235 self.enable_slow = False
2236 self.device_testing = False
2237 self.fixture = []
2238 self.enable_coverage = False
2239 self.enable_lsan = False
2240 self.enable_asan = False
2241 self.enable_valgrind = False
2242 self.extra_args = []
2243 self.inline_logs = False
2244 self.enable_sizes_report = False
2245 self.west_flash = None
2246 self.west_runner = None
2247 self.generator = None
2248 self.generator_cmd = None
2249
2250 # Keep track of which test cases we've filtered out and why
2251 self.testcases = {}
2252 self.platforms = []
2253 self.selected_platforms = []
2254 self.default_platforms = []
2255 self.outdir = os.path.abspath(outdir)
Anas Nashifaff616d2020-04-17 21:24:57 -04002256 self.discards = {}
Anas Nashifce2b4182020-03-24 14:40:28 -04002257 self.load_errors = 0
2258 self.instances = dict()
2259
2260 self.total_tests = 0 # number of test instances
2261 self.total_cases = 0 # number of test cases
2262 self.total_done = 0 # tests completed
2263 self.total_failed = 0
2264 self.total_skipped = 0
2265
2266 self.total_platforms = 0
2267 self.start_time = 0
2268 self.duration = 0
2269 self.warnings = 0
2270 self.cv = threading.Condition()
2271
2272 # hardcoded for now
2273 self.connected_hardware = []
2274
Anas Nashifbb280352020-05-07 12:02:48 -04002275 def get_platform_instances(self, platform):
2276 filtered_dict = {k:v for k,v in self.instances.items() if k.startswith(platform + "/")}
2277 return filtered_dict
2278
Anas Nashifce2b4182020-03-24 14:40:28 -04002279 def config(self):
2280 logger.info("coverage platform: {}".format(self.coverage_platform))
2281
2282 # Debug Functions
2283 @staticmethod
2284 def info(what):
2285 sys.stdout.write(what + "\n")
2286 sys.stdout.flush()
2287
2288 def update(self):
2289 self.total_tests = len(self.instances)
2290 self.total_cases = len(self.testcases)
2291
2292 def compare_metrics(self, filename):
2293 # name, datatype, lower results better
2294 interesting_metrics = [("ram_size", int, True),
2295 ("rom_size", int, True)]
2296
2297 if not os.path.exists(filename):
2298 logger.info("Cannot compare metrics, %s not found" % filename)
2299 return []
2300
2301 results = []
2302 saved_metrics = {}
2303 with open(filename) as fp:
2304 cr = csv.DictReader(fp)
2305 for row in cr:
2306 d = {}
2307 for m, _, _ in interesting_metrics:
2308 d[m] = row[m]
2309 saved_metrics[(row["test"], row["platform"])] = d
2310
2311 for instance in self.instances.values():
2312 mkey = (instance.testcase.name, instance.platform.name)
2313 if mkey not in saved_metrics:
2314 continue
2315 sm = saved_metrics[mkey]
2316 for metric, mtype, lower_better in interesting_metrics:
2317 if metric not in instance.metrics:
2318 continue
2319 if sm[metric] == "":
2320 continue
2321 delta = instance.metrics.get(metric, 0) - mtype(sm[metric])
2322 if delta == 0:
2323 continue
2324 results.append((instance, metric, instance.metrics.get(metric, 0), delta,
2325 lower_better))
2326 return results
2327
2328 def misc_reports(self, report, show_footprint, all_deltas,
2329 footprint_threshold, last_metrics):
2330
2331 if not report:
2332 return
2333
2334 deltas = self.compare_metrics(report)
2335 warnings = 0
2336 if deltas and show_footprint:
2337 for i, metric, value, delta, lower_better in deltas:
2338 if not all_deltas and ((delta < 0 and lower_better) or
2339 (delta > 0 and not lower_better)):
2340 continue
2341
2342 percentage = (float(delta) / float(value - delta))
2343 if not all_deltas and (percentage <
2344 (footprint_threshold / 100.0)):
2345 continue
2346
2347 logger.info("{:<25} {:<60} {}{}{}: {} {:<+4}, is now {:6} {:+.2%}".format(
2348 i.platform.name, i.testcase.name, Fore.YELLOW,
2349 "INFO" if all_deltas else "WARNING", Fore.RESET,
2350 metric, delta, value, percentage))
2351 warnings += 1
2352
2353 if warnings:
2354 logger.warning("Deltas based on metrics from last %s" %
2355 ("release" if not last_metrics else "run"))
2356
2357 def summary(self, unrecognized_sections):
2358 failed = 0
Anas Nashif4258d8d2020-05-08 08:40:27 -04002359 run = 0
Anas Nashifce2b4182020-03-24 14:40:28 -04002360 for instance in self.instances.values():
2361 if instance.status == "failed":
2362 failed += 1
2363 elif instance.metrics.get("unrecognized") and not unrecognized_sections:
2364 logger.error("%sFAILED%s: %s has unrecognized binary sections: %s" %
2365 (Fore.RED, Fore.RESET, instance.name,
2366 str(instance.metrics.get("unrecognized", []))))
2367 failed += 1
2368
Anas Nashif4258d8d2020-05-08 08:40:27 -04002369 if instance.metrics['handler_time']:
2370 run += 1
2371
Anas Nashifce2b4182020-03-24 14:40:28 -04002372 if self.total_tests and self.total_tests != self.total_skipped:
2373 pass_rate = (float(self.total_tests - self.total_failed - self.total_skipped) / float(
2374 self.total_tests - self.total_skipped))
2375 else:
2376 pass_rate = 0
2377
2378 logger.info(
2379 "{}{} of {}{} tests passed ({:.2%}), {}{}{} failed, {} skipped with {}{}{} warnings in {:.2f} seconds".format(
2380 Fore.RED if failed else Fore.GREEN,
2381 self.total_tests - self.total_failed - self.total_skipped,
2382 self.total_tests - self.total_skipped,
2383 Fore.RESET,
2384 pass_rate,
2385 Fore.RED if self.total_failed else Fore.RESET,
2386 self.total_failed,
2387 Fore.RESET,
2388 self.total_skipped,
2389 Fore.YELLOW if self.warnings else Fore.RESET,
2390 self.warnings,
2391 Fore.RESET,
2392 self.duration))
2393
2394 self.total_platforms = len(self.platforms)
2395 if self.platforms:
2396 logger.info("In total {} test cases were executed on {} out of total {} platforms ({:02.2f}%)".format(
2397 self.total_cases,
2398 len(self.selected_platforms),
2399 self.total_platforms,
2400 (100 * len(self.selected_platforms) / len(self.platforms))
2401 ))
2402
Anas Nashif4258d8d2020-05-08 08:40:27 -04002403 logger.info(f"{Fore.GREEN}{run}{Fore.RESET} tests executed on platforms, \
2404{Fore.RED}{self.total_tests - run}{Fore.RESET} tests were only built.")
2405
Anas Nashif6915adf2020-04-22 09:39:42 -04002406 def save_reports(self, name, suffix, report_dir, no_update, release, only_failed):
Anas Nashifce2b4182020-03-24 14:40:28 -04002407 if not self.instances:
2408 return
2409
2410 if name:
2411 report_name = name
2412 else:
2413 report_name = "sanitycheck"
2414
2415 if report_dir:
2416 os.makedirs(report_dir, exist_ok=True)
2417 filename = os.path.join(report_dir, report_name)
2418 outdir = report_dir
2419 else:
2420 filename = os.path.join(self.outdir, report_name)
2421 outdir = self.outdir
2422
Anas Nashif6915adf2020-04-22 09:39:42 -04002423 if suffix:
2424 filename = "{}_{}".format(filename, suffix)
2425
Anas Nashifce2b4182020-03-24 14:40:28 -04002426 if not no_update:
Anas Nashif90415502020-04-11 22:15:04 -04002427 self.xunit_report(filename + ".xml", full_report=False, append=only_failed)
2428 self.xunit_report(filename + "_report.xml", full_report=True, append=only_failed)
Anas Nashifce2b4182020-03-24 14:40:28 -04002429 self.csv_report(filename + ".csv")
Anas Nashif90415502020-04-11 22:15:04 -04002430
Anas Nashif6915adf2020-04-22 09:39:42 -04002431 self.target_report(outdir, suffix, append=only_failed)
Anas Nashifce2b4182020-03-24 14:40:28 -04002432 if self.discards:
2433 self.discard_report(filename + "_discard.csv")
2434
2435 if release:
2436 self.csv_report(self.RELEASE_DATA)
2437
2438 def add_configurations(self):
2439
2440 for board_root in self.board_roots:
2441 board_root = os.path.abspath(board_root)
2442
2443 logger.debug("Reading platform configuration files under %s..." %
2444 board_root)
2445
2446 for file in glob.glob(os.path.join(board_root, "*", "*", "*.yaml")):
2447 logger.debug("Found platform configuration " + file)
2448 try:
2449 platform = Platform()
2450 platform.load(file)
2451 if platform.sanitycheck:
2452 self.platforms.append(platform)
2453 if platform.default:
2454 self.default_platforms.append(platform.name)
2455
2456 except RuntimeError as e:
2457 logger.error("E: %s: can't load: %s" % (file, e))
2458 self.load_errors += 1
2459
2460 def get_all_tests(self):
2461 tests = []
2462 for _, tc in self.testcases.items():
2463 for case in tc.cases:
2464 tests.append(case)
2465
2466 return tests
2467
2468 @staticmethod
2469 def get_toolchain():
2470 toolchain = os.environ.get("ZEPHYR_TOOLCHAIN_VARIANT", None) or \
2471 os.environ.get("ZEPHYR_GCC_VARIANT", None)
2472
2473 if toolchain == "gccarmemb":
2474 # Remove this translation when gccarmemb is no longer supported.
2475 toolchain = "gnuarmemb"
2476
2477 try:
2478 if not toolchain:
2479 raise SanityRuntimeError("E: Variable ZEPHYR_TOOLCHAIN_VARIANT is not defined")
2480 except Exception as e:
2481 print(str(e))
2482 sys.exit(2)
2483
2484 return toolchain
2485
2486 def add_testcases(self, testcase_filter=[]):
2487 for root in self.roots:
2488 root = os.path.abspath(root)
2489
2490 logger.debug("Reading test case configuration files under %s..." % root)
2491
2492 for dirpath, dirnames, filenames in os.walk(root, topdown=True):
2493 logger.debug("scanning %s" % dirpath)
Aastha Grovera0ae5342020-05-13 13:34:00 -07002494 if self.SAMPLE_FILENAME in filenames:
2495 filename = self.SAMPLE_FILENAME
2496 elif self.TESTCASE_FILENAME in filenames:
2497 filename = self.TESTCASE_FILENAME
Anas Nashifce2b4182020-03-24 14:40:28 -04002498 else:
2499 continue
2500
2501 logger.debug("Found possible test case in " + dirpath)
2502
2503 dirnames[:] = []
2504 tc_path = os.path.join(dirpath, filename)
2505
2506 try:
2507 parsed_data = SanityConfigParser(tc_path, self.tc_schema)
2508 parsed_data.load()
2509
2510 tc_path = os.path.dirname(tc_path)
2511 workdir = os.path.relpath(tc_path, root)
2512
2513 for name in parsed_data.tests.keys():
Anas Nashifaff616d2020-04-17 21:24:57 -04002514 tc = TestCase(root, workdir, name)
Anas Nashifce2b4182020-03-24 14:40:28 -04002515
2516 tc_dict = parsed_data.get_test(name, self.testcase_valid_keys)
2517
2518 tc.source_dir = tc_path
2519 tc.yamlfile = tc_path
2520
Anas Nashifce2b4182020-03-24 14:40:28 -04002521 tc.type = tc_dict["type"]
2522 tc.tags = tc_dict["tags"]
2523 tc.extra_args = tc_dict["extra_args"]
2524 tc.extra_configs = tc_dict["extra_configs"]
2525 tc.arch_whitelist = tc_dict["arch_whitelist"]
2526 tc.arch_exclude = tc_dict["arch_exclude"]
2527 tc.skip = tc_dict["skip"]
2528 tc.platform_exclude = tc_dict["platform_exclude"]
2529 tc.platform_whitelist = tc_dict["platform_whitelist"]
2530 tc.toolchain_exclude = tc_dict["toolchain_exclude"]
2531 tc.toolchain_whitelist = tc_dict["toolchain_whitelist"]
2532 tc.tc_filter = tc_dict["filter"]
2533 tc.timeout = tc_dict["timeout"]
2534 tc.harness = tc_dict["harness"]
2535 tc.harness_config = tc_dict["harness_config"]
Anas Nashif43275c82020-05-04 18:22:16 -04002536 if tc.harness == 'console' and not tc.harness_config:
2537 raise Exception('Harness config error: console harness defined without a configuration.')
Anas Nashifce2b4182020-03-24 14:40:28 -04002538 tc.build_only = tc_dict["build_only"]
2539 tc.build_on_all = tc_dict["build_on_all"]
2540 tc.slow = tc_dict["slow"]
2541 tc.min_ram = tc_dict["min_ram"]
2542 tc.depends_on = tc_dict["depends_on"]
2543 tc.min_flash = tc_dict["min_flash"]
2544 tc.extra_sections = tc_dict["extra_sections"]
2545
2546 tc.parse_subcases(tc_path)
2547
2548 if testcase_filter:
2549 if tc.name and tc.name in testcase_filter:
2550 self.testcases[tc.name] = tc
2551 else:
2552 self.testcases[tc.name] = tc
2553
2554 except Exception as e:
2555 logger.error("%s: can't load (skipping): %s" % (tc_path, e))
2556 self.load_errors += 1
2557
2558
2559 def get_platform(self, name):
2560 selected_platform = None
2561 for platform in self.platforms:
2562 if platform.name == name:
2563 selected_platform = platform
2564 break
2565 return selected_platform
2566
2567 def load_from_file(self, file, filter_status=[]):
2568 try:
2569 with open(file, "r") as fp:
2570 cr = csv.DictReader(fp)
2571 instance_list = []
2572 for row in cr:
2573 if row["status"] in filter_status:
2574 continue
2575 test = row["test"]
2576
2577 platform = self.get_platform(row["platform"])
2578 instance = TestInstance(self.testcases[test], platform, self.outdir)
2579 instance.check_build_or_run(
2580 self.build_only,
2581 self.enable_slow,
2582 self.device_testing,
2583 self.fixture
2584 )
2585 instance.create_overlay(platform, self.enable_asan, self.enable_coverage, self.coverage_platform)
2586 instance_list.append(instance)
2587 self.add_instances(instance_list)
2588
2589 except KeyError as e:
2590 logger.error("Key error while parsing tests file.({})".format(str(e)))
2591 sys.exit(2)
2592
2593 except FileNotFoundError as e:
2594 logger.error("Couldn't find input file with list of tests. ({})".format(e))
2595 sys.exit(2)
2596
2597 def apply_filters(self, **kwargs):
2598
2599 toolchain = self.get_toolchain()
2600
2601 discards = {}
2602 platform_filter = kwargs.get('platform')
Anas Nashifaff616d2020-04-17 21:24:57 -04002603 exclude_platform = kwargs.get('exclude_platform', [])
2604 testcase_filter = kwargs.get('run_individual_tests', [])
Anas Nashifce2b4182020-03-24 14:40:28 -04002605 arch_filter = kwargs.get('arch')
2606 tag_filter = kwargs.get('tag')
2607 exclude_tag = kwargs.get('exclude_tag')
2608 all_filter = kwargs.get('all')
2609 device_testing_filter = kwargs.get('device_testing')
2610 force_toolchain = kwargs.get('force_toolchain')
Anas Nashif1a5defa2020-05-01 14:57:00 -04002611 force_platform = kwargs.get('force_platform')
Anas Nashifce2b4182020-03-24 14:40:28 -04002612
2613 logger.debug("platform filter: " + str(platform_filter))
2614 logger.debug(" arch_filter: " + str(arch_filter))
2615 logger.debug(" tag_filter: " + str(tag_filter))
2616 logger.debug(" exclude_tag: " + str(exclude_tag))
2617
2618 default_platforms = False
2619
2620 if platform_filter:
2621 platforms = list(filter(lambda p: p.name in platform_filter, self.platforms))
2622 else:
2623 platforms = self.platforms
2624
2625 if all_filter:
2626 logger.info("Selecting all possible platforms per test case")
2627 # When --all used, any --platform arguments ignored
2628 platform_filter = []
2629 elif not platform_filter:
2630 logger.info("Selecting default platforms per test case")
2631 default_platforms = True
2632
2633 logger.info("Building initial testcase list...")
2634
2635 for tc_name, tc in self.testcases.items():
2636 # list of instances per testcase, aka configurations.
2637 instance_list = []
2638 for plat in platforms:
2639 instance = TestInstance(tc, plat, self.outdir)
2640 instance.check_build_or_run(
2641 self.build_only,
2642 self.enable_slow,
2643 self.device_testing,
2644 self.fixture
2645 )
Anas Nashif1a5defa2020-05-01 14:57:00 -04002646 if not force_platform and plat.name in exclude_platform:
Anas Nashifce2b4182020-03-24 14:40:28 -04002647 discards[instance] = "Platform is excluded on command line."
2648 continue
2649
2650 if (plat.arch == "unit") != (tc.type == "unit"):
2651 # Discard silently
2652 continue
2653
2654 if device_testing_filter and instance.build_only:
2655 discards[instance] = "Not runnable on device"
2656 continue
2657
2658 if tc.skip:
2659 discards[instance] = "Skip filter"
2660 continue
2661
2662 if tc.build_on_all and not platform_filter:
2663 platform_filter = []
2664
2665 if tag_filter and not tc.tags.intersection(tag_filter):
2666 discards[instance] = "Command line testcase tag filter"
2667 continue
2668
2669 if exclude_tag and tc.tags.intersection(exclude_tag):
2670 discards[instance] = "Command line testcase exclude filter"
2671 continue
2672
2673 if testcase_filter and tc_name not in testcase_filter:
2674 discards[instance] = "Testcase name filter"
2675 continue
2676
2677 if arch_filter and plat.arch not in arch_filter:
2678 discards[instance] = "Command line testcase arch filter"
2679 continue
2680
Anas Nashif1a5defa2020-05-01 14:57:00 -04002681 if not force_platform:
Anas Nashifce2b4182020-03-24 14:40:28 -04002682
Anas Nashif1a5defa2020-05-01 14:57:00 -04002683 if tc.arch_whitelist and plat.arch not in tc.arch_whitelist:
2684 discards[instance] = "Not in test case arch whitelist"
2685 continue
Anas Nashifce2b4182020-03-24 14:40:28 -04002686
Anas Nashif1a5defa2020-05-01 14:57:00 -04002687 if tc.arch_exclude and plat.arch in tc.arch_exclude:
2688 discards[instance] = "In test case arch exclude"
2689 continue
2690
2691 if tc.platform_exclude and plat.name in tc.platform_exclude:
2692 discards[instance] = "In test case platform exclude"
2693 continue
Anas Nashifce2b4182020-03-24 14:40:28 -04002694
2695 if tc.toolchain_exclude and toolchain in tc.toolchain_exclude:
2696 discards[instance] = "In test case toolchain exclude"
2697 continue
2698
2699 if platform_filter and plat.name not in platform_filter:
2700 discards[instance] = "Command line platform filter"
2701 continue
2702
2703 if tc.platform_whitelist and plat.name not in tc.platform_whitelist:
2704 discards[instance] = "Not in testcase platform whitelist"
2705 continue
2706
2707 if tc.toolchain_whitelist and toolchain not in tc.toolchain_whitelist:
2708 discards[instance] = "Not in testcase toolchain whitelist"
2709 continue
2710
2711 if not plat.env_satisfied:
2712 discards[instance] = "Environment ({}) not satisfied".format(", ".join(plat.env))
2713 continue
2714
2715 if not force_toolchain \
2716 and toolchain and (toolchain not in plat.supported_toolchains) \
2717 and tc.type != 'unit':
2718 discards[instance] = "Not supported by the toolchain"
2719 continue
2720
2721 if plat.ram < tc.min_ram:
2722 discards[instance] = "Not enough RAM"
2723 continue
2724
2725 if tc.depends_on:
2726 dep_intersection = tc.depends_on.intersection(set(plat.supported))
2727 if dep_intersection != set(tc.depends_on):
2728 discards[instance] = "No hardware support"
2729 continue
2730
2731 if plat.flash < tc.min_flash:
2732 discards[instance] = "Not enough FLASH"
2733 continue
2734
2735 if set(plat.ignore_tags) & tc.tags:
2736 discards[instance] = "Excluded tags per platform"
2737 continue
2738
2739 # if nothing stopped us until now, it means this configuration
2740 # needs to be added.
2741 instance_list.append(instance)
2742
2743 # no configurations, so jump to next testcase
2744 if not instance_list:
2745 continue
2746
2747 # if sanitycheck was launched with no platform options at all, we
2748 # take all default platforms
2749 if default_platforms and not tc.build_on_all:
2750 if tc.platform_whitelist:
2751 a = set(self.default_platforms)
2752 b = set(tc.platform_whitelist)
2753 c = a.intersection(b)
2754 if c:
2755 aa = list(filter(lambda tc: tc.platform.name in c, instance_list))
2756 self.add_instances(aa)
2757 else:
2758 self.add_instances(instance_list[:1])
2759 else:
2760 instances = list(filter(lambda tc: tc.platform.default, instance_list))
2761 self.add_instances(instances)
2762
Anas Nashifaff616d2020-04-17 21:24:57 -04002763 for instance in list(filter(lambda inst: not inst.platform.default, instance_list)):
Anas Nashifce2b4182020-03-24 14:40:28 -04002764 discards[instance] = "Not a default test platform"
2765
2766 else:
2767 self.add_instances(instance_list)
2768
2769 for _, case in self.instances.items():
2770 case.create_overlay(case.platform, self.enable_asan, self.enable_coverage, self.coverage_platform)
2771
2772 self.discards = discards
2773 self.selected_platforms = set(p.platform.name for p in self.instances.values())
2774
2775 return discards
2776
2777 def add_instances(self, instance_list):
2778 for instance in instance_list:
2779 self.instances[instance.name] = instance
2780
2781 def add_tasks_to_queue(self, test_only=False):
2782 for instance in self.instances.values():
2783 if test_only:
2784 if instance.run:
2785 pipeline.put({"op": "run", "test": instance, "status": "built"})
2786 else:
2787 if instance.status not in ['passed', 'skipped']:
2788 instance.status = None
2789 pipeline.put({"op": "cmake", "test": instance})
2790
2791 return "DONE FEEDING"
2792
2793 def execute(self):
2794 def calc_one_elf_size(instance):
2795 if instance.status not in ["failed", "skipped"]:
2796 if instance.platform.type != "native":
2797 size_calc = instance.calculate_sizes()
2798 instance.metrics["ram_size"] = size_calc.get_ram_size()
2799 instance.metrics["rom_size"] = size_calc.get_rom_size()
2800 instance.metrics["unrecognized"] = size_calc.unrecognized_sections()
2801 else:
2802 instance.metrics["ram_size"] = 0
2803 instance.metrics["rom_size"] = 0
2804 instance.metrics["unrecognized"] = []
2805
2806 instance.metrics["handler_time"] = instance.handler.duration if instance.handler else 0
2807
2808 logger.info("Adding tasks to the queue...")
2809 # We can use a with statement to ensure threads are cleaned up promptly
2810 with BoundedExecutor(bound=self.jobs, max_workers=self.jobs) as executor:
2811
2812 # start a future for a thread which sends work in through the queue
2813 future_to_test = {
2814 executor.submit(self.add_tasks_to_queue, self.test_only): 'FEEDER DONE'}
2815
2816 while future_to_test:
2817 # check for status of the futures which are currently working
2818 done, pending = concurrent.futures.wait(future_to_test, timeout=1,
2819 return_when=concurrent.futures.FIRST_COMPLETED)
2820
2821 # if there is incoming work, start a new future
2822 while not pipeline.empty():
2823 # fetch a url from the queue
2824 message = pipeline.get()
2825 test = message['test']
2826
2827 pb = ProjectBuilder(self,
2828 test,
2829 lsan=self.enable_lsan,
2830 asan=self.enable_asan,
2831 coverage=self.enable_coverage,
2832 extra_args=self.extra_args,
2833 device_testing=self.device_testing,
2834 cmake_only=self.cmake_only,
2835 cleanup=self.cleanup,
2836 valgrind=self.enable_valgrind,
2837 inline_logs=self.inline_logs,
Anas Nashifce2b4182020-03-24 14:40:28 -04002838 generator=self.generator,
Anas Nashiff6462a32020-03-29 19:02:51 -04002839 generator_cmd=self.generator_cmd,
2840 verbose=self.verbose
Anas Nashifce2b4182020-03-24 14:40:28 -04002841 )
2842 future_to_test[executor.submit(pb.process, message)] = test.name
2843
2844 # process any completed futures
2845 for future in done:
2846 test = future_to_test[future]
2847 try:
2848 data = future.result()
2849 except Exception as exc:
2850 logger.error('%r generated an exception: %s' % (test, exc))
2851 sys.exit('%r generated an exception: %s' % (test, exc))
2852
2853 else:
2854 if data:
2855 logger.debug(data)
2856
2857 # remove the now completed future
2858 del future_to_test[future]
2859
2860 for future in pending:
2861 test = future_to_test[future]
2862
2863 try:
2864 future.result(timeout=180)
2865 except concurrent.futures.TimeoutError:
2866 logger.warning("{} stuck?".format(test))
2867
2868 if self.enable_size_report and not self.cmake_only:
2869 # Parallelize size calculation
2870 executor = concurrent.futures.ThreadPoolExecutor(self.jobs)
2871 futures = [executor.submit(calc_one_elf_size, instance)
2872 for instance in self.instances.values()]
2873 concurrent.futures.wait(futures)
2874 else:
2875 for instance in self.instances.values():
2876 instance.metrics["ram_size"] = 0
2877 instance.metrics["rom_size"] = 0
2878 instance.metrics["handler_time"] = instance.handler.duration if instance.handler else 0
2879 instance.metrics["unrecognized"] = []
2880
2881 def discard_report(self, filename):
2882
2883 try:
2884 if self.discards is None:
2885 raise SanityRuntimeError("apply_filters() hasn't been run!")
2886 except Exception as e:
2887 logger.error(str(e))
2888 sys.exit(2)
2889
2890 with open(filename, "wt") as csvfile:
2891 fieldnames = ["test", "arch", "platform", "reason"]
2892 cw = csv.DictWriter(csvfile, fieldnames, lineterminator=os.linesep)
2893 cw.writeheader()
2894 for instance, reason in sorted(self.discards.items()):
2895 rowdict = {"test": instance.testcase.name,
2896 "arch": instance.platform.arch,
2897 "platform": instance.platform.name,
2898 "reason": reason}
2899 cw.writerow(rowdict)
2900
Anas Nashif6915adf2020-04-22 09:39:42 -04002901 def target_report(self, outdir, suffix, append=False):
Anas Nashifce2b4182020-03-24 14:40:28 -04002902 platforms = {inst.platform.name for _, inst in self.instances.items()}
2903 for platform in platforms:
Anas Nashif6915adf2020-04-22 09:39:42 -04002904 if suffix:
2905 filename = os.path.join(outdir,"{}_{}.xml".format(platform, suffix))
2906 else:
2907 filename = os.path.join(outdir,"{}.xml".format(platform))
Anas Nashif90415502020-04-11 22:15:04 -04002908 self.xunit_report(filename, platform, full_report=True, append=append)
Anas Nashifce2b4182020-03-24 14:40:28 -04002909
Anas Nashif90415502020-04-11 22:15:04 -04002910
2911 @staticmethod
2912 def process_log(log_file):
2913 filtered_string = ""
2914 if os.path.exists(log_file):
2915 with open(log_file, "rb") as f:
2916 log = f.read().decode("utf-8")
2917 filtered_string = ''.join(filter(lambda x: x in string.printable, log))
2918
2919 return filtered_string
2920
2921 def xunit_report(self, filename, platform=None, full_report=False, append=False):
2922 fails = 0
2923 passes = 0
2924 errors = 0
2925 skips = 0
2926 duration = 0
2927
2928 for _, instance in self.instances.items():
2929 if platform and instance.platform.name != platform:
2930 continue
2931
2932 handler_time = instance.metrics.get('handler_time', 0)
2933 duration += handler_time
2934 if full_report:
Anas Nashifce2b4182020-03-24 14:40:28 -04002935 for k in instance.results.keys():
2936 if instance.results[k] == 'PASS':
2937 passes += 1
2938 elif instance.results[k] == 'BLOCK':
2939 errors += 1
2940 elif instance.results[k] == 'SKIP':
2941 skips += 1
2942 else:
2943 fails += 1
Anas Nashif90415502020-04-11 22:15:04 -04002944 else:
2945 if instance.status in ["failed", "timeout"]:
2946 if instance.reason in ['build_error', 'handler_crash']:
2947 errors += 1
2948 else:
2949 fails += 1
2950 elif instance.status == 'skipped':
2951 skips += 1
2952 else:
2953 passes += 1
Anas Nashifce2b4182020-03-24 14:40:28 -04002954
Anas Nashif90415502020-04-11 22:15:04 -04002955 run = "Sanitycheck"
2956 eleTestsuite = None
2957
2958 # When we re-run the tests, we re-use the results and update only with
2959 # the newly run tests.
2960 if os.path.exists(filename) and append:
2961 tree = ET.parse(filename)
2962 eleTestsuites = tree.getroot()
2963 eleTestsuite = tree.findall('testsuite')[0]
2964 eleTestsuite.attrib['failures'] = "%d" % fails
2965 eleTestsuite.attrib['errors'] = "%d" % errors
2966 eleTestsuite.attrib['skip'] = "%d" % skips
2967
2968 else:
Anas Nashifce2b4182020-03-24 14:40:28 -04002969 eleTestsuites = ET.Element('testsuites')
2970 eleTestsuite = ET.SubElement(eleTestsuites, 'testsuite',
2971 name=run, time="%f" % duration,
Anas Nashif90415502020-04-11 22:15:04 -04002972 tests="%d" % (errors + passes + fails + skips),
Anas Nashifce2b4182020-03-24 14:40:28 -04002973 failures="%d" % fails,
Anas Nashif90415502020-04-11 22:15:04 -04002974 errors="%d" % (errors), skip="%s" % (skips))
Anas Nashifce2b4182020-03-24 14:40:28 -04002975
Anas Nashif90415502020-04-11 22:15:04 -04002976 for _, instance in self.instances.items():
2977 if platform and instance.platform.name != platform:
2978 continue
Anas Nashifce2b4182020-03-24 14:40:28 -04002979
Anas Nashif90415502020-04-11 22:15:04 -04002980 if full_report:
2981 tname = os.path.basename(instance.testcase.name)
2982 else:
2983 tname = instance.testcase.name
2984 # remove testcases that are being re-run from exiting reports
2985 if append:
2986 for tc in eleTestsuite.findall('testcase'):
2987 if tc.get('classname') == "%s:%s" % (instance.platform.name, tname):
2988 eleTestsuite.remove(tc)
2989
2990 handler_time = instance.metrics.get('handler_time', 0)
2991
2992 if full_report:
Anas Nashifce2b4182020-03-24 14:40:28 -04002993 for k in instance.results.keys():
2994 eleTestcase = ET.SubElement(
2995 eleTestsuite, 'testcase',
Anas Nashif90415502020-04-11 22:15:04 -04002996 classname="%s:%s" % (instance.platform.name, tname),
Anas Nashifce2b4182020-03-24 14:40:28 -04002997 name="%s" % (k), time="%f" % handler_time)
2998 if instance.results[k] in ['FAIL', 'BLOCK']:
2999 if instance.results[k] == 'FAIL':
3000 el = ET.SubElement(
3001 eleTestcase,
3002 'failure',
3003 type="failure",
3004 message="failed")
3005 else:
3006 el = ET.SubElement(
3007 eleTestcase,
3008 'error',
3009 type="failure",
3010 message="failed")
3011 p = os.path.join(self.outdir, instance.platform.name, instance.testcase.name)
3012 log_file = os.path.join(p, "handler.log")
Anas Nashif90415502020-04-11 22:15:04 -04003013 el.text = self.process_log(log_file)
Anas Nashifce2b4182020-03-24 14:40:28 -04003014
3015 elif instance.results[k] == 'SKIP':
3016 el = ET.SubElement(
3017 eleTestcase,
3018 'skipped',
3019 type="skipped",
3020 message="Skipped")
Anas Nashifce2b4182020-03-24 14:40:28 -04003021 else:
Anas Nashif90415502020-04-11 22:15:04 -04003022 eleTestcase = ET.SubElement(eleTestsuite, 'testcase',
3023 classname="%s:%s" % (instance.platform.name, instance.testcase.name),
3024 name="%s" % (instance.testcase.name),
3025 time="%f" % handler_time)
3026 if instance.status in ["failed", "timeout"]:
3027 failure = ET.SubElement(
3028 eleTestcase,
3029 'failure',
3030 type="failure",
3031 message=instance.reason)
3032 p = ("%s/%s/%s" % (self.outdir, instance.platform.name, instance.testcase.name))
3033 bl = os.path.join(p, "build.log")
3034 hl = os.path.join(p, "handler.log")
3035 log_file = bl
3036 if instance.reason != 'Build error':
3037 if os.path.exists(hl):
3038 log_file = hl
3039 else:
3040 log_file = bl
Anas Nashifce2b4182020-03-24 14:40:28 -04003041
Anas Nashif90415502020-04-11 22:15:04 -04003042 failure.text = self.process_log(log_file)
Anas Nashifce2b4182020-03-24 14:40:28 -04003043
Anas Nashif90415502020-04-11 22:15:04 -04003044 elif instance.status == "skipped":
3045 ET.SubElement(eleTestcase, 'skipped', type="skipped", message="Skipped")
Anas Nashifce2b4182020-03-24 14:40:28 -04003046
Anas Nashifce2b4182020-03-24 14:40:28 -04003047
3048 result = ET.tostring(eleTestsuites)
3049 with open(filename, 'wb') as report:
3050 report.write(result)
3051
Anas Nashif90415502020-04-11 22:15:04 -04003052
Anas Nashifce2b4182020-03-24 14:40:28 -04003053 def csv_report(self, filename):
3054 with open(filename, "wt") as csvfile:
3055 fieldnames = ["test", "arch", "platform", "status",
3056 "extra_args", "handler", "handler_time", "ram_size",
3057 "rom_size"]
3058 cw = csv.DictWriter(csvfile, fieldnames, lineterminator=os.linesep)
3059 cw.writeheader()
3060 for instance in self.instances.values():
3061 rowdict = {"test": instance.testcase.name,
3062 "arch": instance.platform.arch,
3063 "platform": instance.platform.name,
3064 "extra_args": " ".join(instance.testcase.extra_args),
3065 "handler": instance.platform.simulation}
3066
3067 rowdict["status"] = instance.status
3068 if instance.status not in ["failed", "timeout"]:
3069 if instance.handler:
3070 rowdict["handler_time"] = instance.metrics.get("handler_time", 0)
3071 ram_size = instance.metrics.get("ram_size", 0)
3072 rom_size = instance.metrics.get("rom_size", 0)
3073 rowdict["ram_size"] = ram_size
3074 rowdict["rom_size"] = rom_size
3075 cw.writerow(rowdict)
3076
3077 def get_testcase(self, identifier):
3078 results = []
3079 for _, tc in self.testcases.items():
3080 for case in tc.cases:
3081 if case == identifier:
3082 results.append(tc)
3083 return results
3084
3085
3086class CoverageTool:
3087 """ Base class for every supported coverage tool
3088 """
3089
3090 def __init__(self):
Anas Nashiff6462a32020-03-29 19:02:51 -04003091 self.gcov_tool = None
3092 self.base_dir = None
Anas Nashifce2b4182020-03-24 14:40:28 -04003093
3094 @staticmethod
3095 def factory(tool):
3096 if tool == 'lcov':
Anas Nashiff6462a32020-03-29 19:02:51 -04003097 t = Lcov()
3098 elif tool == 'gcovr':
3099 t = Lcov()
3100 else:
3101 logger.error("Unsupported coverage tool specified: {}".format(tool))
3102 return None
3103
Anas Nashiff6462a32020-03-29 19:02:51 -04003104 return t
Anas Nashifce2b4182020-03-24 14:40:28 -04003105
3106 @staticmethod
3107 def retrieve_gcov_data(intput_file):
Anas Nashiff6462a32020-03-29 19:02:51 -04003108 logger.debug("Working on %s" % intput_file)
Anas Nashifce2b4182020-03-24 14:40:28 -04003109 extracted_coverage_info = {}
3110 capture_data = False
3111 capture_complete = False
3112 with open(intput_file, 'r') as fp:
3113 for line in fp.readlines():
3114 if re.search("GCOV_COVERAGE_DUMP_START", line):
3115 capture_data = True
3116 continue
3117 if re.search("GCOV_COVERAGE_DUMP_END", line):
3118 capture_complete = True
3119 break
3120 # Loop until the coverage data is found.
3121 if not capture_data:
3122 continue
3123 if line.startswith("*"):
3124 sp = line.split("<")
3125 if len(sp) > 1:
3126 # Remove the leading delimiter "*"
3127 file_name = sp[0][1:]
3128 # Remove the trailing new line char
3129 hex_dump = sp[1][:-1]
3130 else:
3131 continue
3132 else:
3133 continue
3134 extracted_coverage_info.update({file_name: hex_dump})
3135 if not capture_data:
3136 capture_complete = True
3137 return {'complete': capture_complete, 'data': extracted_coverage_info}
3138
3139 @staticmethod
3140 def create_gcda_files(extracted_coverage_info):
Anas Nashiff6462a32020-03-29 19:02:51 -04003141 logger.debug("Generating gcda files")
Anas Nashifce2b4182020-03-24 14:40:28 -04003142 for filename, hexdump_val in extracted_coverage_info.items():
3143 # if kobject_hash is given for coverage gcovr fails
3144 # hence skipping it problem only in gcovr v4.1
3145 if "kobject_hash" in filename:
3146 filename = (filename[:-4]) + "gcno"
3147 try:
3148 os.remove(filename)
3149 except Exception:
3150 pass
3151 continue
3152
3153 with open(filename, 'wb') as fp:
3154 fp.write(bytes.fromhex(hexdump_val))
3155
3156 def generate(self, outdir):
3157 for filename in glob.glob("%s/**/handler.log" % outdir, recursive=True):
3158 gcov_data = self.__class__.retrieve_gcov_data(filename)
3159 capture_complete = gcov_data['complete']
3160 extracted_coverage_info = gcov_data['data']
3161 if capture_complete:
3162 self.__class__.create_gcda_files(extracted_coverage_info)
3163 logger.debug("Gcov data captured: {}".format(filename))
3164 else:
3165 logger.error("Gcov data capture incomplete: {}".format(filename))
3166
3167 with open(os.path.join(outdir, "coverage.log"), "a") as coveragelog:
3168 ret = self._generate(outdir, coveragelog)
3169 if ret == 0:
3170 logger.info("HTML report generated: {}".format(
3171 os.path.join(outdir, "coverage", "index.html")))
3172
3173
3174class Lcov(CoverageTool):
3175
3176 def __init__(self):
3177 super().__init__()
3178 self.ignores = []
3179
3180 def add_ignore_file(self, pattern):
3181 self.ignores.append('*' + pattern + '*')
3182
3183 def add_ignore_directory(self, pattern):
3184 self.ignores.append(pattern + '/*')
3185
3186 def _generate(self, outdir, coveragelog):
3187 coveragefile = os.path.join(outdir, "coverage.info")
3188 ztestfile = os.path.join(outdir, "ztest.info")
3189 subprocess.call(["lcov", "--gcov-tool", self.gcov_tool,
3190 "--capture", "--directory", outdir,
3191 "--rc", "lcov_branch_coverage=1",
3192 "--output-file", coveragefile], stdout=coveragelog)
3193 # We want to remove tests/* and tests/ztest/test/* but save tests/ztest
3194 subprocess.call(["lcov", "--gcov-tool", self.gcov_tool, "--extract",
3195 coveragefile,
Anas Nashiff6462a32020-03-29 19:02:51 -04003196 os.path.join(self.base_dir, "tests", "ztest", "*"),
Anas Nashifce2b4182020-03-24 14:40:28 -04003197 "--output-file", ztestfile,
3198 "--rc", "lcov_branch_coverage=1"], stdout=coveragelog)
3199
3200 if os.path.exists(ztestfile) and os.path.getsize(ztestfile) > 0:
3201 subprocess.call(["lcov", "--gcov-tool", self.gcov_tool, "--remove",
3202 ztestfile,
Anas Nashiff6462a32020-03-29 19:02:51 -04003203 os.path.join(self.base_dir, "tests/ztest/test/*"),
Anas Nashifce2b4182020-03-24 14:40:28 -04003204 "--output-file", ztestfile,
3205 "--rc", "lcov_branch_coverage=1"],
3206 stdout=coveragelog)
3207 files = [coveragefile, ztestfile]
3208 else:
3209 files = [coveragefile]
3210
3211 for i in self.ignores:
3212 subprocess.call(
3213 ["lcov", "--gcov-tool", self.gcov_tool, "--remove",
3214 coveragefile, i, "--output-file",
3215 coveragefile, "--rc", "lcov_branch_coverage=1"],
3216 stdout=coveragelog)
3217
3218 # The --ignore-errors source option is added to avoid it exiting due to
3219 # samples/application_development/external_lib/
3220 return subprocess.call(["genhtml", "--legend", "--branch-coverage",
3221 "--ignore-errors", "source",
3222 "-output-directory",
3223 os.path.join(outdir, "coverage")] + files,
3224 stdout=coveragelog)
3225
3226
3227class Gcovr(CoverageTool):
3228
3229 def __init__(self):
3230 super().__init__()
3231 self.ignores = []
3232
3233 def add_ignore_file(self, pattern):
3234 self.ignores.append('.*' + pattern + '.*')
3235
3236 def add_ignore_directory(self, pattern):
3237 self.ignores.append(pattern + '/.*')
3238
3239 @staticmethod
3240 def _interleave_list(prefix, list):
3241 tuple_list = [(prefix, item) for item in list]
3242 return [item for sublist in tuple_list for item in sublist]
3243
3244 def _generate(self, outdir, coveragelog):
3245 coveragefile = os.path.join(outdir, "coverage.json")
3246 ztestfile = os.path.join(outdir, "ztest.json")
3247
3248 excludes = Gcovr._interleave_list("-e", self.ignores)
3249
3250 # We want to remove tests/* and tests/ztest/test/* but save tests/ztest
Anas Nashiff6462a32020-03-29 19:02:51 -04003251 subprocess.call(["gcovr", "-r", self.base_dir, "--gcov-executable",
Anas Nashifce2b4182020-03-24 14:40:28 -04003252 self.gcov_tool, "-e", "tests/*"] + excludes +
3253 ["--json", "-o", coveragefile, outdir],
3254 stdout=coveragelog)
3255
Anas Nashiff6462a32020-03-29 19:02:51 -04003256 subprocess.call(["gcovr", "-r", self.base_dir, "--gcov-executable",
Anas Nashifce2b4182020-03-24 14:40:28 -04003257 self.gcov_tool, "-f", "tests/ztest", "-e",
3258 "tests/ztest/test/*", "--json", "-o", ztestfile,
3259 outdir], stdout=coveragelog)
3260
3261 if os.path.exists(ztestfile) and os.path.getsize(ztestfile) > 0:
3262 files = [coveragefile, ztestfile]
3263 else:
3264 files = [coveragefile]
3265
3266 subdir = os.path.join(outdir, "coverage")
3267 os.makedirs(subdir, exist_ok=True)
3268
3269 tracefiles = self._interleave_list("--add-tracefile", files)
3270
Anas Nashiff6462a32020-03-29 19:02:51 -04003271 return subprocess.call(["gcovr", "-r", self.base_dir, "--html",
Anas Nashifce2b4182020-03-24 14:40:28 -04003272 "--html-details"] + tracefiles +
3273 ["-o", os.path.join(subdir, "index.html")],
3274 stdout=coveragelog)
3275class HardwareMap:
3276
3277 schema_path = os.path.join(ZEPHYR_BASE, "scripts", "sanity_chk", "hwmap-schema.yaml")
3278
3279 manufacturer = [
3280 'ARM',
3281 'SEGGER',
3282 'MBED',
3283 'STMicroelectronics',
3284 'Atmel Corp.',
3285 'Texas Instruments',
3286 'Silicon Labs',
3287 'NXP Semiconductors',
3288 'Microchip Technology Inc.',
3289 'FTDI',
3290 'Digilent'
3291 ]
3292
3293 runner_mapping = {
3294 'pyocd': [
3295 'DAPLink CMSIS-DAP',
3296 'MBED CMSIS-DAP'
3297 ],
3298 'jlink': [
3299 'J-Link',
3300 'J-Link OB'
3301 ],
3302 'openocd': [
3303 'STM32 STLink', '^XDS110.*'
3304 ],
3305 'dediprog': [
3306 'TTL232R-3V3',
3307 'MCP2200 USB Serial Port Emulator'
3308 ]
3309 }
3310
3311 def __init__(self):
3312 self.detected = []
3313 self.connected_hardware = []
3314
3315 def load_device_from_cmdline(self, serial, platform):
3316 device = {
3317 "serial": serial,
3318 "platform": platform,
3319 "counter": 0,
3320 "available": True,
3321 "connected": True
3322 }
3323 self.connected_hardware.append(device)
3324
3325 def load_hardware_map(self, map_file):
3326 hwm_schema = scl.yaml_load(self.schema_path)
3327 self.connected_hardware = scl.yaml_load_verify(map_file, hwm_schema)
3328 for i in self.connected_hardware:
3329 i['counter'] = 0
3330
Martí Bolívar07dce822020-04-13 16:50:51 -07003331 def scan_hw(self, persistent=False):
Anas Nashifce2b4182020-03-24 14:40:28 -04003332 from serial.tools import list_ports
3333
Martí Bolívar07dce822020-04-13 16:50:51 -07003334 if persistent and platform.system() == 'Linux':
3335 # On Linux, /dev/serial/by-id provides symlinks to
3336 # '/dev/ttyACMx' nodes using names which are unique as
3337 # long as manufacturers fill out USB metadata nicely.
3338 #
3339 # This creates a map from '/dev/ttyACMx' device nodes
3340 # to '/dev/serial/by-id/usb-...' symlinks. The symlinks
3341 # go into the hardware map because they stay the same
3342 # even when the user unplugs / replugs the device.
3343 #
3344 # Some inexpensive USB/serial adapters don't result
3345 # in unique names here, though, so use of this feature
3346 # requires explicitly setting persistent=True.
3347 by_id = Path('/dev/serial/by-id')
3348 def readlink(link):
3349 return str((by_id / link).resolve())
3350
3351 persistent_map = {readlink(link): str(link)
3352 for link in by_id.iterdir()}
3353 else:
3354 persistent_map = {}
3355
Anas Nashifce2b4182020-03-24 14:40:28 -04003356 serial_devices = list_ports.comports()
3357 logger.info("Scanning connected hardware...")
3358 for d in serial_devices:
3359 if d.manufacturer in self.manufacturer:
3360
3361 # TI XDS110 can have multiple serial devices for a single board
3362 # assume endpoint 0 is the serial, skip all others
3363 if d.manufacturer == 'Texas Instruments' and not d.location.endswith('0'):
3364 continue
3365 s_dev = {}
3366 s_dev['platform'] = "unknown"
3367 s_dev['id'] = d.serial_number
Martí Bolívar07dce822020-04-13 16:50:51 -07003368 s_dev['serial'] = persistent_map.get(d.device, d.device)
Anas Nashifce2b4182020-03-24 14:40:28 -04003369 s_dev['product'] = d.product
3370 s_dev['runner'] = 'unknown'
3371 for runner, _ in self.runner_mapping.items():
3372 products = self.runner_mapping.get(runner)
3373 if d.product in products:
3374 s_dev['runner'] = runner
3375 continue
3376 # Try regex matching
3377 for p in products:
3378 if re.match(p, d.product):
3379 s_dev['runner'] = runner
3380
3381 s_dev['available'] = True
3382 s_dev['connected'] = True
3383 self.detected.append(s_dev)
3384 else:
3385 logger.warning("Unsupported device (%s): %s" % (d.manufacturer, d))
3386
3387 def write_map(self, hwm_file):
3388 # use existing map
3389 if os.path.exists(hwm_file):
3390 with open(hwm_file, 'r') as yaml_file:
3391 hwm = yaml.load(yaml_file, Loader=yaml.FullLoader)
3392 # disconnect everything
3393 for h in hwm:
3394 h['connected'] = False
3395 h['serial'] = None
3396
3397 for d in self.detected:
3398 for h in hwm:
3399 if d['id'] == h['id'] and d['product'] == h['product']:
3400 h['connected'] = True
3401 h['serial'] = d['serial']
3402 d['match'] = True
3403
3404 new = list(filter(lambda n: not n.get('match', False), self.detected))
3405 hwm = hwm + new
3406
3407 logger.info("Registered devices:")
3408 self.dump(hwm)
3409
3410 with open(hwm_file, 'w') as yaml_file:
3411 yaml.dump(hwm, yaml_file, default_flow_style=False)
3412
3413 else:
3414 # create new file
3415 with open(hwm_file, 'w') as yaml_file:
3416 yaml.dump(self.detected, yaml_file, default_flow_style=False)
3417 logger.info("Detected devices:")
3418 self.dump(self.detected)
3419
3420 @staticmethod
3421 def dump(hwmap=[], filtered=[], header=[], connected_only=False):
3422 print("")
3423 table = []
3424 if not header:
3425 header = ["Platform", "ID", "Serial device"]
3426 for p in sorted(hwmap, key=lambda i: i['platform']):
3427 platform = p.get('platform')
3428 connected = p.get('connected', False)
3429 if filtered and platform not in filtered:
3430 continue
3431
3432 if not connected_only or connected:
3433 table.append([platform, p.get('id', None), p.get('serial')])
3434
3435 print(tabulate(table, headers=header, tablefmt="github"))
3436
3437
3438def size_report(sc):
3439 logger.info(sc.filename)
3440 logger.info("SECTION NAME VMA LMA SIZE HEX SZ TYPE")
3441 for i in range(len(sc.sections)):
3442 v = sc.sections[i]
3443
3444 logger.info("%-17s 0x%08x 0x%08x %8d 0x%05x %-7s" %
3445 (v["name"], v["virt_addr"], v["load_addr"], v["size"], v["size"],
3446 v["type"]))
3447
3448 logger.info("Totals: %d bytes (ROM), %d bytes (RAM)" %
3449 (sc.rom_size, sc.ram_size))
3450 logger.info("")
3451
3452
3453
3454def export_tests(filename, tests):
3455 with open(filename, "wt") as csvfile:
3456 fieldnames = ['section', 'subsection', 'title', 'reference']
3457 cw = csv.DictWriter(csvfile, fieldnames, lineterminator=os.linesep)
3458 for test in tests:
3459 data = test.split(".")
3460 if len(data) > 1:
3461 subsec = " ".join(data[1].split("_")).title()
3462 rowdict = {
3463 "section": data[0].capitalize(),
3464 "subsection": subsec,
3465 "title": test,
3466 "reference": test
3467 }
3468 cw.writerow(rowdict)
3469 else:
3470 logger.info("{} can't be exported".format(test))