blob: 7c15b8f64b1c12184fde1472175c9414296095ba [file] [log] [blame]
Anas Nashifce2b4182020-03-24 14:40:28 -04001#!/usr/bin/env python3
2# vim: set syntax=python ts=4 :
3#
4# Copyright (c) 2018 Intel Corporation
5# SPDX-License-Identifier: Apache-2.0
6
7import os
8import contextlib
9import string
10import mmap
11import sys
12import re
13import subprocess
14import select
15import shutil
16import shlex
17import signal
18import threading
19import concurrent.futures
20from collections import OrderedDict
21from threading import BoundedSemaphore
22import queue
23import time
24import csv
25import glob
26import concurrent
27import xml.etree.ElementTree as ET
28import logging
29from pathlib import Path
30from distutils.spawn import find_executable
31from colorama import Fore
32import yaml
Martí Bolívar07dce822020-04-13 16:50:51 -070033import platform
Anas Nashifce2b4182020-03-24 14:40:28 -040034
35try:
36 import serial
37except ImportError:
38 print("Install pyserial python module with pip to use --device-testing option.")
39
40try:
41 from tabulate import tabulate
42except ImportError:
43 print("Install tabulate python module with pip to use --device-testing option.")
44
Wentong Wu0d619ae2020-05-05 19:46:49 -040045try:
46 import psutil
47except ImportError:
Anas Nashif77946fa2020-05-21 18:19:01 -040048 print("Install psutil python module with pip to run in Qemu.")
Wentong Wu0d619ae2020-05-05 19:46:49 -040049
Anas Nashifce2b4182020-03-24 14:40:28 -040050ZEPHYR_BASE = os.getenv("ZEPHYR_BASE")
51if not ZEPHYR_BASE:
52 sys.exit("$ZEPHYR_BASE environment variable undefined")
53
54sys.path.insert(0, os.path.join(ZEPHYR_BASE, "scripts", "dts"))
55import edtlib
56
57hw_map_local = threading.Lock()
58report_lock = threading.Lock()
59
60# Use this for internal comparisons; that's what canonicalization is
61# for. Don't use it when invoking other components of the build system
62# to avoid confusing and hard to trace inconsistencies in error messages
63# and logs, generated Makefiles, etc. compared to when users invoke these
64# components directly.
65# Note "normalization" is different from canonicalization, see os.path.
66canonical_zephyr_base = os.path.realpath(ZEPHYR_BASE)
67
68sys.path.insert(0, os.path.join(ZEPHYR_BASE, "scripts/"))
69
70from sanity_chk import scl
71from sanity_chk import expr_parser
72
Anas Nashifce2b4182020-03-24 14:40:28 -040073logger = logging.getLogger('sanitycheck')
74logger.setLevel(logging.DEBUG)
75
Anas Nashifce2b4182020-03-24 14:40:28 -040076pipeline = queue.LifoQueue()
77
78class CMakeCacheEntry:
79 '''Represents a CMake cache entry.
80
81 This class understands the type system in a CMakeCache.txt, and
82 converts the following cache types to Python types:
83
84 Cache Type Python type
85 ---------- -------------------------------------------
86 FILEPATH str
87 PATH str
88 STRING str OR list of str (if ';' is in the value)
89 BOOL bool
90 INTERNAL str OR list of str (if ';' is in the value)
91 ---------- -------------------------------------------
92 '''
93
94 # Regular expression for a cache entry.
95 #
96 # CMake variable names can include escape characters, allowing a
97 # wider set of names than is easy to match with a regular
98 # expression. To be permissive here, use a non-greedy match up to
99 # the first colon (':'). This breaks if the variable name has a
100 # colon inside, but it's good enough.
101 CACHE_ENTRY = re.compile(
102 r'''(?P<name>.*?) # name
103 :(?P<type>FILEPATH|PATH|STRING|BOOL|INTERNAL) # type
104 =(?P<value>.*) # value
105 ''', re.X)
106
107 @classmethod
108 def _to_bool(cls, val):
109 # Convert a CMake BOOL string into a Python bool.
110 #
111 # "True if the constant is 1, ON, YES, TRUE, Y, or a
112 # non-zero number. False if the constant is 0, OFF, NO,
113 # FALSE, N, IGNORE, NOTFOUND, the empty string, or ends in
114 # the suffix -NOTFOUND. Named boolean constants are
115 # case-insensitive. If the argument is not one of these
116 # constants, it is treated as a variable."
117 #
118 # https://cmake.org/cmake/help/v3.0/command/if.html
119 val = val.upper()
120 if val in ('ON', 'YES', 'TRUE', 'Y'):
121 return 1
122 elif val in ('OFF', 'NO', 'FALSE', 'N', 'IGNORE', 'NOTFOUND', ''):
123 return 0
124 elif val.endswith('-NOTFOUND'):
125 return 0
126 else:
127 try:
128 v = int(val)
129 return v != 0
130 except ValueError as exc:
131 raise ValueError('invalid bool {}'.format(val)) from exc
132
133 @classmethod
134 def from_line(cls, line, line_no):
135 # Comments can only occur at the beginning of a line.
136 # (The value of an entry could contain a comment character).
137 if line.startswith('//') or line.startswith('#'):
138 return None
139
140 # Whitespace-only lines do not contain cache entries.
141 if not line.strip():
142 return None
143
144 m = cls.CACHE_ENTRY.match(line)
145 if not m:
146 return None
147
148 name, type_, value = (m.group(g) for g in ('name', 'type', 'value'))
149 if type_ == 'BOOL':
150 try:
151 value = cls._to_bool(value)
152 except ValueError as exc:
153 args = exc.args + ('on line {}: {}'.format(line_no, line),)
154 raise ValueError(args) from exc
155 elif type_ in ['STRING', 'INTERNAL']:
156 # If the value is a CMake list (i.e. is a string which
157 # contains a ';'), convert to a Python list.
158 if ';' in value:
159 value = value.split(';')
160
161 return CMakeCacheEntry(name, value)
162
163 def __init__(self, name, value):
164 self.name = name
165 self.value = value
166
167 def __str__(self):
168 fmt = 'CMakeCacheEntry(name={}, value={})'
169 return fmt.format(self.name, self.value)
170
171
172class CMakeCache:
173 '''Parses and represents a CMake cache file.'''
174
175 @staticmethod
176 def from_file(cache_file):
177 return CMakeCache(cache_file)
178
179 def __init__(self, cache_file):
180 self.cache_file = cache_file
181 self.load(cache_file)
182
183 def load(self, cache_file):
184 entries = []
185 with open(cache_file, 'r') as cache:
186 for line_no, line in enumerate(cache):
187 entry = CMakeCacheEntry.from_line(line, line_no)
188 if entry:
189 entries.append(entry)
190 self._entries = OrderedDict((e.name, e) for e in entries)
191
192 def get(self, name, default=None):
193 entry = self._entries.get(name)
194 if entry is not None:
195 return entry.value
196 else:
197 return default
198
199 def get_list(self, name, default=None):
200 if default is None:
201 default = []
202 entry = self._entries.get(name)
203 if entry is not None:
204 value = entry.value
205 if isinstance(value, list):
206 return value
207 elif isinstance(value, str):
208 return [value] if value else []
209 else:
210 msg = 'invalid value {} type {}'
211 raise RuntimeError(msg.format(value, type(value)))
212 else:
213 return default
214
215 def __contains__(self, name):
216 return name in self._entries
217
218 def __getitem__(self, name):
219 return self._entries[name].value
220
221 def __setitem__(self, name, entry):
222 if not isinstance(entry, CMakeCacheEntry):
223 msg = 'improper type {} for value {}, expecting CMakeCacheEntry'
224 raise TypeError(msg.format(type(entry), entry))
225 self._entries[name] = entry
226
227 def __delitem__(self, name):
228 del self._entries[name]
229
230 def __iter__(self):
231 return iter(self._entries.values())
232
233
234class SanityCheckException(Exception):
235 pass
236
237
238class SanityRuntimeError(SanityCheckException):
239 pass
240
241
242class ConfigurationError(SanityCheckException):
243 def __init__(self, cfile, message):
244 SanityCheckException.__init__(self, cfile + ": " + message)
245
246
247class BuildError(SanityCheckException):
248 pass
249
250
251class ExecutionError(SanityCheckException):
252 pass
253
254
255class HarnessImporter:
256
257 def __init__(self, name):
258 sys.path.insert(0, os.path.join(ZEPHYR_BASE, "scripts/sanity_chk"))
259 module = __import__("harness")
260 if name:
261 my_class = getattr(module, name)
262 else:
263 my_class = getattr(module, "Test")
264
265 self.instance = my_class()
266
267
268class Handler:
269 def __init__(self, instance, type_str="build"):
270 """Constructor
271
272 """
273 self.lock = threading.Lock()
274
275 self.state = "waiting"
276 self.run = False
277 self.duration = 0
278 self.type_str = type_str
279
280 self.binary = None
281 self.pid_fn = None
282 self.call_make_run = False
283
284 self.name = instance.name
285 self.instance = instance
286 self.timeout = instance.testcase.timeout
287 self.sourcedir = instance.testcase.source_dir
288 self.build_dir = instance.build_dir
289 self.log = os.path.join(self.build_dir, "handler.log")
290 self.returncode = 0
291 self.set_state("running", self.duration)
292 self.generator = None
293 self.generator_cmd = None
294
295 self.args = []
296
297 def set_state(self, state, duration):
298 self.lock.acquire()
299 self.state = state
300 self.duration = duration
301 self.lock.release()
302
303 def get_state(self):
304 self.lock.acquire()
305 ret = (self.state, self.duration)
306 self.lock.release()
307 return ret
308
309 def record(self, harness):
310 if harness.recording:
311 filename = os.path.join(self.build_dir, "recording.csv")
312 with open(filename, "at") as csvfile:
313 cw = csv.writer(csvfile, harness.fieldnames, lineterminator=os.linesep)
314 cw.writerow(harness.fieldnames)
315 for instance in harness.recording:
316 cw.writerow(instance)
317
318
319class BinaryHandler(Handler):
320 def __init__(self, instance, type_str):
321 """Constructor
322
323 @param instance Test Instance
324 """
325 super().__init__(instance, type_str)
326
327 self.terminated = False
328
329 # Tool options
330 self.valgrind = False
331 self.lsan = False
332 self.asan = False
333 self.coverage = False
334
335 def try_kill_process_by_pid(self):
336 if self.pid_fn:
337 pid = int(open(self.pid_fn).read())
338 os.unlink(self.pid_fn)
339 self.pid_fn = None # clear so we don't try to kill the binary twice
340 try:
341 os.kill(pid, signal.SIGTERM)
342 except ProcessLookupError:
343 pass
344
345 def terminate(self, proc):
346 # encapsulate terminate functionality so we do it consistently where ever
347 # we might want to terminate the proc. We need try_kill_process_by_pid
348 # because of both how newer ninja (1.6.0 or greater) and .NET / renode
349 # work. Newer ninja's don't seem to pass SIGTERM down to the children
350 # so we need to use try_kill_process_by_pid.
351 self.try_kill_process_by_pid()
352 proc.terminate()
Anas Nashif227392c2020-04-27 20:31:56 -0400353 # sleep for a while before attempting to kill
354 time.sleep(0.5)
355 proc.kill()
Anas Nashifce2b4182020-03-24 14:40:28 -0400356 self.terminated = True
357
358 def _output_reader(self, proc, harness):
359 log_out_fp = open(self.log, "wt")
360 for line in iter(proc.stdout.readline, b''):
361 logger.debug("OUTPUT: {0}".format(line.decode('utf-8').rstrip()))
362 log_out_fp.write(line.decode('utf-8'))
363 log_out_fp.flush()
364 harness.handle(line.decode('utf-8').rstrip())
365 if harness.state:
366 try:
367 # POSIX arch based ztests end on their own,
368 # so let's give it up to 100ms to do so
369 proc.wait(0.1)
370 except subprocess.TimeoutExpired:
371 self.terminate(proc)
372 break
373
374 log_out_fp.close()
375
376 def handle(self):
377
378 harness_name = self.instance.testcase.harness.capitalize()
379 harness_import = HarnessImporter(harness_name)
380 harness = harness_import.instance
381 harness.configure(self.instance)
382
383 if self.call_make_run:
384 command = [self.generator_cmd, "run"]
385 else:
386 command = [self.binary]
387
388 run_valgrind = False
389 if self.valgrind and shutil.which("valgrind"):
390 command = ["valgrind", "--error-exitcode=2",
391 "--leak-check=full",
392 "--suppressions=" + ZEPHYR_BASE + "/scripts/valgrind.supp",
393 "--log-file=" + self.build_dir + "/valgrind.log"
394 ] + command
395 run_valgrind = True
396
397 logger.debug("Spawning process: " +
398 " ".join(shlex.quote(word) for word in command) + os.linesep +
399 "in directory: " + self.build_dir)
400
401 start_time = time.time()
402
403 env = os.environ.copy()
404 if self.asan:
405 env["ASAN_OPTIONS"] = "log_path=stdout:" + \
406 env.get("ASAN_OPTIONS", "")
407 if not self.lsan:
408 env["ASAN_OPTIONS"] += "detect_leaks=0"
409
410 with subprocess.Popen(command, stdout=subprocess.PIPE,
411 stderr=subprocess.PIPE, cwd=self.build_dir, env=env) as proc:
412 logger.debug("Spawning BinaryHandler Thread for %s" % self.name)
413 t = threading.Thread(target=self._output_reader, args=(proc, harness,), daemon=True)
414 t.start()
415 t.join(self.timeout)
416 if t.is_alive():
417 self.terminate(proc)
418 t.join()
419 proc.wait()
420 self.returncode = proc.returncode
421
422 handler_time = time.time() - start_time
423
424 if self.coverage:
425 subprocess.call(["GCOV_PREFIX=" + self.build_dir,
426 "gcov", self.sourcedir, "-b", "-s", self.build_dir], shell=True)
427
428 self.try_kill_process_by_pid()
429
430 # FIXME: This is needed when killing the simulator, the console is
431 # garbled and needs to be reset. Did not find a better way to do that.
432
433 subprocess.call(["stty", "sane"])
434 self.instance.results = harness.tests
435
436 if not self.terminated and self.returncode != 0:
437 # When a process is killed, the default handler returns 128 + SIGTERM
438 # so in that case the return code itself is not meaningful
439 self.set_state("failed", handler_time)
440 self.instance.reason = "Failed"
441 elif run_valgrind and self.returncode == 2:
442 self.set_state("failed", handler_time)
443 self.instance.reason = "Valgrind error"
444 elif harness.state:
445 self.set_state(harness.state, handler_time)
Anas Nashifb802af82020-04-26 21:57:38 -0400446 if harness.state == "failed":
447 self.instance.reason = "Failed"
Anas Nashifce2b4182020-03-24 14:40:28 -0400448 else:
449 self.set_state("timeout", handler_time)
450 self.instance.reason = "Timeout"
451
452 self.record(harness)
453
454
455class DeviceHandler(Handler):
456
457 def __init__(self, instance, type_str):
458 """Constructor
459
460 @param instance Test Instance
461 """
462 super().__init__(instance, type_str)
463
464 self.suite = None
Anas Nashifce2b4182020-03-24 14:40:28 -0400465
466 def monitor_serial(self, ser, halt_fileno, harness):
467 log_out_fp = open(self.log, "wt")
468
469 ser_fileno = ser.fileno()
470 readlist = [halt_fileno, ser_fileno]
471
472 while ser.isOpen():
473 readable, _, _ = select.select(readlist, [], [], self.timeout)
474
475 if halt_fileno in readable:
476 logger.debug('halted')
477 ser.close()
478 break
479 if ser_fileno not in readable:
480 continue # Timeout.
481
482 serial_line = None
483 try:
484 serial_line = ser.readline()
485 except TypeError:
486 pass
487 except serial.SerialException:
488 ser.close()
489 break
490
491 # Just because ser_fileno has data doesn't mean an entire line
492 # is available yet.
493 if serial_line:
494 sl = serial_line.decode('utf-8', 'ignore').lstrip()
495 logger.debug("DEVICE: {0}".format(sl.rstrip()))
496
497 log_out_fp.write(sl)
498 log_out_fp.flush()
499 harness.handle(sl.rstrip())
500
501 if harness.state:
502 ser.close()
503 break
504
505 log_out_fp.close()
506
Anas Nashif3b86f132020-05-21 10:35:33 -0400507 def device_is_available(self, instance):
508 device = instance.platform.name
509 fixture = instance.testcase.harness_config.get("fixture")
Anas Nashifce2b4182020-03-24 14:40:28 -0400510 for i in self.suite.connected_hardware:
Anas Nashif3b86f132020-05-21 10:35:33 -0400511 if fixture and fixture not in i.get('fixtures', []):
512 continue
Anas Nashifce2b4182020-03-24 14:40:28 -0400513 if i['platform'] == device and i['available'] and i['serial']:
514 return True
515
516 return False
517
Anas Nashif3b86f132020-05-21 10:35:33 -0400518 def get_available_device(self, instance):
519 device = instance.platform.name
Anas Nashifce2b4182020-03-24 14:40:28 -0400520 for i in self.suite.connected_hardware:
521 if i['platform'] == device and i['available'] and i['serial']:
522 i['available'] = False
523 i['counter'] += 1
524 return i
525
526 return None
527
528 def make_device_available(self, serial):
529 with hw_map_local:
530 for i in self.suite.connected_hardware:
531 if i['serial'] == serial:
532 i['available'] = True
533
534 @staticmethod
535 def run_custom_script(script, timeout):
536 with subprocess.Popen(script, stderr=subprocess.PIPE, stdout=subprocess.PIPE) as proc:
537 try:
538 stdout, _ = proc.communicate(timeout=timeout)
539 logger.debug(stdout.decode())
540
541 except subprocess.TimeoutExpired:
542 proc.kill()
543 proc.communicate()
544 logger.error("{} timed out".format(script))
545
546 def handle(self):
547 out_state = "failed"
548
Anas Nashifc11f8ac2020-03-31 08:37:55 -0400549 if self.suite.west_flash:
Anas Nashifce2b4182020-03-24 14:40:28 -0400550 command = ["west", "flash", "--skip-rebuild", "-d", self.build_dir]
Anas Nashifc11f8ac2020-03-31 08:37:55 -0400551 if self.suite.west_runner:
Anas Nashifce2b4182020-03-24 14:40:28 -0400552 command.append("--runner")
Anas Nashifc11f8ac2020-03-31 08:37:55 -0400553 command.append(self.suite.west_runner)
Anas Nashifce2b4182020-03-24 14:40:28 -0400554 # There are three ways this option is used.
555 # 1) bare: --west-flash
556 # This results in options.west_flash == []
557 # 2) with a value: --west-flash="--board-id=42"
558 # This results in options.west_flash == "--board-id=42"
559 # 3) Multiple values: --west-flash="--board-id=42,--erase"
560 # This results in options.west_flash == "--board-id=42 --erase"
Anas Nashifc11f8ac2020-03-31 08:37:55 -0400561 if self.suite.west_flash != []:
Anas Nashifce2b4182020-03-24 14:40:28 -0400562 command.append('--')
Anas Nashifc11f8ac2020-03-31 08:37:55 -0400563 command.extend(self.suite.west_flash.split(','))
Anas Nashifce2b4182020-03-24 14:40:28 -0400564 else:
565 command = [self.generator_cmd, "-C", self.build_dir, "flash"]
566
Anas Nashif3b86f132020-05-21 10:35:33 -0400567 while not self.device_is_available(self.instance):
Anas Nashifce2b4182020-03-24 14:40:28 -0400568 logger.debug("Waiting for device {} to become available".format(self.instance.platform.name))
569 time.sleep(1)
570
Anas Nashif3b86f132020-05-21 10:35:33 -0400571 hardware = self.get_available_device(self.instance)
Anas Nashifce2b4182020-03-24 14:40:28 -0400572
Anas Nashif3b86f132020-05-21 10:35:33 -0400573 if hardware:
574 runner = hardware.get('runner', None)
Anas Nashifce2b4182020-03-24 14:40:28 -0400575 if runner:
576 board_id = hardware.get("probe_id", hardware.get("id", None))
577 product = hardware.get("product", None)
578 command = ["west", "flash", "--skip-rebuild", "-d", self.build_dir]
579 command.append("--runner")
580 command.append(hardware.get('runner', None))
581 if runner == "pyocd":
582 command.append("--board-id")
583 command.append(board_id)
584 elif runner == "nrfjprog":
585 command.append('--')
586 command.append("--snr")
587 command.append(board_id)
588 elif runner == "openocd" and product == "STM32 STLink":
589 command.append('--')
590 command.append("--cmd-pre-init")
591 command.append("hla_serial %s" % (board_id))
592 elif runner == "openocd" and product == "EDBG CMSIS-DAP":
593 command.append('--')
594 command.append("--cmd-pre-init")
595 command.append("cmsis_dap_serial %s" % (board_id))
596 elif runner == "jlink":
597 command.append("--tool-opt=-SelectEmuBySN %s" % (board_id))
598
599 serial_device = hardware['serial']
600
601 try:
602 ser = serial.Serial(
603 serial_device,
604 baudrate=115200,
605 parity=serial.PARITY_NONE,
606 stopbits=serial.STOPBITS_ONE,
607 bytesize=serial.EIGHTBITS,
608 timeout=self.timeout
609 )
610 except serial.SerialException as e:
611 self.set_state("failed", 0)
612 self.instance.reason = "Failed"
613 logger.error("Serial device error: %s" % (str(e)))
614 self.make_device_available(serial_device)
615 return
616
617 ser.flush()
618
619 harness_name = self.instance.testcase.harness.capitalize()
620 harness_import = HarnessImporter(harness_name)
621 harness = harness_import.instance
622 harness.configure(self.instance)
623 read_pipe, write_pipe = os.pipe()
624 start_time = time.time()
625
626 pre_script = hardware.get('pre_script')
627 post_flash_script = hardware.get('post_flash_script')
628 post_script = hardware.get('post_script')
629
630 if pre_script:
631 self.run_custom_script(pre_script, 30)
632
633 t = threading.Thread(target=self.monitor_serial, daemon=True,
634 args=(ser, read_pipe, harness))
635 t.start()
636
637 d_log = "{}/device.log".format(self.instance.build_dir)
638 logger.debug('Flash command: %s', command)
639 try:
640 stdout = stderr = None
641 with subprocess.Popen(command, stderr=subprocess.PIPE, stdout=subprocess.PIPE) as proc:
642 try:
643 (stdout, stderr) = proc.communicate(timeout=30)
644 logger.debug(stdout.decode())
645
646 if proc.returncode != 0:
647 self.instance.reason = "Device issue (Flash?)"
648 with open(d_log, "w") as dlog_fp:
649 dlog_fp.write(stderr.decode())
650 except subprocess.TimeoutExpired:
651 proc.kill()
652 (stdout, stderr) = proc.communicate()
653 self.instance.reason = "Device issue (Timeout)"
654
655 with open(d_log, "w") as dlog_fp:
656 dlog_fp.write(stderr.decode())
657
658 except subprocess.CalledProcessError:
659 os.write(write_pipe, b'x') # halt the thread
660
661 if post_flash_script:
662 self.run_custom_script(post_flash_script, 30)
663
664
665 t.join(self.timeout)
666 if t.is_alive():
667 logger.debug("Timed out while monitoring serial output on {}".format(self.instance.platform.name))
668 out_state = "timeout"
669
670 if ser.isOpen():
671 ser.close()
672
673 os.close(write_pipe)
674 os.close(read_pipe)
675
676 handler_time = time.time() - start_time
677
678 if out_state == "timeout":
679 for c in self.instance.testcase.cases:
680 if c not in harness.tests:
681 harness.tests[c] = "BLOCK"
682
683 self.instance.reason = "Timeout"
684
685 self.instance.results = harness.tests
686
687 if harness.state:
688 self.set_state(harness.state, handler_time)
689 if harness.state == "failed":
690 self.instance.reason = "Failed"
691 else:
692 self.set_state(out_state, handler_time)
693
694 if post_script:
695 self.run_custom_script(post_script, 30)
696
697 self.make_device_available(serial_device)
698
699 self.record(harness)
700
701
702class QEMUHandler(Handler):
703 """Spawns a thread to monitor QEMU output from pipes
704
705 We pass QEMU_PIPE to 'make run' and monitor the pipes for output.
706 We need to do this as once qemu starts, it runs forever until killed.
707 Test cases emit special messages to the console as they run, we check
708 for these to collect whether the test passed or failed.
709 """
710
711 def __init__(self, instance, type_str):
712 """Constructor
713
714 @param instance Test instance
715 """
716
717 super().__init__(instance, type_str)
718 self.fifo_fn = os.path.join(instance.build_dir, "qemu-fifo")
719
720 self.pid_fn = os.path.join(instance.build_dir, "qemu.pid")
721
722 @staticmethod
Wentong Wu0d619ae2020-05-05 19:46:49 -0400723 def _get_cpu_time(pid):
724 """get process CPU time.
725
726 The guest virtual time in QEMU icount mode isn't host time and
727 it's maintained by counting guest instructions, so we use QEMU
728 process exection time to mostly simulate the time of guest OS.
729 """
730 proc = psutil.Process(pid)
731 cpu_time = proc.cpu_times()
732 return cpu_time.user + cpu_time.system
733
734 @staticmethod
Anas Nashifce2b4182020-03-24 14:40:28 -0400735 def _thread(handler, timeout, outdir, logfile, fifo_fn, pid_fn, results, harness):
736 fifo_in = fifo_fn + ".in"
737 fifo_out = fifo_fn + ".out"
738
739 # These in/out nodes are named from QEMU's perspective, not ours
740 if os.path.exists(fifo_in):
741 os.unlink(fifo_in)
742 os.mkfifo(fifo_in)
743 if os.path.exists(fifo_out):
744 os.unlink(fifo_out)
745 os.mkfifo(fifo_out)
746
747 # We don't do anything with out_fp but we need to open it for
748 # writing so that QEMU doesn't block, due to the way pipes work
749 out_fp = open(fifo_in, "wb")
750 # Disable internal buffering, we don't
751 # want read() or poll() to ever block if there is data in there
752 in_fp = open(fifo_out, "rb", buffering=0)
753 log_out_fp = open(logfile, "wt")
754
755 start_time = time.time()
756 timeout_time = start_time + timeout
757 p = select.poll()
758 p.register(in_fp, select.POLLIN)
759 out_state = None
760
761 line = ""
762 timeout_extended = False
Wentong Wu0d619ae2020-05-05 19:46:49 -0400763
764 pid = 0
765 if os.path.exists(pid_fn):
766 pid = int(open(pid_fn).read())
767
Anas Nashifce2b4182020-03-24 14:40:28 -0400768 while True:
769 this_timeout = int((timeout_time - time.time()) * 1000)
770 if this_timeout < 0 or not p.poll(this_timeout):
Wentong Wu0d619ae2020-05-05 19:46:49 -0400771 if pid and this_timeout > 0:
772 #there is possibility we polled nothing because
773 #of host not scheduled QEMU process enough CPU
774 #time during p.poll(this_timeout)
775 cpu_time = QEMUHandler._get_cpu_time(pid)
776 if cpu_time < timeout and not out_state:
777 timeout_time = time.time() + (timeout - cpu_time)
778 continue
779
Anas Nashifce2b4182020-03-24 14:40:28 -0400780 if not out_state:
781 out_state = "timeout"
782 break
783
Wentong Wu0d619ae2020-05-05 19:46:49 -0400784 if pid == 0 and os.path.exists(pid_fn):
785 pid = int(open(pid_fn).read())
786
Anas Nashifce2b4182020-03-24 14:40:28 -0400787 try:
788 c = in_fp.read(1).decode("utf-8")
789 except UnicodeDecodeError:
790 # Test is writing something weird, fail
791 out_state = "unexpected byte"
792 break
793
794 if c == "":
795 # EOF, this shouldn't happen unless QEMU crashes
796 out_state = "unexpected eof"
797 break
798 line = line + c
799 if c != "\n":
800 continue
801
802 # line contains a full line of data output from QEMU
803 log_out_fp.write(line)
804 log_out_fp.flush()
805 line = line.strip()
806 logger.debug("QEMU: %s" % line)
807
808 harness.handle(line)
809 if harness.state:
810 # if we have registered a fail make sure the state is not
811 # overridden by a false success message coming from the
812 # testsuite
813 if out_state != 'failed':
814 out_state = harness.state
815
816 # if we get some state, that means test is doing well, we reset
817 # the timeout and wait for 2 more seconds to catch anything
818 # printed late. We wait much longer if code
819 # coverage is enabled since dumping this information can
820 # take some time.
821 if not timeout_extended or harness.capture_coverage:
822 timeout_extended = True
823 if harness.capture_coverage:
824 timeout_time = time.time() + 30
825 else:
826 timeout_time = time.time() + 2
827 line = ""
828
829 handler.record(harness)
830
831 handler_time = time.time() - start_time
832 logger.debug("QEMU complete (%s) after %f seconds" %
833 (out_state, handler_time))
834 handler.set_state(out_state, handler_time)
835 if out_state == "timeout":
836 handler.instance.reason = "Timeout"
837 elif out_state == "failed":
838 handler.instance.reason = "Failed"
839
840 log_out_fp.close()
841 out_fp.close()
842 in_fp.close()
Wentong Wu0d619ae2020-05-05 19:46:49 -0400843 if pid:
Anas Nashifce2b4182020-03-24 14:40:28 -0400844 try:
845 if pid:
846 os.kill(pid, signal.SIGTERM)
847 except ProcessLookupError:
848 # Oh well, as long as it's dead! User probably sent Ctrl-C
849 pass
850
851 os.unlink(fifo_in)
852 os.unlink(fifo_out)
853
854 def handle(self):
855 self.results = {}
856 self.run = True
857
858 # We pass this to QEMU which looks for fifos with .in and .out
859 # suffixes.
860 self.fifo_fn = os.path.join(self.instance.build_dir, "qemu-fifo")
861
862 self.pid_fn = os.path.join(self.instance.build_dir, "qemu.pid")
863 if os.path.exists(self.pid_fn):
864 os.unlink(self.pid_fn)
865
866 self.log_fn = self.log
867
868 harness_import = HarnessImporter(self.instance.testcase.harness.capitalize())
869 harness = harness_import.instance
870 harness.configure(self.instance)
871 self.thread = threading.Thread(name=self.name, target=QEMUHandler._thread,
872 args=(self, self.timeout, self.build_dir,
873 self.log_fn, self.fifo_fn,
874 self.pid_fn, self.results, harness))
875
876 self.instance.results = harness.tests
877 self.thread.daemon = True
878 logger.debug("Spawning QEMUHandler Thread for %s" % self.name)
879 self.thread.start()
880 subprocess.call(["stty", "sane"])
881
882 logger.debug("Running %s (%s)" % (self.name, self.type_str))
883 command = [self.generator_cmd]
884 command += ["-C", self.build_dir, "run"]
885
886 with subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=self.build_dir) as proc:
887 logger.debug("Spawning QEMUHandler Thread for %s" % self.name)
Wentong Wu7ec57b42020-05-05 19:19:18 -0400888 try:
889 proc.wait(self.timeout)
890 except subprocess.TimeoutExpired:
891 #sometimes QEMU can't handle SIGTERM signal correctly
892 #in that case kill -9 QEMU process directly and leave
893 #sanitycheck judge testing result by console output
894 if os.path.exists(self.pid_fn):
895 qemu_pid = int(open(self.pid_fn).read())
896 try:
897 os.kill(qemu_pid, signal.SIGKILL)
898 except ProcessLookupError:
899 pass
900 proc.wait()
901 self.returncode = 0
902 else:
903 proc.terminate()
904 proc.kill()
905 self.returncode = proc.returncode
906 else:
907 self.returncode = proc.returncode
908
909 if os.path.exists(self.pid_fn):
910 os.unlink(self.pid_fn)
Anas Nashifce2b4182020-03-24 14:40:28 -0400911
912 if self.returncode != 0:
913 self.set_state("failed", 0)
914 self.instance.reason = "Exited with {}".format(self.returncode)
915
916 def get_fifo(self):
917 return self.fifo_fn
918
919
920class SizeCalculator:
921 alloc_sections = [
922 "bss",
923 "noinit",
924 "app_bss",
925 "app_noinit",
926 "ccm_bss",
927 "ccm_noinit"
928 ]
929
930 rw_sections = [
931 "datas",
932 "initlevel",
933 "exceptions",
934 "initshell",
935 "_static_thread_area",
936 "_k_timer_area",
937 "_k_mem_slab_area",
938 "_k_mem_pool_area",
939 "sw_isr_table",
940 "_k_sem_area",
941 "_k_mutex_area",
942 "app_shmem_regions",
943 "_k_fifo_area",
944 "_k_lifo_area",
945 "_k_stack_area",
946 "_k_msgq_area",
947 "_k_mbox_area",
948 "_k_pipe_area",
949 "net_if",
950 "net_if_dev",
Anas Nashifce2b4182020-03-24 14:40:28 -0400951 "net_l2_data",
952 "_k_queue_area",
953 "_net_buf_pool_area",
954 "app_datas",
955 "kobject_data",
956 "mmu_tables",
957 "app_pad",
958 "priv_stacks",
959 "ccm_data",
960 "usb_descriptor",
961 "usb_data", "usb_bos_desc",
Jukka Rissanen420b1952020-04-01 12:47:53 +0300962 "uart_mux",
Anas Nashifce2b4182020-03-24 14:40:28 -0400963 'log_backends_sections',
964 'log_dynamic_sections',
965 'log_const_sections',
966 "app_smem",
967 'shell_root_cmds_sections',
968 'log_const_sections',
969 "font_entry_sections",
970 "priv_stacks_noinit",
971 "_GCOV_BSS_SECTION_NAME",
972 "gcov",
973 "nocache"
974 ]
975
976 # These get copied into RAM only on non-XIP
977 ro_sections = [
978 "rom_start",
979 "text",
980 "ctors",
981 "init_array",
982 "reset",
983 "object_access",
984 "rodata",
985 "devconfig",
986 "net_l2",
987 "vector",
988 "sw_isr_table",
989 "_settings_handlers_area",
990 "_bt_channels_area",
991 "_bt_br_channels_area",
992 "_bt_services_area",
993 "vectors",
994 "net_socket_register",
995 "net_ppp_proto"
996 ]
997
998 def __init__(self, filename, extra_sections):
999 """Constructor
1000
1001 @param filename Path to the output binary
1002 The <filename> is parsed by objdump to determine section sizes
1003 """
1004 # Make sure this is an ELF binary
1005 with open(filename, "rb") as f:
1006 magic = f.read(4)
1007
1008 try:
1009 if magic != b'\x7fELF':
1010 raise SanityRuntimeError("%s is not an ELF binary" % filename)
1011 except Exception as e:
1012 print(str(e))
1013 sys.exit(2)
1014
1015 # Search for CONFIG_XIP in the ELF's list of symbols using NM and AWK.
1016 # GREP can not be used as it returns an error if the symbol is not
1017 # found.
1018 is_xip_command = "nm " + filename + \
1019 " | awk '/CONFIG_XIP/ { print $3 }'"
1020 is_xip_output = subprocess.check_output(
1021 is_xip_command, shell=True, stderr=subprocess.STDOUT).decode(
1022 "utf-8").strip()
1023 try:
1024 if is_xip_output.endswith("no symbols"):
1025 raise SanityRuntimeError("%s has no symbol information" % filename)
1026 except Exception as e:
1027 print(str(e))
1028 sys.exit(2)
1029
1030 self.is_xip = (len(is_xip_output) != 0)
1031
1032 self.filename = filename
1033 self.sections = []
1034 self.rom_size = 0
1035 self.ram_size = 0
1036 self.extra_sections = extra_sections
1037
1038 self._calculate_sizes()
1039
1040 def get_ram_size(self):
1041 """Get the amount of RAM the application will use up on the device
1042
1043 @return amount of RAM, in bytes
1044 """
1045 return self.ram_size
1046
1047 def get_rom_size(self):
1048 """Get the size of the data that this application uses on device's flash
1049
1050 @return amount of ROM, in bytes
1051 """
1052 return self.rom_size
1053
1054 def unrecognized_sections(self):
1055 """Get a list of sections inside the binary that weren't recognized
1056
1057 @return list of unrecognized section names
1058 """
1059 slist = []
1060 for v in self.sections:
1061 if not v["recognized"]:
1062 slist.append(v["name"])
1063 return slist
1064
1065 def _calculate_sizes(self):
1066 """ Calculate RAM and ROM usage by section """
1067 objdump_command = "objdump -h " + self.filename
1068 objdump_output = subprocess.check_output(
1069 objdump_command, shell=True).decode("utf-8").splitlines()
1070
1071 for line in objdump_output:
1072 words = line.split()
1073
1074 if not words: # Skip lines that are too short
1075 continue
1076
1077 index = words[0]
1078 if not index[0].isdigit(): # Skip lines that do not start
1079 continue # with a digit
1080
1081 name = words[1] # Skip lines with section names
1082 if name[0] == '.': # starting with '.'
1083 continue
1084
1085 # TODO this doesn't actually reflect the size in flash or RAM as
1086 # it doesn't include linker-imposed padding between sections.
1087 # It is close though.
1088 size = int(words[2], 16)
1089 if size == 0:
1090 continue
1091
1092 load_addr = int(words[4], 16)
1093 virt_addr = int(words[3], 16)
1094
1095 # Add section to memory use totals (for both non-XIP and XIP scenarios)
1096 # Unrecognized section names are not included in the calculations.
1097 recognized = True
1098 if name in SizeCalculator.alloc_sections:
1099 self.ram_size += size
1100 stype = "alloc"
1101 elif name in SizeCalculator.rw_sections:
1102 self.ram_size += size
1103 self.rom_size += size
1104 stype = "rw"
1105 elif name in SizeCalculator.ro_sections:
1106 self.rom_size += size
1107 if not self.is_xip:
1108 self.ram_size += size
1109 stype = "ro"
1110 else:
1111 stype = "unknown"
1112 if name not in self.extra_sections:
1113 recognized = False
1114
1115 self.sections.append({"name": name, "load_addr": load_addr,
1116 "size": size, "virt_addr": virt_addr,
1117 "type": stype, "recognized": recognized})
1118
1119
1120
1121class SanityConfigParser:
1122 """Class to read test case files with semantic checking
1123 """
1124
1125 def __init__(self, filename, schema):
1126 """Instantiate a new SanityConfigParser object
1127
1128 @param filename Source .yaml file to read
1129 """
1130 self.data = {}
1131 self.schema = schema
1132 self.filename = filename
1133 self.tests = {}
1134 self.common = {}
1135
1136 def load(self):
1137 self.data = scl.yaml_load_verify(self.filename, self.schema)
1138
1139 if 'tests' in self.data:
1140 self.tests = self.data['tests']
1141 if 'common' in self.data:
1142 self.common = self.data['common']
1143
1144 def _cast_value(self, value, typestr):
1145 if isinstance(value, str):
1146 v = value.strip()
1147 if typestr == "str":
1148 return v
1149
1150 elif typestr == "float":
1151 return float(value)
1152
1153 elif typestr == "int":
1154 return int(value)
1155
1156 elif typestr == "bool":
1157 return value
1158
1159 elif typestr.startswith("list") and isinstance(value, list):
1160 return value
1161 elif typestr.startswith("list") and isinstance(value, str):
1162 vs = v.split()
1163 if len(typestr) > 4 and typestr[4] == ":":
1164 return [self._cast_value(vsi, typestr[5:]) for vsi in vs]
1165 else:
1166 return vs
1167
1168 elif typestr.startswith("set"):
1169 vs = v.split()
1170 if len(typestr) > 3 and typestr[3] == ":":
1171 return {self._cast_value(vsi, typestr[4:]) for vsi in vs}
1172 else:
1173 return set(vs)
1174
1175 elif typestr.startswith("map"):
1176 return value
1177 else:
1178 raise ConfigurationError(
1179 self.filename, "unknown type '%s'" % value)
1180
1181 def get_test(self, name, valid_keys):
1182 """Get a dictionary representing the keys/values within a test
1183
1184 @param name The test in the .yaml file to retrieve data from
1185 @param valid_keys A dictionary representing the intended semantics
1186 for this test. Each key in this dictionary is a key that could
1187 be specified, if a key is given in the .yaml file which isn't in
1188 here, it will generate an error. Each value in this dictionary
1189 is another dictionary containing metadata:
1190
1191 "default" - Default value if not given
1192 "type" - Data type to convert the text value to. Simple types
1193 supported are "str", "float", "int", "bool" which will get
1194 converted to respective Python data types. "set" and "list"
1195 may also be specified which will split the value by
1196 whitespace (but keep the elements as strings). finally,
1197 "list:<type>" and "set:<type>" may be given which will
1198 perform a type conversion after splitting the value up.
1199 "required" - If true, raise an error if not defined. If false
1200 and "default" isn't specified, a type conversion will be
1201 done on an empty string
1202 @return A dictionary containing the test key-value pairs with
1203 type conversion and default values filled in per valid_keys
1204 """
1205
1206 d = {}
1207 for k, v in self.common.items():
1208 d[k] = v
1209
1210 for k, v in self.tests[name].items():
1211 if k not in valid_keys:
1212 raise ConfigurationError(
1213 self.filename,
1214 "Unknown config key '%s' in definition for '%s'" %
1215 (k, name))
1216
1217 if k in d:
1218 if isinstance(d[k], str):
1219 # By default, we just concatenate string values of keys
1220 # which appear both in "common" and per-test sections,
1221 # but some keys are handled in adhoc way based on their
1222 # semantics.
1223 if k == "filter":
1224 d[k] = "(%s) and (%s)" % (d[k], v)
1225 else:
1226 d[k] += " " + v
1227 else:
1228 d[k] = v
1229
1230 for k, kinfo in valid_keys.items():
1231 if k not in d:
1232 if "required" in kinfo:
1233 required = kinfo["required"]
1234 else:
1235 required = False
1236
1237 if required:
1238 raise ConfigurationError(
1239 self.filename,
1240 "missing required value for '%s' in test '%s'" %
1241 (k, name))
1242 else:
1243 if "default" in kinfo:
1244 default = kinfo["default"]
1245 else:
1246 default = self._cast_value("", kinfo["type"])
1247 d[k] = default
1248 else:
1249 try:
1250 d[k] = self._cast_value(d[k], kinfo["type"])
1251 except ValueError:
1252 raise ConfigurationError(
1253 self.filename, "bad %s value '%s' for key '%s' in name '%s'" %
1254 (kinfo["type"], d[k], k, name))
1255
1256 return d
1257
1258
1259class Platform:
1260 """Class representing metadata for a particular platform
1261
1262 Maps directly to BOARD when building"""
1263
1264 platform_schema = scl.yaml_load(os.path.join(ZEPHYR_BASE,
1265 "scripts", "sanity_chk", "platform-schema.yaml"))
1266
1267 def __init__(self):
1268 """Constructor.
1269
1270 """
1271
1272 self.name = ""
1273 self.sanitycheck = True
1274 # if no RAM size is specified by the board, take a default of 128K
1275 self.ram = 128
1276
1277 self.ignore_tags = []
1278 self.default = False
1279 # if no flash size is specified by the board, take a default of 512K
1280 self.flash = 512
1281 self.supported = set()
1282
1283 self.arch = ""
1284 self.type = "na"
1285 self.simulation = "na"
1286 self.supported_toolchains = []
1287 self.env = []
1288 self.env_satisfied = True
1289 self.filter_data = dict()
1290
1291 def load(self, platform_file):
1292 scp = SanityConfigParser(platform_file, self.platform_schema)
1293 scp.load()
1294 data = scp.data
1295
1296 self.name = data['identifier']
1297 self.sanitycheck = data.get("sanitycheck", True)
1298 # if no RAM size is specified by the board, take a default of 128K
1299 self.ram = data.get("ram", 128)
1300 testing = data.get("testing", {})
1301 self.ignore_tags = testing.get("ignore_tags", [])
1302 self.default = testing.get("default", False)
1303 # if no flash size is specified by the board, take a default of 512K
1304 self.flash = data.get("flash", 512)
1305 self.supported = set()
1306 for supp_feature in data.get("supported", []):
1307 for item in supp_feature.split(":"):
1308 self.supported.add(item)
1309
1310 self.arch = data['arch']
1311 self.type = data.get('type', "na")
1312 self.simulation = data.get('simulation', "na")
1313 self.supported_toolchains = data.get("toolchain", [])
1314 self.env = data.get("env", [])
1315 self.env_satisfied = True
1316 for env in self.env:
1317 if not os.environ.get(env, None):
1318 self.env_satisfied = False
1319
1320 def __repr__(self):
1321 return "<%s on %s>" % (self.name, self.arch)
1322
1323
Anas Nashifaff616d2020-04-17 21:24:57 -04001324class DisablePyTestCollectionMixin(object):
1325 __test__ = False
1326
1327
1328class TestCase(DisablePyTestCollectionMixin):
Anas Nashifce2b4182020-03-24 14:40:28 -04001329 """Class representing a test application
1330 """
1331
Anas Nashifaff616d2020-04-17 21:24:57 -04001332 def __init__(self, testcase_root, workdir, name):
Anas Nashifce2b4182020-03-24 14:40:28 -04001333 """TestCase constructor.
1334
1335 This gets called by TestSuite as it finds and reads test yaml files.
1336 Multiple TestCase instances may be generated from a single testcase.yaml,
1337 each one corresponds to an entry within that file.
1338
1339 We need to have a unique name for every single test case. Since
1340 a testcase.yaml can define multiple tests, the canonical name for
1341 the test case is <workdir>/<name>.
1342
1343 @param testcase_root os.path.abspath() of one of the --testcase-root
1344 @param workdir Sub-directory of testcase_root where the
1345 .yaml test configuration file was found
1346 @param name Name of this test case, corresponding to the entry name
1347 in the test case configuration file. For many test cases that just
1348 define one test, can be anything and is usually "test". This is
1349 really only used to distinguish between different cases when
1350 the testcase.yaml defines multiple tests
Anas Nashifce2b4182020-03-24 14:40:28 -04001351 """
1352
Anas Nashifaff616d2020-04-17 21:24:57 -04001353
Anas Nashifce2b4182020-03-24 14:40:28 -04001354 self.source_dir = ""
1355 self.yamlfile = ""
1356 self.cases = []
Anas Nashifaff616d2020-04-17 21:24:57 -04001357 self.name = self.get_unique(testcase_root, workdir, name)
1358 self.id = name
Anas Nashifce2b4182020-03-24 14:40:28 -04001359
1360 self.type = None
Anas Nashifaff616d2020-04-17 21:24:57 -04001361 self.tags = set()
Anas Nashifce2b4182020-03-24 14:40:28 -04001362 self.extra_args = None
1363 self.extra_configs = None
1364 self.arch_whitelist = None
1365 self.arch_exclude = None
Anas Nashifaff616d2020-04-17 21:24:57 -04001366 self.skip = False
Anas Nashifce2b4182020-03-24 14:40:28 -04001367 self.platform_exclude = None
1368 self.platform_whitelist = None
1369 self.toolchain_exclude = None
1370 self.toolchain_whitelist = None
1371 self.tc_filter = None
1372 self.timeout = 60
1373 self.harness = ""
1374 self.harness_config = {}
1375 self.build_only = True
1376 self.build_on_all = False
1377 self.slow = False
Anas Nashifaff616d2020-04-17 21:24:57 -04001378 self.min_ram = -1
Anas Nashifce2b4182020-03-24 14:40:28 -04001379 self.depends_on = None
Anas Nashifaff616d2020-04-17 21:24:57 -04001380 self.min_flash = -1
Anas Nashifce2b4182020-03-24 14:40:28 -04001381 self.extra_sections = None
1382
1383 @staticmethod
1384 def get_unique(testcase_root, workdir, name):
1385
1386 canonical_testcase_root = os.path.realpath(testcase_root)
1387 if Path(canonical_zephyr_base) in Path(canonical_testcase_root).parents:
1388 # This is in ZEPHYR_BASE, so include path in name for uniqueness
1389 # FIXME: We should not depend on path of test for unique names.
1390 relative_tc_root = os.path.relpath(canonical_testcase_root,
1391 start=canonical_zephyr_base)
1392 else:
1393 relative_tc_root = ""
1394
1395 # workdir can be "."
1396 unique = os.path.normpath(os.path.join(relative_tc_root, workdir, name))
Anas Nashif7a691252020-05-07 07:47:51 -04001397 check = name.split(".")
1398 if len(check) < 2:
1399 raise SanityCheckException(f"""bad test name '{name}' in {testcase_root}/{workdir}. \
1400Tests should reference the category and subsystem with a dot as a separator.
1401 """
1402 )
Anas Nashifce2b4182020-03-24 14:40:28 -04001403 return unique
1404
1405 @staticmethod
1406 def scan_file(inf_name):
1407 suite_regex = re.compile(
1408 # do not match until end-of-line, otherwise we won't allow
1409 # stc_regex below to catch the ones that are declared in the same
1410 # line--as we only search starting the end of this match
1411 br"^\s*ztest_test_suite\(\s*(?P<suite_name>[a-zA-Z0-9_]+)\s*,",
1412 re.MULTILINE)
1413 stc_regex = re.compile(
1414 br"^\s*" # empy space at the beginning is ok
1415 # catch the case where it is declared in the same sentence, e.g:
1416 #
1417 # ztest_test_suite(mutex_complex, ztest_user_unit_test(TESTNAME));
1418 br"(?:ztest_test_suite\([a-zA-Z0-9_]+,\s*)?"
1419 # Catch ztest[_user]_unit_test-[_setup_teardown](TESTNAME)
1420 br"ztest_(?:1cpu_)?(?:user_)?unit_test(?:_setup_teardown)?"
1421 # Consume the argument that becomes the extra testcse
1422 br"\(\s*"
1423 br"(?P<stc_name>[a-zA-Z0-9_]+)"
1424 # _setup_teardown() variant has two extra arguments that we ignore
1425 br"(?:\s*,\s*[a-zA-Z0-9_]+\s*,\s*[a-zA-Z0-9_]+)?"
1426 br"\s*\)",
1427 # We don't check how it finishes; we don't care
1428 re.MULTILINE)
1429 suite_run_regex = re.compile(
1430 br"^\s*ztest_run_test_suite\((?P<suite_name>[a-zA-Z0-9_]+)\)",
1431 re.MULTILINE)
1432 achtung_regex = re.compile(
1433 br"(#ifdef|#endif)",
1434 re.MULTILINE)
1435 warnings = None
1436
1437 with open(inf_name) as inf:
1438 if os.name == 'nt':
1439 mmap_args = {'fileno': inf.fileno(), 'length': 0, 'access': mmap.ACCESS_READ}
1440 else:
1441 mmap_args = {'fileno': inf.fileno(), 'length': 0, 'flags': mmap.MAP_PRIVATE, 'prot': mmap.PROT_READ,
1442 'offset': 0}
1443
1444 with contextlib.closing(mmap.mmap(**mmap_args)) as main_c:
1445 # contextlib makes pylint think main_c isn't subscriptable
1446 # pylint: disable=unsubscriptable-object
1447
1448 suite_regex_match = suite_regex.search(main_c)
1449 if not suite_regex_match:
1450 # can't find ztest_test_suite, maybe a client, because
1451 # it includes ztest.h
1452 return None, None
1453
1454 suite_run_match = suite_run_regex.search(main_c)
1455 if not suite_run_match:
1456 raise ValueError("can't find ztest_run_test_suite")
1457
1458 achtung_matches = re.findall(
1459 achtung_regex,
1460 main_c[suite_regex_match.end():suite_run_match.start()])
1461 if achtung_matches:
1462 warnings = "found invalid %s in ztest_test_suite()" \
1463 % ", ".join({match.decode() for match in achtung_matches})
1464 _matches = re.findall(
1465 stc_regex,
1466 main_c[suite_regex_match.end():suite_run_match.start()])
Anas Nashif44f7ba02020-05-12 12:26:41 -04001467 for match in _matches:
1468 if not match.decode().startswith("test_"):
1469 warnings = "Found a test that does not start with test_"
Anas Nashifce2b4182020-03-24 14:40:28 -04001470 matches = [match.decode().replace("test_", "") for match in _matches]
1471 return matches, warnings
1472
1473 def scan_path(self, path):
1474 subcases = []
Anas Nashif91fd68d2020-05-08 07:22:58 -04001475 for filename in glob.glob(os.path.join(path, "src", "*.c*")):
Anas Nashifce2b4182020-03-24 14:40:28 -04001476 try:
1477 _subcases, warnings = self.scan_file(filename)
1478 if warnings:
1479 logger.error("%s: %s" % (filename, warnings))
Anas Nashif61c6e2b2020-05-07 07:03:30 -04001480 raise SanityRuntimeError("%s: %s" % (filename, warnings))
Anas Nashifce2b4182020-03-24 14:40:28 -04001481 if _subcases:
1482 subcases += _subcases
1483 except ValueError as e:
1484 logger.error("%s: can't find: %s" % (filename, e))
Anas Nashif61c6e2b2020-05-07 07:03:30 -04001485
Anas Nashifce2b4182020-03-24 14:40:28 -04001486 for filename in glob.glob(os.path.join(path, "*.c")):
1487 try:
1488 _subcases, warnings = self.scan_file(filename)
1489 if warnings:
1490 logger.error("%s: %s" % (filename, warnings))
1491 if _subcases:
1492 subcases += _subcases
1493 except ValueError as e:
1494 logger.error("%s: can't find: %s" % (filename, e))
1495 return subcases
1496
1497 def parse_subcases(self, test_path):
1498 results = self.scan_path(test_path)
1499 for sub in results:
1500 name = "{}.{}".format(self.id, sub)
1501 self.cases.append(name)
1502
1503 if not results:
1504 self.cases.append(self.id)
1505
1506 def __str__(self):
1507 return self.name
1508
1509
Anas Nashifaff616d2020-04-17 21:24:57 -04001510class TestInstance(DisablePyTestCollectionMixin):
Anas Nashifce2b4182020-03-24 14:40:28 -04001511 """Class representing the execution of a particular TestCase on a platform
1512
1513 @param test The TestCase object we want to build/execute
1514 @param platform Platform object that we want to build and run against
1515 @param base_outdir Base directory for all test results. The actual
1516 out directory used is <outdir>/<platform>/<test case name>
1517 """
1518
1519 def __init__(self, testcase, platform, outdir):
1520
1521 self.testcase = testcase
1522 self.platform = platform
1523
1524 self.status = None
1525 self.reason = "Unknown"
1526 self.metrics = dict()
1527 self.handler = None
1528 self.outdir = outdir
1529
1530 self.name = os.path.join(platform.name, testcase.name)
1531 self.build_dir = os.path.join(outdir, platform.name, testcase.name)
1532
1533 self.build_only = True
1534 self.run = False
1535
1536 self.results = {}
1537
1538 def __lt__(self, other):
1539 return self.name < other.name
1540
Anas Nashifaff616d2020-04-17 21:24:57 -04001541 # Global testsuite parameters
Anas Nashifce8c12e2020-05-21 09:11:40 -04001542 def check_build_or_run(self, build_only=False, enable_slow=False, device_testing=False, fixtures=[]):
Anas Nashifce2b4182020-03-24 14:40:28 -04001543
1544 # right now we only support building on windows. running is still work
1545 # in progress.
1546 if os.name == 'nt':
1547 self.build_only = True
1548 self.run = False
1549 return
1550
1551 _build_only = True
1552
1553 # we asked for build-only on the command line
1554 if build_only or self.testcase.build_only:
1555 self.build_only = True
1556 self.run = False
1557 return
1558
1559 # Do not run slow tests:
1560 skip_slow = self.testcase.slow and not enable_slow
1561 if skip_slow:
1562 self.build_only = True
1563 self.run = False
1564 return
1565
1566 runnable = bool(self.testcase.type == "unit" or \
1567 self.platform.type == "native" or \
1568 self.platform.simulation in ["nsim", "renode", "qemu"] or \
1569 device_testing)
1570
1571 if self.platform.simulation == "nsim":
1572 if not find_executable("nsimdrv"):
1573 runnable = False
1574
1575 if self.platform.simulation == "renode":
1576 if not find_executable("renode"):
1577 runnable = False
1578
1579 # console harness allows us to run the test and capture data.
Anas Nashifce8c12e2020-05-21 09:11:40 -04001580 if self.testcase.harness in [ 'console', 'ztest']:
Anas Nashifce2b4182020-03-24 14:40:28 -04001581
1582 # if we have a fixture that is also being supplied on the
1583 # command-line, then we need to run the test, not just build it.
Anas Nashifce8c12e2020-05-21 09:11:40 -04001584 fixture = self.testcase.harness_config.get('fixture')
1585 if fixture:
1586 if fixture in fixtures:
Anas Nashifce2b4182020-03-24 14:40:28 -04001587 _build_only = False
1588 else:
1589 _build_only = True
1590 else:
1591 _build_only = False
Anas Nashif3b86f132020-05-21 10:35:33 -04001592
Anas Nashifce2b4182020-03-24 14:40:28 -04001593 elif self.testcase.harness:
1594 _build_only = True
1595 else:
1596 _build_only = False
1597
1598 self.build_only = not (not _build_only and runnable)
1599 self.run = not self.build_only
1600 return
1601
1602 def create_overlay(self, platform, enable_asan=False, enable_coverage=False, coverage_platform=[]):
1603 # Create this in a "sanitycheck/" subdirectory otherwise this
1604 # will pass this overlay to kconfig.py *twice* and kconfig.cmake
1605 # will silently give that second time precedence over any
1606 # --extra-args=CONFIG_*
1607 subdir = os.path.join(self.build_dir, "sanitycheck")
1608 os.makedirs(subdir, exist_ok=True)
1609 file = os.path.join(subdir, "testcase_extra.conf")
1610
1611 with open(file, "w") as f:
1612 content = ""
1613
1614 if self.testcase.extra_configs:
1615 content = "\n".join(self.testcase.extra_configs)
1616
1617 if enable_coverage:
1618 if platform.name in coverage_platform:
1619 content = content + "\nCONFIG_COVERAGE=y"
1620 content = content + "\nCONFIG_COVERAGE_DUMP=y"
1621
1622 if enable_asan:
1623 if platform.type == "native":
1624 content = content + "\nCONFIG_ASAN=y"
1625
1626 f.write(content)
1627
1628 def calculate_sizes(self):
1629 """Get the RAM/ROM sizes of a test case.
1630
1631 This can only be run after the instance has been executed by
1632 MakeGenerator, otherwise there won't be any binaries to measure.
1633
1634 @return A SizeCalculator object
1635 """
1636 fns = glob.glob(os.path.join(self.build_dir, "zephyr", "*.elf"))
1637 fns.extend(glob.glob(os.path.join(self.build_dir, "zephyr", "*.exe")))
1638 fns = [x for x in fns if not x.endswith('_prebuilt.elf')]
1639 if len(fns) != 1:
1640 raise BuildError("Missing/multiple output ELF binary")
1641
1642 return SizeCalculator(fns[0], self.testcase.extra_sections)
1643
1644 def __repr__(self):
1645 return "<TestCase %s on %s>" % (self.testcase.name, self.platform.name)
1646
1647
1648class CMake():
1649 config_re = re.compile('(CONFIG_[A-Za-z0-9_]+)[=]\"?([^\"]*)\"?$')
1650 dt_re = re.compile('([A-Za-z0-9_]+)[=]\"?([^\"]*)\"?$')
1651
1652 def __init__(self, testcase, platform, source_dir, build_dir):
1653
1654 self.cwd = None
1655 self.capture_output = True
1656
1657 self.defconfig = {}
1658 self.cmake_cache = {}
1659
1660 self.instance = None
1661 self.testcase = testcase
1662 self.platform = platform
1663 self.source_dir = source_dir
1664 self.build_dir = build_dir
1665 self.log = "build.log"
1666 self.generator = None
1667 self.generator_cmd = None
1668
1669 def parse_generated(self):
1670 self.defconfig = {}
1671 return {}
1672
1673 def run_build(self, args=[]):
1674
1675 logger.debug("Building %s for %s" % (self.source_dir, self.platform.name))
1676
1677 cmake_args = []
1678 cmake_args.extend(args)
1679 cmake = shutil.which('cmake')
1680 cmd = [cmake] + cmake_args
1681 kwargs = dict()
1682
1683 if self.capture_output:
1684 kwargs['stdout'] = subprocess.PIPE
1685 # CMake sends the output of message() to stderr unless it's STATUS
1686 kwargs['stderr'] = subprocess.STDOUT
1687
1688 if self.cwd:
1689 kwargs['cwd'] = self.cwd
1690
1691 p = subprocess.Popen(cmd, **kwargs)
1692 out, _ = p.communicate()
1693
1694 results = {}
1695 if p.returncode == 0:
1696 msg = "Finished building %s for %s" % (self.source_dir, self.platform.name)
1697
1698 self.instance.status = "passed"
1699 results = {'msg': msg, "returncode": p.returncode, "instance": self.instance}
1700
1701 if out:
1702 log_msg = out.decode(sys.getdefaultencoding())
1703 with open(os.path.join(self.build_dir, self.log), "a") as log:
1704 log.write(log_msg)
1705
1706 else:
1707 return None
1708 else:
1709 # A real error occurred, raise an exception
1710 if out:
1711 log_msg = out.decode(sys.getdefaultencoding())
1712 with open(os.path.join(self.build_dir, self.log), "a") as log:
1713 log.write(log_msg)
1714
1715 if log_msg:
1716 res = re.findall("region `(FLASH|RAM|SRAM)' overflowed by", log_msg)
1717 if res:
1718 logger.debug("Test skipped due to {} Overflow".format(res[0]))
1719 self.instance.status = "skipped"
1720 self.instance.reason = "{} overflow".format(res[0])
1721 else:
1722 self.instance.status = "failed"
1723 self.instance.reason = "Build failure"
1724
1725 results = {
1726 "returncode": p.returncode,
1727 "instance": self.instance,
1728 }
1729
1730 return results
1731
1732 def run_cmake(self, args=[]):
1733
1734 ldflags = "-Wl,--fatal-warnings"
1735 logger.debug("Running cmake on %s for %s" % (self.source_dir, self.platform.name))
1736
1737 # fixme: add additional cflags based on options
1738 cmake_args = [
1739 '-B{}'.format(self.build_dir),
1740 '-S{}'.format(self.source_dir),
1741 '-DEXTRA_CFLAGS="-Werror ',
1742 '-DEXTRA_AFLAGS=-Wa,--fatal-warnings',
1743 '-DEXTRA_LDFLAGS="{}'.format(ldflags),
1744 '-G{}'.format(self.generator)
1745 ]
1746
1747 if self.cmake_only:
1748 cmake_args.append("-DCMAKE_EXPORT_COMPILE_COMMANDS=1")
1749
1750 args = ["-D{}".format(a.replace('"', '')) for a in args]
1751 cmake_args.extend(args)
1752
1753 cmake_opts = ['-DBOARD={}'.format(self.platform.name)]
1754 cmake_args.extend(cmake_opts)
1755
1756
1757 logger.debug("Calling cmake with arguments: {}".format(cmake_args))
1758 cmake = shutil.which('cmake')
1759 cmd = [cmake] + cmake_args
1760 kwargs = dict()
1761
1762 if self.capture_output:
1763 kwargs['stdout'] = subprocess.PIPE
1764 # CMake sends the output of message() to stderr unless it's STATUS
1765 kwargs['stderr'] = subprocess.STDOUT
1766
1767 if self.cwd:
1768 kwargs['cwd'] = self.cwd
1769
1770 p = subprocess.Popen(cmd, **kwargs)
1771 out, _ = p.communicate()
1772
1773 if p.returncode == 0:
1774 filter_results = self.parse_generated()
1775 msg = "Finished building %s for %s" % (self.source_dir, self.platform.name)
1776 logger.debug(msg)
1777 results = {'msg': msg, 'filter': filter_results}
1778
1779 else:
1780 self.instance.status = "failed"
1781 self.instance.reason = "Cmake build failure"
1782 logger.error("Cmake build failure: %s for %s" % (self.source_dir, self.platform.name))
1783 results = {"returncode": p.returncode}
1784
1785 if out:
1786 with open(os.path.join(self.build_dir, self.log), "a") as log:
1787 log_msg = out.decode(sys.getdefaultencoding())
1788 log.write(log_msg)
1789
1790 return results
1791
1792
1793class FilterBuilder(CMake):
1794
1795 def __init__(self, testcase, platform, source_dir, build_dir):
1796 super().__init__(testcase, platform, source_dir, build_dir)
1797
1798 self.log = "config-sanitycheck.log"
1799
1800 def parse_generated(self):
1801
1802 if self.platform.name == "unit_testing":
1803 return {}
1804
1805 cmake_cache_path = os.path.join(self.build_dir, "CMakeCache.txt")
1806 defconfig_path = os.path.join(self.build_dir, "zephyr", ".config")
1807
1808 with open(defconfig_path, "r") as fp:
1809 defconfig = {}
1810 for line in fp.readlines():
1811 m = self.config_re.match(line)
1812 if not m:
1813 if line.strip() and not line.startswith("#"):
1814 sys.stderr.write("Unrecognized line %s\n" % line)
1815 continue
1816 defconfig[m.group(1)] = m.group(2).strip()
1817
1818 self.defconfig = defconfig
1819
1820 cmake_conf = {}
1821 try:
1822 cache = CMakeCache.from_file(cmake_cache_path)
1823 except FileNotFoundError:
1824 cache = {}
1825
1826 for k in iter(cache):
1827 cmake_conf[k.name] = k.value
1828
1829 self.cmake_cache = cmake_conf
1830
1831 filter_data = {
1832 "ARCH": self.platform.arch,
1833 "PLATFORM": self.platform.name
1834 }
1835 filter_data.update(os.environ)
1836 filter_data.update(self.defconfig)
1837 filter_data.update(self.cmake_cache)
1838
1839 dts_path = os.path.join(self.build_dir, "zephyr", self.platform.name + ".dts.pre.tmp")
1840 if self.testcase and self.testcase.tc_filter:
1841 try:
1842 if os.path.exists(dts_path):
Kumar Gala6a2cb942020-05-08 16:32:16 -05001843 edt = edtlib.EDT(dts_path, [os.path.join(ZEPHYR_BASE, "dts", "bindings")],
1844 warn_reg_unit_address_mismatch=False)
Anas Nashifce2b4182020-03-24 14:40:28 -04001845 else:
1846 edt = None
1847 res = expr_parser.parse(self.testcase.tc_filter, filter_data, edt)
1848
1849 except (ValueError, SyntaxError) as se:
1850 sys.stderr.write(
1851 "Failed processing %s\n" % self.testcase.yamlfile)
1852 raise se
1853
1854 if not res:
1855 return {os.path.join(self.platform.name, self.testcase.name): True}
1856 else:
1857 return {os.path.join(self.platform.name, self.testcase.name): False}
1858 else:
1859 self.platform.filter_data = filter_data
1860 return filter_data
1861
1862
1863class ProjectBuilder(FilterBuilder):
1864
1865 def __init__(self, suite, instance, **kwargs):
1866 super().__init__(instance.testcase, instance.platform, instance.testcase.source_dir, instance.build_dir)
1867
1868 self.log = "build.log"
1869 self.instance = instance
1870 self.suite = suite
1871
1872 self.lsan = kwargs.get('lsan', False)
1873 self.asan = kwargs.get('asan', False)
1874 self.valgrind = kwargs.get('valgrind', False)
1875 self.extra_args = kwargs.get('extra_args', [])
1876 self.device_testing = kwargs.get('device_testing', False)
1877 self.cmake_only = kwargs.get('cmake_only', False)
1878 self.cleanup = kwargs.get('cleanup', False)
1879 self.coverage = kwargs.get('coverage', False)
1880 self.inline_logs = kwargs.get('inline_logs', False)
Anas Nashifce2b4182020-03-24 14:40:28 -04001881 self.generator = kwargs.get('generator', None)
1882 self.generator_cmd = kwargs.get('generator_cmd', None)
Anas Nashiff6462a32020-03-29 19:02:51 -04001883 self.verbose = kwargs.get('verbose', None)
Anas Nashifce2b4182020-03-24 14:40:28 -04001884
1885 @staticmethod
1886 def log_info(filename, inline_logs):
1887 filename = os.path.abspath(os.path.realpath(filename))
1888 if inline_logs:
1889 logger.info("{:-^100}".format(filename))
1890
1891 try:
1892 with open(filename) as fp:
1893 data = fp.read()
1894 except Exception as e:
1895 data = "Unable to read log data (%s)\n" % (str(e))
1896
1897 logger.error(data)
1898
1899 logger.info("{:-^100}".format(filename))
1900 else:
1901 logger.error("see: " + Fore.YELLOW + filename + Fore.RESET)
1902
1903 def log_info_file(self, inline_logs):
1904 build_dir = self.instance.build_dir
1905 h_log = "{}/handler.log".format(build_dir)
1906 b_log = "{}/build.log".format(build_dir)
1907 v_log = "{}/valgrind.log".format(build_dir)
1908 d_log = "{}/device.log".format(build_dir)
1909
1910 if os.path.exists(v_log) and "Valgrind" in self.instance.reason:
1911 self.log_info("{}".format(v_log), inline_logs)
1912 elif os.path.exists(h_log) and os.path.getsize(h_log) > 0:
1913 self.log_info("{}".format(h_log), inline_logs)
1914 elif os.path.exists(d_log) and os.path.getsize(d_log) > 0:
1915 self.log_info("{}".format(d_log), inline_logs)
1916 else:
1917 self.log_info("{}".format(b_log), inline_logs)
1918
1919 def setup_handler(self):
1920
1921 instance = self.instance
1922 args = []
1923
1924 # FIXME: Needs simplification
1925 if instance.platform.simulation == "qemu":
1926 instance.handler = QEMUHandler(instance, "qemu")
1927 args.append("QEMU_PIPE=%s" % instance.handler.get_fifo())
1928 instance.handler.call_make_run = True
1929 elif instance.testcase.type == "unit":
1930 instance.handler = BinaryHandler(instance, "unit")
1931 instance.handler.binary = os.path.join(instance.build_dir, "testbinary")
Anas Nashif051602f2020-04-28 14:27:46 -04001932 if self.coverage:
1933 args.append("COVERAGE=1")
Anas Nashifce2b4182020-03-24 14:40:28 -04001934 elif instance.platform.type == "native":
1935 handler = BinaryHandler(instance, "native")
1936
1937 handler.asan = self.asan
1938 handler.valgrind = self.valgrind
1939 handler.lsan = self.lsan
1940 handler.coverage = self.coverage
1941
1942 handler.binary = os.path.join(instance.build_dir, "zephyr", "zephyr.exe")
1943 instance.handler = handler
1944 elif instance.platform.simulation == "nsim":
1945 if find_executable("nsimdrv"):
1946 instance.handler = BinaryHandler(instance, "nsim")
1947 instance.handler.call_make_run = True
1948 elif instance.platform.simulation == "renode":
1949 if find_executable("renode"):
1950 instance.handler = BinaryHandler(instance, "renode")
1951 instance.handler.pid_fn = os.path.join(instance.build_dir, "renode.pid")
1952 instance.handler.call_make_run = True
1953 elif self.device_testing:
1954 instance.handler = DeviceHandler(instance, "device")
1955
1956 if instance.handler:
1957 instance.handler.args = args
Anas Nashifb3669492020-03-24 22:33:50 -04001958 instance.handler.generator_cmd = self.generator_cmd
1959 instance.handler.generator = self.generator
Anas Nashifce2b4182020-03-24 14:40:28 -04001960
1961 def process(self, message):
1962 op = message.get('op')
1963
1964 if not self.instance.handler:
1965 self.setup_handler()
1966
1967 # The build process, call cmake and build with configured generator
1968 if op == "cmake":
1969 results = self.cmake()
1970 if self.instance.status == "failed":
1971 pipeline.put({"op": "report", "test": self.instance})
1972 elif self.cmake_only:
1973 pipeline.put({"op": "report", "test": self.instance})
1974 else:
1975 if self.instance.name in results['filter'] and results['filter'][self.instance.name]:
1976 logger.debug("filtering %s" % self.instance.name)
1977 self.instance.status = "skipped"
1978 self.instance.reason = "filter"
1979 pipeline.put({"op": "report", "test": self.instance})
1980 else:
1981 pipeline.put({"op": "build", "test": self.instance})
1982
1983 elif op == "build":
1984 logger.debug("build test: %s" % self.instance.name)
1985 results = self.build()
1986
1987 if not results:
1988 self.instance.status = "failed"
1989 self.instance.reason = "Build Failure"
1990 pipeline.put({"op": "report", "test": self.instance})
1991 else:
1992 if results.get('returncode', 1) > 0:
1993 pipeline.put({"op": "report", "test": self.instance})
1994 else:
1995 if self.instance.run:
1996 pipeline.put({"op": "run", "test": self.instance})
1997 else:
1998 pipeline.put({"op": "report", "test": self.instance})
1999 # Run the generated binary using one of the supported handlers
2000 elif op == "run":
2001 logger.debug("run test: %s" % self.instance.name)
2002 self.run()
2003 self.instance.status, _ = self.instance.handler.get_state()
2004 pipeline.put({
2005 "op": "report",
2006 "test": self.instance,
2007 "state": "executed",
2008 "status": self.instance.status,
2009 "reason": self.instance.reason}
2010 )
2011
2012 # Report results and output progress to screen
2013 elif op == "report":
2014 with report_lock:
2015 self.report_out()
2016
2017 if self.cleanup and not self.coverage and self.instance.status == "passed":
2018 pipeline.put({
2019 "op": "cleanup",
2020 "test": self.instance
2021 })
2022
2023 elif op == "cleanup":
2024 self.cleanup_artifacts()
2025
2026 def cleanup_artifacts(self):
2027 logger.debug("Cleaning up {}".format(self.instance.build_dir))
2028 whitelist = [
2029 'zephyr/.config',
2030 'handler.log',
2031 'build.log',
2032 'device.log',
Anas Nashif9ace63e2020-04-28 07:14:43 -04002033 'recording.csv',
Anas Nashifce2b4182020-03-24 14:40:28 -04002034 ]
2035 whitelist = [os.path.join(self.instance.build_dir, file) for file in whitelist]
2036
2037 for dirpath, dirnames, filenames in os.walk(self.instance.build_dir, topdown=False):
2038 for name in filenames:
2039 path = os.path.join(dirpath, name)
2040 if path not in whitelist:
2041 os.remove(path)
2042 # Remove empty directories and symbolic links to directories
2043 for dir in dirnames:
2044 path = os.path.join(dirpath, dir)
2045 if os.path.islink(path):
2046 os.remove(path)
2047 elif not os.listdir(path):
2048 os.rmdir(path)
2049
2050 def report_out(self):
2051 total_tests_width = len(str(self.suite.total_tests))
2052 self.suite.total_done += 1
2053 instance = self.instance
2054
2055 if instance.status in ["failed", "timeout"]:
2056 self.suite.total_failed += 1
Anas Nashiff6462a32020-03-29 19:02:51 -04002057 if self.verbose:
Anas Nashifce2b4182020-03-24 14:40:28 -04002058 status = Fore.RED + "FAILED " + Fore.RESET + instance.reason
2059 else:
2060 print("")
2061 logger.error(
2062 "{:<25} {:<50} {}FAILED{}: {}".format(
2063 instance.platform.name,
2064 instance.testcase.name,
2065 Fore.RED,
2066 Fore.RESET,
2067 instance.reason))
Anas Nashiff6462a32020-03-29 19:02:51 -04002068 if not self.verbose:
Anas Nashifce2b4182020-03-24 14:40:28 -04002069 self.log_info_file(self.inline_logs)
2070 elif instance.status == "skipped":
2071 self.suite.total_skipped += 1
2072 status = Fore.YELLOW + "SKIPPED" + Fore.RESET
2073 else:
2074 status = Fore.GREEN + "PASSED" + Fore.RESET
2075
Anas Nashiff6462a32020-03-29 19:02:51 -04002076 if self.verbose:
Anas Nashifce2b4182020-03-24 14:40:28 -04002077 if self.cmake_only:
2078 more_info = "cmake"
2079 elif instance.status == "skipped":
2080 more_info = instance.reason
2081 else:
2082 if instance.handler and instance.run:
2083 more_info = instance.handler.type_str
2084 htime = instance.handler.duration
2085 if htime:
2086 more_info += " {:.3f}s".format(htime)
2087 else:
2088 more_info = "build"
2089
2090 logger.info("{:>{}}/{} {:<25} {:<50} {} ({})".format(
2091 self.suite.total_done, total_tests_width, self.suite.total_tests, instance.platform.name,
2092 instance.testcase.name, status, more_info))
2093
2094 if instance.status in ["failed", "timeout"]:
2095 self.log_info_file(self.inline_logs)
2096 else:
2097 sys.stdout.write("\rINFO - Total complete: %s%4d/%4d%s %2d%% skipped: %s%4d%s, failed: %s%4d%s" % (
2098 Fore.GREEN,
2099 self.suite.total_done,
2100 self.suite.total_tests,
2101 Fore.RESET,
2102 int((float(self.suite.total_done) / self.suite.total_tests) * 100),
2103 Fore.YELLOW if self.suite.total_skipped > 0 else Fore.RESET,
2104 self.suite.total_skipped,
2105 Fore.RESET,
2106 Fore.RED if self.suite.total_failed > 0 else Fore.RESET,
2107 self.suite.total_failed,
2108 Fore.RESET
2109 )
2110 )
2111 sys.stdout.flush()
2112
2113 def cmake(self):
2114
2115 instance = self.instance
2116 args = self.testcase.extra_args[:]
2117 args += self.extra_args
2118
2119 if instance.handler:
2120 args += instance.handler.args
2121
2122 # merge overlay files into one variable
2123 def extract_overlays(args):
2124 re_overlay = re.compile('OVERLAY_CONFIG=(.*)')
2125 other_args = []
2126 overlays = []
2127 for arg in args:
2128 match = re_overlay.search(arg)
2129 if match:
2130 overlays.append(match.group(1).strip('\'"'))
2131 else:
2132 other_args.append(arg)
2133
2134 args[:] = other_args
2135 return overlays
2136
2137 overlays = extract_overlays(args)
2138
2139 if (self.testcase.extra_configs or self.coverage or
2140 self.asan):
2141 overlays.append(os.path.join(instance.build_dir,
2142 "sanitycheck", "testcase_extra.conf"))
2143
2144 if overlays:
2145 args.append("OVERLAY_CONFIG=\"%s\"" % (" ".join(overlays)))
2146
2147 results = self.run_cmake(args)
2148 return results
2149
2150 def build(self):
2151 results = self.run_build(['--build', self.build_dir])
2152 return results
2153
2154 def run(self):
2155
2156 instance = self.instance
2157
2158 if instance.handler.type_str == "device":
2159 instance.handler.suite = self.suite
2160
2161 instance.handler.handle()
2162
2163 sys.stdout.flush()
2164
2165
2166class BoundedExecutor(concurrent.futures.ThreadPoolExecutor):
2167 """BoundedExecutor behaves as a ThreadPoolExecutor which will block on
2168 calls to submit() once the limit given as "bound" work items are queued for
2169 execution.
2170 :param bound: Integer - the maximum number of items in the work queue
2171 :param max_workers: Integer - the size of the thread pool
2172 """
2173
2174 def __init__(self, bound, max_workers, **kwargs):
2175 super().__init__(max_workers)
2176 # self.executor = ThreadPoolExecutor(max_workers=max_workers)
2177 self.semaphore = BoundedSemaphore(bound + max_workers)
2178
2179 def submit(self, fn, *args, **kwargs):
2180 self.semaphore.acquire()
2181 try:
2182 future = super().submit(fn, *args, **kwargs)
2183 except Exception:
2184 self.semaphore.release()
2185 raise
2186 else:
2187 future.add_done_callback(lambda x: self.semaphore.release())
2188 return future
2189
2190
Anas Nashifaff616d2020-04-17 21:24:57 -04002191class TestSuite(DisablePyTestCollectionMixin):
Anas Nashifce2b4182020-03-24 14:40:28 -04002192 config_re = re.compile('(CONFIG_[A-Za-z0-9_]+)[=]\"?([^\"]*)\"?$')
2193 dt_re = re.compile('([A-Za-z0-9_]+)[=]\"?([^\"]*)\"?$')
2194
2195 tc_schema = scl.yaml_load(
2196 os.path.join(ZEPHYR_BASE,
2197 "scripts", "sanity_chk", "testcase-schema.yaml"))
2198
2199 testcase_valid_keys = {"tags": {"type": "set", "required": False},
2200 "type": {"type": "str", "default": "integration"},
2201 "extra_args": {"type": "list"},
2202 "extra_configs": {"type": "list"},
2203 "build_only": {"type": "bool", "default": False},
2204 "build_on_all": {"type": "bool", "default": False},
2205 "skip": {"type": "bool", "default": False},
2206 "slow": {"type": "bool", "default": False},
2207 "timeout": {"type": "int", "default": 60},
2208 "min_ram": {"type": "int", "default": 8},
2209 "depends_on": {"type": "set"},
2210 "min_flash": {"type": "int", "default": 32},
2211 "arch_whitelist": {"type": "set"},
2212 "arch_exclude": {"type": "set"},
2213 "extra_sections": {"type": "list", "default": []},
2214 "platform_exclude": {"type": "set"},
2215 "platform_whitelist": {"type": "set"},
2216 "toolchain_exclude": {"type": "set"},
2217 "toolchain_whitelist": {"type": "set"},
2218 "filter": {"type": "str"},
2219 "harness": {"type": "str"},
2220 "harness_config": {"type": "map", "default": {}}
2221 }
2222
2223 RELEASE_DATA = os.path.join(ZEPHYR_BASE, "scripts", "sanity_chk",
2224 "sanity_last_release.csv")
2225
Aastha Grovera0ae5342020-05-13 13:34:00 -07002226 SAMPLE_FILENAME = 'sample.yaml'
2227 TESTCASE_FILENAME = 'testcase.yaml'
2228
Anas Nashifaff616d2020-04-17 21:24:57 -04002229 def __init__(self, board_root_list=[], testcase_roots=[], outdir=None):
Anas Nashifce2b4182020-03-24 14:40:28 -04002230
2231 self.roots = testcase_roots
2232 if not isinstance(board_root_list, list):
2233 self.board_roots = [board_root_list]
2234 else:
2235 self.board_roots = board_root_list
2236
2237 # Testsuite Options
2238 self.coverage_platform = []
2239 self.build_only = False
2240 self.cmake_only = False
2241 self.cleanup = False
2242 self.enable_slow = False
2243 self.device_testing = False
Anas Nashifce8c12e2020-05-21 09:11:40 -04002244 self.fixtures = []
Anas Nashifce2b4182020-03-24 14:40:28 -04002245 self.enable_coverage = False
2246 self.enable_lsan = False
2247 self.enable_asan = False
2248 self.enable_valgrind = False
2249 self.extra_args = []
2250 self.inline_logs = False
2251 self.enable_sizes_report = False
2252 self.west_flash = None
2253 self.west_runner = None
2254 self.generator = None
2255 self.generator_cmd = None
2256
2257 # Keep track of which test cases we've filtered out and why
2258 self.testcases = {}
2259 self.platforms = []
2260 self.selected_platforms = []
2261 self.default_platforms = []
2262 self.outdir = os.path.abspath(outdir)
Anas Nashifaff616d2020-04-17 21:24:57 -04002263 self.discards = {}
Anas Nashifce2b4182020-03-24 14:40:28 -04002264 self.load_errors = 0
2265 self.instances = dict()
2266
2267 self.total_tests = 0 # number of test instances
2268 self.total_cases = 0 # number of test cases
2269 self.total_done = 0 # tests completed
2270 self.total_failed = 0
2271 self.total_skipped = 0
2272
2273 self.total_platforms = 0
2274 self.start_time = 0
2275 self.duration = 0
2276 self.warnings = 0
2277 self.cv = threading.Condition()
2278
2279 # hardcoded for now
2280 self.connected_hardware = []
2281
Anas Nashifbb280352020-05-07 12:02:48 -04002282 def get_platform_instances(self, platform):
2283 filtered_dict = {k:v for k,v in self.instances.items() if k.startswith(platform + "/")}
2284 return filtered_dict
2285
Anas Nashifce2b4182020-03-24 14:40:28 -04002286 def config(self):
2287 logger.info("coverage platform: {}".format(self.coverage_platform))
2288
2289 # Debug Functions
2290 @staticmethod
2291 def info(what):
2292 sys.stdout.write(what + "\n")
2293 sys.stdout.flush()
2294
2295 def update(self):
2296 self.total_tests = len(self.instances)
2297 self.total_cases = len(self.testcases)
2298
2299 def compare_metrics(self, filename):
2300 # name, datatype, lower results better
2301 interesting_metrics = [("ram_size", int, True),
2302 ("rom_size", int, True)]
2303
2304 if not os.path.exists(filename):
2305 logger.info("Cannot compare metrics, %s not found" % filename)
2306 return []
2307
2308 results = []
2309 saved_metrics = {}
2310 with open(filename) as fp:
2311 cr = csv.DictReader(fp)
2312 for row in cr:
2313 d = {}
2314 for m, _, _ in interesting_metrics:
2315 d[m] = row[m]
2316 saved_metrics[(row["test"], row["platform"])] = d
2317
2318 for instance in self.instances.values():
2319 mkey = (instance.testcase.name, instance.platform.name)
2320 if mkey not in saved_metrics:
2321 continue
2322 sm = saved_metrics[mkey]
2323 for metric, mtype, lower_better in interesting_metrics:
2324 if metric not in instance.metrics:
2325 continue
2326 if sm[metric] == "":
2327 continue
2328 delta = instance.metrics.get(metric, 0) - mtype(sm[metric])
2329 if delta == 0:
2330 continue
2331 results.append((instance, metric, instance.metrics.get(metric, 0), delta,
2332 lower_better))
2333 return results
2334
2335 def misc_reports(self, report, show_footprint, all_deltas,
2336 footprint_threshold, last_metrics):
2337
2338 if not report:
2339 return
2340
2341 deltas = self.compare_metrics(report)
2342 warnings = 0
2343 if deltas and show_footprint:
2344 for i, metric, value, delta, lower_better in deltas:
2345 if not all_deltas and ((delta < 0 and lower_better) or
2346 (delta > 0 and not lower_better)):
2347 continue
2348
2349 percentage = (float(delta) / float(value - delta))
2350 if not all_deltas and (percentage <
2351 (footprint_threshold / 100.0)):
2352 continue
2353
2354 logger.info("{:<25} {:<60} {}{}{}: {} {:<+4}, is now {:6} {:+.2%}".format(
2355 i.platform.name, i.testcase.name, Fore.YELLOW,
2356 "INFO" if all_deltas else "WARNING", Fore.RESET,
2357 metric, delta, value, percentage))
2358 warnings += 1
2359
2360 if warnings:
2361 logger.warning("Deltas based on metrics from last %s" %
2362 ("release" if not last_metrics else "run"))
2363
2364 def summary(self, unrecognized_sections):
2365 failed = 0
Anas Nashif4258d8d2020-05-08 08:40:27 -04002366 run = 0
Anas Nashifce2b4182020-03-24 14:40:28 -04002367 for instance in self.instances.values():
2368 if instance.status == "failed":
2369 failed += 1
2370 elif instance.metrics.get("unrecognized") and not unrecognized_sections:
2371 logger.error("%sFAILED%s: %s has unrecognized binary sections: %s" %
2372 (Fore.RED, Fore.RESET, instance.name,
2373 str(instance.metrics.get("unrecognized", []))))
2374 failed += 1
2375
Anas Nashif4258d8d2020-05-08 08:40:27 -04002376 if instance.metrics['handler_time']:
2377 run += 1
2378
Anas Nashifce2b4182020-03-24 14:40:28 -04002379 if self.total_tests and self.total_tests != self.total_skipped:
2380 pass_rate = (float(self.total_tests - self.total_failed - self.total_skipped) / float(
2381 self.total_tests - self.total_skipped))
2382 else:
2383 pass_rate = 0
2384
2385 logger.info(
2386 "{}{} of {}{} tests passed ({:.2%}), {}{}{} failed, {} skipped with {}{}{} warnings in {:.2f} seconds".format(
2387 Fore.RED if failed else Fore.GREEN,
2388 self.total_tests - self.total_failed - self.total_skipped,
2389 self.total_tests - self.total_skipped,
2390 Fore.RESET,
2391 pass_rate,
2392 Fore.RED if self.total_failed else Fore.RESET,
2393 self.total_failed,
2394 Fore.RESET,
2395 self.total_skipped,
2396 Fore.YELLOW if self.warnings else Fore.RESET,
2397 self.warnings,
2398 Fore.RESET,
2399 self.duration))
2400
2401 self.total_platforms = len(self.platforms)
2402 if self.platforms:
2403 logger.info("In total {} test cases were executed on {} out of total {} platforms ({:02.2f}%)".format(
2404 self.total_cases,
2405 len(self.selected_platforms),
2406 self.total_platforms,
2407 (100 * len(self.selected_platforms) / len(self.platforms))
2408 ))
2409
Anas Nashif4258d8d2020-05-08 08:40:27 -04002410 logger.info(f"{Fore.GREEN}{run}{Fore.RESET} tests executed on platforms, \
2411{Fore.RED}{self.total_tests - run}{Fore.RESET} tests were only built.")
2412
Anas Nashif6915adf2020-04-22 09:39:42 -04002413 def save_reports(self, name, suffix, report_dir, no_update, release, only_failed):
Anas Nashifce2b4182020-03-24 14:40:28 -04002414 if not self.instances:
2415 return
2416
2417 if name:
2418 report_name = name
2419 else:
2420 report_name = "sanitycheck"
2421
2422 if report_dir:
2423 os.makedirs(report_dir, exist_ok=True)
2424 filename = os.path.join(report_dir, report_name)
2425 outdir = report_dir
2426 else:
2427 filename = os.path.join(self.outdir, report_name)
2428 outdir = self.outdir
2429
Anas Nashif6915adf2020-04-22 09:39:42 -04002430 if suffix:
2431 filename = "{}_{}".format(filename, suffix)
2432
Anas Nashifce2b4182020-03-24 14:40:28 -04002433 if not no_update:
Anas Nashif90415502020-04-11 22:15:04 -04002434 self.xunit_report(filename + ".xml", full_report=False, append=only_failed)
2435 self.xunit_report(filename + "_report.xml", full_report=True, append=only_failed)
Anas Nashifce2b4182020-03-24 14:40:28 -04002436 self.csv_report(filename + ".csv")
Anas Nashif90415502020-04-11 22:15:04 -04002437
Anas Nashif6915adf2020-04-22 09:39:42 -04002438 self.target_report(outdir, suffix, append=only_failed)
Anas Nashifce2b4182020-03-24 14:40:28 -04002439 if self.discards:
2440 self.discard_report(filename + "_discard.csv")
2441
2442 if release:
2443 self.csv_report(self.RELEASE_DATA)
2444
2445 def add_configurations(self):
2446
2447 for board_root in self.board_roots:
2448 board_root = os.path.abspath(board_root)
2449
2450 logger.debug("Reading platform configuration files under %s..." %
2451 board_root)
2452
2453 for file in glob.glob(os.path.join(board_root, "*", "*", "*.yaml")):
2454 logger.debug("Found platform configuration " + file)
2455 try:
2456 platform = Platform()
2457 platform.load(file)
2458 if platform.sanitycheck:
2459 self.platforms.append(platform)
2460 if platform.default:
2461 self.default_platforms.append(platform.name)
2462
2463 except RuntimeError as e:
2464 logger.error("E: %s: can't load: %s" % (file, e))
2465 self.load_errors += 1
2466
2467 def get_all_tests(self):
2468 tests = []
2469 for _, tc in self.testcases.items():
2470 for case in tc.cases:
2471 tests.append(case)
2472
2473 return tests
2474
2475 @staticmethod
2476 def get_toolchain():
2477 toolchain = os.environ.get("ZEPHYR_TOOLCHAIN_VARIANT", None) or \
2478 os.environ.get("ZEPHYR_GCC_VARIANT", None)
2479
2480 if toolchain == "gccarmemb":
2481 # Remove this translation when gccarmemb is no longer supported.
2482 toolchain = "gnuarmemb"
2483
2484 try:
2485 if not toolchain:
2486 raise SanityRuntimeError("E: Variable ZEPHYR_TOOLCHAIN_VARIANT is not defined")
2487 except Exception as e:
2488 print(str(e))
2489 sys.exit(2)
2490
2491 return toolchain
2492
2493 def add_testcases(self, testcase_filter=[]):
2494 for root in self.roots:
2495 root = os.path.abspath(root)
2496
2497 logger.debug("Reading test case configuration files under %s..." % root)
2498
2499 for dirpath, dirnames, filenames in os.walk(root, topdown=True):
2500 logger.debug("scanning %s" % dirpath)
Aastha Grovera0ae5342020-05-13 13:34:00 -07002501 if self.SAMPLE_FILENAME in filenames:
2502 filename = self.SAMPLE_FILENAME
2503 elif self.TESTCASE_FILENAME in filenames:
2504 filename = self.TESTCASE_FILENAME
Anas Nashifce2b4182020-03-24 14:40:28 -04002505 else:
2506 continue
2507
2508 logger.debug("Found possible test case in " + dirpath)
2509
2510 dirnames[:] = []
2511 tc_path = os.path.join(dirpath, filename)
2512
2513 try:
2514 parsed_data = SanityConfigParser(tc_path, self.tc_schema)
2515 parsed_data.load()
2516
2517 tc_path = os.path.dirname(tc_path)
2518 workdir = os.path.relpath(tc_path, root)
2519
2520 for name in parsed_data.tests.keys():
Anas Nashifaff616d2020-04-17 21:24:57 -04002521 tc = TestCase(root, workdir, name)
Anas Nashifce2b4182020-03-24 14:40:28 -04002522
2523 tc_dict = parsed_data.get_test(name, self.testcase_valid_keys)
2524
2525 tc.source_dir = tc_path
2526 tc.yamlfile = tc_path
2527
Anas Nashifce2b4182020-03-24 14:40:28 -04002528 tc.type = tc_dict["type"]
2529 tc.tags = tc_dict["tags"]
2530 tc.extra_args = tc_dict["extra_args"]
2531 tc.extra_configs = tc_dict["extra_configs"]
2532 tc.arch_whitelist = tc_dict["arch_whitelist"]
2533 tc.arch_exclude = tc_dict["arch_exclude"]
2534 tc.skip = tc_dict["skip"]
2535 tc.platform_exclude = tc_dict["platform_exclude"]
2536 tc.platform_whitelist = tc_dict["platform_whitelist"]
2537 tc.toolchain_exclude = tc_dict["toolchain_exclude"]
2538 tc.toolchain_whitelist = tc_dict["toolchain_whitelist"]
2539 tc.tc_filter = tc_dict["filter"]
2540 tc.timeout = tc_dict["timeout"]
2541 tc.harness = tc_dict["harness"]
2542 tc.harness_config = tc_dict["harness_config"]
Anas Nashif43275c82020-05-04 18:22:16 -04002543 if tc.harness == 'console' and not tc.harness_config:
2544 raise Exception('Harness config error: console harness defined without a configuration.')
Anas Nashifce2b4182020-03-24 14:40:28 -04002545 tc.build_only = tc_dict["build_only"]
2546 tc.build_on_all = tc_dict["build_on_all"]
2547 tc.slow = tc_dict["slow"]
2548 tc.min_ram = tc_dict["min_ram"]
2549 tc.depends_on = tc_dict["depends_on"]
2550 tc.min_flash = tc_dict["min_flash"]
2551 tc.extra_sections = tc_dict["extra_sections"]
2552
2553 tc.parse_subcases(tc_path)
2554
2555 if testcase_filter:
2556 if tc.name and tc.name in testcase_filter:
2557 self.testcases[tc.name] = tc
2558 else:
2559 self.testcases[tc.name] = tc
2560
2561 except Exception as e:
2562 logger.error("%s: can't load (skipping): %s" % (tc_path, e))
2563 self.load_errors += 1
2564
2565
2566 def get_platform(self, name):
2567 selected_platform = None
2568 for platform in self.platforms:
2569 if platform.name == name:
2570 selected_platform = platform
2571 break
2572 return selected_platform
2573
2574 def load_from_file(self, file, filter_status=[]):
2575 try:
2576 with open(file, "r") as fp:
2577 cr = csv.DictReader(fp)
2578 instance_list = []
2579 for row in cr:
2580 if row["status"] in filter_status:
2581 continue
2582 test = row["test"]
2583
2584 platform = self.get_platform(row["platform"])
2585 instance = TestInstance(self.testcases[test], platform, self.outdir)
2586 instance.check_build_or_run(
2587 self.build_only,
2588 self.enable_slow,
2589 self.device_testing,
Anas Nashifce8c12e2020-05-21 09:11:40 -04002590 self.fixtures
Anas Nashifce2b4182020-03-24 14:40:28 -04002591 )
2592 instance.create_overlay(platform, self.enable_asan, self.enable_coverage, self.coverage_platform)
2593 instance_list.append(instance)
2594 self.add_instances(instance_list)
2595
2596 except KeyError as e:
2597 logger.error("Key error while parsing tests file.({})".format(str(e)))
2598 sys.exit(2)
2599
2600 except FileNotFoundError as e:
2601 logger.error("Couldn't find input file with list of tests. ({})".format(e))
2602 sys.exit(2)
2603
2604 def apply_filters(self, **kwargs):
2605
2606 toolchain = self.get_toolchain()
2607
2608 discards = {}
2609 platform_filter = kwargs.get('platform')
Anas Nashifaff616d2020-04-17 21:24:57 -04002610 exclude_platform = kwargs.get('exclude_platform', [])
2611 testcase_filter = kwargs.get('run_individual_tests', [])
Anas Nashifce2b4182020-03-24 14:40:28 -04002612 arch_filter = kwargs.get('arch')
2613 tag_filter = kwargs.get('tag')
2614 exclude_tag = kwargs.get('exclude_tag')
2615 all_filter = kwargs.get('all')
2616 device_testing_filter = kwargs.get('device_testing')
2617 force_toolchain = kwargs.get('force_toolchain')
Anas Nashif1a5defa2020-05-01 14:57:00 -04002618 force_platform = kwargs.get('force_platform')
Anas Nashifce2b4182020-03-24 14:40:28 -04002619
2620 logger.debug("platform filter: " + str(platform_filter))
2621 logger.debug(" arch_filter: " + str(arch_filter))
2622 logger.debug(" tag_filter: " + str(tag_filter))
2623 logger.debug(" exclude_tag: " + str(exclude_tag))
2624
2625 default_platforms = False
2626
2627 if platform_filter:
2628 platforms = list(filter(lambda p: p.name in platform_filter, self.platforms))
2629 else:
2630 platforms = self.platforms
2631
2632 if all_filter:
2633 logger.info("Selecting all possible platforms per test case")
2634 # When --all used, any --platform arguments ignored
2635 platform_filter = []
2636 elif not platform_filter:
2637 logger.info("Selecting default platforms per test case")
2638 default_platforms = True
2639
2640 logger.info("Building initial testcase list...")
2641
2642 for tc_name, tc in self.testcases.items():
2643 # list of instances per testcase, aka configurations.
2644 instance_list = []
2645 for plat in platforms:
2646 instance = TestInstance(tc, plat, self.outdir)
2647 instance.check_build_or_run(
2648 self.build_only,
2649 self.enable_slow,
2650 self.device_testing,
Anas Nashifce8c12e2020-05-21 09:11:40 -04002651 self.fixtures
Anas Nashifce2b4182020-03-24 14:40:28 -04002652 )
Anas Nashif3b86f132020-05-21 10:35:33 -04002653
2654 if device_testing_filter:
2655 for h in self.connected_hardware:
2656 if h['platform'] == plat.name:
2657 if tc.harness_config.get('fixture') in h.get('fixtures', []):
2658 instance.build_only = False
2659 instance.run = True
2660
Anas Nashif1a5defa2020-05-01 14:57:00 -04002661 if not force_platform and plat.name in exclude_platform:
Anas Nashifce2b4182020-03-24 14:40:28 -04002662 discards[instance] = "Platform is excluded on command line."
2663 continue
2664
2665 if (plat.arch == "unit") != (tc.type == "unit"):
2666 # Discard silently
2667 continue
2668
2669 if device_testing_filter and instance.build_only:
2670 discards[instance] = "Not runnable on device"
2671 continue
2672
2673 if tc.skip:
2674 discards[instance] = "Skip filter"
2675 continue
2676
2677 if tc.build_on_all and not platform_filter:
2678 platform_filter = []
2679
2680 if tag_filter and not tc.tags.intersection(tag_filter):
2681 discards[instance] = "Command line testcase tag filter"
2682 continue
2683
2684 if exclude_tag and tc.tags.intersection(exclude_tag):
2685 discards[instance] = "Command line testcase exclude filter"
2686 continue
2687
2688 if testcase_filter and tc_name not in testcase_filter:
2689 discards[instance] = "Testcase name filter"
2690 continue
2691
2692 if arch_filter and plat.arch not in arch_filter:
2693 discards[instance] = "Command line testcase arch filter"
2694 continue
2695
Anas Nashif1a5defa2020-05-01 14:57:00 -04002696 if not force_platform:
Anas Nashifce2b4182020-03-24 14:40:28 -04002697
Anas Nashif1a5defa2020-05-01 14:57:00 -04002698 if tc.arch_whitelist and plat.arch not in tc.arch_whitelist:
2699 discards[instance] = "Not in test case arch whitelist"
2700 continue
Anas Nashifce2b4182020-03-24 14:40:28 -04002701
Anas Nashif1a5defa2020-05-01 14:57:00 -04002702 if tc.arch_exclude and plat.arch in tc.arch_exclude:
2703 discards[instance] = "In test case arch exclude"
2704 continue
2705
2706 if tc.platform_exclude and plat.name in tc.platform_exclude:
2707 discards[instance] = "In test case platform exclude"
2708 continue
Anas Nashifce2b4182020-03-24 14:40:28 -04002709
2710 if tc.toolchain_exclude and toolchain in tc.toolchain_exclude:
2711 discards[instance] = "In test case toolchain exclude"
2712 continue
2713
2714 if platform_filter and plat.name not in platform_filter:
2715 discards[instance] = "Command line platform filter"
2716 continue
2717
2718 if tc.platform_whitelist and plat.name not in tc.platform_whitelist:
2719 discards[instance] = "Not in testcase platform whitelist"
2720 continue
2721
2722 if tc.toolchain_whitelist and toolchain not in tc.toolchain_whitelist:
2723 discards[instance] = "Not in testcase toolchain whitelist"
2724 continue
2725
2726 if not plat.env_satisfied:
2727 discards[instance] = "Environment ({}) not satisfied".format(", ".join(plat.env))
2728 continue
2729
2730 if not force_toolchain \
2731 and toolchain and (toolchain not in plat.supported_toolchains) \
2732 and tc.type != 'unit':
2733 discards[instance] = "Not supported by the toolchain"
2734 continue
2735
2736 if plat.ram < tc.min_ram:
2737 discards[instance] = "Not enough RAM"
2738 continue
2739
2740 if tc.depends_on:
2741 dep_intersection = tc.depends_on.intersection(set(plat.supported))
2742 if dep_intersection != set(tc.depends_on):
2743 discards[instance] = "No hardware support"
2744 continue
2745
2746 if plat.flash < tc.min_flash:
2747 discards[instance] = "Not enough FLASH"
2748 continue
2749
2750 if set(plat.ignore_tags) & tc.tags:
2751 discards[instance] = "Excluded tags per platform"
2752 continue
2753
2754 # if nothing stopped us until now, it means this configuration
2755 # needs to be added.
2756 instance_list.append(instance)
2757
2758 # no configurations, so jump to next testcase
2759 if not instance_list:
2760 continue
2761
2762 # if sanitycheck was launched with no platform options at all, we
2763 # take all default platforms
2764 if default_platforms and not tc.build_on_all:
2765 if tc.platform_whitelist:
2766 a = set(self.default_platforms)
2767 b = set(tc.platform_whitelist)
2768 c = a.intersection(b)
2769 if c:
2770 aa = list(filter(lambda tc: tc.platform.name in c, instance_list))
2771 self.add_instances(aa)
2772 else:
2773 self.add_instances(instance_list[:1])
2774 else:
2775 instances = list(filter(lambda tc: tc.platform.default, instance_list))
2776 self.add_instances(instances)
2777
Anas Nashifaff616d2020-04-17 21:24:57 -04002778 for instance in list(filter(lambda inst: not inst.platform.default, instance_list)):
Anas Nashifce2b4182020-03-24 14:40:28 -04002779 discards[instance] = "Not a default test platform"
2780
2781 else:
2782 self.add_instances(instance_list)
2783
2784 for _, case in self.instances.items():
2785 case.create_overlay(case.platform, self.enable_asan, self.enable_coverage, self.coverage_platform)
2786
2787 self.discards = discards
2788 self.selected_platforms = set(p.platform.name for p in self.instances.values())
2789
2790 return discards
2791
2792 def add_instances(self, instance_list):
2793 for instance in instance_list:
2794 self.instances[instance.name] = instance
2795
2796 def add_tasks_to_queue(self, test_only=False):
2797 for instance in self.instances.values():
2798 if test_only:
2799 if instance.run:
2800 pipeline.put({"op": "run", "test": instance, "status": "built"})
2801 else:
2802 if instance.status not in ['passed', 'skipped']:
2803 instance.status = None
2804 pipeline.put({"op": "cmake", "test": instance})
2805
2806 return "DONE FEEDING"
2807
2808 def execute(self):
2809 def calc_one_elf_size(instance):
2810 if instance.status not in ["failed", "skipped"]:
2811 if instance.platform.type != "native":
2812 size_calc = instance.calculate_sizes()
2813 instance.metrics["ram_size"] = size_calc.get_ram_size()
2814 instance.metrics["rom_size"] = size_calc.get_rom_size()
2815 instance.metrics["unrecognized"] = size_calc.unrecognized_sections()
2816 else:
2817 instance.metrics["ram_size"] = 0
2818 instance.metrics["rom_size"] = 0
2819 instance.metrics["unrecognized"] = []
2820
2821 instance.metrics["handler_time"] = instance.handler.duration if instance.handler else 0
2822
2823 logger.info("Adding tasks to the queue...")
2824 # We can use a with statement to ensure threads are cleaned up promptly
2825 with BoundedExecutor(bound=self.jobs, max_workers=self.jobs) as executor:
2826
2827 # start a future for a thread which sends work in through the queue
2828 future_to_test = {
2829 executor.submit(self.add_tasks_to_queue, self.test_only): 'FEEDER DONE'}
2830
2831 while future_to_test:
2832 # check for status of the futures which are currently working
2833 done, pending = concurrent.futures.wait(future_to_test, timeout=1,
2834 return_when=concurrent.futures.FIRST_COMPLETED)
2835
2836 # if there is incoming work, start a new future
2837 while not pipeline.empty():
2838 # fetch a url from the queue
2839 message = pipeline.get()
2840 test = message['test']
2841
2842 pb = ProjectBuilder(self,
2843 test,
2844 lsan=self.enable_lsan,
2845 asan=self.enable_asan,
2846 coverage=self.enable_coverage,
2847 extra_args=self.extra_args,
2848 device_testing=self.device_testing,
2849 cmake_only=self.cmake_only,
2850 cleanup=self.cleanup,
2851 valgrind=self.enable_valgrind,
2852 inline_logs=self.inline_logs,
Anas Nashifce2b4182020-03-24 14:40:28 -04002853 generator=self.generator,
Anas Nashiff6462a32020-03-29 19:02:51 -04002854 generator_cmd=self.generator_cmd,
2855 verbose=self.verbose
Anas Nashifce2b4182020-03-24 14:40:28 -04002856 )
2857 future_to_test[executor.submit(pb.process, message)] = test.name
2858
2859 # process any completed futures
2860 for future in done:
2861 test = future_to_test[future]
2862 try:
2863 data = future.result()
2864 except Exception as exc:
2865 logger.error('%r generated an exception: %s' % (test, exc))
2866 sys.exit('%r generated an exception: %s' % (test, exc))
2867
2868 else:
2869 if data:
2870 logger.debug(data)
2871
2872 # remove the now completed future
2873 del future_to_test[future]
2874
2875 for future in pending:
2876 test = future_to_test[future]
2877
2878 try:
2879 future.result(timeout=180)
2880 except concurrent.futures.TimeoutError:
2881 logger.warning("{} stuck?".format(test))
2882
2883 if self.enable_size_report and not self.cmake_only:
2884 # Parallelize size calculation
2885 executor = concurrent.futures.ThreadPoolExecutor(self.jobs)
2886 futures = [executor.submit(calc_one_elf_size, instance)
2887 for instance in self.instances.values()]
2888 concurrent.futures.wait(futures)
2889 else:
2890 for instance in self.instances.values():
2891 instance.metrics["ram_size"] = 0
2892 instance.metrics["rom_size"] = 0
2893 instance.metrics["handler_time"] = instance.handler.duration if instance.handler else 0
2894 instance.metrics["unrecognized"] = []
2895
2896 def discard_report(self, filename):
2897
2898 try:
2899 if self.discards is None:
2900 raise SanityRuntimeError("apply_filters() hasn't been run!")
2901 except Exception as e:
2902 logger.error(str(e))
2903 sys.exit(2)
2904
2905 with open(filename, "wt") as csvfile:
2906 fieldnames = ["test", "arch", "platform", "reason"]
2907 cw = csv.DictWriter(csvfile, fieldnames, lineterminator=os.linesep)
2908 cw.writeheader()
2909 for instance, reason in sorted(self.discards.items()):
2910 rowdict = {"test": instance.testcase.name,
2911 "arch": instance.platform.arch,
2912 "platform": instance.platform.name,
2913 "reason": reason}
2914 cw.writerow(rowdict)
2915
Anas Nashif6915adf2020-04-22 09:39:42 -04002916 def target_report(self, outdir, suffix, append=False):
Anas Nashifce2b4182020-03-24 14:40:28 -04002917 platforms = {inst.platform.name for _, inst in self.instances.items()}
2918 for platform in platforms:
Anas Nashif6915adf2020-04-22 09:39:42 -04002919 if suffix:
2920 filename = os.path.join(outdir,"{}_{}.xml".format(platform, suffix))
2921 else:
2922 filename = os.path.join(outdir,"{}.xml".format(platform))
Anas Nashif90415502020-04-11 22:15:04 -04002923 self.xunit_report(filename, platform, full_report=True, append=append)
Anas Nashifce2b4182020-03-24 14:40:28 -04002924
Anas Nashif90415502020-04-11 22:15:04 -04002925
2926 @staticmethod
2927 def process_log(log_file):
2928 filtered_string = ""
2929 if os.path.exists(log_file):
2930 with open(log_file, "rb") as f:
2931 log = f.read().decode("utf-8")
2932 filtered_string = ''.join(filter(lambda x: x in string.printable, log))
2933
2934 return filtered_string
2935
2936 def xunit_report(self, filename, platform=None, full_report=False, append=False):
2937 fails = 0
2938 passes = 0
2939 errors = 0
2940 skips = 0
2941 duration = 0
2942
2943 for _, instance in self.instances.items():
2944 if platform and instance.platform.name != platform:
2945 continue
2946
2947 handler_time = instance.metrics.get('handler_time', 0)
2948 duration += handler_time
2949 if full_report:
Anas Nashifce2b4182020-03-24 14:40:28 -04002950 for k in instance.results.keys():
2951 if instance.results[k] == 'PASS':
2952 passes += 1
2953 elif instance.results[k] == 'BLOCK':
2954 errors += 1
2955 elif instance.results[k] == 'SKIP':
2956 skips += 1
2957 else:
2958 fails += 1
Anas Nashif90415502020-04-11 22:15:04 -04002959 else:
2960 if instance.status in ["failed", "timeout"]:
2961 if instance.reason in ['build_error', 'handler_crash']:
2962 errors += 1
2963 else:
2964 fails += 1
2965 elif instance.status == 'skipped':
2966 skips += 1
2967 else:
2968 passes += 1
Anas Nashifce2b4182020-03-24 14:40:28 -04002969
Anas Nashif90415502020-04-11 22:15:04 -04002970 run = "Sanitycheck"
2971 eleTestsuite = None
2972
2973 # When we re-run the tests, we re-use the results and update only with
2974 # the newly run tests.
2975 if os.path.exists(filename) and append:
2976 tree = ET.parse(filename)
2977 eleTestsuites = tree.getroot()
2978 eleTestsuite = tree.findall('testsuite')[0]
2979 eleTestsuite.attrib['failures'] = "%d" % fails
2980 eleTestsuite.attrib['errors'] = "%d" % errors
2981 eleTestsuite.attrib['skip'] = "%d" % skips
2982
2983 else:
Anas Nashifce2b4182020-03-24 14:40:28 -04002984 eleTestsuites = ET.Element('testsuites')
2985 eleTestsuite = ET.SubElement(eleTestsuites, 'testsuite',
2986 name=run, time="%f" % duration,
Anas Nashif90415502020-04-11 22:15:04 -04002987 tests="%d" % (errors + passes + fails + skips),
Anas Nashifce2b4182020-03-24 14:40:28 -04002988 failures="%d" % fails,
Anas Nashif90415502020-04-11 22:15:04 -04002989 errors="%d" % (errors), skip="%s" % (skips))
Anas Nashifce2b4182020-03-24 14:40:28 -04002990
Anas Nashif90415502020-04-11 22:15:04 -04002991 for _, instance in self.instances.items():
2992 if platform and instance.platform.name != platform:
2993 continue
Anas Nashifce2b4182020-03-24 14:40:28 -04002994
Anas Nashif90415502020-04-11 22:15:04 -04002995 if full_report:
2996 tname = os.path.basename(instance.testcase.name)
2997 else:
2998 tname = instance.testcase.name
2999 # remove testcases that are being re-run from exiting reports
3000 if append:
3001 for tc in eleTestsuite.findall('testcase'):
3002 if tc.get('classname') == "%s:%s" % (instance.platform.name, tname):
3003 eleTestsuite.remove(tc)
3004
3005 handler_time = instance.metrics.get('handler_time', 0)
3006
3007 if full_report:
Anas Nashifce2b4182020-03-24 14:40:28 -04003008 for k in instance.results.keys():
3009 eleTestcase = ET.SubElement(
3010 eleTestsuite, 'testcase',
Anas Nashif90415502020-04-11 22:15:04 -04003011 classname="%s:%s" % (instance.platform.name, tname),
Anas Nashifce2b4182020-03-24 14:40:28 -04003012 name="%s" % (k), time="%f" % handler_time)
3013 if instance.results[k] in ['FAIL', 'BLOCK']:
3014 if instance.results[k] == 'FAIL':
3015 el = ET.SubElement(
3016 eleTestcase,
3017 'failure',
3018 type="failure",
3019 message="failed")
3020 else:
3021 el = ET.SubElement(
3022 eleTestcase,
3023 'error',
3024 type="failure",
3025 message="failed")
3026 p = os.path.join(self.outdir, instance.platform.name, instance.testcase.name)
3027 log_file = os.path.join(p, "handler.log")
Anas Nashif90415502020-04-11 22:15:04 -04003028 el.text = self.process_log(log_file)
Anas Nashifce2b4182020-03-24 14:40:28 -04003029
3030 elif instance.results[k] == 'SKIP':
3031 el = ET.SubElement(
3032 eleTestcase,
3033 'skipped',
3034 type="skipped",
3035 message="Skipped")
Anas Nashifce2b4182020-03-24 14:40:28 -04003036 else:
Anas Nashif90415502020-04-11 22:15:04 -04003037 eleTestcase = ET.SubElement(eleTestsuite, 'testcase',
3038 classname="%s:%s" % (instance.platform.name, instance.testcase.name),
3039 name="%s" % (instance.testcase.name),
3040 time="%f" % handler_time)
3041 if instance.status in ["failed", "timeout"]:
3042 failure = ET.SubElement(
3043 eleTestcase,
3044 'failure',
3045 type="failure",
3046 message=instance.reason)
3047 p = ("%s/%s/%s" % (self.outdir, instance.platform.name, instance.testcase.name))
3048 bl = os.path.join(p, "build.log")
3049 hl = os.path.join(p, "handler.log")
3050 log_file = bl
3051 if instance.reason != 'Build error':
3052 if os.path.exists(hl):
3053 log_file = hl
3054 else:
3055 log_file = bl
Anas Nashifce2b4182020-03-24 14:40:28 -04003056
Anas Nashif90415502020-04-11 22:15:04 -04003057 failure.text = self.process_log(log_file)
Anas Nashifce2b4182020-03-24 14:40:28 -04003058
Anas Nashif90415502020-04-11 22:15:04 -04003059 elif instance.status == "skipped":
3060 ET.SubElement(eleTestcase, 'skipped', type="skipped", message="Skipped")
Anas Nashifce2b4182020-03-24 14:40:28 -04003061
Anas Nashifce2b4182020-03-24 14:40:28 -04003062
3063 result = ET.tostring(eleTestsuites)
3064 with open(filename, 'wb') as report:
3065 report.write(result)
3066
Anas Nashif90415502020-04-11 22:15:04 -04003067
Anas Nashifce2b4182020-03-24 14:40:28 -04003068 def csv_report(self, filename):
3069 with open(filename, "wt") as csvfile:
3070 fieldnames = ["test", "arch", "platform", "status",
3071 "extra_args", "handler", "handler_time", "ram_size",
3072 "rom_size"]
3073 cw = csv.DictWriter(csvfile, fieldnames, lineterminator=os.linesep)
3074 cw.writeheader()
3075 for instance in self.instances.values():
3076 rowdict = {"test": instance.testcase.name,
3077 "arch": instance.platform.arch,
3078 "platform": instance.platform.name,
3079 "extra_args": " ".join(instance.testcase.extra_args),
3080 "handler": instance.platform.simulation}
3081
3082 rowdict["status"] = instance.status
3083 if instance.status not in ["failed", "timeout"]:
3084 if instance.handler:
3085 rowdict["handler_time"] = instance.metrics.get("handler_time", 0)
3086 ram_size = instance.metrics.get("ram_size", 0)
3087 rom_size = instance.metrics.get("rom_size", 0)
3088 rowdict["ram_size"] = ram_size
3089 rowdict["rom_size"] = rom_size
3090 cw.writerow(rowdict)
3091
3092 def get_testcase(self, identifier):
3093 results = []
3094 for _, tc in self.testcases.items():
3095 for case in tc.cases:
3096 if case == identifier:
3097 results.append(tc)
3098 return results
3099
3100
3101class CoverageTool:
3102 """ Base class for every supported coverage tool
3103 """
3104
3105 def __init__(self):
Anas Nashiff6462a32020-03-29 19:02:51 -04003106 self.gcov_tool = None
3107 self.base_dir = None
Anas Nashifce2b4182020-03-24 14:40:28 -04003108
3109 @staticmethod
3110 def factory(tool):
3111 if tool == 'lcov':
Anas Nashiff6462a32020-03-29 19:02:51 -04003112 t = Lcov()
3113 elif tool == 'gcovr':
3114 t = Lcov()
3115 else:
3116 logger.error("Unsupported coverage tool specified: {}".format(tool))
3117 return None
3118
Anas Nashiff6462a32020-03-29 19:02:51 -04003119 return t
Anas Nashifce2b4182020-03-24 14:40:28 -04003120
3121 @staticmethod
3122 def retrieve_gcov_data(intput_file):
Anas Nashiff6462a32020-03-29 19:02:51 -04003123 logger.debug("Working on %s" % intput_file)
Anas Nashifce2b4182020-03-24 14:40:28 -04003124 extracted_coverage_info = {}
3125 capture_data = False
3126 capture_complete = False
3127 with open(intput_file, 'r') as fp:
3128 for line in fp.readlines():
3129 if re.search("GCOV_COVERAGE_DUMP_START", line):
3130 capture_data = True
3131 continue
3132 if re.search("GCOV_COVERAGE_DUMP_END", line):
3133 capture_complete = True
3134 break
3135 # Loop until the coverage data is found.
3136 if not capture_data:
3137 continue
3138 if line.startswith("*"):
3139 sp = line.split("<")
3140 if len(sp) > 1:
3141 # Remove the leading delimiter "*"
3142 file_name = sp[0][1:]
3143 # Remove the trailing new line char
3144 hex_dump = sp[1][:-1]
3145 else:
3146 continue
3147 else:
3148 continue
3149 extracted_coverage_info.update({file_name: hex_dump})
3150 if not capture_data:
3151 capture_complete = True
3152 return {'complete': capture_complete, 'data': extracted_coverage_info}
3153
3154 @staticmethod
3155 def create_gcda_files(extracted_coverage_info):
Anas Nashiff6462a32020-03-29 19:02:51 -04003156 logger.debug("Generating gcda files")
Anas Nashifce2b4182020-03-24 14:40:28 -04003157 for filename, hexdump_val in extracted_coverage_info.items():
3158 # if kobject_hash is given for coverage gcovr fails
3159 # hence skipping it problem only in gcovr v4.1
3160 if "kobject_hash" in filename:
3161 filename = (filename[:-4]) + "gcno"
3162 try:
3163 os.remove(filename)
3164 except Exception:
3165 pass
3166 continue
3167
3168 with open(filename, 'wb') as fp:
3169 fp.write(bytes.fromhex(hexdump_val))
3170
3171 def generate(self, outdir):
3172 for filename in glob.glob("%s/**/handler.log" % outdir, recursive=True):
3173 gcov_data = self.__class__.retrieve_gcov_data(filename)
3174 capture_complete = gcov_data['complete']
3175 extracted_coverage_info = gcov_data['data']
3176 if capture_complete:
3177 self.__class__.create_gcda_files(extracted_coverage_info)
3178 logger.debug("Gcov data captured: {}".format(filename))
3179 else:
3180 logger.error("Gcov data capture incomplete: {}".format(filename))
3181
3182 with open(os.path.join(outdir, "coverage.log"), "a") as coveragelog:
3183 ret = self._generate(outdir, coveragelog)
3184 if ret == 0:
3185 logger.info("HTML report generated: {}".format(
3186 os.path.join(outdir, "coverage", "index.html")))
3187
3188
3189class Lcov(CoverageTool):
3190
3191 def __init__(self):
3192 super().__init__()
3193 self.ignores = []
3194
3195 def add_ignore_file(self, pattern):
3196 self.ignores.append('*' + pattern + '*')
3197
3198 def add_ignore_directory(self, pattern):
3199 self.ignores.append(pattern + '/*')
3200
3201 def _generate(self, outdir, coveragelog):
3202 coveragefile = os.path.join(outdir, "coverage.info")
3203 ztestfile = os.path.join(outdir, "ztest.info")
3204 subprocess.call(["lcov", "--gcov-tool", self.gcov_tool,
3205 "--capture", "--directory", outdir,
3206 "--rc", "lcov_branch_coverage=1",
3207 "--output-file", coveragefile], stdout=coveragelog)
3208 # We want to remove tests/* and tests/ztest/test/* but save tests/ztest
3209 subprocess.call(["lcov", "--gcov-tool", self.gcov_tool, "--extract",
3210 coveragefile,
Anas Nashiff6462a32020-03-29 19:02:51 -04003211 os.path.join(self.base_dir, "tests", "ztest", "*"),
Anas Nashifce2b4182020-03-24 14:40:28 -04003212 "--output-file", ztestfile,
3213 "--rc", "lcov_branch_coverage=1"], stdout=coveragelog)
3214
3215 if os.path.exists(ztestfile) and os.path.getsize(ztestfile) > 0:
3216 subprocess.call(["lcov", "--gcov-tool", self.gcov_tool, "--remove",
3217 ztestfile,
Anas Nashiff6462a32020-03-29 19:02:51 -04003218 os.path.join(self.base_dir, "tests/ztest/test/*"),
Anas Nashifce2b4182020-03-24 14:40:28 -04003219 "--output-file", ztestfile,
3220 "--rc", "lcov_branch_coverage=1"],
3221 stdout=coveragelog)
3222 files = [coveragefile, ztestfile]
3223 else:
3224 files = [coveragefile]
3225
3226 for i in self.ignores:
3227 subprocess.call(
3228 ["lcov", "--gcov-tool", self.gcov_tool, "--remove",
3229 coveragefile, i, "--output-file",
3230 coveragefile, "--rc", "lcov_branch_coverage=1"],
3231 stdout=coveragelog)
3232
3233 # The --ignore-errors source option is added to avoid it exiting due to
3234 # samples/application_development/external_lib/
3235 return subprocess.call(["genhtml", "--legend", "--branch-coverage",
3236 "--ignore-errors", "source",
3237 "-output-directory",
3238 os.path.join(outdir, "coverage")] + files,
3239 stdout=coveragelog)
3240
3241
3242class Gcovr(CoverageTool):
3243
3244 def __init__(self):
3245 super().__init__()
3246 self.ignores = []
3247
3248 def add_ignore_file(self, pattern):
3249 self.ignores.append('.*' + pattern + '.*')
3250
3251 def add_ignore_directory(self, pattern):
3252 self.ignores.append(pattern + '/.*')
3253
3254 @staticmethod
3255 def _interleave_list(prefix, list):
3256 tuple_list = [(prefix, item) for item in list]
3257 return [item for sublist in tuple_list for item in sublist]
3258
3259 def _generate(self, outdir, coveragelog):
3260 coveragefile = os.path.join(outdir, "coverage.json")
3261 ztestfile = os.path.join(outdir, "ztest.json")
3262
3263 excludes = Gcovr._interleave_list("-e", self.ignores)
3264
3265 # We want to remove tests/* and tests/ztest/test/* but save tests/ztest
Anas Nashiff6462a32020-03-29 19:02:51 -04003266 subprocess.call(["gcovr", "-r", self.base_dir, "--gcov-executable",
Anas Nashifce2b4182020-03-24 14:40:28 -04003267 self.gcov_tool, "-e", "tests/*"] + excludes +
3268 ["--json", "-o", coveragefile, outdir],
3269 stdout=coveragelog)
3270
Anas Nashiff6462a32020-03-29 19:02:51 -04003271 subprocess.call(["gcovr", "-r", self.base_dir, "--gcov-executable",
Anas Nashifce2b4182020-03-24 14:40:28 -04003272 self.gcov_tool, "-f", "tests/ztest", "-e",
3273 "tests/ztest/test/*", "--json", "-o", ztestfile,
3274 outdir], stdout=coveragelog)
3275
3276 if os.path.exists(ztestfile) and os.path.getsize(ztestfile) > 0:
3277 files = [coveragefile, ztestfile]
3278 else:
3279 files = [coveragefile]
3280
3281 subdir = os.path.join(outdir, "coverage")
3282 os.makedirs(subdir, exist_ok=True)
3283
3284 tracefiles = self._interleave_list("--add-tracefile", files)
3285
Anas Nashiff6462a32020-03-29 19:02:51 -04003286 return subprocess.call(["gcovr", "-r", self.base_dir, "--html",
Anas Nashifce2b4182020-03-24 14:40:28 -04003287 "--html-details"] + tracefiles +
3288 ["-o", os.path.join(subdir, "index.html")],
3289 stdout=coveragelog)
3290class HardwareMap:
3291
3292 schema_path = os.path.join(ZEPHYR_BASE, "scripts", "sanity_chk", "hwmap-schema.yaml")
3293
3294 manufacturer = [
3295 'ARM',
3296 'SEGGER',
3297 'MBED',
3298 'STMicroelectronics',
3299 'Atmel Corp.',
3300 'Texas Instruments',
3301 'Silicon Labs',
3302 'NXP Semiconductors',
3303 'Microchip Technology Inc.',
3304 'FTDI',
3305 'Digilent'
3306 ]
3307
3308 runner_mapping = {
3309 'pyocd': [
3310 'DAPLink CMSIS-DAP',
3311 'MBED CMSIS-DAP'
3312 ],
3313 'jlink': [
3314 'J-Link',
3315 'J-Link OB'
3316 ],
3317 'openocd': [
3318 'STM32 STLink', '^XDS110.*'
3319 ],
3320 'dediprog': [
3321 'TTL232R-3V3',
3322 'MCP2200 USB Serial Port Emulator'
3323 ]
3324 }
3325
3326 def __init__(self):
3327 self.detected = []
3328 self.connected_hardware = []
3329
3330 def load_device_from_cmdline(self, serial, platform):
3331 device = {
3332 "serial": serial,
3333 "platform": platform,
3334 "counter": 0,
3335 "available": True,
3336 "connected": True
3337 }
3338 self.connected_hardware.append(device)
3339
3340 def load_hardware_map(self, map_file):
3341 hwm_schema = scl.yaml_load(self.schema_path)
3342 self.connected_hardware = scl.yaml_load_verify(map_file, hwm_schema)
3343 for i in self.connected_hardware:
3344 i['counter'] = 0
3345
Martí Bolívar07dce822020-04-13 16:50:51 -07003346 def scan_hw(self, persistent=False):
Anas Nashifce2b4182020-03-24 14:40:28 -04003347 from serial.tools import list_ports
3348
Martí Bolívar07dce822020-04-13 16:50:51 -07003349 if persistent and platform.system() == 'Linux':
3350 # On Linux, /dev/serial/by-id provides symlinks to
3351 # '/dev/ttyACMx' nodes using names which are unique as
3352 # long as manufacturers fill out USB metadata nicely.
3353 #
3354 # This creates a map from '/dev/ttyACMx' device nodes
3355 # to '/dev/serial/by-id/usb-...' symlinks. The symlinks
3356 # go into the hardware map because they stay the same
3357 # even when the user unplugs / replugs the device.
3358 #
3359 # Some inexpensive USB/serial adapters don't result
3360 # in unique names here, though, so use of this feature
3361 # requires explicitly setting persistent=True.
3362 by_id = Path('/dev/serial/by-id')
3363 def readlink(link):
3364 return str((by_id / link).resolve())
3365
3366 persistent_map = {readlink(link): str(link)
3367 for link in by_id.iterdir()}
3368 else:
3369 persistent_map = {}
3370
Anas Nashifce2b4182020-03-24 14:40:28 -04003371 serial_devices = list_ports.comports()
3372 logger.info("Scanning connected hardware...")
3373 for d in serial_devices:
3374 if d.manufacturer in self.manufacturer:
3375
3376 # TI XDS110 can have multiple serial devices for a single board
3377 # assume endpoint 0 is the serial, skip all others
3378 if d.manufacturer == 'Texas Instruments' and not d.location.endswith('0'):
3379 continue
3380 s_dev = {}
3381 s_dev['platform'] = "unknown"
3382 s_dev['id'] = d.serial_number
Martí Bolívar07dce822020-04-13 16:50:51 -07003383 s_dev['serial'] = persistent_map.get(d.device, d.device)
Anas Nashifce2b4182020-03-24 14:40:28 -04003384 s_dev['product'] = d.product
3385 s_dev['runner'] = 'unknown'
3386 for runner, _ in self.runner_mapping.items():
3387 products = self.runner_mapping.get(runner)
3388 if d.product in products:
3389 s_dev['runner'] = runner
3390 continue
3391 # Try regex matching
3392 for p in products:
3393 if re.match(p, d.product):
3394 s_dev['runner'] = runner
3395
3396 s_dev['available'] = True
3397 s_dev['connected'] = True
3398 self.detected.append(s_dev)
3399 else:
3400 logger.warning("Unsupported device (%s): %s" % (d.manufacturer, d))
3401
3402 def write_map(self, hwm_file):
3403 # use existing map
3404 if os.path.exists(hwm_file):
3405 with open(hwm_file, 'r') as yaml_file:
3406 hwm = yaml.load(yaml_file, Loader=yaml.FullLoader)
3407 # disconnect everything
3408 for h in hwm:
3409 h['connected'] = False
3410 h['serial'] = None
3411
3412 for d in self.detected:
3413 for h in hwm:
3414 if d['id'] == h['id'] and d['product'] == h['product']:
3415 h['connected'] = True
3416 h['serial'] = d['serial']
3417 d['match'] = True
3418
3419 new = list(filter(lambda n: not n.get('match', False), self.detected))
3420 hwm = hwm + new
3421
3422 logger.info("Registered devices:")
3423 self.dump(hwm)
3424
3425 with open(hwm_file, 'w') as yaml_file:
3426 yaml.dump(hwm, yaml_file, default_flow_style=False)
3427
3428 else:
3429 # create new file
3430 with open(hwm_file, 'w') as yaml_file:
3431 yaml.dump(self.detected, yaml_file, default_flow_style=False)
3432 logger.info("Detected devices:")
3433 self.dump(self.detected)
3434
3435 @staticmethod
3436 def dump(hwmap=[], filtered=[], header=[], connected_only=False):
3437 print("")
3438 table = []
3439 if not header:
3440 header = ["Platform", "ID", "Serial device"]
3441 for p in sorted(hwmap, key=lambda i: i['platform']):
3442 platform = p.get('platform')
3443 connected = p.get('connected', False)
3444 if filtered and platform not in filtered:
3445 continue
3446
3447 if not connected_only or connected:
3448 table.append([platform, p.get('id', None), p.get('serial')])
3449
3450 print(tabulate(table, headers=header, tablefmt="github"))
3451
3452
3453def size_report(sc):
3454 logger.info(sc.filename)
3455 logger.info("SECTION NAME VMA LMA SIZE HEX SZ TYPE")
3456 for i in range(len(sc.sections)):
3457 v = sc.sections[i]
3458
3459 logger.info("%-17s 0x%08x 0x%08x %8d 0x%05x %-7s" %
3460 (v["name"], v["virt_addr"], v["load_addr"], v["size"], v["size"],
3461 v["type"]))
3462
3463 logger.info("Totals: %d bytes (ROM), %d bytes (RAM)" %
3464 (sc.rom_size, sc.ram_size))
3465 logger.info("")
3466
3467
3468
3469def export_tests(filename, tests):
3470 with open(filename, "wt") as csvfile:
3471 fieldnames = ['section', 'subsection', 'title', 'reference']
3472 cw = csv.DictWriter(csvfile, fieldnames, lineterminator=os.linesep)
3473 for test in tests:
3474 data = test.split(".")
3475 if len(data) > 1:
3476 subsec = " ".join(data[1].split("_")).title()
3477 rowdict = {
3478 "section": data[0].capitalize(),
3479 "subsection": subsec,
3480 "title": test,
3481 "reference": test
3482 }
3483 cw.writerow(rowdict)
3484 else:
3485 logger.info("{} can't be exported".format(test))