sanitycheck: Add a feature which can handle pytest script.

1. Add a class of Pytest(Harness) which can handle pytest script
in harness.py
2. Use running_dir to store current test directory which be
used in pytest scrpt.
3. Add usage of this feature into zephyr doc.

Signed-off-by: YouhuaX Zhu <youhuax.zhu@intel.com>
diff --git a/scripts/pylib/twister/harness.py b/scripts/pylib/twister/harness.py
index 60debe3..2269988 100644
--- a/scripts/pylib/twister/harness.py
+++ b/scripts/pylib/twister/harness.py
@@ -1,6 +1,9 @@
 # SPDX-License-Identifier: Apache-2.0
 import re
+import os
+import subprocess
 from collections import OrderedDict
+import xml.etree.ElementTree as ET
 
 result_re = re.compile(".*(PASS|FAIL|SKIP) - (test_)?(.*) in")
 
@@ -28,6 +31,7 @@
         self.recording = []
         self.fieldnames = []
         self.ztest = False
+        self.is_pytest = False
 
     def configure(self, instance):
         config = instance.testcase.harness_config
@@ -121,6 +125,92 @@
         else:
             self.tests[self.id] = "FAIL"
 
+class Pytest(Harness):
+    def configure(self, instance):
+        super(Pytest, self).configure(instance)
+        self.running_dir = instance.build_dir
+        self.source_dir = instance.testcase.source_dir
+        self.pytest_root = 'pytest'
+        self.is_pytest = True
+        config = instance.testcase.harness_config
+
+        if config:
+            self.pytest_root = config.get('pytest_root', 'pytest')
+
+    def handle(self, line):
+        ''' Test cases that make use of pytest more care about results given
+            by pytest tool which is called in pytest_run(), so works of this
+            handle is trying to give a PASS or FAIL to avoid timeout, nothing
+            is writen into handler.log
+        '''
+        self.state = "passed"
+        self.tests[self.id] = "PASS"
+
+    def pytest_run(self, log_file):
+        ''' To keep artifacts of pytest in self.running_dir, pass this directory
+            by "--cmdopt". On pytest end, add a command line option and provide
+            the cmdopt through a fixture function
+            If pytest harness report failure, twister will direct user to see
+            handler.log, this method writes test result in handler.log
+        '''
+        cmd = [
+			'pytest',
+			'-s',
+			os.path.join(self.source_dir, self.pytest_root),
+			'--cmdopt',
+			self.running_dir,
+			'--junit-xml',
+			os.path.join(self.running_dir, 'report.xml'),
+			'-q'
+        ]
+
+        log = open(log_file, "a")
+        outs = []
+        errs = []
+
+        with subprocess.Popen(cmd,
+                              stdout = subprocess.PIPE,
+                              stderr = subprocess.PIPE) as proc:
+            try:
+                outs, errs = proc.communicate()
+                tree = ET.parse(os.path.join(self.running_dir, "report.xml"))
+                root = tree.getroot()
+                for child in root:
+                    if child.tag == 'testsuite':
+                        if child.attrib['failures'] != '0':
+                            self.state = "failed"
+                        elif child.attrib['skipped'] != '0':
+                            self.state = "skipped"
+                        elif child.attrib['errors'] != '0':
+                            self.state = "errors"
+                        else:
+                            self.state = "passed"
+            except subprocess.TimeoutExpired:
+                proc.kill()
+                self.state = "failed"
+            except ET.ParseError:
+                self.state = "failed"
+            except IOError:
+                log.write("Can't access report.xml\n")
+                self.state = "failed"
+
+        if self.state == "passed":
+            self.tests[self.id] = "PASS"
+            log.write("Pytest cases passed\n")
+        elif self.state == "skipped":
+            self.tests[self.id] = "SKIP"
+            log.write("Pytest cases skipped\n")
+            log.write("Please refer report.xml for detail")
+        else:
+            self.tests[self.id] = "FAIL"
+            log.write("Pytest cases failed\n")
+
+        log.write("\nOutput from pytest:\n")
+        log.write(outs.decode('UTF-8'))
+        log.write(errs.decode('UTF-8'))
+        log.close()
+
+
 class Test(Harness):
     RUN_PASSED = "PROJECT EXECUTION SUCCESSFUL"
     RUN_FAILED = "PROJECT EXECUTION FAILED"