everywhere: fix typos
Fix a lot of typos
Signed-off-by: Nazar Kazakov <nazar.kazakov.work@gmail.com>
diff --git a/scripts/ci/check_compliance.py b/scripts/ci/check_compliance.py
index 9d2ed13..42687ae 100755
--- a/scripts/ci/check_compliance.py
+++ b/scripts/ci/check_compliance.py
@@ -219,7 +219,7 @@
class KconfigCheck(ComplianceTest):
"""
Checks is we are introducing any new warnings/errors with Kconfig,
- for example using undefiend Kconfig variables.
+ for example using undefined Kconfig variables.
"""
name = "Kconfig"
doc = "See https://docs.zephyrproject.org/latest/guides/kconfig/index.html for more details."
@@ -586,7 +586,7 @@
class KconfigBasicCheck(KconfigCheck, ComplianceTest):
"""
Checks is we are introducing any new warnings/errors with Kconfig,
- for example using undefiend Kconfig variables.
+ for example using undefined Kconfig variables.
This runs the basic Kconfig test, which is checking only for undefined
references inside the Kconfig tree.
"""
diff --git a/scripts/coccinelle/const_config_info.cocci b/scripts/coccinelle/const_config_info.cocci
index 718051f..c780223 100644
--- a/scripts/coccinelle/const_config_info.cocci
+++ b/scripts/coccinelle/const_config_info.cocci
@@ -1,5 +1,5 @@
// Copyright (c) 2020 Nordic Semiconductor ASA
-// SPDX-License-Identifer: Apache-2.0
+// SPDX-License-Identifier: Apache-2.0
// Enforce preservation of const qualifier on config_info casts
//
diff --git a/scripts/coccinelle/find_dev_usage.cocci b/scripts/coccinelle/find_dev_usage.cocci
index fa96987..6c67af2 100644
--- a/scripts/coccinelle/find_dev_usage.cocci
+++ b/scripts/coccinelle/find_dev_usage.cocci
@@ -1,5 +1,5 @@
// Copyright (c) 2020 Intel Corporation
-// SPDX-License-Identifer: Apache-2.0
+// SPDX-License-Identifier: Apache-2.0
// Uses a python database (a dict) to find where const struct device
// variable are being used in zephyr functions and, if it's being in place
diff --git a/scripts/coccinelle/find_functions.cocci b/scripts/coccinelle/find_functions.cocci
index ed29d87..d8afb6d 100644
--- a/scripts/coccinelle/find_functions.cocci
+++ b/scripts/coccinelle/find_functions.cocci
@@ -1,5 +1,5 @@
// Copyright (c) 2020 Intel Corporation
-// SPDX-License-Identifer: Apache-2.0
+// SPDX-License-Identifier: Apache-2.0
// In patch mode, patch all device instance to const (if not already).
@@ -99,7 +99,7 @@
}
)
-// Insert function implentations and inlines
+// Insert function implementations and inlines
@script:python
depends on report
@
diff --git a/scripts/coccinelle/int_ms_to_timeout.cocci b/scripts/coccinelle/int_ms_to_timeout.cocci
index 2cfaaeb..01563e7 100644
--- a/scripts/coccinelle/int_ms_to_timeout.cocci
+++ b/scripts/coccinelle/int_ms_to_timeout.cocci
@@ -1,5 +1,5 @@
// Copyright (c) 2019-2020 Nordic Semiconductor ASA
-// SPDX-License-Identifer: Apache-2.0
+// SPDX-License-Identifier: Apache-2.0
// Convert legacy integer timeouts to timeout API
//
diff --git a/scripts/coccinelle/ms_timeout.cocci b/scripts/coccinelle/ms_timeout.cocci
index 0b925f7..a01eafe 100644
--- a/scripts/coccinelle/ms_timeout.cocci
+++ b/scripts/coccinelle/ms_timeout.cocci
@@ -1,5 +1,5 @@
// Copyright (c) 2019-2020 Nordic Semiconductor ASA
-// SPDX-License-Identifer: Apache-2.0
+// SPDX-License-Identifier: Apache-2.0
// Replace use of K_NO_WAIT and K_FOREVER in API that requires
// timeouts be specified as integral milliseconds.
@@ -17,7 +17,7 @@
// ** Handle millisecond timeout as the last parameter
-// Match identifer passed as timeout
+// Match identifier passed as timeout
@match_fn_l1@
identifier fn =~ "(?x)^
(dmic_read
@@ -80,7 +80,7 @@
// ** Handle millisecond timeout as second from last parameter
-// Match identifer passed as timeout
+// Match identifier passed as timeout
@match_fn_l2@
identifier fn =~ "(?x)^
(http_client_req
@@ -138,7 +138,7 @@
// ** Handle millisecond timeout as third from last parameter
-// Match identifer passed as timeout
+// Match identifier passed as timeout
@match_fn_l3@
identifier fn =~ "(?x)^
(can_send
diff --git a/scripts/coredump/coredump_gdbserver.py b/scripts/coredump/coredump_gdbserver.py
index e9ba93a..a1f0503 100755
--- a/scripts/coredump/coredump_gdbserver.py
+++ b/scripts/coredump/coredump_gdbserver.py
@@ -60,7 +60,7 @@
# know what is going on
logger.setLevel(logging.INFO)
- # Setup logging for "gdbstuc"
+ # Setup logging for "gdbstub"
logger = logging.getLogger("gdbstub")
if args.debug:
logger.setLevel(logging.DEBUG)
diff --git a/scripts/coredump/coredump_parser/elf_parser.py b/scripts/coredump/coredump_parser/elf_parser.py
index 013b618..743f9ef 100644
--- a/scripts/coredump/coredump_parser/elf_parser.py
+++ b/scripts/coredump/coredump_parser/elf_parser.py
@@ -26,7 +26,7 @@
Class to parse ELF file for memory content in various sections.
There are read-only sections (e.g. text and rodata) where
the memory content does not need to be dumped via coredump
- and can be retrived from the ELF file.
+ and can be retrieved from the ELF file.
"""
def __init__(self, elffile):
diff --git a/scripts/dts/gen_dts_cmake.py b/scripts/dts/gen_dts_cmake.py
index 8afc598..f3be6a0 100755
--- a/scripts/dts/gen_dts_cmake.py
+++ b/scripts/dts/gen_dts_cmake.py
@@ -28,7 +28,7 @@
The build system includes this generated file early on, so
devicetree values can be used at CMake processing time.
-Accss is not done directly, but with Zephyr CMake extension APIs,
+Access is not done directly, but with Zephyr CMake extension APIs,
like this:
# sets 'compat' to "vnd,soc" in CMake
diff --git a/scripts/footprint/compare_footprint b/scripts/footprint/compare_footprint
index 313a004..69403c0 100755
--- a/scripts/footprint/compare_footprint
+++ b/scripts/footprint/compare_footprint
@@ -144,7 +144,7 @@
os.path.basename(os.environ.get('ZEPHYR_BASE')))
if os.path.exists(tmp_location):
shutil.rmtree(tmp_location)
- logging.debug("clonning into %s" % tmp_location)
+ logging.debug("cloning into %s" % tmp_location)
proc = subprocess.Popen(cmd, stdout=subprocess.PIPE,
stderr=subprocess.STDOUT,
cwd=tempfile.gettempdir(), shell=True)
diff --git a/scripts/footprint/fpdiff.py b/scripts/footprint/fpdiff.py
index 3c4172b..194d438 100755
--- a/scripts/footprint/fpdiff.py
+++ b/scripts/footprint/fpdiff.py
@@ -8,7 +8,7 @@
# end up with a json file in the build directory that can be used as input
# for this script.
-# The output shows which symbols insreased and which decreased in size and
+# The output shows which symbols increased and which decreased in size and
# also tracked added/remove symbols as well.
# Example:
diff --git a/scripts/gen_kobject_list.py b/scripts/gen_kobject_list.py
index d93f5b4..8368799 100755
--- a/scripts/gen_kobject_list.py
+++ b/scripts/gen_kobject_list.py
@@ -119,7 +119,7 @@
return "K_OBJ_%s" % name.upper()
subsystems = [
- # Editing the list is deprecated, add the __subsystem sentinal to your driver
+ # Editing the list is deprecated, add the __subsystem sentinel to your driver
# api declaration instead. e.x.
#
# __subsystem struct my_driver_api {
diff --git a/scripts/gen_kobject_placeholders.py b/scripts/gen_kobject_placeholders.py
index f5d1e87..3cbad0a 100755
--- a/scripts/gen_kobject_placeholders.py
+++ b/scripts/gen_kobject_placeholders.py
@@ -115,7 +115,7 @@
parser.add_argument("--outdir", required=True,
help="Output directory (<build_dir>/include/generated)")
parser.add_argument("--datapct", required=True,
- help="Multipler to the size of reserved space for DATA region")
+ help="Multiplier to the size of reserved space for DATA region")
parser.add_argument("--rodata", required=True,
help="Extra bytes to reserve for RODATA region")
parser.add_argument("-v", "--verbose", action="store_true",
diff --git a/scripts/gen_relocate_app.py b/scripts/gen_relocate_app.py
index 90424cd..626cbc7 100644
--- a/scripts/gen_relocate_app.py
+++ b/scripts/gen_relocate_app.py
@@ -42,7 +42,7 @@
import warnings
from elftools.elf.elffile import ELFFile
-# This script will create linker comands for text,rodata data, bss section relocation
+# This script will create linker commands for text,rodata data, bss section relocation
PRINT_TEMPLATE = """
KEEP(*({0}))
@@ -344,7 +344,7 @@
code_generation["extern"] += EXTERN_LINKER_VAR_DECLARATION.format(
memory_type.lower(), mtype)
- # add for all the bss data that needs to be zeored on boot up
+ # add for all the bss data that needs to be zeroed on boot up
if full_list_of_sections["bss"] and generate_section["bss"] and memory_type != "SRAM":
code_generation["zero_code"] += MEMSET_TEMPLATE.format(memory_type.lower())
code_generation["extern"] += EXTERN_LINKER_VAR_DECLARATION.format(
@@ -450,7 +450,7 @@
rel_dict = create_dict_wrt_mem()
complete_list_of_sections = {}
- # Create/or trucate file contents if it already exists
+ # Create/or truncate file contents if it already exists
# raw = open(linker_file, "w")
# for each memory_type, create text/rodata/data/bss sections for all obj files
diff --git a/scripts/gen_syscalls.py b/scripts/gen_syscalls.py
index 2abf054..1d25041 100755
--- a/scripts/gen_syscalls.py
+++ b/scripts/gen_syscalls.py
@@ -116,7 +116,7 @@
uintptr_t arg4, uintptr_t arg5, uintptr_t arg6, void *ssf);
"""
-# defines a macro wrapper which supercedes the syscall when used
+# defines a macro wrapper which supersedes the syscall when used
# and provides tracing enter/exit hooks while allowing per compilation unit
# enable/disable of syscall tracing. Used for returning functions
# Note that the last argument to the exit macro is the return value.
@@ -135,7 +135,7 @@
#endif
"""
-# defines a macro wrapper which supercedes the syscall when used
+# defines a macro wrapper which supersedes the syscall when used
# and provides tracing enter/exit hooks while allowing per compilation unit
# enable/disable of syscall tracing. Used for non-returning (void) functions
syscall_tracer_void_template = """
diff --git a/scripts/get_maintainer.py b/scripts/get_maintainer.py
index 0c27e41..02e30da 100755
--- a/scripts/get_maintainer.py
+++ b/scripts/get_maintainer.py
@@ -106,7 +106,7 @@
"maintainer",
metavar="MAINTAINER",
nargs="?",
- help="List all areas maintained by maintaier.")
+ help="List all areas maintained by maintainer.")
areas_parser.set_defaults(cmd_fn=Maintainers._areas_cmd)
diff --git a/scripts/kconfig/kconfigfunctions.py b/scripts/kconfig/kconfigfunctions.py
index aca59e0..19cdc49 100644
--- a/scripts/kconfig/kconfigfunctions.py
+++ b/scripts/kconfig/kconfigfunctions.py
@@ -197,7 +197,7 @@
"""
This function takes a 'chosen' property and treats that property as a path
to an EDT node. If it finds an EDT node, it will look to see if that
- nodnode has a register at the given 'index' and return the address value of
+ node has a register at the given 'index' and return the address value of
that reg, if not we return 0.
The function will divide the value based on 'unit':
diff --git a/scripts/logging/dictionary/dictionary_parser/utils.py b/scripts/logging/dictionary/dictionary_parser/utils.py
index 62ac64d..f94a809 100644
--- a/scripts/logging/dictionary/dictionary_parser/utils.py
+++ b/scripts/logging/dictionary/dictionary_parser/utils.py
@@ -5,7 +5,7 @@
# SPDX-License-Identifier: Apache-2.0
"""
-Utilities for Dictionary-based Logingg Parser
+Utilities for Dictionary-based Logging Parser
"""
import binascii
diff --git a/scripts/parse_syscalls.py b/scripts/parse_syscalls.py
index e00474f..9994efd 100644
--- a/scripts/parse_syscalls.py
+++ b/scripts/parse_syscalls.py
@@ -68,7 +68,7 @@
files.sort()
for fn in files:
- # toolchain/common.h has the definitions of these tagswhich we
+ # toolchain/common.h has the definitions of these tags which we
# don't want to trip over
path = os.path.join(root, fn)
if (not (path.endswith(".h") or path.endswith(".c")) or
diff --git a/scripts/pylib/twister/scl.py b/scripts/pylib/twister/scl.py
index 7c1a430..2518e4b 100644
--- a/scripts/pylib/twister/scl.py
+++ b/scripts/pylib/twister/scl.py
@@ -27,12 +27,12 @@
"""
Safely load a YAML document
- Follows recomendations from
+ Follows recommendations from
https://security.openstack.org/guidelines/dg_avoid-dangerous-input-parsing-libraries.html.
:param str filename: filename to load
:raises yaml.scanner: On YAML scan issues
- :raises: any other exception on file access erors
+ :raises: any other exception on file access errors
:return: dictionary representing the YAML document
"""
try:
@@ -46,7 +46,7 @@
e.note, cmark.name, cmark.line, cmark.column, e.context)
raise
-# If pykwalify is installed, then the validate functionw ill work --
+# If pykwalify is installed, then the validate function will work --
# otherwise, it is a stub and we'd warn about it.
try:
import pykwalify.core
@@ -67,7 +67,7 @@
def yaml_load_verify(filename, schema):
"""
Safely load a testcase/sample yaml document and validate it
- against the YAML schema, returing in case of success the YAML data.
+ against the YAML schema, returning in case of success the YAML data.
:param str filename: name of the file to load and process
:param dict schema: loaded YAML schema (can load with :func:`yaml_load`)
diff --git a/scripts/pylib/twister/twisterlib.py b/scripts/pylib/twister/twisterlib.py
index 67ef1c9..01f4731 100755
--- a/scripts/pylib/twister/twisterlib.py
+++ b/scripts/pylib/twister/twisterlib.py
@@ -964,7 +964,7 @@
The guest virtual time in QEMU icount mode isn't host time and
it's maintained by counting guest instructions, so we use QEMU
- process exection time to mostly simulate the time of guest OS.
+ process execution time to mostly simulate the time of guest OS.
"""
proc = psutil.Process(pid)
cpu_time = proc.cpu_times()
@@ -1747,7 +1747,7 @@
br"^\s*void\s+test_main\(void\)",
re.MULTILINE)
stc_regex = re.compile(
- br"""^\s* # empy space at the beginning is ok
+ br"""^\s* # empty space at the beginning is ok
# catch the case where it is declared in the same sentence, e.g:
#
# ztest_test_suite(mutex_complex, ztest_user_unit_test(TESTNAME));
@@ -1758,7 +1758,7 @@
)?
# Catch ztest[_user]_unit_test-[_setup_teardown](TESTNAME)
ztest_(?:1cpu_)?(?:user_)?unit_test(?:_setup_teardown)?
- # Consume the argument that becomes the extra testcse
+ # Consume the argument that becomes the extra testcase
\(\s*(?P<stc_name>[a-zA-Z0-9_]+)
# _setup_teardown() variant has two extra arguments that we ignore
(?:\s*,\s*[a-zA-Z0-9_]+\s*,\s*[a-zA-Z0-9_]+)?
@@ -3486,7 +3486,7 @@
self.filtered_platforms = set(p.platform.name for p in self.instances.values()
if p.status != "skipped" )
- # Remove from discards configururations that must not be discarded (e.g. integration_platforms when --integration was used)
+ # Remove from discards configurations that must not be discarded (e.g. integration_platforms when --integration was used)
for instance in remove_from_discards:
del self.discards[instance]
diff --git a/scripts/tests/twister/README.md b/scripts/tests/twister/README.md
index c19d70d..9e9f783 100644
--- a/scripts/tests/twister/README.md
+++ b/scripts/tests/twister/README.md
@@ -54,4 +54,4 @@
- test_twister.py : Contains basic testcases for environment variables, verifying testcase & platform schema's.
- test_testsuite_class.py : Contains testcases for Testsuite class (except reporting functionality) in twisterlib.py.
- test_testinstance.py : Contains testcases for Testinstance and Testcase class.
-- test_reporting_testsuite.py : Contains testcases for reporting fucntionality of Testsuite class of twister.
+- test_reporting_testsuite.py : Contains testcases for reporting functionality of Testsuite class of twister.
diff --git a/scripts/tests/twister/test_reporting_testsuite.py b/scripts/tests/twister/test_reporting_testsuite.py
index cea2c44..f15b90d 100644
--- a/scripts/tests/twister/test_reporting_testsuite.py
+++ b/scripts/tests/twister/test_reporting_testsuite.py
@@ -23,7 +23,7 @@
Test 1: Check if apply_filters function has been run before running
discard_report
Test 2: Test if the generated report is not empty
- Test 3: Test if the gerenrated report contains the expected columns"""
+ Test 3: Test if the generated report contains the expected columns"""
class_testsuite.platforms = platforms_list
class_testsuite.platform_names = [p.name for p in platforms_list]
class_testsuite.testcases = all_testcases_dict
@@ -43,7 +43,7 @@
def test_csv_report(class_testsuite, instances_fixture, tmpdir):
""" Testing csv_report function of Testsuite class in twister
- Test 1: Assert the csv_report isnt empty after execution of csv_report function
+ Test 1: Assert the csv_report isn't empty after execution of csv_report function
Test 2: Assert on the columns and values of the generated csv_report"""
class_testsuite.instances = instances_fixture
filename = tmpdir.mkdir("test_csv").join("twister_csv_report.csv")
diff --git a/scripts/tests/twister/test_testinstance.py b/scripts/tests/twister/test_testinstance.py
index 613bc40..7475bae 100644
--- a/scripts/tests/twister/test_testinstance.py
+++ b/scripts/tests/twister/test_testinstance.py
@@ -30,7 +30,7 @@
def test_check_build_or_run(class_testsuite, monkeypatch, all_testcases_dict, platforms_list, build_only, slow, harness, platform_type, platform_sim, device_testing, fixture, expected):
"""" Test to check the conditions for build_only and run scenarios
Scenario 1: Test when different parameters are passed, build_only and run are set correctly
- Sceanrio 2: Test if build_only is enabled when the OS is Windows"""
+ Scenario 2: Test if build_only is enabled when the OS is Windows"""
class_testsuite.testcases = all_testcases_dict
testcase = class_testsuite.testcases.get('scripts/tests/twister/test_data/testcases/tests/test_a/test_a.check_1')
diff --git a/scripts/twister b/scripts/twister
index 937d826..69e7c70 100755
--- a/scripts/twister
+++ b/scripts/twister
@@ -539,7 +539,7 @@
parser.add_argument(
"-j", "--jobs", type=int,
help="Number of jobs for building, defaults to number of CPU threads, "
- "overcommited by factor 2 when --build-only.")
+ "overcommitted by factor 2 when --build-only.")
parser.add_argument(
"--json-report", action="store_true",
@@ -1112,7 +1112,7 @@
suite.selected_platforms = set(p.platform.name for p in suite.instances.values())
elif options.test_only:
# Get list of connected hardware and filter tests to only be run on connected hardware
- # in cases where no platform was specified when runn the tests.
+ # in cases where no platform was specified when running the tests.
# If the platform does not exist in the hardware map, just skip it.
connected_list = []
if not options.platform:
diff --git a/scripts/utils/pinctrl_nrf_migrate.py b/scripts/utils/pinctrl_nrf_migrate.py
index 1618522..6c6438d 100644
--- a/scripts/utils/pinctrl_nrf_migrate.py
+++ b/scripts/utils/pinctrl_nrf_migrate.py
@@ -296,7 +296,7 @@
Args:
device: Device name.
- indent: Intentation.
+ indent: Indentation.
Returns:
State entries to be appended to the device.
diff --git a/scripts/west_commands/runners/canopen_program.py b/scripts/west_commands/runners/canopen_program.py
index e63b518..34c4a36 100644
--- a/scripts/west_commands/runners/canopen_program.py
+++ b/scripts/west_commands/runners/canopen_program.py
@@ -318,7 +318,7 @@
array.add_member(member)
objdict.add_object(array)
- array = canopen.objectdictionary.Array('Program sofware ID', 0x1f56)
+ array = canopen.objectdictionary.Array('Program software ID', 0x1f56)
member = canopen.objectdictionary.Variable('', 0x1f56, subindex=1)
member.data_type = canopen.objectdictionary.UNSIGNED32
array.add_member(member)
diff --git a/scripts/west_commands/runners/jlink.py b/scripts/west_commands/runners/jlink.py
index 4215afe..923eebb 100644
--- a/scripts/west_commands/runners/jlink.py
+++ b/scripts/west_commands/runners/jlink.py
@@ -259,7 +259,7 @@
if self.erase:
lines.append('erase') # Erase all flash sectors
- # Get the build artifact to flash, prefering .hex over .bin
+ # Get the build artifact to flash, preferring .hex over .bin
if self.hex_name is not None and os.path.isfile(self.hex_name):
flash_file = self.hex_name
flash_cmd = f'loadfile {self.hex_name}'
diff --git a/scripts/west_commands/runners/mdb.py b/scripts/west_commands/runners/mdb.py
index 4107487..7cf4049 100644
--- a/scripts/west_commands/runners/mdb.py
+++ b/scripts/west_commands/runners/mdb.py
@@ -192,7 +192,7 @@
def do_add_parser(cls, parser):
parser.add_argument('--jtag', default='digilent',
help='''choose the jtag interface for hardware
- targets, e.g. --jtat=digilent for digilent
+ targets, e.g. --jtag=digilent for digilent
jtag adapter''')
parser.add_argument('--cores', default=1,
help='''choose the number of cores that target has,
diff --git a/scripts/west_commands/zspdx/cmakefileapijson.py b/scripts/west_commands/zspdx/cmakefileapijson.py
index c4357e0..921a092 100644
--- a/scripts/west_commands/zspdx/cmakefileapijson.py
+++ b/scripts/west_commands/zspdx/cmakefileapijson.py
@@ -102,7 +102,7 @@
cfgdir.build = dir_dict.get("build", "")
cfgdir.parentIndex = dir_dict.get("parentIndex", -1)
cfgdir.childIndexes = dir_dict.get("childIndexes", [])
- cfgdir.projectIndex = dir_dict.get("projecttIndex", -1)
+ cfgdir.projectIndex = dir_dict.get("projectIndex", -1)
cfgdir.targetIndexes = dir_dict.get("targetIndexes", [])
minCMakeVer_dict = dir_dict.get("minimumCMakeVersion", {})
cfgdir.minimumCMakeVersion = minCMakeVer_dict.get("string", "")