scripts: dts: write new gen_defines.py
This is joint work with Kumar Gala (see signed-off-by).
This supports a new devicetree macro syntax coming. It's not really
worth mixing up the old and the new generation scripts into one file,
because:
- we aim to remove support for the old macros at some point, so it
will be cleaner to start fresh with a new script based on the old one
that only generates the new syntax
- it will avoid regressions to leave the existing code alone while
we're moving users to the new names
Keep the existing script by moving it to gen_legacy_defines.py and
changing a few comments and strings around. It's responsible for
generating:
- devicetree.conf: only needed by deprecated kconfigfunctions
- devicetree_legacy_unfixed.h: "old" devicetree_unfixed.h macros
Put a new gen_defines.py in its place. It generates:
- zephyr.dts
- devicetree_unfixed.h in the new syntax
Include devicetree_legacy_unfixed.h from devicetree.h so no DT users
are affected by this change.
Signed-off-by: Martí Bolívar <marti.bolivar@nordicsemi.no>
Signed-off-by: Kumar Gala <kumar.gala@linaro.org>
diff --git a/scripts/dts/gen_defines.py b/scripts/dts/gen_defines.py
new file mode 100755
index 0000000..ac46b18
--- /dev/null
+++ b/scripts/dts/gen_defines.py
@@ -0,0 +1,628 @@
+#!/usr/bin/env python3
+
+# Copyright (c) 2019 - 2020 Nordic Semiconductor ASA
+# Copyright (c) 2019 Linaro Limited
+# SPDX-License-Identifier: BSD-3-Clause
+
+# This script uses edtlib to generate a header file from a devicetree
+# (.dts) file. Information from binding files in YAML format is used
+# as well.
+#
+# Bindings are files that describe devicetree nodes. Devicetree nodes are
+# usually mapped to bindings via their 'compatible = "..."' property.
+#
+# See Zephyr's Devicetree user guide for details.
+#
+# Note: Do not access private (_-prefixed) identifiers from edtlib here (and
+# also note that edtlib is not meant to expose the dtlib API directly).
+# Instead, think of what API you need, and add it as a public documented API in
+# edtlib. This will keep this script simple.
+
+import argparse
+import os
+import pathlib
+import re
+import sys
+
+import edtlib
+
+def main():
+ global header_file
+
+ args = parse_args()
+
+ try:
+ edt = edtlib.EDT(args.dts, args.bindings_dirs,
+ # Suppress this warning if it's suppressed in dtc
+ warn_reg_unit_address_mismatch=
+ "-Wno-simple_bus_reg" not in args.dtc_flags)
+ except edtlib.EDTError as e:
+ sys.exit(f"devicetree error: {e}")
+
+ # Save merged DTS source, as a debugging aid
+ with open(args.dts_out, "w", encoding="utf-8") as f:
+ print(edt.dts_source, file=f)
+
+ with open(args.header_out, "w", encoding="utf-8") as header_file:
+ write_top_comment(edt)
+
+ for node in sorted(edt.nodes, key=lambda node: node.dep_ordinal):
+ node.z_path_id = "N_" + "_".join(
+ f"S_{str2ident(name)}" for name in node.path[1:].split("/"))
+ write_node_comment(node)
+
+ if not node.enabled:
+ out_comment("No macros: node is disabled")
+ continue
+ if not node.matching_compat:
+ out_comment("No macros: node has no matching binding")
+ continue
+
+ write_idents_and_existence(node)
+ write_bus(node)
+ write_special_props(node)
+ write_vanilla_props(node)
+
+ write_chosen(edt)
+ write_inst_num(edt)
+
+
+def parse_args():
+ # Returns parsed command-line arguments
+
+ parser = argparse.ArgumentParser()
+ parser.add_argument("--dts", required=True, help="DTS file")
+ parser.add_argument("--dtc-flags",
+ help="'dtc' devicetree compiler flags, some of which "
+ "might be respected here")
+ parser.add_argument("--bindings-dirs", nargs='+', required=True,
+ help="directory with bindings in YAML format, "
+ "we allow multiple")
+ parser.add_argument("--header-out", required=True,
+ help="path to write header to")
+ parser.add_argument("--dts-out", required=True,
+ help="path to write merged DTS source code to (e.g. "
+ "as a debugging aid)")
+
+ return parser.parse_args()
+
+
+def write_top_comment(edt):
+ # Writes an overview comment with misc. info at the top of the header and
+ # configuration file
+
+ s = f"""\
+Generated by gen_defines.py
+
+DTS input file:
+ {edt.dts_path}
+
+Directories with bindings:
+ {", ".join(map(relativize, edt.bindings_dirs))}
+
+Nodes in dependency order (ordinal and path):
+"""
+
+ for scc in edt.scc_order():
+ if len(scc) > 1:
+ err("cycle in devicetree involving "
+ + ", ".join(node.path for node in scc))
+ s += f" {scc[0].dep_ordinal:<3} {scc[0].path}\n"
+
+ s += """
+Definitions derived from these nodes in dependency order are next,
+followed by /chosen nodes.
+"""
+
+ out_comment(s, blank_before=False)
+
+
+def write_node_comment(node):
+ # Writes a comment describing 'node' to the header and configuration file
+
+ s = f"""\
+Devicetree node:
+ {node.path}
+"""
+
+ if node.matching_compat:
+ s += f"""
+Binding (compatible = {node.matching_compat}):
+ {relativize(node.binding_path)}
+"""
+ s += f"""
+Node's path identifier in this file: {node.z_path_id}
+"""
+
+ s += f"\nDependency Ordinal: {node.dep_ordinal}\n"
+
+ if node.depends_on:
+ s += "\nRequires:\n"
+ for dep in node.depends_on:
+ s += f" {dep.dep_ordinal:<3} {dep.path}\n"
+
+ if node.required_by:
+ s += "\nSupports:\n"
+ for req in node.required_by:
+ s += f" {req.dep_ordinal:<3} {req.path}\n"
+
+ if node.description:
+ # Indent description by two spaces
+ s += "\nDescription:\n" + \
+ "\n".join(" " + line for line in
+ node.description.splitlines()) + \
+ "\n"
+
+ out_comment(s)
+
+
+def relativize(path):
+ # If 'path' is within $ZEPHYR_BASE, returns it relative to $ZEPHYR_BASE,
+ # with a "$ZEPHYR_BASE/..." hint at the start of the string. Otherwise,
+ # returns 'path' unchanged.
+
+ zbase = os.getenv("ZEPHYR_BASE")
+ if zbase is None:
+ return path
+
+ try:
+ return str("$ZEPHYR_BASE" / pathlib.Path(path).relative_to(zbase))
+ except ValueError:
+ # Not within ZEPHYR_BASE
+ return path
+
+
+def write_idents_and_existence(node):
+ # Writes macros related to the node's aliases, labels, etc.,
+ # as well as existence flags.
+
+ # Aliases
+ idents = [f"N_ALIAS_{str2ident(alias)}" for alias in node.aliases]
+ # Instances
+ for compat in node.compats:
+ if not node.enabled:
+ continue
+ instance_no = node.edt.compat2enabled[compat].index(node)
+ idents.append(f"N_INST_{instance_no}_{str2ident(compat)}")
+ # Node labels
+ idents.extend(f"N_NODELABEL_{str2ident(label)}" for label in node.labels)
+
+ out_comment("Existence and alternate IDs:")
+ out_dt_define(node.z_path_id + "_EXISTS", 1)
+
+ # Only determine maxlen if we have any idents
+ if idents:
+ maxlen = max(len("DT_" + ident) for ident in idents)
+ for ident in idents:
+ out_dt_define(ident, "DT_" + node.z_path_id, width=maxlen)
+
+
+def write_bus(node):
+ # Macros about the node's bus controller, if there is one
+
+ bus = node.bus_node
+ if not bus:
+ return
+
+ if not bus.label:
+ err(f"missing 'label' property on bus node {bus!r}")
+
+ out_comment(f"Bus info (controller: '{bus.path}', type: '{node.on_bus}')")
+ out_dt_define(f"{node.z_path_id}_BUS_{str2ident(node.on_bus)}", 1)
+ out_dt_define(f"{node.z_path_id}_BUS", f"DT_{bus.z_path_id}")
+
+
+def write_special_props(node):
+ # Writes required macros for special case properties, when the
+ # data cannot otherwise be obtained from write_vanilla_props()
+ # results
+
+ out_comment("Special property macros:")
+
+ # Macros that are special to the devicetree specification
+ write_regs(node)
+ write_interrupts(node)
+ write_compatibles(node)
+
+
+def write_regs(node):
+ # reg property: edtlib knows the right #address-cells and
+ # #size-cells, and can therefore pack the register base addresses
+ # and sizes correctly
+
+ idx_vals = []
+ name_vals = []
+ path_id = node.z_path_id
+
+ if node.regs is not None:
+ idx_vals.append((f"{path_id}_REG_NUM", len(node.regs)))
+
+ for i, reg in enumerate(node.regs):
+ if reg.addr is not None:
+ idx_macro = f"{path_id}_REG_IDX_{i}_VAL_ADDRESS"
+ idx_vals.append((idx_macro,
+ f"{reg.addr} /* {hex(reg.addr)} */"))
+ if reg.name:
+ name_macro = f"{path_id}_REG_NAME_{reg.name}_VAL_ADDRESS"
+ name_vals.append((name_macro, f"DT_{idx_macro}"))
+
+ if reg.size is not None:
+ idx_macro = f"{path_id}_REG_IDX_{i}_VAL_SIZE"
+ idx_vals.append((idx_macro,
+ f"{reg.size} /* {hex(reg.size)} */"))
+ if reg.name:
+ name_macro = f"{path_id}_REG_NAME_{reg.name}_VAL_SIZE"
+ name_vals.append((name_macro, f"DT_{idx_macro}"))
+
+ for macro, val in idx_vals:
+ out_dt_define(macro, val)
+ for macro, val in name_vals:
+ out_dt_define(macro, val)
+
+def write_interrupts(node):
+ # interrupts property: we have some hard-coded logic for interrupt
+ # mapping here.
+ #
+ # TODO: can we push map_arm_gic_irq_type() and
+ # encode_zephyr_multi_level_irq() out of Python and into C with
+ # macro magic in devicetree.h?
+
+ def map_arm_gic_irq_type(irq, irq_num):
+ # Maps ARM GIC IRQ (type)+(index) combo to linear IRQ number
+ if "type" not in irq.data:
+ err(f"Expected binding for {irq.controller!r} to have 'type' in "
+ "interrupt-cells")
+ irq_type = irq.data["type"]
+
+ if irq_type == 0: # GIC_SPI
+ return irq_num + 32
+ if irq_type == 1: # GIC_PPI
+ return irq_num + 16
+ err(f"Invalid interrupt type specified for {irq!r}")
+
+ def encode_zephyr_multi_level_irq(irq, irq_num):
+ # See doc/reference/kernel/other/interrupts.rst for details
+ # on how this encoding works
+
+ irq_ctrl = irq.controller
+ # Look for interrupt controller parent until we have none
+ while irq_ctrl.interrupts:
+ irq_num = (irq_num + 1) << 8
+ if "irq" not in irq_ctrl.interrupts[0].data:
+ err(f"Expected binding for {irq_ctrl!r} to have 'irq' in "
+ "interrupt-cells")
+ irq_num |= irq_ctrl.interrupts[0].data["irq"]
+ irq_ctrl = irq_ctrl.interrupts[0].controller
+ return irq_num
+
+ idx_vals = []
+ name_vals = []
+ path_id = node.z_path_id
+
+ if node.interrupts is not None:
+ idx_vals.append((f"{path_id}_IRQ_NUM", len(node.interrupts)))
+
+ for i, irq in enumerate(node.interrupts):
+ for cell_name, cell_value in irq.data.items():
+ name = str2ident(cell_name)
+
+ if cell_name == "irq":
+ if "arm,gic" in irq.controller.compats:
+ cell_value = map_arm_gic_irq_type(irq, cell_value)
+ cell_value = encode_zephyr_multi_level_irq(irq, cell_value)
+
+ idx_macro = f"{path_id}_IRQ_IDX_{i}_VAL_{name}"
+ idx_vals.append((idx_macro, cell_value))
+ idx_vals.append((idx_macro + "_EXISTS", 1))
+ if irq.name:
+ name_macro = \
+ f"{path_id}_IRQ_NAME_{str2ident(irq.name)}_VAL_{name}"
+ name_vals.append((name_macro, f"DT_{idx_macro}"))
+ name_vals.append((name_macro + "_EXISTS", 1))
+
+ for macro, val in idx_vals:
+ out_dt_define(macro, val)
+ for macro, val in name_vals:
+ out_dt_define(macro, val)
+
+
+def write_compatibles(node):
+ # Writes a macro for each of the node's compatibles. We don't care
+ # about whether edtlib / Zephyr's binding language recognizes
+ # them. The compatibles the node provides are what is important.
+
+ for compat in node.compats:
+ out_dt_define(
+ f"{node.z_path_id}_COMPAT_MATCHES_{str2ident(compat)}", 1)
+
+
+def write_vanilla_props(node):
+ # Writes macros for any and all properties defined in the
+ # "properties" section of the binding for the node.
+ #
+ # This does generate macros for special properties as well, like
+ # regs, etc. Just let that be rather than bothering to add
+ # never-ending amounts of special case code here to skip special
+ # properties. This function's macros can't conflict with
+ # write_special_props() macros, because they're in different
+ # namespaces. Special cases aren't special enough to break the rules.
+
+ macro2val = {}
+ for prop_name, prop in node.props.items():
+ macro = f"{node.z_path_id}_P_{str2ident(prop_name)}"
+ val = prop2value(prop)
+ if val is not None:
+ # DT_N_<node-id>_P_<prop-id>
+ macro2val[macro] = val
+
+ if prop.enum_index is not None:
+ # DT_N_<node-id>_P_<prop-id>_ENUM_IDX
+ macro2val[macro + "_ENUM_IDX"] = prop.enum_index
+
+ if "phandle" in prop.type:
+ macro2val.update(phandle_macros(prop, macro))
+ elif "array" in prop.type:
+ # DT_N_<node-id>_P_<prop-id>_IDX_<i>
+ for i, subval in enumerate(prop.val):
+ if isinstance(subval, str):
+ macro2val[macro + f"_IDX_{i}"] = quote_str(subval)
+ else:
+ macro2val[macro + f"_IDX_{i}"] = subval
+
+ plen = prop_len(prop)
+ if plen is not None:
+ # DT_N_<node-id>_P_<prop-id>_LEN
+ macro2val[macro + "_LEN"] = plen
+
+ macro2val[f"{macro}_EXISTS"] = 1
+
+ if macro2val:
+ out_comment("Generic property macros:")
+ for macro, val in macro2val.items():
+ out_dt_define(macro, val)
+ else:
+ out_comment("(No generic property macros)")
+
+
+def prop2value(prop):
+ # Gets the macro value for property 'prop', if there is
+ # a single well-defined C rvalue that it can be represented as.
+ # Returns None if there isn't one.
+
+ if prop.type == "string":
+ return quote_str(prop.val)
+
+ if prop.type == "int":
+ return prop.val
+
+ if prop.type == "boolean":
+ return 1 if prop.val else 0
+
+ if prop.type in ["array", "uint8-array"]:
+ return list2init(f"{val} /* {hex(val)} */" for val in prop.val)
+
+ if prop.type == "string-array":
+ return list2init(quote_str(val) for val in prop.val)
+
+ # phandle, phandles, phandle-array, path, compound: nothing
+ return None
+
+
+def prop_len(prop):
+ # Returns the property's length if and only if we should generate
+ # a _LEN macro for the property. Otherwise, returns None.
+ #
+ # This deliberately excludes reg and interrupts.
+ # While they have array type, their lengths as arrays are
+ # basically nonsense semantically due to #address-cells and
+ # #size-cells for "reg" and #interrupt-cells for "interrupts".
+ #
+ # We have special purpose macros for the number of register blocks
+ # / interrupt specifiers. Excluding them from this list means
+ # DT_PROP_LEN(node_id, ...) fails fast at the devicetree.h layer
+ # with a build error. This forces users to switch to the right
+ # macros.
+
+ if prop.type == "phandle":
+ return 1
+
+ if (prop.type in ["array", "uint8-array", "string-array",
+ "phandles", "phandle-array"] and
+ prop.name not in ["reg", "interrupts"]):
+ return len(prop.val)
+
+ return None
+
+
+def phandle_macros(prop, macro):
+ # Returns a dict of macros for phandle or phandles property 'prop'.
+ #
+ # The 'macro' argument is the N_<node-id>_P_<prop-id> bit.
+ #
+ # These are currently special because we can't serialize their
+ # values without using label properties, which we're trying to get
+ # away from needing in Zephyr. (Label properties are great for
+ # humans, but have drawbacks for code size and boot time.)
+ #
+ # The names look a bit weird to make it easier for devicetree.h
+ # to use the same macros for phandle, phandles, and phandle-array.
+
+ ret = {}
+
+ if prop.type == "phandle":
+ # A phandle is treated as a phandles with fixed length 1.
+ ret[f"{macro}_IDX_0_PH"] = f"DT_{prop.val.z_path_id}"
+ elif prop.type == "phandles":
+ for i, node in enumerate(prop.val):
+ ret[f"{macro}_IDX_{i}_PH"] = f"DT_{node.z_path_id}"
+ elif prop.type == "phandle-array":
+ for i, entry in enumerate(prop.val):
+ ret.update(controller_and_data_macros(entry, i, macro))
+
+ return ret
+
+
+def controller_and_data_macros(entry, i, macro):
+ # Helper procedure used by phandle_macros().
+ #
+ # Its purpose is to write the "controller" (i.e. label property of
+ # the phandle's node) and associated data macros for a
+ # ControllerAndData.
+
+ ret = {}
+ data = entry.data
+
+ # DT_N_<node-id>_P_<prop-id>_IDX_<i>_PH
+ ret[f"{macro}_IDX_{i}_PH"] = f"DT_{entry.controller.z_path_id}"
+ # DT_N_<node-id>_P_<prop-id>_IDX_<i>_VAL_<VAL>
+ for cell, val in data.items():
+ ret[f"{macro}_IDX_{i}_VAL_{str2ident(cell)}"] = val
+ ret[f"{macro}_IDX_{i}_VAL_{str2ident(cell)}_EXISTS"] = 1
+
+ if not entry.name:
+ return ret
+
+ name = str2ident(entry.name)
+ # DT_N_<node-id>_P_<prop-id>_IDX_<i>_NAME
+ ret[f"{macro}_IDX_{i}_NAME"] = quote_str(entry.name)
+ # DT_N_<node-id>_P_<prop-id>_NAME_<NAME>_PH
+ ret[f"{macro}_NAME_{name}_PH"] = f"DT_{entry.controller.z_path_id}"
+ # DT_N_<node-id>_P_<prop-id>_NAME_<NAME>_VAL_<VAL>
+ for cell, val in data.items():
+ cell_ident = str2ident(cell)
+ ret[f"{macro}_NAME_{name}_VAL_{cell_ident}"] = \
+ f"DT_{macro}_IDX_{i}_VAL_{cell_ident}"
+ ret[f"{macro}_NAME_{name}_VAL_{cell_ident}_EXISTS"] = 1
+
+ return ret
+
+
+def write_chosen(edt):
+ # Tree-wide information such as chosen nodes is printed here.
+
+ out_comment("Chosen nodes\n")
+ chosen = {}
+ for name, node in edt.chosen_nodes.items():
+ chosen[f"DT_CHOSEN_{str2ident(name)}"] = f"DT_{node.z_path_id}"
+ chosen[f"DT_CHOSEN_{str2ident(name)}_EXISTS"] = 1
+ max_len = max(map(len, chosen))
+ for macro, value in chosen.items():
+ out_define(macro, value, width=max_len)
+
+
+def write_inst_num(edt):
+ # Tree-wide information such as number of instances is printed here.
+
+ out_comment("Number of instances\n")
+ compat_list = []
+
+ # Walk the nodes to build which compats we need to generate for
+ for node in sorted(edt.nodes, key=lambda node: node.dep_ordinal):
+ if not node.enabled:
+ continue
+ if not node.matching_compat:
+ continue
+ for compat in node.compats:
+ if compat not in compat_list:
+ compat_list.append(compat)
+
+ for compat in compat_list:
+ num_inst = len(edt.compat2enabled[compat])
+ out_define(f"DT_N_INST_{str2ident(compat)}_NUM", num_inst)
+
+
+def str2ident(s):
+ # Converts 's' to a form suitable for (part of) an identifier
+
+ return re.sub('[-,.@/+]', '_', s.lower())
+
+
+def list2init(l):
+ # Converts 'l', a Python list (or iterable), to a C array initializer
+
+ return "{" + ", ".join(l) + "}"
+
+
+def out_dt_define(macro, val, width=None, deprecation_msg=None):
+ # Writes "#define DT_<macro> <val>" to the header file
+ #
+ # The macro will be left-justified to 'width' characters if that
+ # is specified, and the value will follow immediately after in
+ # that case. Otherwise, this function decides how to add
+ # whitespace between 'macro' and 'val'.
+ #
+ # If a 'deprecation_msg' string is passed, the generated identifiers will
+ # generate a warning if used, via __WARN(<deprecation_msg>)).
+ #
+ # Returns the full generated macro for 'macro', with leading "DT_".
+ ret = "DT_" + macro
+ out_define(ret, val, width=width, deprecation_msg=deprecation_msg)
+ return ret
+
+
+def out_define(macro, val, width=None, deprecation_msg=None):
+ # Helper for out_dt_define(). Outputs "#define <macro> <val>",
+ # adds a deprecation message if given, and allocates whitespace
+ # unless told not to.
+
+ warn = fr' __WARN("{deprecation_msg}")' if deprecation_msg else ""
+
+ if width:
+ s = f"#define {macro.ljust(width)}{warn} {val}"
+ else:
+ s = f"#define {macro}{warn} {val}"
+
+ print(s, file=header_file)
+
+
+def out_comment(s, blank_before=True):
+ # Writes 's' as a comment to the header and configuration file. 's' is
+ # allowed to have multiple lines. blank_before=True adds a blank line
+ # before the comment.
+
+ if blank_before:
+ print(file=header_file)
+
+ if "\n" in s:
+ # Format multi-line comments like
+ #
+ # /*
+ # * first line
+ # * second line
+ # *
+ # * empty line before this line
+ # */
+ res = ["/*"]
+ for line in s.splitlines():
+ # Avoid an extra space after '*' for empty lines. They turn red in
+ # Vim if space error checking is on, which is annoying.
+ res.append(" *" if not line.strip() else " * " + line)
+ res.append(" */")
+ print("\n".join(res), file=header_file)
+ else:
+ # Format single-line comments like
+ #
+ # /* foo bar */
+ print("/* " + s + " */", file=header_file)
+
+
+def escape(s):
+ # Backslash-escapes any double quotes and backslashes in 's'
+
+ # \ must be escaped before " to avoid double escaping
+ return s.replace("\\", "\\\\").replace('"', '\\"')
+
+
+def quote_str(s):
+ # Puts quotes around 's' and escapes any double quotes and
+ # backslashes within it
+
+ return f'"{escape(s)}"'
+
+
+def err(s):
+ raise Exception(s)
+
+
+if __name__ == "__main__":
+ main()