Martí Bolívar | dc85edd | 2020-02-28 15:26:52 -0800 | [diff] [blame] | 1 | #!/usr/bin/env python3 |
| 2 | |
| 3 | # Copyright (c) 2019 - 2020 Nordic Semiconductor ASA |
| 4 | # Copyright (c) 2019 Linaro Limited |
| 5 | # SPDX-License-Identifier: BSD-3-Clause |
| 6 | |
| 7 | # This script uses edtlib to generate a header file from a devicetree |
| 8 | # (.dts) file. Information from binding files in YAML format is used |
| 9 | # as well. |
| 10 | # |
| 11 | # Bindings are files that describe devicetree nodes. Devicetree nodes are |
| 12 | # usually mapped to bindings via their 'compatible = "..."' property. |
| 13 | # |
| 14 | # See Zephyr's Devicetree user guide for details. |
| 15 | # |
| 16 | # Note: Do not access private (_-prefixed) identifiers from edtlib here (and |
| 17 | # also note that edtlib is not meant to expose the dtlib API directly). |
| 18 | # Instead, think of what API you need, and add it as a public documented API in |
| 19 | # edtlib. This will keep this script simple. |
| 20 | |
| 21 | import argparse |
| 22 | import os |
| 23 | import pathlib |
| 24 | import re |
| 25 | import sys |
| 26 | |
| 27 | import edtlib |
| 28 | |
| 29 | def main(): |
| 30 | global header_file |
| 31 | |
| 32 | args = parse_args() |
| 33 | |
| 34 | try: |
| 35 | edt = edtlib.EDT(args.dts, args.bindings_dirs, |
| 36 | # Suppress this warning if it's suppressed in dtc |
| 37 | warn_reg_unit_address_mismatch= |
| 38 | "-Wno-simple_bus_reg" not in args.dtc_flags) |
| 39 | except edtlib.EDTError as e: |
| 40 | sys.exit(f"devicetree error: {e}") |
| 41 | |
| 42 | # Save merged DTS source, as a debugging aid |
| 43 | with open(args.dts_out, "w", encoding="utf-8") as f: |
| 44 | print(edt.dts_source, file=f) |
| 45 | |
| 46 | with open(args.header_out, "w", encoding="utf-8") as header_file: |
| 47 | write_top_comment(edt) |
| 48 | |
| 49 | for node in sorted(edt.nodes, key=lambda node: node.dep_ordinal): |
| 50 | node.z_path_id = "N_" + "_".join( |
| 51 | f"S_{str2ident(name)}" for name in node.path[1:].split("/")) |
| 52 | write_node_comment(node) |
| 53 | |
| 54 | if not node.enabled: |
| 55 | out_comment("No macros: node is disabled") |
| 56 | continue |
| 57 | if not node.matching_compat: |
| 58 | out_comment("No macros: node has no matching binding") |
| 59 | continue |
| 60 | |
| 61 | write_idents_and_existence(node) |
| 62 | write_bus(node) |
| 63 | write_special_props(node) |
| 64 | write_vanilla_props(node) |
| 65 | |
| 66 | write_chosen(edt) |
| 67 | write_inst_num(edt) |
| 68 | |
| 69 | |
| 70 | def parse_args(): |
| 71 | # Returns parsed command-line arguments |
| 72 | |
| 73 | parser = argparse.ArgumentParser() |
| 74 | parser.add_argument("--dts", required=True, help="DTS file") |
| 75 | parser.add_argument("--dtc-flags", |
| 76 | help="'dtc' devicetree compiler flags, some of which " |
| 77 | "might be respected here") |
| 78 | parser.add_argument("--bindings-dirs", nargs='+', required=True, |
| 79 | help="directory with bindings in YAML format, " |
| 80 | "we allow multiple") |
| 81 | parser.add_argument("--header-out", required=True, |
| 82 | help="path to write header to") |
| 83 | parser.add_argument("--dts-out", required=True, |
| 84 | help="path to write merged DTS source code to (e.g. " |
| 85 | "as a debugging aid)") |
| 86 | |
| 87 | return parser.parse_args() |
| 88 | |
| 89 | |
| 90 | def write_top_comment(edt): |
| 91 | # Writes an overview comment with misc. info at the top of the header and |
| 92 | # configuration file |
| 93 | |
| 94 | s = f"""\ |
| 95 | Generated by gen_defines.py |
| 96 | |
| 97 | DTS input file: |
| 98 | {edt.dts_path} |
| 99 | |
| 100 | Directories with bindings: |
| 101 | {", ".join(map(relativize, edt.bindings_dirs))} |
| 102 | |
| 103 | Nodes in dependency order (ordinal and path): |
| 104 | """ |
| 105 | |
| 106 | for scc in edt.scc_order(): |
| 107 | if len(scc) > 1: |
| 108 | err("cycle in devicetree involving " |
| 109 | + ", ".join(node.path for node in scc)) |
| 110 | s += f" {scc[0].dep_ordinal:<3} {scc[0].path}\n" |
| 111 | |
| 112 | s += """ |
| 113 | Definitions derived from these nodes in dependency order are next, |
| 114 | followed by /chosen nodes. |
| 115 | """ |
| 116 | |
| 117 | out_comment(s, blank_before=False) |
| 118 | |
| 119 | |
| 120 | def write_node_comment(node): |
| 121 | # Writes a comment describing 'node' to the header and configuration file |
| 122 | |
| 123 | s = f"""\ |
| 124 | Devicetree node: |
| 125 | {node.path} |
| 126 | """ |
| 127 | |
| 128 | if node.matching_compat: |
| 129 | s += f""" |
| 130 | Binding (compatible = {node.matching_compat}): |
| 131 | {relativize(node.binding_path)} |
| 132 | """ |
| 133 | s += f""" |
| 134 | Node's path identifier in this file: {node.z_path_id} |
| 135 | """ |
| 136 | |
| 137 | s += f"\nDependency Ordinal: {node.dep_ordinal}\n" |
| 138 | |
| 139 | if node.depends_on: |
| 140 | s += "\nRequires:\n" |
| 141 | for dep in node.depends_on: |
| 142 | s += f" {dep.dep_ordinal:<3} {dep.path}\n" |
| 143 | |
| 144 | if node.required_by: |
| 145 | s += "\nSupports:\n" |
| 146 | for req in node.required_by: |
| 147 | s += f" {req.dep_ordinal:<3} {req.path}\n" |
| 148 | |
| 149 | if node.description: |
| 150 | # Indent description by two spaces |
| 151 | s += "\nDescription:\n" + \ |
| 152 | "\n".join(" " + line for line in |
| 153 | node.description.splitlines()) + \ |
| 154 | "\n" |
| 155 | |
| 156 | out_comment(s) |
| 157 | |
| 158 | |
| 159 | def relativize(path): |
| 160 | # If 'path' is within $ZEPHYR_BASE, returns it relative to $ZEPHYR_BASE, |
| 161 | # with a "$ZEPHYR_BASE/..." hint at the start of the string. Otherwise, |
| 162 | # returns 'path' unchanged. |
| 163 | |
| 164 | zbase = os.getenv("ZEPHYR_BASE") |
| 165 | if zbase is None: |
| 166 | return path |
| 167 | |
| 168 | try: |
| 169 | return str("$ZEPHYR_BASE" / pathlib.Path(path).relative_to(zbase)) |
| 170 | except ValueError: |
| 171 | # Not within ZEPHYR_BASE |
| 172 | return path |
| 173 | |
| 174 | |
| 175 | def write_idents_and_existence(node): |
| 176 | # Writes macros related to the node's aliases, labels, etc., |
| 177 | # as well as existence flags. |
| 178 | |
| 179 | # Aliases |
| 180 | idents = [f"N_ALIAS_{str2ident(alias)}" for alias in node.aliases] |
| 181 | # Instances |
| 182 | for compat in node.compats: |
| 183 | if not node.enabled: |
| 184 | continue |
| 185 | instance_no = node.edt.compat2enabled[compat].index(node) |
| 186 | idents.append(f"N_INST_{instance_no}_{str2ident(compat)}") |
| 187 | # Node labels |
| 188 | idents.extend(f"N_NODELABEL_{str2ident(label)}" for label in node.labels) |
| 189 | |
| 190 | out_comment("Existence and alternate IDs:") |
| 191 | out_dt_define(node.z_path_id + "_EXISTS", 1) |
| 192 | |
| 193 | # Only determine maxlen if we have any idents |
| 194 | if idents: |
| 195 | maxlen = max(len("DT_" + ident) for ident in idents) |
| 196 | for ident in idents: |
| 197 | out_dt_define(ident, "DT_" + node.z_path_id, width=maxlen) |
| 198 | |
| 199 | |
| 200 | def write_bus(node): |
| 201 | # Macros about the node's bus controller, if there is one |
| 202 | |
| 203 | bus = node.bus_node |
| 204 | if not bus: |
| 205 | return |
| 206 | |
| 207 | if not bus.label: |
| 208 | err(f"missing 'label' property on bus node {bus!r}") |
| 209 | |
| 210 | out_comment(f"Bus info (controller: '{bus.path}', type: '{node.on_bus}')") |
| 211 | out_dt_define(f"{node.z_path_id}_BUS_{str2ident(node.on_bus)}", 1) |
| 212 | out_dt_define(f"{node.z_path_id}_BUS", f"DT_{bus.z_path_id}") |
| 213 | |
| 214 | |
| 215 | def write_special_props(node): |
| 216 | # Writes required macros for special case properties, when the |
| 217 | # data cannot otherwise be obtained from write_vanilla_props() |
| 218 | # results |
| 219 | |
| 220 | out_comment("Special property macros:") |
| 221 | |
| 222 | # Macros that are special to the devicetree specification |
| 223 | write_regs(node) |
| 224 | write_interrupts(node) |
| 225 | write_compatibles(node) |
| 226 | |
| 227 | |
| 228 | def write_regs(node): |
| 229 | # reg property: edtlib knows the right #address-cells and |
| 230 | # #size-cells, and can therefore pack the register base addresses |
| 231 | # and sizes correctly |
| 232 | |
| 233 | idx_vals = [] |
| 234 | name_vals = [] |
| 235 | path_id = node.z_path_id |
| 236 | |
| 237 | if node.regs is not None: |
| 238 | idx_vals.append((f"{path_id}_REG_NUM", len(node.regs))) |
| 239 | |
| 240 | for i, reg in enumerate(node.regs): |
| 241 | if reg.addr is not None: |
| 242 | idx_macro = f"{path_id}_REG_IDX_{i}_VAL_ADDRESS" |
| 243 | idx_vals.append((idx_macro, |
| 244 | f"{reg.addr} /* {hex(reg.addr)} */")) |
| 245 | if reg.name: |
| 246 | name_macro = f"{path_id}_REG_NAME_{reg.name}_VAL_ADDRESS" |
| 247 | name_vals.append((name_macro, f"DT_{idx_macro}")) |
| 248 | |
| 249 | if reg.size is not None: |
| 250 | idx_macro = f"{path_id}_REG_IDX_{i}_VAL_SIZE" |
| 251 | idx_vals.append((idx_macro, |
| 252 | f"{reg.size} /* {hex(reg.size)} */")) |
| 253 | if reg.name: |
| 254 | name_macro = f"{path_id}_REG_NAME_{reg.name}_VAL_SIZE" |
| 255 | name_vals.append((name_macro, f"DT_{idx_macro}")) |
| 256 | |
| 257 | for macro, val in idx_vals: |
| 258 | out_dt_define(macro, val) |
| 259 | for macro, val in name_vals: |
| 260 | out_dt_define(macro, val) |
| 261 | |
| 262 | def write_interrupts(node): |
| 263 | # interrupts property: we have some hard-coded logic for interrupt |
| 264 | # mapping here. |
| 265 | # |
| 266 | # TODO: can we push map_arm_gic_irq_type() and |
| 267 | # encode_zephyr_multi_level_irq() out of Python and into C with |
| 268 | # macro magic in devicetree.h? |
| 269 | |
| 270 | def map_arm_gic_irq_type(irq, irq_num): |
| 271 | # Maps ARM GIC IRQ (type)+(index) combo to linear IRQ number |
| 272 | if "type" not in irq.data: |
| 273 | err(f"Expected binding for {irq.controller!r} to have 'type' in " |
| 274 | "interrupt-cells") |
| 275 | irq_type = irq.data["type"] |
| 276 | |
| 277 | if irq_type == 0: # GIC_SPI |
| 278 | return irq_num + 32 |
| 279 | if irq_type == 1: # GIC_PPI |
| 280 | return irq_num + 16 |
| 281 | err(f"Invalid interrupt type specified for {irq!r}") |
| 282 | |
| 283 | def encode_zephyr_multi_level_irq(irq, irq_num): |
| 284 | # See doc/reference/kernel/other/interrupts.rst for details |
| 285 | # on how this encoding works |
| 286 | |
| 287 | irq_ctrl = irq.controller |
| 288 | # Look for interrupt controller parent until we have none |
| 289 | while irq_ctrl.interrupts: |
| 290 | irq_num = (irq_num + 1) << 8 |
| 291 | if "irq" not in irq_ctrl.interrupts[0].data: |
| 292 | err(f"Expected binding for {irq_ctrl!r} to have 'irq' in " |
| 293 | "interrupt-cells") |
| 294 | irq_num |= irq_ctrl.interrupts[0].data["irq"] |
| 295 | irq_ctrl = irq_ctrl.interrupts[0].controller |
| 296 | return irq_num |
| 297 | |
| 298 | idx_vals = [] |
| 299 | name_vals = [] |
| 300 | path_id = node.z_path_id |
| 301 | |
| 302 | if node.interrupts is not None: |
| 303 | idx_vals.append((f"{path_id}_IRQ_NUM", len(node.interrupts))) |
| 304 | |
| 305 | for i, irq in enumerate(node.interrupts): |
| 306 | for cell_name, cell_value in irq.data.items(): |
| 307 | name = str2ident(cell_name) |
| 308 | |
| 309 | if cell_name == "irq": |
| 310 | if "arm,gic" in irq.controller.compats: |
| 311 | cell_value = map_arm_gic_irq_type(irq, cell_value) |
| 312 | cell_value = encode_zephyr_multi_level_irq(irq, cell_value) |
| 313 | |
| 314 | idx_macro = f"{path_id}_IRQ_IDX_{i}_VAL_{name}" |
| 315 | idx_vals.append((idx_macro, cell_value)) |
| 316 | idx_vals.append((idx_macro + "_EXISTS", 1)) |
| 317 | if irq.name: |
| 318 | name_macro = \ |
| 319 | f"{path_id}_IRQ_NAME_{str2ident(irq.name)}_VAL_{name}" |
| 320 | name_vals.append((name_macro, f"DT_{idx_macro}")) |
| 321 | name_vals.append((name_macro + "_EXISTS", 1)) |
| 322 | |
| 323 | for macro, val in idx_vals: |
| 324 | out_dt_define(macro, val) |
| 325 | for macro, val in name_vals: |
| 326 | out_dt_define(macro, val) |
| 327 | |
| 328 | |
| 329 | def write_compatibles(node): |
| 330 | # Writes a macro for each of the node's compatibles. We don't care |
| 331 | # about whether edtlib / Zephyr's binding language recognizes |
| 332 | # them. The compatibles the node provides are what is important. |
| 333 | |
| 334 | for compat in node.compats: |
| 335 | out_dt_define( |
| 336 | f"{node.z_path_id}_COMPAT_MATCHES_{str2ident(compat)}", 1) |
| 337 | |
| 338 | |
| 339 | def write_vanilla_props(node): |
| 340 | # Writes macros for any and all properties defined in the |
| 341 | # "properties" section of the binding for the node. |
| 342 | # |
| 343 | # This does generate macros for special properties as well, like |
| 344 | # regs, etc. Just let that be rather than bothering to add |
| 345 | # never-ending amounts of special case code here to skip special |
| 346 | # properties. This function's macros can't conflict with |
| 347 | # write_special_props() macros, because they're in different |
| 348 | # namespaces. Special cases aren't special enough to break the rules. |
| 349 | |
| 350 | macro2val = {} |
| 351 | for prop_name, prop in node.props.items(): |
| 352 | macro = f"{node.z_path_id}_P_{str2ident(prop_name)}" |
| 353 | val = prop2value(prop) |
| 354 | if val is not None: |
| 355 | # DT_N_<node-id>_P_<prop-id> |
| 356 | macro2val[macro] = val |
| 357 | |
| 358 | if prop.enum_index is not None: |
| 359 | # DT_N_<node-id>_P_<prop-id>_ENUM_IDX |
| 360 | macro2val[macro + "_ENUM_IDX"] = prop.enum_index |
| 361 | |
| 362 | if "phandle" in prop.type: |
| 363 | macro2val.update(phandle_macros(prop, macro)) |
| 364 | elif "array" in prop.type: |
| 365 | # DT_N_<node-id>_P_<prop-id>_IDX_<i> |
| 366 | for i, subval in enumerate(prop.val): |
| 367 | if isinstance(subval, str): |
| 368 | macro2val[macro + f"_IDX_{i}"] = quote_str(subval) |
| 369 | else: |
| 370 | macro2val[macro + f"_IDX_{i}"] = subval |
| 371 | |
| 372 | plen = prop_len(prop) |
| 373 | if plen is not None: |
| 374 | # DT_N_<node-id>_P_<prop-id>_LEN |
| 375 | macro2val[macro + "_LEN"] = plen |
| 376 | |
| 377 | macro2val[f"{macro}_EXISTS"] = 1 |
| 378 | |
| 379 | if macro2val: |
| 380 | out_comment("Generic property macros:") |
| 381 | for macro, val in macro2val.items(): |
| 382 | out_dt_define(macro, val) |
| 383 | else: |
| 384 | out_comment("(No generic property macros)") |
| 385 | |
| 386 | |
| 387 | def prop2value(prop): |
| 388 | # Gets the macro value for property 'prop', if there is |
| 389 | # a single well-defined C rvalue that it can be represented as. |
| 390 | # Returns None if there isn't one. |
| 391 | |
| 392 | if prop.type == "string": |
| 393 | return quote_str(prop.val) |
| 394 | |
| 395 | if prop.type == "int": |
| 396 | return prop.val |
| 397 | |
| 398 | if prop.type == "boolean": |
| 399 | return 1 if prop.val else 0 |
| 400 | |
| 401 | if prop.type in ["array", "uint8-array"]: |
| 402 | return list2init(f"{val} /* {hex(val)} */" for val in prop.val) |
| 403 | |
| 404 | if prop.type == "string-array": |
| 405 | return list2init(quote_str(val) for val in prop.val) |
| 406 | |
| 407 | # phandle, phandles, phandle-array, path, compound: nothing |
| 408 | return None |
| 409 | |
| 410 | |
| 411 | def prop_len(prop): |
| 412 | # Returns the property's length if and only if we should generate |
| 413 | # a _LEN macro for the property. Otherwise, returns None. |
| 414 | # |
| 415 | # This deliberately excludes reg and interrupts. |
| 416 | # While they have array type, their lengths as arrays are |
| 417 | # basically nonsense semantically due to #address-cells and |
| 418 | # #size-cells for "reg" and #interrupt-cells for "interrupts". |
| 419 | # |
| 420 | # We have special purpose macros for the number of register blocks |
| 421 | # / interrupt specifiers. Excluding them from this list means |
| 422 | # DT_PROP_LEN(node_id, ...) fails fast at the devicetree.h layer |
| 423 | # with a build error. This forces users to switch to the right |
| 424 | # macros. |
| 425 | |
| 426 | if prop.type == "phandle": |
| 427 | return 1 |
| 428 | |
| 429 | if (prop.type in ["array", "uint8-array", "string-array", |
| 430 | "phandles", "phandle-array"] and |
| 431 | prop.name not in ["reg", "interrupts"]): |
| 432 | return len(prop.val) |
| 433 | |
| 434 | return None |
| 435 | |
| 436 | |
| 437 | def phandle_macros(prop, macro): |
| 438 | # Returns a dict of macros for phandle or phandles property 'prop'. |
| 439 | # |
| 440 | # The 'macro' argument is the N_<node-id>_P_<prop-id> bit. |
| 441 | # |
| 442 | # These are currently special because we can't serialize their |
| 443 | # values without using label properties, which we're trying to get |
| 444 | # away from needing in Zephyr. (Label properties are great for |
| 445 | # humans, but have drawbacks for code size and boot time.) |
| 446 | # |
| 447 | # The names look a bit weird to make it easier for devicetree.h |
| 448 | # to use the same macros for phandle, phandles, and phandle-array. |
| 449 | |
| 450 | ret = {} |
| 451 | |
| 452 | if prop.type == "phandle": |
| 453 | # A phandle is treated as a phandles with fixed length 1. |
| 454 | ret[f"{macro}_IDX_0_PH"] = f"DT_{prop.val.z_path_id}" |
| 455 | elif prop.type == "phandles": |
| 456 | for i, node in enumerate(prop.val): |
| 457 | ret[f"{macro}_IDX_{i}_PH"] = f"DT_{node.z_path_id}" |
| 458 | elif prop.type == "phandle-array": |
| 459 | for i, entry in enumerate(prop.val): |
| 460 | ret.update(controller_and_data_macros(entry, i, macro)) |
| 461 | |
| 462 | return ret |
| 463 | |
| 464 | |
| 465 | def controller_and_data_macros(entry, i, macro): |
| 466 | # Helper procedure used by phandle_macros(). |
| 467 | # |
| 468 | # Its purpose is to write the "controller" (i.e. label property of |
| 469 | # the phandle's node) and associated data macros for a |
| 470 | # ControllerAndData. |
| 471 | |
| 472 | ret = {} |
| 473 | data = entry.data |
| 474 | |
| 475 | # DT_N_<node-id>_P_<prop-id>_IDX_<i>_PH |
| 476 | ret[f"{macro}_IDX_{i}_PH"] = f"DT_{entry.controller.z_path_id}" |
| 477 | # DT_N_<node-id>_P_<prop-id>_IDX_<i>_VAL_<VAL> |
| 478 | for cell, val in data.items(): |
| 479 | ret[f"{macro}_IDX_{i}_VAL_{str2ident(cell)}"] = val |
| 480 | ret[f"{macro}_IDX_{i}_VAL_{str2ident(cell)}_EXISTS"] = 1 |
| 481 | |
| 482 | if not entry.name: |
| 483 | return ret |
| 484 | |
| 485 | name = str2ident(entry.name) |
| 486 | # DT_N_<node-id>_P_<prop-id>_IDX_<i>_NAME |
| 487 | ret[f"{macro}_IDX_{i}_NAME"] = quote_str(entry.name) |
| 488 | # DT_N_<node-id>_P_<prop-id>_NAME_<NAME>_PH |
| 489 | ret[f"{macro}_NAME_{name}_PH"] = f"DT_{entry.controller.z_path_id}" |
| 490 | # DT_N_<node-id>_P_<prop-id>_NAME_<NAME>_VAL_<VAL> |
| 491 | for cell, val in data.items(): |
| 492 | cell_ident = str2ident(cell) |
| 493 | ret[f"{macro}_NAME_{name}_VAL_{cell_ident}"] = \ |
| 494 | f"DT_{macro}_IDX_{i}_VAL_{cell_ident}" |
| 495 | ret[f"{macro}_NAME_{name}_VAL_{cell_ident}_EXISTS"] = 1 |
| 496 | |
| 497 | return ret |
| 498 | |
| 499 | |
| 500 | def write_chosen(edt): |
| 501 | # Tree-wide information such as chosen nodes is printed here. |
| 502 | |
| 503 | out_comment("Chosen nodes\n") |
| 504 | chosen = {} |
| 505 | for name, node in edt.chosen_nodes.items(): |
| 506 | chosen[f"DT_CHOSEN_{str2ident(name)}"] = f"DT_{node.z_path_id}" |
| 507 | chosen[f"DT_CHOSEN_{str2ident(name)}_EXISTS"] = 1 |
Kumar Gala | 299bfd0 | 2020-03-25 15:32:58 -0500 | [diff] [blame^] | 508 | max_len = max(map(len, chosen), default=0) |
Martí Bolívar | dc85edd | 2020-02-28 15:26:52 -0800 | [diff] [blame] | 509 | for macro, value in chosen.items(): |
| 510 | out_define(macro, value, width=max_len) |
| 511 | |
| 512 | |
| 513 | def write_inst_num(edt): |
| 514 | # Tree-wide information such as number of instances is printed here. |
| 515 | |
| 516 | out_comment("Number of instances\n") |
| 517 | compat_list = [] |
| 518 | |
| 519 | # Walk the nodes to build which compats we need to generate for |
| 520 | for node in sorted(edt.nodes, key=lambda node: node.dep_ordinal): |
| 521 | if not node.enabled: |
| 522 | continue |
| 523 | if not node.matching_compat: |
| 524 | continue |
| 525 | for compat in node.compats: |
| 526 | if compat not in compat_list: |
| 527 | compat_list.append(compat) |
| 528 | |
| 529 | for compat in compat_list: |
| 530 | num_inst = len(edt.compat2enabled[compat]) |
| 531 | out_define(f"DT_N_INST_{str2ident(compat)}_NUM", num_inst) |
| 532 | |
| 533 | |
| 534 | def str2ident(s): |
| 535 | # Converts 's' to a form suitable for (part of) an identifier |
| 536 | |
| 537 | return re.sub('[-,.@/+]', '_', s.lower()) |
| 538 | |
| 539 | |
| 540 | def list2init(l): |
| 541 | # Converts 'l', a Python list (or iterable), to a C array initializer |
| 542 | |
| 543 | return "{" + ", ".join(l) + "}" |
| 544 | |
| 545 | |
| 546 | def out_dt_define(macro, val, width=None, deprecation_msg=None): |
| 547 | # Writes "#define DT_<macro> <val>" to the header file |
| 548 | # |
| 549 | # The macro will be left-justified to 'width' characters if that |
| 550 | # is specified, and the value will follow immediately after in |
| 551 | # that case. Otherwise, this function decides how to add |
| 552 | # whitespace between 'macro' and 'val'. |
| 553 | # |
| 554 | # If a 'deprecation_msg' string is passed, the generated identifiers will |
| 555 | # generate a warning if used, via __WARN(<deprecation_msg>)). |
| 556 | # |
| 557 | # Returns the full generated macro for 'macro', with leading "DT_". |
| 558 | ret = "DT_" + macro |
| 559 | out_define(ret, val, width=width, deprecation_msg=deprecation_msg) |
| 560 | return ret |
| 561 | |
| 562 | |
| 563 | def out_define(macro, val, width=None, deprecation_msg=None): |
| 564 | # Helper for out_dt_define(). Outputs "#define <macro> <val>", |
| 565 | # adds a deprecation message if given, and allocates whitespace |
| 566 | # unless told not to. |
| 567 | |
| 568 | warn = fr' __WARN("{deprecation_msg}")' if deprecation_msg else "" |
| 569 | |
| 570 | if width: |
| 571 | s = f"#define {macro.ljust(width)}{warn} {val}" |
| 572 | else: |
| 573 | s = f"#define {macro}{warn} {val}" |
| 574 | |
| 575 | print(s, file=header_file) |
| 576 | |
| 577 | |
| 578 | def out_comment(s, blank_before=True): |
| 579 | # Writes 's' as a comment to the header and configuration file. 's' is |
| 580 | # allowed to have multiple lines. blank_before=True adds a blank line |
| 581 | # before the comment. |
| 582 | |
| 583 | if blank_before: |
| 584 | print(file=header_file) |
| 585 | |
| 586 | if "\n" in s: |
| 587 | # Format multi-line comments like |
| 588 | # |
| 589 | # /* |
| 590 | # * first line |
| 591 | # * second line |
| 592 | # * |
| 593 | # * empty line before this line |
| 594 | # */ |
| 595 | res = ["/*"] |
| 596 | for line in s.splitlines(): |
| 597 | # Avoid an extra space after '*' for empty lines. They turn red in |
| 598 | # Vim if space error checking is on, which is annoying. |
| 599 | res.append(" *" if not line.strip() else " * " + line) |
| 600 | res.append(" */") |
| 601 | print("\n".join(res), file=header_file) |
| 602 | else: |
| 603 | # Format single-line comments like |
| 604 | # |
| 605 | # /* foo bar */ |
| 606 | print("/* " + s + " */", file=header_file) |
| 607 | |
| 608 | |
| 609 | def escape(s): |
| 610 | # Backslash-escapes any double quotes and backslashes in 's' |
| 611 | |
| 612 | # \ must be escaped before " to avoid double escaping |
| 613 | return s.replace("\\", "\\\\").replace('"', '\\"') |
| 614 | |
| 615 | |
| 616 | def quote_str(s): |
| 617 | # Puts quotes around 's' and escapes any double quotes and |
| 618 | # backslashes within it |
| 619 | |
| 620 | return f'"{escape(s)}"' |
| 621 | |
| 622 | |
| 623 | def err(s): |
| 624 | raise Exception(s) |
| 625 | |
| 626 | |
| 627 | if __name__ == "__main__": |
| 628 | main() |