blob: d0f8df6cd2abad33c97b171d6a1269d2291f7c60 [file] [log] [blame]
Marcus Shawcroft459e8502017-02-13 21:43:10 +00001#!/usr/bin/env python
Andy Grossbb063162017-01-29 23:53:17 -06002import sys
3from os import walk
4import os
5import re
6import yaml
7import pprint
8
9from devicetree import parse_file
10
11# globals
12compatibles = {}
13phandles = {}
14aliases = {}
15chosen = {}
16reduced = {}
17
18def convert_string_to_label(s):
19 # Transmute ,- to _
20 s = s.replace("-", "_");
21 s = s.replace(",", "_");
22 return s
23
24def get_all_compatibles(d, name, comp_dict):
25 if 'props' in d:
26 compat = d['props'].get('compatible')
27 enabled = d['props'].get('status')
28
29 if enabled == "disabled":
30 return comp_dict
31
32 if compat != None:
33 comp_dict[name] = compat
34
35 if name != '/':
36 name += '/'
37
38 if isinstance(d,dict):
39 if d['children']:
40 for k,v in d['children'].items():
41 get_all_compatibles(v, name + k, comp_dict)
42
43 return comp_dict
44
45def get_aliases(root):
46 if 'children' in root:
47 if 'aliases' in root['children']:
48 for k,v in root['children']['aliases']['props'].items():
49 aliases[v] = k
50
51 return
52
53def get_compat(node):
54
55 compat = None
56
57 if 'props' in node:
58 compat = node['props'].get('compatible')
59
60 if isinstance(compat, list):
61 compat = compat[0]
62
63 return compat
64
65def get_chosen(root):
66
67 if 'children' in root:
68 if 'chosen' in root['children']:
69 for k,v in root['children']['chosen']['props'].items():
70 chosen[k] = v
71
72 return
73
74def get_phandles(root, name, handles):
75
76 if 'props' in root:
77 handle = root['props'].get('phandle')
78 enabled = root['props'].get('status')
79
80 if enabled == "disabled":
81 return
82
83 if handle != None:
84 phandles[handle] = name
85
86 if name != '/':
87 name += '/'
88
89 if isinstance(root, dict):
90 if root['children']:
91 for k,v in root['children'].items():
92 get_phandles(v, name + k, handles)
93
94 return
95
96class Loader(yaml.Loader):
97 def __init__(self, stream):
98 self._root = os.path.realpath(stream.name)
99 super(Loader, self).__init__(stream)
100 Loader.add_constructor('!include', Loader.include)
101 Loader.add_constructor('!import', Loader.include)
102
103 def include(self, node):
104 if isinstance(node, yaml.ScalarNode):
105 return self.extractFile(self.construct_scalar(node))
106
107 elif isinstance(node, yaml.SequenceNode):
108 result = []
109 for filename in self.construct_sequence(node):
110 result += self.extractFile(filename)
111 return result
112
113 elif isinstance(node, yaml.MappingNode):
114 result = {}
115 for k,v in self.construct_mapping(node).iteritems():
116 result[k] = self.extractFile(v)
117 return result
118
119 else:
120 print("Error:: unrecognised node type in !include statement")
121 raise yaml.constructor.ConstructorError
122
123 def extractFile(self, filename):
124 filepath = os.path.join(os.path.dirname(self._root), filename)
125 if not os.path.isfile(filepath):
126 # we need to look in common directory
127 # take path and back up 2 directories and tack on '/common/yaml'
128 filepath = os.path.dirname(self._root).split('/')
129 filepath = '/'.join(filepath[:-2])
130 filepath = os.path.join(filepath + '/common/yaml', filename)
131 with open(filepath, 'r') as f:
132 return yaml.load(f, Loader)
133
134def insert_defs(node_address, defs, new_defs, new_aliases):
135 if node_address in defs:
136 if 'aliases' in defs[node_address]:
137 defs[node_address]['aliases'].update(new_aliases)
138 else:
139 defs[node_address]['aliases'] = new_aliases
140
141 defs[node_address].update(new_defs)
142 else:
143 new_defs['aliases'] = new_aliases
144 defs[node_address] = new_defs
145
146 return
147
148def find_node_by_path(nodes, path):
149 d = nodes
150 for k in path[1:].split('/'):
151 d = d['children'][k]
152
153 return d
154
155def compress_nodes(nodes, path):
156 if 'props' in nodes:
157 status = nodes['props'].get('status')
158
159 if status == "disabled":
160 return
161
162 if isinstance(nodes, dict):
163 reduced[path] = dict(nodes)
164 reduced[path].pop('children', None)
165 if path != '/':
166 path += '/'
167 if nodes['children']:
168 for k,v in nodes['children'].items():
169 compress_nodes(v, path + k)
170
171 return
172
173def find_parent_irq_node(node_address):
174 address = ''
175
176 for comp in node_address.split('/')[1:]:
177 address += '/' + comp
178 if 'interrupt-parent' in reduced[address]['props']:
179 interrupt_parent = reduced[address]['props'].get('interrupt-parent')
180
181 return reduced[phandles[interrupt_parent]]
182
183def extract_interrupts(node_address, yaml, y_key, names, defs, def_label):
184 node = reduced[node_address]
185
186 try:
187 props = list(node['props'].get(y_key))
188 except:
189 props = [node['props'].get(y_key)]
190
191 irq_parent = find_parent_irq_node(node_address)
192
193 l_base = def_label.split('/')
194 index = 0
195
196 while props:
197 prop_def = {}
198 prop_alias = {}
199 l_idx = [str(index)]
200
201 if y_key == 'interrupts-extended':
202 cell_parent = reduced[phandles[props.pop(0)]]
203 name = []
204 else:
205 try:
206 name = [names.pop(0).upper()]
207 except:
208 name = []
209
210 cell_parent = irq_parent
211
212 cell_yaml = yaml[get_compat(cell_parent)]
Kumar Galaefc68b42017-03-09 11:11:16 -0600213 l_cell_prefix = [yaml[get_compat(irq_parent)].get('cell_string', []).upper()]
Andy Grossbb063162017-01-29 23:53:17 -0600214
215 for i in range(cell_parent['props']['#interrupt-cells']):
216 l_cell_name = [cell_yaml['#cells'][i].upper()]
217 if l_cell_name == l_cell_prefix:
218 l_cell_name = []
219
220 l_fqn = '_'.join(l_base + l_cell_prefix + l_idx)
221 prop_def[l_fqn] = props.pop(0)
222 if len(name):
223 prop_alias['_'.join(l_base + name + l_cell_prefix)] = l_fqn
224
225 index += 1
226 insert_defs(node_address, defs, prop_def, prop_alias)
227
228 return
229
230def extract_reg_prop(node_address, names, defs, def_label, div):
231 node = reduced[node_address]
232
233 props = list(reduced[node_address]['props']['reg'])
234
235 address_cells = reduced['/']['props'].get('#address-cells')
236 size_cells = reduced['/']['props'].get('#size-cells')
237 address = ''
238 for comp in node_address.split('/')[1:]:
239 address += '/' + comp
240 address_cells = reduced[address]['props'].get('#address-cells', address_cells)
241 size_cells = reduced[address]['props'].get('#size-cells', size_cells)
242
243 index = 0
244 l_base = def_label.split('/')
245 l_addr = ["BASE_ADDRESS"]
246 l_size = ["SIZE"]
247
248 while props:
249 prop_def = {}
250 prop_alias = {}
251 addr = 0
252 size = 0
253 l_idx = [str(index)]
254
255 try:
256 name = [names.pop(0).upper()]
257 except:
258 name = []
259
260 for x in range(address_cells):
261 addr += props.pop(0) << (32 * x)
262 for x in range(size_cells):
263 size += props.pop(0) << (32 * x)
264
265 l_addr_fqn = '_'.join(l_base + l_addr + l_idx)
266 l_size_fqn = '_'.join(l_base + l_size + l_idx)
267 prop_def[l_addr_fqn] = hex(addr)
268 prop_def[l_size_fqn] = int(size / div)
269 if len(name):
270 prop_alias['_'.join(l_base + name + l_addr)] = l_addr_fqn
271 prop_alias['_'.join(l_base + name + l_size)] = l_size_fqn
272
273 if index == 0:
274 prop_alias['_'.join(l_base + l_addr)] = l_addr_fqn
275 prop_alias['_'.join(l_base + l_size)] = l_size_fqn
276
277 insert_defs(node_address, defs, prop_def, prop_alias)
278
279 # increment index for definition creation
280 index += 1
281
282 return
283
284def extract_cells(node_address, yaml, y_key, names, index, prefix, defs, def_label):
285 try:
286 props = list(reduced[node_address]['props'].get(y_key))
287 except:
288 props = [reduced[node_address]['props'].get(y_key)]
289
290 cell_parent = reduced[phandles[props.pop(0)]]
291
292 try:
293 cell_yaml = yaml[get_compat(cell_parent)]
294 except:
295 raise Exception("Could not find yaml description for " + cell_parent['name'])
296
297 try:
298 name = names.pop(0).upper()
299 except:
300 name = []
301
302 l_cell = [str(cell_yaml.get('cell_string',''))]
303 l_base = def_label.split('/')
304 l_base += prefix
305 l_idx = [str(index)]
306
307 prop_def = {}
308 prop_alias = {}
309
310 for k in cell_parent['props'].keys():
311 if k[0] == '#' and '-cells' in k:
312 for i in range(cell_parent['props'].get(k)):
313 l_cellname = [str(cell_yaml['#cells'][i]).upper()]
314 if l_cell == l_cellname:
315 label = l_base + l_cell + l_idx
316 else:
317 label = l_base + l_cell + l_cellname + l_idx
318 label_name = l_base + name + l_cellname
319 prop_def['_'.join(label)] = props.pop(0)
320 if len(name):
321 prop_alias['_'.join(label_name)] = '_'.join(label)
322
323 if index == 0:
324 prop_alias['_'.join(label[:-1])] = '_'.join(label)
325
326 insert_defs(node_address, defs, prop_def, prop_alias)
327
328 # recurse if we have anything left
329 if len(props):
330 extract_cells(node_address, yaml, y_key, names, index + 1, prefix, defs, def_label)
331
332 return
333
334def extract_pinctrl(node_address, yaml, pinconf, names, index, defs, def_label):
335
336 prop_list = []
337 if not isinstance(pinconf,list):
338 prop_list.append(pinconf)
339 else:
340 prop_list = list(pinconf)
341
342 def_prefix = def_label.split('_')
343 target_node = node_address
344
345 prop_def = {}
346 for p in prop_list:
347 pin_node_address = phandles[p]
348 pin_entry = reduced[pin_node_address]
349 parent_address = '/'.join(pin_node_address.split('/')[:-1])
350 pin_parent = reduced[parent_address]
351 cell_yaml = yaml[get_compat(pin_parent)]
352 cell_prefix = cell_yaml.get('cell_string', None)
353 post_fix = []
354
355 if cell_prefix != None:
356 post_fix.append(cell_prefix)
357
358 for subnode in reduced.keys():
359 if pin_node_address in subnode and pin_node_address != subnode:
360 # found a subnode underneath the pinmux handle
361 node_label = subnode.split('/')[-2:]
362 pin_label = def_prefix + post_fix + subnode.split('/')[-2:]
363
364 for i, pin in enumerate(reduced[subnode]['props']['pins']):
365 key_label = list(pin_label) + [cell_yaml['#cells'][0]] + [str(i)]
366 func_label = key_label[:-2] + [cell_yaml['#cells'][1]] + [str(i)]
367 key_label = convert_string_to_label('_'.join(key_label)).upper()
368 func_label = convert_string_to_label('_'.join(func_label)).upper()
369
370 prop_def[key_label] = pin
371 prop_def[func_label] = reduced[subnode]['props']['function']
372
373 insert_defs(node_address, defs, prop_def, {})
374
375def extract_single(node_address, yaml, prop, key, prefix, defs, def_label):
376
377 prop_def = {}
378
379 if isinstance(prop, list):
380 for i, p in enumerate(prop):
381 k = convert_string_to_label(key).upper()
382 label = def_label + '_' + k
383 prop_def[label + '_' + str(i)] = p
384 else:
385 k = convert_string_to_label(key).upper()
386 label = def_label + '_' + k
387 prop_def[label] = prop
388
389 if node_address in defs:
390 defs[node_address].update(prop_def)
391 else:
392 defs[node_address] = prop_def
393
394 return
395
396def extract_property(yaml, node_address, y_key, y_val, names, prefix, defs):
397
398 node = reduced[node_address]
399 def_label = convert_string_to_label(get_compat(node)).upper()
400 def_label += '_' + node_address.split('@')[-1].upper()
401
402 if y_key == 'reg':
403 extract_reg_prop(node_address, names, defs, def_label, 1)
404 elif y_key == 'interrupts' or y_key == 'interupts-extended':
405 extract_interrupts(node_address, yaml, y_key, names, defs, def_label)
406 elif 'pinctrl-' in y_key:
407 p_index = int(y_key.split('-')[1])
408 extract_pinctrl(node_address, yaml, reduced[node_address]['props'][y_key],
409 names[p_index], p_index, defs, def_label)
410 elif 'clocks' in y_key:
411 extract_cells(node_address, yaml, y_key,
412 names, 0, prefix, defs, def_label)
413 else:
414 extract_single(node_address, yaml[get_compat(reduced[node_address])],
415 reduced[node_address]['props'][y_key], y_key,
416 prefix, defs, def_label)
417
418 return
419
420def extract_node_include_info(reduced, node_address, yaml, defs, structs):
421 node = reduced[node_address]
422 node_compat = get_compat(node)
423
424 if not node_compat in yaml.keys():
425 return {}, {}
426
427 y_node = yaml[node_compat]
428
429 # check to see if we need to process the properties
430 for yp in y_node['properties']:
431 for k,v in yp.items():
432 if 'generation' in v:
433 if v['generation'] == 'define':
434 label = v.get('define_string')
435 storage = defs
436 else:
437 label = v.get('structures_string')
438 storage = structs
439
440 prefix = []
441 if v.get('use-name-prefix') != None:
442 prefix = [convert_string_to_label(k.upper())]
443
444 for c in node['props'].keys():
445 if c.endswith("-names"):
446 pass
447
448 if re.match(k + '$', c):
449
450 if 'pinctrl-' in c:
451 names = node['props'].get('pinctrl-names', [])
452 else:
453 names = node['props'].get(c[:-1] + '-names', [])
454 if not names:
455 names = node['props'].get(c + '-names', [])
456
457 if not isinstance(names, list):
458 names = [names]
459
460 extract_property(yaml, node_address, c, v, names, prefix, defs)
461
462 return
463
464def yaml_collapse(yaml_list):
465 collapsed = dict(yaml_list)
466
467 for k,v in collapsed.items():
468 props = set()
469 if 'properties' in v:
470 for entry in v['properties']:
471 for key in entry:
472 props.add(key)
473
474 if 'inherits' in v:
475 for inherited in v['inherits']:
476 for prop in inherited['properties']:
477 for key in prop:
478 if key not in props:
479 v['properties'].append(prop)
480 v.pop('inherits')
481
482 return collapsed
483
484
485def print_key_value(k, v, tabstop):
486 label = "#define " + k
487
488 # calculate the name's tabs
489 if len(label) % 8:
490 tabs = (len(label) + 7) >> 3
491 else:
492 tabs = (len(label) >> 3) + 1
493
494 sys.stdout.write(label)
495 for i in range(0, tabstop - tabs + 1):
496 sys.stdout.write('\t')
497 sys.stdout.write(str(v))
498 sys.stdout.write("\n")
499
500 return
501
502def generate_include_file(defs):
503 compatible = reduced['/']['props']['compatible'][0]
504
505 sys.stdout.write("/**************************************************\n")
506 sys.stdout.write(" * Generated include file for " + compatible)
507 sys.stdout.write("\n")
508 sys.stdout.write(" * DO NOT MODIFY\n");
509 sys.stdout.write(" */\n")
510 sys.stdout.write("\n")
511 sys.stdout.write("#ifndef _DEVICE_TREE_BOARD_H" + "\n");
512 sys.stdout.write("#define _DEVICE_TREE_BOARD_H" + "\n");
513 sys.stdout.write("\n")
514
515 node_keys = sorted(defs.keys())
516 for node in node_keys:
517 sys.stdout.write('/* ' + node.split('/')[-1] + ' */')
518 sys.stdout.write("\n")
519
520 maxlength = max(len(s + '#define ') for s in defs[node].keys())
521 if maxlength % 8:
522 maxtabstop = (maxlength + 7) >> 3
523 else:
524 maxtabstop = (maxlength >> 3) + 1
525
526 if (maxtabstop * 8 - maxlength) <= 2:
527 maxtabstop += 1
528
529 prop_keys = sorted(defs[node].keys())
530 for prop in prop_keys:
531 if prop == 'aliases':
532 for entry in sorted(defs[node][prop]):
533 print_key_value(entry, defs[node][prop].get(entry), maxtabstop)
534 else:
535 print_key_value(prop, defs[node].get(prop), maxtabstop)
536 sys.stdout.write("\n")
537
538 sys.stdout.write("#endif\n");
539
540def main(args):
541 if len(args) < 2:
542 print('Usage: %s filename.dts path_to_yaml' % args[0])
543 return 1
544
545 try:
546 with open(args[1], "r") as fd:
547 d = parse_file(fd)
548 except:
549 raise Exception("Input file " + os.path.abspath(args[1]) + " does not exist.")
550
551 # compress list to nodes w/ paths, add interrupt parent
552 compress_nodes(d['/'], '/')
553
554 # build up useful lists
555 compatibles = get_all_compatibles(d['/'], '/', {})
556 get_phandles(d['/'], '/', {})
557 get_aliases(d['/'])
558 get_chosen(d['/'])
559
560 # find unique set of compatibles across all active nodes
561 s = set()
562 for k,v in compatibles.items():
563 if isinstance(v,list):
564 for item in v:
565 s.add(item)
566 else:
567 s.add(v)
568
569 # scan YAML files and find the ones we are interested in
570 yaml_files = []
571 for (dirpath, dirnames, filenames) in walk(args[2]):
572 yaml_files.extend([f for f in filenames if re.match('.*\.yaml\Z', f)])
573 yaml_files = [dirpath + '/' + t for t in yaml_files]
574 break
575
576 yaml_list = {}
577 file_load_list = set()
578 for file in yaml_files:
579 for line in open(file, 'r'):
580 if re.search('^\s+constraint:*', line):
581 c = line.split(':')[1].strip()
582 c = c.strip('"')
583 if c in s:
584 if not file in file_load_list:
585 file_load_list.add(file)
586 with open(file, 'r') as yf:
587 yaml_list[c] = yaml.load(yf, Loader)
588
589 if yaml_list == {}:
590 raise Exception("Missing YAML information. Check YAML sources")
591
592 # collapse the yaml inherited information
593 yaml_list = yaml_collapse(yaml_list)
594
595 # load zephyr specific nodes
596 flash = {}
597 console = {}
598 sram = {}
599 if 'zephyr,flash' in chosen:
600 flash = reduced[chosen['zephyr,flash']]
601 if 'zephyr,console' in chosen:
602 console = reduced[chosen['zephyr,console']]
603 if 'zephyr,sram' in chosen:
604 sram = reduced[chosen['zephyr,sram']]
605
606 defs = {}
607 structs = {}
608 for k, v in reduced.items():
609 node_compat = get_compat(v)
610 if node_compat != None and node_compat in yaml_list:
611 extract_node_include_info(reduced, k, yaml_list, defs, structs)
612
613 if defs == {}:
614 raise Exception("No information parsed from dts file.")
615
616 if flash:
617 extract_reg_prop(chosen['zephyr,flash'], None, defs, "CONFIG_FLASH", 1024)
618 else:
619 # We will add address and size of 0 for systems with no flash controller
620 # This is what they already do in the Kconfig options anyway
621 defs['dummy-flash'] = { 'CONFIG_FLASH_BASE_ADDRESS': 0, 'CONFIG_FLASH_SIZE': 0 }
622
623 if sram:
624 extract_reg_prop(chosen['zephyr,sram'], None, defs, "CONFIG_SRAM", 1024)
625
626 # generate include file
627 generate_include_file(defs)
628
629if __name__ == '__main__':
630 # test1.py executed as script
631 # do something
632 sys.exit(main(sys.argv))