blob: 82987eac833152acde7ff55dc61771d4b33d4ee4 [file] [log] [blame]
Anas Nashif6361be22016-05-22 12:34:06 -04001#!/usr/bin/python
2#
3# Copyright (c) 2016, Intel Corporation
4#
5# Licensed under the Apache License, Version 2.0 (the "License");
6# you may not use this file except in compliance with the License.
7# You may obtain a copy of the License at
8#
9# http://www.apache.org/licenses/LICENSE-2.0
10#
11# Unless required by applicable law or agreed to in writing, software
12# distributed under the License is distributed on an "AS IS" BASIS,
13# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14# See the License for the specific language governing permissions and
15# limitations under the License.
16
17# Based on a script by:
18# Chereau, Fabien <fabien.chereau@intel.com>
19
20import os
21import re
22from optparse import OptionParser
23import sys
24import argparse
25import subprocess
26import json
27import operator
28
29class bcolors:
30 HEADER = '\033[95m'
31 OKBLUE = '\033[94m'
32 OKGREEN = '\033[92m'
33 WARNING = '\033[93m'
34 FAIL = '\033[91m'
35 ENDC = '\033[0m'
36 BOLD = '\033[1m'
37 UNDERLINE = '\033[4m'
38
39
40parser = OptionParser()
41parser.add_option("-d", "--depth", dest="depth", type="int",
42 help="How deep should we go into the tree", metavar="DEPTH")
43parser.add_option("-o", "--outdir", dest="outdir",
44 help="read files from directory OUT", metavar="OUT")
45parser.add_option("-k", "--kernel-name", dest="binary", default="zephyr",
46 help="kernel binary name")
47parser.add_option("-r", "--ram",
48 action="store_true", dest="ram", default=False,
49 help="print RAM statistics")
50parser.add_option("-F", "--rom",
51 action="store_true", dest="rom", default=False,
52 help="print ROM statistics")
53
54(options, args) = parser.parse_args()
55
56# Return a dict containing symbol_name: path/to/file/where/it/originates
57# for all symbols from the .elf file. Optionnaly strips the path according
58# to the passed sub-path
59def load_symbols_and_paths(elf_file, path_to_strip = None):
60 symbols_paths = {}
61 nm_out = subprocess.check_output(["nm", elf_file, "-S", "-l", "--size-sort", "--radix=d"])
62 for line in nm_out.split('\n'):
63 fields = line.replace('\t', ' ').split(' ')
64 # Get rid of trailing empty field
65 if len(fields) == 1 and fields[0] == '':
66 continue
67 assert len(fields)>=4
68 if len(fields)<5:
69 path = ":/" + fields[3]
70 else:
71 path = fields[4].split(':')[0]
72 if path_to_strip != None:
73 if path_to_strip in path:
74 path = path.replace(path_to_strip, "") + '/' + fields[3]
75 else:
76 path = ":/" + fields[3]
77 symbols_paths[fields[3]] = path
78 return symbols_paths
79
80def get_section_size(f, section_name):
81 decimal_size = 0
82 re_res = re.search(r"(.*] "+section_name+".*)", f, re.MULTILINE)
83 if re_res != None :
84 # Replace multiple spaces with one space
85 # Skip first characters to avoid having 1 extra random space
86 res = ' '.join(re_res.group(1).split())[5:]
87 decimal_size = int(res.split()[4], 16)
88 return decimal_size
89
90def get_footprint_from_bin_and_statfile(bin_file, stat_file, total_flash, total_ram):
91 """Compute flash and RAM memory footprint from a .bin and.stat file"""
92 f = open(stat_file).read()
93
94 # Get kctext + text + ctors + rodata + kcrodata segment size
95 total_used_flash = os.path.getsize(bin_file)
96
97 #getting used ram on target
98 total_used_ram = (get_section_size(f, "noinit") + get_section_size(f, "bss")
99 + get_section_size(f, "initlevel") + get_section_size(f, "datas") + get_section_size(f, ".data")
100 + get_section_size(f, ".heap") + get_section_size(f, ".stack") + get_section_size(f, ".bss")
101 + get_section_size(f, ".panic_section"))
102
103 total_percent_ram = 0
104 total_percent_flash = 0
105 if total_ram > 0:
106 total_percent_ram = float(total_used_ram) / total_ram * 100
107 if total_flash >0:
108 total_percent_flash = float(total_used_flash) / total_flash * 100
109
110 res = { "total_flash": total_used_flash,
111 "percent_flash": total_percent_flash,
112 "total_ram": total_used_ram,
113 "percent_ram": total_percent_ram}
114 return res
115
116def generate_target_memory_section(out, kernel_name, source_dir, features_json):
117 features_path_data = None
118 try:
119 features_path_data = json.loads(open(features_json, 'r').read())
120 except:
121 pass
122
123 bin_file_abs = os.path.join(out, kernel_name+'.bin')
124 elf_file_abs = os.path.join(out, kernel_name+'.elf')
125
126 # First deal with size on flash. These are the symbols flagged as LOAD in objdump output
127 size_out = subprocess.check_output(["objdump", "-hw", elf_file_abs])
128 loaded_section_total = 0
129 loaded_section_names = []
130 loaded_section_names_sizes = {}
131 ram_section_total = 0
132 ram_section_names = []
133 ram_section_names_sizes = {}
134 for line in size_out.split('\n'):
135 if "LOAD" in line:
136 loaded_section_total = loaded_section_total + int(line.split()[2], 16)
137 loaded_section_names.append(line.split()[1])
138 loaded_section_names_sizes[line.split()[1]] = int(line.split()[2], 16)
139 if "ALLOC" in line and "READONLY" not in line and "rodata" not in line and "CODE" not in line:
140 ram_section_total = ram_section_total + int(line.split()[2], 16)
141 ram_section_names.append(line.split()[1])
142 ram_section_names_sizes[line.split()[1]] = int(line.split()[2], 16)
143
144 # Actual .bin size, which doesn't not always match section sizes
145 bin_size = os.stat(bin_file_abs).st_size
146
147 # Get the path associated to each symbol
148 symbols_paths = load_symbols_and_paths(elf_file_abs, source_dir)
149
150 # A set of helper function for building a simple tree with a path-like
151 # hierarchy.
152 def _insert_one_elem(tree, path, size):
153 splitted_path = path.split('/')
154 cur = None
155 for p in splitted_path:
156 if cur == None:
157 cur = p
158 else:
159 cur = cur + '/' + p
160 if cur in tree:
161 tree[cur] += size
162 else:
163 tree[cur] = size
164
165 def _parent_for_node(e):
166 parent = "root" if len(e.split('/')) == 1 else e.rsplit('/', 1)[0]
167 if e == "root":
168 parent = None
169 return parent
170
171 def _childs_for_node(tree, node):
172 res = []
173 for e in tree:
174 if _parent_for_node(e) == node:
175 res += [e]
176 return res
177
178 def _siblings_for_node(tree, node):
179 return _childs_for_node(tree, _parent_for_node(node))
180
181 def _max_sibling_size(tree, node):
182 siblings = _siblings_for_node(tree, node)
183 return max([tree[e] for e in siblings])
184
185
186 # Extract the list of symbols a second time but this time using the objdump tool
187 # which provides more info as nm
188 symbols_out = subprocess.check_output(["objdump", "-tw", elf_file_abs])
189 flash_symbols_total = 0
190 data_nodes = {}
191 data_nodes['root'] = 0
192
193 ram_symbols_total = 0
194 ram_nodes = {}
195 ram_nodes['root'] = 0
196 for l in symbols_out.split('\n'):
197 line = l[0:9] + "......." + l[16:]
198 fields = line.replace('\t', ' ').split(' ')
199 # Get rid of trailing empty field
200 if len(fields) != 5:
201 continue
202 size = int(fields[3], 16)
203 if fields[2] in loaded_section_names and size != 0:
204 flash_symbols_total += size
205 _insert_one_elem(data_nodes, symbols_paths[fields[4]], size)
206 if fields[2] in ram_section_names and size != 0:
207 ram_symbols_total += size
208 _insert_one_elem(ram_nodes, symbols_paths[fields[4]], size)
209
210 def _init_features_list_results(features_list):
211 for feature in features_list:
212 _init_feature_results(feature)
213
214 def _init_feature_results(feature):
215 feature["size"] = 0
216 # recursive through children
217 for child in feature["children"]:
218 _init_feature_results(child)
219
220 def _check_all_symbols(symbols_struct, features_list):
221 out = ""
222 sorted_nodes = sorted(symbols_struct.items(), key=operator.itemgetter(0))
223 named_symbol_filter = re.compile('.*\.[a-zA-Z]+/.*')
224 out_symbols_filter = re.compile('^:/')
225 for symbpath in sorted_nodes:
226 matched = 0
227 # The files and folders (not matching regex) are discarded
228 # like: folder folder/file.ext
229 is_symbol=named_symbol_filter.match(symbpath[0])
230 is_generated=out_symbols_filter.match(symbpath[0])
231 if is_symbol == None and is_generated == None:
232 continue
233 # The symbols inside a file are kept: folder/file.ext/symbol
234 # and unrecognized paths too (":/")
235 for feature in features_list:
236 matched = matched + _does_symbol_matches_feature(symbpath[0], symbpath[1], feature)
237 if matched is 0:
238 out += "UNCATEGORIZED: %s %d<br/>" % (symbpath[0], symbpath[1])
239 return out
240
241 def _does_symbol_matches_feature(symbol, size, feature):
242 matched = 0
243 # check each include-filter in feature
244 for inc_path in feature["folders"]:
245 # filter out if the include-filter is not in the symbol string
246 if inc_path not in symbol:
247 continue
248 # if the symbol match the include-filter, check against exclude-filter
249 is_excluded = 0
250 for exc_path in feature["excludes"]:
251 if exc_path in symbol:
252 is_excluded = 1
253 break
254 if is_excluded == 0:
255 matched = 1
256 feature["size"] = feature["size"] + size
257 # it can only be matched once per feature (add size once)
258 break
259 # check children independently of this feature's result
260 for child in feature["children"]:
261 child_matched = _does_symbol_matches_feature(symbol, size, child)
262 matched = matched + child_matched
263 return matched
264
265
266
267 # Create a simplified tree keeping only the most important contributors
268 # This is used for the pie diagram summary
269 min_parent_size = bin_size/25
270 min_sibling_size = bin_size/35
271 tmp = {}
272 for e in data_nodes:
273 if _parent_for_node(e) == None:
274 continue
275 if data_nodes[_parent_for_node(e)] < min_parent_size:
276 continue
277 if _max_sibling_size(data_nodes, e) < min_sibling_size:
278 continue
279 tmp[e] = data_nodes[e]
280
281 # Keep only final nodes
282 tmp2 = {}
283 for e in tmp:
284 if len(_childs_for_node(tmp, e)) == 0:
285 tmp2[e] = tmp[e]
286
287 # Group nodes too small in an "other" section
288 filtered_data_nodes = {}
289 for e in tmp2:
290 if tmp[e] < min_sibling_size:
291 k = _parent_for_node(e) + "/(other)"
292 if k in filtered_data_nodes:
293 filtered_data_nodes[k] += tmp[e]
294 else:
295 filtered_data_nodes[k] = tmp[e]
296 else:
297 filtered_data_nodes[e] = tmp[e]
298
299
300 def _parent_level_3_at_most(node):
301 e = _parent_for_node(node)
302 while e.count('/')>2:
303 e = _parent_for_node(e)
304 return e
305
306 return ram_nodes, data_nodes
307
308
309def print_tree(data, total, depth):
310 base = os.environ['ZEPHYR_BASE']
311 totp = 0
312 print '{:92s} {:10s} {:8s}'.format(bcolors.FAIL + "Path", "Size", "%" + bcolors.ENDC)
313 print '='*110
314 for i in sorted(data):
315 p = i.split("/")
316 if depth and len(p) > depth:
317 continue
318
319 percent = 100 * float(data[i])/float(total)
320 percent_c = percent
321 if len(p) < 2:
322 totp += percent
323
324 if len(p) > 1:
325 if not os.path.exists(os.path.join(base, i)):
326 s = bcolors.WARNING + p[-1] + bcolors.ENDC
327 else:
328 s = bcolors.OKBLUE + p[-1] + bcolors.ENDC
329 print '{:80s} {:20d} {:8.2f}%'.format(" "*(len(p)-1) + s, data[i], percent_c )
330 else:
331 print '{:80s} {:20d} {:8.2f}%'.format(bcolors.OKBLUE + i + bcolors.ENDC, data[i], percent_c )
332
333 print '='*110
334 print '{:92d}'.format(total)
335 return totp
336
337
338binary = os.path.join(options.outdir, options.binary + ".elf")
339
340if options.outdir and os.path.exists(binary):
341 fp = get_footprint_from_bin_and_statfile("%s/%s.bin" %(options.outdir, options.binary),
342 "%s/%s.stat" %(options.outdir,options.binary), 0, 0 )
343 base = os.environ['ZEPHYR_BASE']
344 ram, data = generate_target_memory_section(options.outdir, options.binary, base + '/', None)
345 if options.rom:
346 print_tree(data, fp['total_flash'], options.depth)
347 if options.ram:
348 print_tree(ram, fp['total_ram'], options.depth)
349
350else:
351 print "%s does not exist." %(binary)