blob: b684d4cc4de6d9a31a8c5c669ae3b5fa3530a78a [file] [log] [blame]
Anas Nashifdfc259d2021-06-06 17:03:08 -04001#!/usr/bin/env python3
2# Copyright (c) 2021 Intel Corporation
3#
4# SPDX-License-Identifier: Apache-2.0
5
6from anytree.importer import DictImporter
7from anytree import PreOrderIter
8from anytree.search import find
9importer = DictImporter()
10from datetime import datetime
11from dateutil.relativedelta import relativedelta
12import os
13import json
14from git import Repo
15from git.exc import BadName
16
17from influxdb import InfluxDBClient
18import glob
19import argparse
20from tabulate import tabulate
21
22TODAY = datetime.utcnow()
23two_mon_rel = relativedelta(months=4)
24
25influx_dsn = 'influxdb://localhost:8086/footprint_tracking'
26
27def create_event(data, board, feature, commit, current_time, typ, application):
28 footprint_data = []
29 client = InfluxDBClient.from_dsn(influx_dsn)
30 client.create_database('footprint_tracking')
31 for d in data.keys():
32 footprint_data.append({
33 "measurement": d,
34 "tags": {
35 "board": board,
36 "commit": commit,
37 "application": application,
38 "type": typ,
39 "feature": feature
40 },
41 "time": current_time,
42 "fields": {
43 "value": data[d]
44 }
45 })
46
47 client.write_points(footprint_data, time_precision='s', database='footprint_tracking')
48
49
50def parse_args():
51 global args
52 parser = argparse.ArgumentParser(
53 description=__doc__,
Jamie McCraeec704442023-01-04 16:08:36 +000054 formatter_class=argparse.RawDescriptionHelpFormatter, allow_abbrev=False)
Anas Nashifdfc259d2021-06-06 17:03:08 -040055
56 parser.add_argument("-d", "--data", help="Data Directory")
57 parser.add_argument("-y", "--dryrun", action="store_true", help="Dry run, do not upload to database")
58 parser.add_argument("-z", "--zephyr-base", help="Zephyr tree")
59 parser.add_argument("-f", "--file", help="JSON file with footprint data")
60 args = parser.parse_args()
61
62
63def parse_file(json_file):
64
65 with open(json_file, "r") as fp:
66 contents = json.load(fp)
67 root = importer.import_(contents['symbols'])
68
69 zr = find(root, lambda node: node.name == 'ZEPHYR_BASE')
70 ws = find(root, lambda node: node.name == 'WORKSPACE')
71
72 data = {}
73 if zr and ws:
74 trees = [zr, ws]
75 else:
76 trees = [root]
77
78 for node in PreOrderIter(root, maxlevel=2):
79 if node.name not in ['WORKSPACE', 'ZEPHYR_BASE']:
80 if node.name in ['Root', 'Symbols']:
81 data['all'] = node.size
82 else:
83 data[node.name] = node.size
84
85 for t in trees:
86 root = t.name
87 for node in PreOrderIter(t, maxlevel=2):
88 if node.name == root:
89 continue
90 comp = node.name
91 if comp in ['Root', 'Symbols']:
92 data['all'] = node.size
93 else:
94 data[comp] = node.size
95
96 return data
97
98def process_files(data_dir, zephyr_base, dry_run):
99 repo = Repo(zephyr_base)
100
101 for hash in os.listdir(f'{data_dir}'):
102 if not dry_run:
103 client = InfluxDBClient.from_dsn(influx_dsn)
104 result = client.query(f"select * from kernel where commit = '{hash}';")
105 if result:
106 print(f"Skipping {hash}...")
107 continue
108 print(f"Importing {hash}...")
109 for file in glob.glob(f"{args.data}/{hash}/**/*json", recursive=True):
110 file_data = file.split("/")
111 json_file = os.path.basename(file)
112 if 'ram' in json_file:
113 typ = 'ram'
114 else:
115 typ = 'rom'
116 commit = file_data[1]
117 app = file_data[2]
118 feature = file_data[3]
119 board = file_data[4]
120
121 data = parse_file(file)
122
123 try:
124 gitcommit = repo.commit(f'{commit}')
125 current_time = gitcommit.committed_datetime
126 except BadName:
127 cidx = commit.find('-g') + 2
128 gitcommit = repo.commit(f'{commit[cidx:]}')
129 current_time = gitcommit.committed_datetime
130
131 print(current_time)
132
133 if not dry_run:
134 create_event(data, board, feature, commit, current_time, typ, app)
135
136def main():
137 parse_args()
138
139 if args.data and args.zephyr_base:
140 process_files(args.data, args.zephyr_base, args.dryrun)
141
142 if args.file:
143 data = parse_file(args.file)
144 items = []
145 for component,value in data.items():
146 items.append([component,value])
147
148 table = tabulate(items, headers=['Component', 'Size'], tablefmt='orgtbl')
149 print(table)
150
151
152if __name__ == "__main__":
153 main()