2018-12-12 17:38:23 +00:00
|
|
|
#!/usr/bin/env python
|
|
|
|
#
|
|
|
|
# Copyright 2019 Espressif Systems (Shanghai) PTE LTD
|
|
|
|
#
|
|
|
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
# you may not use this file except in compliance with the License.
|
|
|
|
# You may obtain a copy of the License at
|
|
|
|
#
|
|
|
|
# http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
#
|
|
|
|
# Unless required by applicable law or agreed to in writing, software
|
|
|
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
# See the License for the specific language governing permissions and
|
|
|
|
# limitations under the License.
|
|
|
|
#
|
|
|
|
# This is python script to process various types trace data streams in SystemView format.
|
|
|
|
# Trace data can be provided in multiple trace files (one per CPU). After processing phase
|
|
|
|
# script prints report for every type of trace data stream which was found.
|
|
|
|
#
|
|
|
|
|
|
|
|
import argparse
|
|
|
|
import sys
|
|
|
|
import os.path
|
|
|
|
import signal
|
|
|
|
import traceback
|
2019-11-20 11:47:07 +00:00
|
|
|
import logging
|
|
|
|
import json
|
2018-12-12 17:38:23 +00:00
|
|
|
import espytrace.apptrace as apptrace
|
|
|
|
import espytrace.sysview as sysview
|
|
|
|
|
|
|
|
|
|
|
|
def main():
|
|
|
|
|
2019-11-20 11:47:07 +00:00
|
|
|
verbosity_levels = [
|
|
|
|
logging.CRITICAL,
|
|
|
|
logging.ERROR,
|
|
|
|
logging.WARNING,
|
|
|
|
logging.INFO,
|
|
|
|
logging.DEBUG
|
|
|
|
]
|
|
|
|
|
2018-12-12 17:38:23 +00:00
|
|
|
parser = argparse.ArgumentParser(description='ESP32 SEGGER SystemView Trace Parsing Tool')
|
|
|
|
|
|
|
|
parser.add_argument('trace_sources', help='Trace data sources. Format: [file://]/path/to/file.', nargs='+', type=str)
|
2019-11-20 11:47:07 +00:00
|
|
|
parser.add_argument('--elf-file', '-b', help='Path to program ELF file.', type=str, default='')
|
2018-12-12 17:38:23 +00:00
|
|
|
parser.add_argument('--tmo', '-w', help='Data wait timeout in sec. -1: infinite, 0: no wait', type=int, default=0)
|
|
|
|
parser.add_argument('--dump-events', '-d', help='Dump all events.', action='store_true')
|
|
|
|
parser.add_argument('--print-events', '-p', help='Print events of selected types. By default only reports are printed', action='store_true')
|
|
|
|
parser.add_argument('--include-events', '-i', help='Events types to be included into report.', type=str, choices=['heap', 'log', 'all'], default='all')
|
|
|
|
parser.add_argument('--toolchain', '-t', help='Toolchain prefix.', type=str, default='xtensa-esp32-elf-')
|
|
|
|
parser.add_argument('--events-map', '-e', help='Events map file.', type=str, default=os.path.join(os.path.dirname(__file__), 'SYSVIEW_FreeRTOS.txt'))
|
2019-11-20 11:47:07 +00:00
|
|
|
parser.add_argument('--to-json', '-j', help='Print JSON.', action='store_true', default=False)
|
|
|
|
parser.add_argument('--verbose', '-v', help='Verbosity level. Default 1', choices=range(0, len(verbosity_levels)), type=int, default=1)
|
2018-12-12 17:38:23 +00:00
|
|
|
args = parser.parse_args()
|
|
|
|
|
|
|
|
def sig_int_handler(signum, frame):
|
|
|
|
reader.cleanup()
|
|
|
|
|
|
|
|
signal.signal(signal.SIGINT, sig_int_handler)
|
|
|
|
|
|
|
|
include_events = {'heap': False, 'log': False}
|
|
|
|
if args.include_events == 'all':
|
|
|
|
for k in include_events:
|
|
|
|
include_events[k] = True
|
|
|
|
elif args.include_events == 'heap':
|
|
|
|
include_events['heap'] = True
|
|
|
|
elif args.include_events == 'log':
|
|
|
|
include_events['log'] = True
|
|
|
|
|
2019-11-20 11:47:07 +00:00
|
|
|
logging.basicConfig(level=verbosity_levels[args.verbose], format='[%(levelname)s] %(message)s')
|
|
|
|
|
2018-12-12 17:38:23 +00:00
|
|
|
# parse trace files
|
|
|
|
parsers = []
|
|
|
|
for i, trace_source in enumerate(args.trace_sources):
|
|
|
|
try:
|
|
|
|
parser = sysview.SysViewMultiTraceDataParser(print_events=False, core_id=i)
|
|
|
|
if include_events['heap']:
|
|
|
|
parser.add_stream_parser(sysview.SysViewTraceDataParser.STREAMID_HEAP,
|
|
|
|
sysview.SysViewHeapTraceDataParser(print_events=False, core_id=i))
|
|
|
|
if include_events['log']:
|
|
|
|
parser.add_stream_parser(sysview.SysViewTraceDataParser.STREAMID_LOG,
|
|
|
|
sysview.SysViewLogTraceDataParser(print_events=False, core_id=i))
|
|
|
|
parsers.append(parser)
|
|
|
|
except Exception as e:
|
2019-11-20 11:47:07 +00:00
|
|
|
logging.error("Failed to create data parser (%s)!", e)
|
2018-12-12 17:38:23 +00:00
|
|
|
traceback.print_exc()
|
|
|
|
sys.exit(2)
|
|
|
|
reader = apptrace.reader_create(trace_source, args.tmo)
|
|
|
|
if not reader:
|
2019-11-20 11:47:07 +00:00
|
|
|
logging.error("Failed to create trace reader!")
|
2018-12-12 17:38:23 +00:00
|
|
|
sys.exit(2)
|
|
|
|
try:
|
2019-11-20 11:47:07 +00:00
|
|
|
# logging.info("Parse trace from '{}'...".format(trace_source))
|
|
|
|
logging.info("Parse trace from '%s'...", trace_source)
|
2018-12-12 17:38:23 +00:00
|
|
|
sysview.parse_trace(reader, parser, args.events_map)
|
2019-11-20 11:47:07 +00:00
|
|
|
logging.info("Parsing completed.")
|
2018-12-12 17:38:23 +00:00
|
|
|
except (apptrace.ReaderTimeoutError, apptrace.ReaderShutdownRequest) as e:
|
2019-11-20 11:47:07 +00:00
|
|
|
logging.info("Stop parsing trace. (%s)", e)
|
2018-12-12 17:38:23 +00:00
|
|
|
except Exception as e:
|
2019-11-20 11:47:07 +00:00
|
|
|
logging.error("Failed to parse trace (%s)!", e)
|
2018-12-12 17:38:23 +00:00
|
|
|
parser.cleanup()
|
|
|
|
traceback.print_exc()
|
|
|
|
sys.exit(2)
|
|
|
|
finally:
|
|
|
|
reader.cleanup()
|
|
|
|
|
|
|
|
# merge and process traces
|
|
|
|
try:
|
2019-11-20 11:47:07 +00:00
|
|
|
proc = sysview.SysViewMultiStreamTraceDataProcessor(traces=parsers, print_events=args.dump_events, keep_all_events=True if args.to_json else False)
|
2018-12-12 17:38:23 +00:00
|
|
|
if include_events['heap']:
|
|
|
|
proc.add_stream_processor(sysview.SysViewTraceDataParser.STREAMID_HEAP,
|
2019-11-20 11:47:07 +00:00
|
|
|
sysview.SysViewHeapTraceDataProcessor(args.toolchain, args.elf_file, root_proc=proc, print_heap_events=args.print_events))
|
2018-12-12 17:38:23 +00:00
|
|
|
if include_events['log']:
|
|
|
|
proc.add_stream_processor(sysview.SysViewTraceDataParser.STREAMID_LOG,
|
2019-11-20 11:47:07 +00:00
|
|
|
sysview.SysViewLogTraceDataProcessor(root_proc=proc, print_log_events=args.print_events))
|
2018-12-12 17:38:23 +00:00
|
|
|
except Exception as e:
|
2019-11-20 11:47:07 +00:00
|
|
|
logging.error("Failed to create data processor (%s)!", e)
|
2018-12-12 17:38:23 +00:00
|
|
|
traceback.print_exc()
|
|
|
|
sys.exit(2)
|
2019-11-20 11:47:07 +00:00
|
|
|
|
2018-12-12 17:38:23 +00:00
|
|
|
try:
|
2019-11-20 11:47:07 +00:00
|
|
|
logging.info("Process events from '%s'...", args.trace_sources)
|
2018-12-12 17:38:23 +00:00
|
|
|
proc.merge_and_process()
|
2019-11-20 11:47:07 +00:00
|
|
|
logging.info("Processing completed.")
|
2018-12-12 17:38:23 +00:00
|
|
|
except Exception as e:
|
2019-11-20 11:47:07 +00:00
|
|
|
logging.error("Failed to process trace (%s)!", e)
|
2018-12-12 17:38:23 +00:00
|
|
|
traceback.print_exc()
|
|
|
|
sys.exit(2)
|
|
|
|
finally:
|
2019-11-20 11:47:07 +00:00
|
|
|
if args.to_json:
|
|
|
|
print(json.dumps(proc, cls=sysview.SysViewTraceDataJsonEncoder, indent=4, separators=(',', ': '), sort_keys=True))
|
|
|
|
else:
|
|
|
|
proc.print_report()
|
|
|
|
proc.cleanup()
|
2018-12-12 17:38:23 +00:00
|
|
|
|
|
|
|
|
|
|
|
if __name__ == '__main__':
|
|
|
|
main()
|