tools: Fix the Python coding style

This commit is contained in:
Roland Dobai 2018-12-04 13:46:48 +01:00
parent d453cce1b3
commit bfa9610f58
54 changed files with 745 additions and 648 deletions

66
.flake8
View file

@ -149,6 +149,8 @@ exclude =
components/expat/expat, components/expat/expat,
components/unity/unity, components/unity/unity,
examples/build_system/cmake/import_lib/main/lib/tinyxml2 examples/build_system/cmake/import_lib/main/lib/tinyxml2
# other third-party libraries
tools/kconfig_new/kconfiglib.py,
# autogenerated scripts # autogenerated scripts
components/protocomm/python/constants_pb2.py, components/protocomm/python/constants_pb2.py,
components/protocomm/python/sec0_pb2.py, components/protocomm/python/sec0_pb2.py,
@ -159,67 +161,5 @@ exclude =
examples/provisioning/custom_config/components/custom_provisioning/python/custom_config_pb2.py, examples/provisioning/custom_config/components/custom_provisioning/python/custom_config_pb2.py,
# temporary list (should be empty) # temporary list (should be empty)
components/nvs_flash/nvs_partition_generator/nvs_partition_gen.py, components/nvs_flash/nvs_partition_generator/nvs_partition_gen.py,
tools/ci/apply_bot_filter.py, tools/esp_app_trace/pylibelf,
tools/cmake/convert_to_cmake.py,
tools/esp_app_trace/apptrace_proc.py,
tools/esp_app_trace/logtrace_proc.py,
tools/esp_app_trace/pylibelf/__init__.py,
tools/esp_app_trace/pylibelf/constants/__init__.py,
tools/esp_app_trace/pylibelf/iterators/__init__.py,
tools/esp_app_trace/pylibelf/macros/__init__.py,
tools/esp_app_trace/pylibelf/types/__init__.py,
tools/esp_app_trace/pylibelf/util/__init__.py,
tools/esp_app_trace/pylibelf/util/syms/__init__.py,
tools/esp_prov/proto/__init__.py,
tools/esp_prov/prov/__init__.py,
tools/esp_prov/prov/custom_prov.py,
tools/esp_prov/prov/wifi_prov.py,
tools/esp_prov/security/__init__.py,
tools/esp_prov/security/security.py,
tools/esp_prov/security/security0.py,
tools/esp_prov/security/security1.py,
tools/esp_prov/transport/__init__.py,
tools/esp_prov/transport/transport.py,
tools/esp_prov/transport/transport_ble.py,
tools/esp_prov/transport/transport_console.py,
tools/esp_prov/transport/transport_softap.py,
tools/esp_prov/utils/__init__.py,
tools/esp_prov/utils/convenience.py,
tools/gen_esp_err_to_name.py,
tools/idf.py,
tools/idf_size.py,
tools/kconfig_new/confgen.py,
tools/kconfig_new/confserver.py,
tools/kconfig_new/gen_kconfig_doc.py,
tools/kconfig_new/kconfiglib.py,
tools/kconfig_new/test/test_confserver.py,
tools/ldgen/fragments.py,
tools/ldgen/generation.py,
tools/ldgen/ldgen.py,
tools/ldgen/pyparsing.py,
tools/ldgen/sdkconfig.py,
tools/ldgen/test/test_fragments.py,
tools/ldgen/test/test_generation.py,
tools/mass_mfg/mfg_gen.py, tools/mass_mfg/mfg_gen.py,
tools/test_idf_monitor/run_test_idf_monitor.py,
tools/test_idf_size/test_idf_size.py,
tools/tiny-test-fw/CIAssignExampleTest.py,
tools/tiny-test-fw/CIAssignUnitTest.py,
tools/tiny-test-fw/DUT.py,
tools/tiny-test-fw/EnvConfig.py,
tools/tiny-test-fw/IDF/IDFApp.py,
tools/tiny-test-fw/IDF/IDFDUT.py,
tools/tiny-test-fw/Runner.py,
tools/tiny-test-fw/TinyFW.py,
tools/tiny-test-fw/Utility/CaseConfig.py,
tools/tiny-test-fw/Utility/LineChart.py,
tools/tiny-test-fw/Utility/PowerControl.py,
tools/tiny-test-fw/Utility/SearchCases.py,
tools/tiny-test-fw/Utility/__init__.py,
tools/tiny-test-fw/docs/conf.py,
tools/tiny-test-fw/example.py,
tools/unit-test-app/idf_ext.py,
tools/unit-test-app/tools/CreateSectionTable.py,
tools/unit-test-app/tools/UnitTestParser.py,
tools/unit-test-app/unit_test.py,
tools/windows/eclipse_make.py,

View file

@ -30,7 +30,7 @@ def parse_filter(filter_name):
def process_filter(execute_by_default, filter_name, ci_name): def process_filter(execute_by_default, filter_name, ci_name):
execute = execute_by_default execute = execute_by_default
# bot message is case insensitive (processed with lower case). so we also convert ci_name to lower case. # bot message is case insensitive (processed with lower case). so we also convert ci_name to lower case.
ci_name = ci_name.lower() ci_name = ci_name.lower()
@ -55,8 +55,8 @@ if __name__ == "__main__":
if os.getenv("BOT_NEEDS_TRIGGER_BY_NAME", "0") == "1": if os.getenv("BOT_NEEDS_TRIGGER_BY_NAME", "0") == "1":
execute_by_default = False execute_by_default = False
need_to_execute = process_filter(True, "BOT_STAGE_FILTER", os.getenv("CI_JOB_STAGE")) \ need_to_execute = process_filter(True, "BOT_STAGE_FILTER", os.getenv("CI_JOB_STAGE")) and process_filter(execute_by_default,
and process_filter(execute_by_default, "BOT_JOB_FILTER", os.getenv("CI_JOB_NAME")) "BOT_JOB_FILTER", os.getenv("CI_JOB_NAME"))
if need_to_execute: if need_to_execute:
sys.exit(0) sys.exit(0)
else: else:

View file

@ -8,10 +8,10 @@ import subprocess
import re import re
import os.path import os.path
import glob import glob
import sys
debug = False debug = False
def get_make_variables(path, makefile="Makefile", expected_failure=False, variables={}): def get_make_variables(path, makefile="Makefile", expected_failure=False, variables={}):
""" """
Given the path to a Makefile of some kind, return a dictionary of all variables defined in this Makefile Given the path to a Makefile of some kind, return a dictionary of all variables defined in this Makefile
@ -20,9 +20,9 @@ def get_make_variables(path, makefile="Makefile", expected_failure=False, variab
Overrides IDF_PATH= to avoid recursively evaluating the entire project Makefile structure. Overrides IDF_PATH= to avoid recursively evaluating the entire project Makefile structure.
""" """
variable_setters = [ ("%s=%s" % (k,v)) for (k,v) in variables.items() ] variable_setters = [("%s=%s" % (k,v)) for (k,v) in variables.items()]
cmdline = ["make", "-rpn", "-C", path, "-f", makefile ] + variable_setters cmdline = ["make", "-rpn", "-C", path, "-f", makefile] + variable_setters
if debug: if debug:
print("Running %s..." % (" ".join(cmdline))) print("Running %s..." % (" ".join(cmdline)))
@ -54,15 +54,16 @@ def get_make_variables(path, makefile="Makefile", expected_failure=False, variab
return result return result
def get_component_variables(project_path, component_path): def get_component_variables(project_path, component_path):
make_vars = get_make_variables(component_path, make_vars = get_make_variables(component_path,
os.path.join(os.environ["IDF_PATH"], os.path.join(os.environ["IDF_PATH"],
"make", "make",
"component_wrapper.mk"), "component_wrapper.mk"),
expected_failure=True, expected_failure=True,
variables = { variables={
"COMPONENT_MAKEFILE" : os.path.join(component_path, "component.mk"), "COMPONENT_MAKEFILE": os.path.join(component_path, "component.mk"),
"COMPONENT_NAME" : os.path.basename(component_path), "COMPONENT_NAME": os.path.basename(component_path),
"PROJECT_PATH": project_path, "PROJECT_PATH": project_path,
}) })
@ -70,7 +71,7 @@ def get_component_variables(project_path, component_path):
# Convert to sources # Convert to sources
def find_src(obj): def find_src(obj):
obj = os.path.splitext(obj)[0] obj = os.path.splitext(obj)[0]
for ext in [ "c", "cpp", "S" ]: for ext in ["c", "cpp", "S"]:
if os.path.exists(os.path.join(component_path, obj) + "." + ext): if os.path.exists(os.path.join(component_path, obj) + "." + ext):
return obj + "." + ext return obj + "." + ext
print("WARNING: Can't find source file for component %s COMPONENT_OBJS %s" % (component_path, obj)) print("WARNING: Can't find source file for component %s COMPONENT_OBJS %s" % (component_path, obj))
@ -86,7 +87,7 @@ def get_component_variables(project_path, component_path):
component_srcs = list() component_srcs = list()
for component_srcdir in make_vars.get("COMPONENT_SRCDIRS", ".").split(" "): for component_srcdir in make_vars.get("COMPONENT_SRCDIRS", ".").split(" "):
component_srcdir_path = os.path.abspath(os.path.join(component_path, component_srcdir)) component_srcdir_path = os.path.abspath(os.path.join(component_path, component_srcdir))
srcs = list() srcs = list()
srcs += glob.glob(os.path.join(component_srcdir_path, "*.[cS]")) srcs += glob.glob(os.path.join(component_srcdir_path, "*.[cS]"))
srcs += glob.glob(os.path.join(component_srcdir_path, "*.cpp")) srcs += glob.glob(os.path.join(component_srcdir_path, "*.cpp"))
@ -96,7 +97,6 @@ def get_component_variables(project_path, component_path):
component_srcs += srcs component_srcs += srcs
make_vars["COMPONENT_SRCS"] = " ".join(component_srcs) make_vars["COMPONENT_SRCS"] = " ".join(component_srcs)
return make_vars return make_vars
@ -111,7 +111,7 @@ def convert_project(project_path):
raise RuntimeError("This project already has a CMakeLists.txt file") raise RuntimeError("This project already has a CMakeLists.txt file")
project_vars = get_make_variables(project_path, expected_failure=True) project_vars = get_make_variables(project_path, expected_failure=True)
if not "PROJECT_NAME" in project_vars: if "PROJECT_NAME" not in project_vars:
raise RuntimeError("PROJECT_NAME does not appear to be defined in IDF project Makefile at %s" % project_path) raise RuntimeError("PROJECT_NAME does not appear to be defined in IDF project Makefile at %s" % project_path)
component_paths = project_vars["COMPONENT_PATHS"].split(" ") component_paths = project_vars["COMPONENT_PATHS"].split(" ")
@ -143,6 +143,7 @@ include($ENV{IDF_PATH}/tools/cmake/project.cmake)
print("Converted project %s" % project_cmakelists) print("Converted project %s" % project_cmakelists)
def convert_component(project_path, component_path): def convert_component(project_path, component_path):
if debug: if debug:
print("Converting %s..." % (component_path)) print("Converting %s..." % (component_path))

View file

@ -1,10 +1,12 @@
#!/usr/bin/env python #!/usr/bin/env python
# #
from __future__ import print_function
import argparse import argparse
import struct import struct
import sys import sys
class bcolors: class bcolors:
HEADER = '\033[95m' HEADER = '\033[95m'
OKBLUE = '\033[94m' OKBLUE = '\033[94m'
@ -14,13 +16,14 @@ class bcolors:
ENDC = '\033[0m' ENDC = '\033[0m'
BOLD = '\033[1m' BOLD = '\033[1m'
UNDERLINE = '\033[4m' UNDERLINE = '\033[4m'
def main(): def main():
ESP32_TRACE_BLOCK_HDR_SZ = 8 ESP32_TRACE_BLOCK_HDR_SZ = 8
ESP32_TRACE_BLOCK_TASK_IDX = 0 ESP32_TRACE_BLOCK_TASK_IDX = 0
ESP32_TRACE_BLOCK_TS_IDX = 1 ESP32_TRACE_BLOCK_TS_IDX = 1
ESP32_TRACE_BLOCK_DATA_IDX = 2 ESP32_TRACE_BLOCK_DATA_IDX = 2
parser = argparse.ArgumentParser(description='ESP32 App Trace Parse Tool') parser = argparse.ArgumentParser(description='ESP32 App Trace Parse Tool')
parser.add_argument('file', help='Path to app trace file', type=str) parser.add_argument('file', help='Path to app trace file', type=str)
@ -31,11 +34,11 @@ def main():
args = parser.parse_args() args = parser.parse_args()
print "====================================================================" print("====================================================================")
try: try:
ftrc = open(args.file, 'rb') ftrc = open(args.file, 'rb')
except IOError as e: except IOError as e:
print "Failed to open trace file (%s)!" % e print("Failed to open trace file (%s)!" % e)
sys.exit(2) sys.exit(2)
passed = True passed = True
@ -44,81 +47,84 @@ def main():
last_ts = None last_ts = None
tot_discont = 0 tot_discont = 0
while True: while True:
#ftrc.seek(off) # ftrc.seek(off)
task = None task = None
ts = 0 ts = 0
trc_buf = ftrc.read(args.block_len) trc_buf = ftrc.read(args.block_len)
if len(trc_buf) == 0: if len(trc_buf) == 0:
# print 'EOF' # print('EOF')
break break
trc_data = struct.unpack('<LL%sB' % (len(trc_buf) - ESP32_TRACE_BLOCK_HDR_SZ), trc_buf) trc_data = struct.unpack('<LL%sB' % (len(trc_buf) - ESP32_TRACE_BLOCK_HDR_SZ), trc_buf)
if len(trc_data): if len(trc_data):
# print "%x %x, len %d" % (trc_data[0], trc_data[1], len(trc_data) - 2) # print("%x %x, len %d" % (trc_data[0], trc_data[1], len(trc_data) - 2))
# print trc_data[2:] # print(trc_data[2:])
# sys.exit(0) # sys.exit(0)
task = trc_data[ESP32_TRACE_BLOCK_TASK_IDX] task = trc_data[ESP32_TRACE_BLOCK_TASK_IDX]
ts = trc_data[ESP32_TRACE_BLOCK_TS_IDX] ts = trc_data[ESP32_TRACE_BLOCK_TS_IDX]
# print ts # print(ts)
if last_ts and last_ts >= ts: if last_ts and last_ts >= ts:
# print "Global TS discontinuity %x -> %x, task %x, stamp %x at %x" % (last_ts, ts, task, data_stats[task]['stamp'], off) # print("Global TS discontinuity %x -> %x, task %x, stamp %x at %x" % (last_ts, ts, task,
# data_stats[task]['stamp'], off))
if args.print_details: if args.print_details:
print "Global TS discontinuity %x -> %x, task %x at %x" % (last_ts, ts, task, off) print("Global TS discontinuity %x -> %x, task %x at %x" % (last_ts, ts, task, off))
# tot_discont += 1 # tot_discont += 1
# passed = False # passed = False
last_ts = ts last_ts = ts
if not task in data_stats: if task not in data_stats:
print "%x: NEW TASK" % task print("%x: NEW TASK" % task)
data_stats[task] = {'stamp' : trc_data[ESP32_TRACE_BLOCK_DATA_IDX], 'last_ts' : ts, 'count' : 1, 'discont_offs' : [], 'inv_stamps_offs' : []} data_stats[task] = {'stamp': trc_data[ESP32_TRACE_BLOCK_DATA_IDX], 'last_ts': ts, 'count': 1, 'discont_offs': [], 'inv_stamps_offs': []}
else: else:
if data_stats[task]['last_ts'] == ts: if data_stats[task]['last_ts'] == ts:
print "Task TS discontinuity %x -> %x, task %x, stamp %x at %x" % (last_ts, ts, task, data_stats[task]['stamp'], off) print("Task TS discontinuity %x -> %x, task %x, stamp %x at %x" % (last_ts, ts, task, data_stats[task]['stamp'], off))
data_stats[task]['discont_offs'].append(off) data_stats[task]['discont_offs'].append(off)
tot_discont += 1 tot_discont += 1
passed = False passed = False
data_stats[task]['last_ts'] = ts data_stats[task]['last_ts'] = ts
data_stats[task]['count'] += 1 data_stats[task]['count'] += 1
if len(trc_data) > ESP32_TRACE_BLOCK_DATA_IDX: if len(trc_data) > ESP32_TRACE_BLOCK_DATA_IDX:
# print "DATA = %x %x %x %x" % (trc_data[-4], trc_data[-3], trc_data[-2], trc_data[-1]) # print("DATA = %x %x %x %x" % (trc_data[-4], trc_data[-3], trc_data[-2], trc_data[-1]))
if args.print_tasks: if args.print_tasks:
print "Task[%d] %x, ts %08x, stamp %x" % (off/args.block_len, task, ts, trc_data[ESP32_TRACE_BLOCK_DATA_IDX]) print("Task[%d] %x, ts %08x, stamp %x" % (off / args.block_len, task, ts, trc_data[ESP32_TRACE_BLOCK_DATA_IDX]))
else: else:
print "%x: NO DATA" % task print("%x: NO DATA" % task)
else: else:
print "Failed to unpack data!" print("Failed to unpack data!")
sys.exit(2) sys.exit(2)
# check data # check data
for i in range(ESP32_TRACE_BLOCK_DATA_IDX, len(trc_data)): for i in range(ESP32_TRACE_BLOCK_DATA_IDX, len(trc_data)):
if trc_data[i] != data_stats[task]['stamp']: if trc_data[i] != data_stats[task]['stamp']:
if not args.no_errors: if not args.no_errors:
print "Invalid stamp %x->%x at %x, task %x" % (data_stats[task]['stamp'], trc_data[i], off + ESP32_TRACE_BLOCK_HDR_SZ + i, task) print("Invalid stamp %x->%x at %x, task %x" % (data_stats[task]['stamp'], trc_data[i], off + ESP32_TRACE_BLOCK_HDR_SZ + i, task))
passed = False passed = False
data_stats[task]['stamp'] = trc_data[i] data_stats[task]['stamp'] = trc_data[i]
data_stats[task]['inv_stamps_offs'].append(off) data_stats[task]['inv_stamps_offs'].append(off)
# break # break
if len(trc_buf) < args.block_len: if len(trc_buf) < args.block_len:
print 'Last block (not full)' print('Last block (not full)')
break break
if data_stats[task]['stamp'] != None: if data_stats[task]['stamp'] is not None:
data_stats[task]['stamp'] = (data_stats[task]['stamp'] + 1) & 0xFF data_stats[task]['stamp'] = (data_stats[task]['stamp'] + 1) & 0xFF
# print "stamp=%x" % data_stats[task][ESP32_TRACE_STAMP_IDX] # print("stamp=%x" % data_stats[task][ESP32_TRACE_STAMP_IDX])
off += args.block_len off += args.block_len
ftrc.close() ftrc.close()
print "====================================================================" print("====================================================================")
print "Trace size %d bytes, discont %d\n" % (off, tot_discont) print("Trace size %d bytes, discont %d\n" % (off, tot_discont))
for t in data_stats: for t in data_stats:
print "Task %x. Total count %d. Inv stamps %d. TS Discontinuities %d." % (t, data_stats[t]['count'], len(data_stats[t]['inv_stamps_offs']), len(data_stats[t]['discont_offs'])) print("Task %x. Total count %d. Inv stamps %d. TS Discontinuities %d." % (t, data_stats[t]['count'],
len(data_stats[t]['inv_stamps_offs']), len(data_stats[t]['discont_offs'])))
if args.print_details: if args.print_details:
print 'Invalid stamps offs: [{}]'.format(', '.join(hex(x) for x in data_stats[t]['inv_stamps_offs'])) print('Invalid stamps offs: [{}]'.format(', '.join(hex(x) for x in data_stats[t]['inv_stamps_offs'])))
print 'TS Discontinuities offs: [{}]'.format(', '.join(hex(x) for x in data_stats[t]['discont_offs'])) print('TS Discontinuities offs: [{}]'.format(', '.join(hex(x) for x in data_stats[t]['discont_offs'])))
print "\n" print("\n")
if passed: if passed:
print "Data - OK" print("Data - OK")
else: else:
print "Data - FAILED!" print("Data - FAILED!")
if __name__ == '__main__': if __name__ == '__main__':
main() main()

View file

@ -1,6 +1,7 @@
#!/usr/bin/env python #!/usr/bin/env python
# #
from __future__ import print_function
import argparse import argparse
import struct import struct
import sys import sys
@ -8,7 +9,8 @@ import pylibelf as elf
import pylibelf.util as elfutil import pylibelf.util as elfutil
import pylibelf.iterators as elfiter import pylibelf.iterators as elfiter
import pylibelf.constants as elfconst import pylibelf.constants as elfconst
from ctypes import * import ctypes
class ESPLogTraceParserError(RuntimeError): class ESPLogTraceParserError(RuntimeError):
def __init__(self, message): def __init__(self, message):
@ -44,7 +46,7 @@ def logtrace_parse(fname):
if len(trc_buf) < ESP32_LOGTRACE_HDR_SZ: if len(trc_buf) < ESP32_LOGTRACE_HDR_SZ:
# print "EOF" # print "EOF"
if len(trc_buf) > 0: if len(trc_buf) > 0:
print "Unprocessed %d bytes of log record header!" % len(trc_buf) print("Unprocessed %d bytes of log record header!" % len(trc_buf))
# data_ok = False # data_ok = False
break break
try: try:
@ -58,17 +60,17 @@ def logtrace_parse(fname):
except IOError as e: except IOError as e:
raise ESPLogTraceParserError("Failed to read log record args (%s)!" % e) raise ESPLogTraceParserError("Failed to read log record args (%s)!" % e)
if len(trc_buf) < args_sz: if len(trc_buf) < args_sz:
# print "EOF" # print("EOF")
if len(trc_buf) > 0: if len(trc_buf) > 0:
print "Unprocessed %d bytes of log record args!" % len(trc_buf) print("Unprocessed %d bytes of log record args!" % len(trc_buf))
# data_ok = False # data_ok = False
break break
try: try:
log_args = struct.unpack('<%sL' % nargs, trc_buf) log_args = struct.unpack('<%sL' % nargs, trc_buf)
except struct.error as e: except struct.error as e:
raise ESPLogTraceParserError("Failed to unpack log record args (%s)!" % e) raise ESPLogTraceParserError("Failed to unpack log record args (%s)!" % e)
# print log_args # print(log_args)
recs.append(ESPLogTraceRecord(fmt_addr, list(log_args))) recs.append(ESPLogTraceRecord(fmt_addr, list(log_args)))
ftrc.close() ftrc.close()
# sorted(recs, key=lambda rec: rec.fmt_addr) # sorted(recs, key=lambda rec: rec.fmt_addr)
@ -83,9 +85,9 @@ def logtrace_get_str_from_elf(felf, str_addr):
continue continue
if str_addr < hdr.sh_addr or str_addr >= hdr.sh_addr + hdr.sh_size: if str_addr < hdr.sh_addr or str_addr >= hdr.sh_addr + hdr.sh_size:
continue continue
# print "Found SECT: %x..%x @ %x" % (hdr.sh_addr, hdr.sh_addr + hdr.sh_size, str_addr - hdr.sh_addr) # print("Found SECT: %x..%x @ %x" % (hdr.sh_addr, hdr.sh_addr + hdr.sh_size, str_addr - hdr.sh_addr))
sec_data = elfiter.getOnlyData(sect).contents sec_data = elfiter.getOnlyData(sect).contents
buf = cast(sec_data.d_buf, POINTER(c_char)) buf = ctypes.cast(sec_data.d_buf, ctypes.POINTER(ctypes.c_char))
for i in range(str_addr - hdr.sh_addr, hdr.sh_size): for i in range(str_addr - hdr.sh_addr, hdr.sh_size):
if buf[i] == "\0": if buf[i] == "\0":
break break
@ -94,6 +96,7 @@ def logtrace_get_str_from_elf(felf, str_addr):
return tgt_str return tgt_str
return None return None
def logtrace_formated_print(recs, elfname, no_err): def logtrace_formated_print(recs, elfname, no_err):
try: try:
felf = elfutil.open_elf(elfname) felf = elfutil.open_elf(elfname)
@ -105,30 +108,31 @@ def logtrace_formated_print(recs, elfname, no_err):
i = 0 i = 0
prcnt_idx = 0 prcnt_idx = 0
while i < len(lrec.args): while i < len(lrec.args):
prcnt_idx = fmt_str.find('%', prcnt_idx, -2) # TODO: check str ending with % prcnt_idx = fmt_str.find('%', prcnt_idx, -2) # TODO: check str ending with %
if prcnt_idx == -1: if prcnt_idx == -1:
break break
prcnt_idx += 1 # goto next char prcnt_idx += 1 # goto next char
if fmt_str[prcnt_idx] == 's': if fmt_str[prcnt_idx] == 's':
# find string # find string
arg_str = logtrace_get_str_from_elf(felf, lrec.args[i]) arg_str = logtrace_get_str_from_elf(felf, lrec.args[i])
if arg_str: if arg_str:
lrec.args[i] = arg_str lrec.args[i] = arg_str
i += 1 i += 1
# print "\nFmt = {%s}, args = %d/%s" % lrec # print("\nFmt = {%s}, args = %d/%s" % lrec)
fmt_str = fmt_str.replace('%p', '%x') fmt_str = fmt_str.replace('%p', '%x')
# print "=====> " + fmt_str % lrec.args # print("=====> " + fmt_str % lrec.args)
try: try:
print fmt_str % tuple(lrec.args), print(fmt_str % tuple(lrec.args), end='')
# print ".", # print(".", end='')
pass pass
except Exception as e: except Exception as e:
if not no_err: if not no_err:
print "Print error (%s)" % e print("Print error (%s)" % e)
print "\nFmt = {%s}, args = %d/%s" % (fmt_str, len(lrec.args), lrec.args) print("\nFmt = {%s}, args = %d/%s" % (fmt_str, len(lrec.args), lrec.args))
elf.elf_end(felf) elf.elf_end(felf)
def main(): def main():
parser = argparse.ArgumentParser(description='ESP32 Log Trace Parsing Tool') parser = argparse.ArgumentParser(description='ESP32 Log Trace Parsing Tool')
@ -141,23 +145,24 @@ def main():
# parse trace file # parse trace file
try: try:
print "Parse trace file '%s'..." % args.trace_file print("Parse trace file '%s'..." % args.trace_file)
lrecs = logtrace_parse(args.trace_file); lrecs = logtrace_parse(args.trace_file)
print "Parsing completed." print("Parsing completed.")
except ESPLogTraceParserError as e: except ESPLogTraceParserError as e:
print "Failed to parse log trace (%s)!" % e print("Failed to parse log trace (%s)!" % e)
sys.exit(2) sys.exit(2)
# print recs # print recs
# get format strings and print info # get format strings and print info
print "====================================================================" print("====================================================================")
try: try:
logtrace_formated_print(lrecs, args.elf_file, args.no_errors); logtrace_formated_print(lrecs, args.elf_file, args.no_errors)
except ESPLogTraceParserError as e: except ESPLogTraceParserError as e:
print "Failed to print log trace (%s)!" % e print("Failed to print log trace (%s)!" % e)
sys.exit(2) sys.exit(2)
print "\n====================================================================\n" print("\n====================================================================\n")
print("Log records count: %d" % len(lrecs))
print "Log records count: %d" % len(lrecs)
if __name__ == '__main__': if __name__ == '__main__':
main() main()

View file

@ -29,4 +29,5 @@ wifi_constants_pb2 = imp.load_source("wifi_constants_pb2", idf_path + "/componen
wifi_config_pb2 = imp.load_source("wifi_config_pb2", idf_path + "/components/wifi_provisioning/python/wifi_config_pb2.py") wifi_config_pb2 = imp.load_source("wifi_config_pb2", idf_path + "/components/wifi_provisioning/python/wifi_config_pb2.py")
# custom_provisioning component related python files generated from .proto files # custom_provisioning component related python files generated from .proto files
custom_config_pb2 = imp.load_source("custom_config_pb2", idf_path + "/examples/provisioning/custom_config/components/custom_provisioning/python/custom_config_pb2.py") custom_config_pb2 = imp.load_source("custom_config_pb2", idf_path +
"/examples/provisioning/custom_config/components/custom_provisioning/python/custom_config_pb2.py")

View file

@ -13,5 +13,5 @@
# limitations under the License. # limitations under the License.
# #
from .wifi_prov import * from .wifi_prov import * # noqa F403
from .custom_prov import * from .custom_prov import * # noqa F403

View file

@ -21,9 +21,11 @@ from future.utils import tobytes
import utils import utils
import proto import proto
def print_verbose(security_ctx, data): def print_verbose(security_ctx, data):
if (security_ctx.verbose): if (security_ctx.verbose):
print("++++ " + data + " ++++") print("++++ " + data + " ++++")
def custom_config_request(security_ctx, info, version): def custom_config_request(security_ctx, info, version):
# Form protobuf request packet from custom-config data # Form protobuf request packet from custom-config data
@ -34,6 +36,7 @@ def custom_config_request(security_ctx, info, version):
print_verbose(security_ctx, "Client -> Device (CustomConfig cmd) " + utils.str_to_hexstr(enc_cmd)) print_verbose(security_ctx, "Client -> Device (CustomConfig cmd) " + utils.str_to_hexstr(enc_cmd))
return enc_cmd return enc_cmd
def custom_config_response(security_ctx, response_data): def custom_config_response(security_ctx, response_data):
# Interpret protobuf response packet # Interpret protobuf response packet
decrypt = security_ctx.decrypt_data(tobytes(response_data)) decrypt = security_ctx.decrypt_data(tobytes(response_data))

View file

@ -21,9 +21,11 @@ from future.utils import tobytes
import utils import utils
import proto import proto
def print_verbose(security_ctx, data): def print_verbose(security_ctx, data):
if (security_ctx.verbose): if (security_ctx.verbose):
print("++++ " + data + " ++++") print("++++ " + data + " ++++")
def config_get_status_request(security_ctx): def config_get_status_request(security_ctx):
# Form protobuf request packet for GetStatus command # Form protobuf request packet for GetStatus command
@ -35,6 +37,7 @@ def config_get_status_request(security_ctx):
print_verbose(security_ctx, "Client -> Device (Encrypted CmdGetStatus) " + utils.str_to_hexstr(encrypted_cfg)) print_verbose(security_ctx, "Client -> Device (Encrypted CmdGetStatus) " + utils.str_to_hexstr(encrypted_cfg))
return encrypted_cfg return encrypted_cfg
def config_get_status_response(security_ctx, response_data): def config_get_status_response(security_ctx, response_data):
# Interpret protobuf response packet from GetStatus command # Interpret protobuf response packet from GetStatus command
decrypted_message = security_ctx.decrypt_data(tobytes(response_data)) decrypted_message = security_ctx.decrypt_data(tobytes(response_data))
@ -56,6 +59,7 @@ def config_get_status_response(security_ctx, response_data):
print("++++ Failure reason: " + "Incorrect SSID ++++") print("++++ Failure reason: " + "Incorrect SSID ++++")
return cmd_resp1.resp_get_status.sta_state return cmd_resp1.resp_get_status.sta_state
def config_set_config_request(security_ctx, ssid, passphrase): def config_set_config_request(security_ctx, ssid, passphrase):
# Form protobuf request packet for SetConfig command # Form protobuf request packet for SetConfig command
cmd = proto.wifi_config_pb2.WiFiConfigPayload() cmd = proto.wifi_config_pb2.WiFiConfigPayload()
@ -66,6 +70,7 @@ def config_set_config_request(security_ctx, ssid, passphrase):
print_verbose(security_ctx, "Client -> Device (SetConfig cmd) " + utils.str_to_hexstr(enc_cmd)) print_verbose(security_ctx, "Client -> Device (SetConfig cmd) " + utils.str_to_hexstr(enc_cmd))
return enc_cmd return enc_cmd
def config_set_config_response(security_ctx, response_data): def config_set_config_response(security_ctx, response_data):
# Interpret protobuf response packet from SetConfig command # Interpret protobuf response packet from SetConfig command
decrypt = security_ctx.decrypt_data(tobytes(response_data)) decrypt = security_ctx.decrypt_data(tobytes(response_data))
@ -74,6 +79,7 @@ def config_set_config_response(security_ctx, response_data):
print_verbose(security_ctx, "SetConfig status " + str(cmd_resp4.resp_set_config.status)) print_verbose(security_ctx, "SetConfig status " + str(cmd_resp4.resp_set_config.status))
return cmd_resp4.resp_set_config.status return cmd_resp4.resp_set_config.status
def config_apply_config_request(security_ctx): def config_apply_config_request(security_ctx):
# Form protobuf request packet for ApplyConfig command # Form protobuf request packet for ApplyConfig command
cmd = proto.wifi_config_pb2.WiFiConfigPayload() cmd = proto.wifi_config_pb2.WiFiConfigPayload()
@ -82,6 +88,7 @@ def config_apply_config_request(security_ctx):
print_verbose(security_ctx, "Client -> Device (ApplyConfig cmd) " + utils.str_to_hexstr(enc_cmd)) print_verbose(security_ctx, "Client -> Device (ApplyConfig cmd) " + utils.str_to_hexstr(enc_cmd))
return enc_cmd return enc_cmd
def config_apply_config_response(security_ctx, response_data): def config_apply_config_response(security_ctx, response_data):
# Interpret protobuf response packet from ApplyConfig command # Interpret protobuf response packet from ApplyConfig command
decrypt = security_ctx.decrypt_data(tobytes(response_data)) decrypt = security_ctx.decrypt_data(tobytes(response_data))

View file

@ -13,5 +13,5 @@
# limitations under the License. # limitations under the License.
# #
from .security0 import * from .security0 import * # noqa: F403, F401
from .security1 import * from .security1 import * # noqa: F403, F401

View file

@ -15,7 +15,7 @@
# Base class for protocomm security # Base class for protocomm security
class Security: class Security:
def __init__(self, security_session): def __init__(self, security_session):
self.security_session = security_session self.security_session = security_session

View file

@ -19,9 +19,9 @@
from __future__ import print_function from __future__ import print_function
from future.utils import tobytes from future.utils import tobytes
import utils
import proto import proto
from .security import * from .security import Security
class Security0(Security): class Security0(Security):
def __init__(self, verbose): def __init__(self, verbose):

View file

@ -21,7 +21,7 @@ from future.utils import tobytes
import utils import utils
import proto import proto
from .security import * from .security import Security
from cryptography.hazmat.backends import default_backend from cryptography.hazmat.backends import default_backend
from cryptography.hazmat.primitives import hashes from cryptography.hazmat.primitives import hashes
@ -30,6 +30,7 @@ from cryptography.hazmat.primitives.ciphers import Cipher, algorithms, modes
import session_pb2 import session_pb2
# Enum for state of protocomm_security1 FSM # Enum for state of protocomm_security1 FSM
class security_state: class security_state:
REQUEST1 = 0 REQUEST1 = 0
@ -37,6 +38,7 @@ class security_state:
RESPONSE2 = 2 RESPONSE2 = 2
FINISHED = 3 FINISHED = 3
def xor(a, b): def xor(a, b):
# XOR two inputs of type `bytes` # XOR two inputs of type `bytes`
ret = bytearray() ret = bytearray()
@ -50,6 +52,7 @@ def xor(a, b):
# Convert bytearray to bytes # Convert bytearray to bytes
return bytes(ret) return bytes(ret)
class Security1(Security): class Security1(Security):
def __init__(self, pop, verbose): def __init__(self, pop, verbose):
# Initialize state of the security1 FSM # Initialize state of the security1 FSM

View file

@ -13,6 +13,6 @@
# limitations under the License. # limitations under the License.
# #
from .transport_console import * from .transport_console import * # noqa: F403, F401
from .transport_softap import * from .transport_softap import * # noqa: F403, F401
from .transport_ble import * from .transport_ble import * # noqa: F403, F401

View file

@ -17,6 +17,7 @@
import abc import abc
class Transport(): class Transport():
@abc.abstractmethod @abc.abstractmethod

View file

@ -15,10 +15,11 @@
from __future__ import print_function from __future__ import print_function
from .transport import * from .transport import Transport
from . import ble_cli from . import ble_cli
class Transport_BLE(Transport): class Transport_BLE(Transport):
def __init__(self, devname, service_uuid, nu_lookup): def __init__(self, devname, service_uuid, nu_lookup):
# Expect service UUID like '0000ffff-0000-1000-8000-00805f9b34fb' # Expect service UUID like '0000ffff-0000-1000-8000-00805f9b34fb'
@ -32,7 +33,7 @@ class Transport_BLE(Transport):
self.cli = ble_cli.get_client() self.cli = ble_cli.get_client()
# Use client to connect to BLE device and bind to service # Use client to connect to BLE device and bind to service
if not self.cli.connect(devname = devname, iface = 'hci0', srv_uuid = service_uuid): if not self.cli.connect(devname=devname, iface='hci0', srv_uuid=service_uuid):
raise RuntimeError("Failed to initialize transport") raise RuntimeError("Failed to initialize transport")
# Check if expected characteristics are provided by the service # Check if expected characteristics are provided by the service
@ -44,7 +45,7 @@ class Transport_BLE(Transport):
# Make sure device is disconnected before application gets closed # Make sure device is disconnected before application gets closed
try: try:
self.disconnect() self.disconnect()
except: except Exception:
pass pass
def disconnect(self): def disconnect(self):

View file

@ -18,11 +18,12 @@ from builtins import input
import utils import utils
from .transport import * from .transport import Transport
class Transport_Console(Transport): class Transport_Console(Transport):
def send_data(self, path, data, session_id = 0): def send_data(self, path, data, session_id=0):
print("Client->Device msg :", path, session_id, utils.str_to_hexstr(data)) print("Client->Device msg :", path, session_id, utils.str_to_hexstr(data))
try: try:
resp = input("Enter device->client msg : ") resp = input("Enter device->client msg : ")

View file

@ -18,7 +18,8 @@ from future.utils import tobytes
import http.client import http.client
from .transport import * from .transport import Transport
class Transport_Softap(Transport): class Transport_Softap(Transport):
def __init__(self, url): def __init__(self, url):
@ -36,4 +37,4 @@ class Transport_Softap(Transport):
raise RuntimeError("Server responded with error code " + str(response.status)) raise RuntimeError("Server responded with error code " + str(response.status))
def send_data(self, ep_name, data): def send_data(self, ep_name, data):
return self._send_post_request('/'+ ep_name, data) return self._send_post_request('/' + ep_name, data)

View file

@ -13,4 +13,4 @@
# limitations under the License. # limitations under the License.
# #
from .convenience import * from .convenience import * # noqa: F403, F401

View file

@ -15,15 +15,17 @@
# Convenience functions for commonly used data type conversions # Convenience functions for commonly used data type conversions
def str_to_hexstr(string): def str_to_hexstr(string):
# Form hexstr by appending ASCII codes (in hex) corresponding to # Form hexstr by appending ASCII codes (in hex) corresponding to
# each character in the input string # each character in the input string
return ''.join('{:02x}'.format(ord(c)) for c in string) return ''.join('{:02x}'.format(ord(c)) for c in string)
def hexstr_to_str(hexstr): def hexstr_to_str(hexstr):
# Prepend 0 (if needed) to make the hexstr length an even number # Prepend 0 (if needed) to make the hexstr length an even number
if len(hexstr)%2 == 1: if len(hexstr) % 2 == 1:
hexstr = '0' + hexstr hexstr = '0' + hexstr
# Interpret consecutive pairs of hex characters as 8 bit ASCII codes # Interpret consecutive pairs of hex characters as 8 bit ASCII codes
# and append characters corresponding to each code to form the string # and append characters corresponding to each code to form the string
return ''.join(chr(int(hexstr[2*i:2*i+2], 16)) for i in range(len(hexstr)//2)) return ''.join(chr(int(hexstr[2 * i: 2 * i + 2], 16)) for i in range(len(hexstr) // 2))

View file

@ -40,17 +40,18 @@ import textwrap
import functools import functools
# list files here which should not be parsed # list files here which should not be parsed
ignore_files = [ 'components/mdns/test_afl_fuzz_host/esp32_compat.h' ] ignore_files = ['components/mdns/test_afl_fuzz_host/esp32_compat.h']
# add directories here which should not be parsed # add directories here which should not be parsed
ignore_dirs = ( 'examples' ) ignore_dirs = ('examples')
# macros from here have higher priorities in case of collisions # macros from here have higher priorities in case of collisions
priority_headers = [ 'components/esp32/include/esp_err.h' ] priority_headers = ['components/esp32/include/esp_err.h']
err_dict = collections.defaultdict(list) # identified errors are stored here; mapped by the error code
rev_err_dict = dict() # map of error string to error code
unproc_list = list() # errors with unknown codes which depend on other errors
err_dict = collections.defaultdict(list) #identified errors are stored here; mapped by the error code
rev_err_dict = dict() #map of error string to error code
unproc_list = list() #errors with unknown codes which depend on other errors
class ErrItem(object): class ErrItem(object):
""" """
@ -62,13 +63,14 @@ class ErrItem(object):
- rel_str - (optional) error string which is a base for the error - rel_str - (optional) error string which is a base for the error
- rel_off - (optional) offset in relation to the base error - rel_off - (optional) offset in relation to the base error
""" """
def __init__(self, name, file, include_as = None, comment = "", rel_str = "", rel_off = 0): def __init__(self, name, file, include_as=None, comment="", rel_str="", rel_off=0):
self.name = name self.name = name
self.file = file self.file = file
self.include_as = include_as self.include_as = include_as
self.comment = comment self.comment = comment
self.rel_str = rel_str self.rel_str = rel_str
self.rel_off = rel_off self.rel_off = rel_off
def __str__(self): def __str__(self):
ret = self.name + " from " + self.file ret = self.name + " from " + self.file
if (self.rel_str != ""): if (self.rel_str != ""):
@ -76,6 +78,7 @@ class ErrItem(object):
if self.comment != "": if self.comment != "":
ret += " // " + self.comment ret += " // " + self.comment
return ret return ret
def __cmp__(self, other): def __cmp__(self, other):
if self.file in priority_headers and other.file not in priority_headers: if self.file in priority_headers and other.file not in priority_headers:
return -1 return -1
@ -99,6 +102,7 @@ class ErrItem(object):
else: else:
return 0 return 0
class InputError(RuntimeError): class InputError(RuntimeError):
""" """
Represents and error on the input Represents and error on the input
@ -106,6 +110,7 @@ class InputError(RuntimeError):
def __init__(self, p, e): def __init__(self, p, e):
super(InputError, self).__init__(p + ": " + e) super(InputError, self).__init__(p + ": " + e)
def process(line, idf_path, include_as): def process(line, idf_path, include_as):
""" """
Process a line of text from file idf_path (relative to IDF project). Process a line of text from file idf_path (relative to IDF project).
@ -129,18 +134,18 @@ def process(line, idf_path, include_as):
m = re.search(r'/\*!<(.+?(?=\*/))', todo_str) m = re.search(r'/\*!<(.+?(?=\*/))', todo_str)
if m: if m:
comment = m.group(1).strip() comment = m.group(1).strip()
todo_str = todo_str[:m.start()].strip() # keep just the part before the comment todo_str = todo_str[:m.start()].strip() # keep just the part before the comment
# identify possible parentheses () # identify possible parentheses ()
m = re.search(r'\((.+)\)', todo_str) m = re.search(r'\((.+)\)', todo_str)
if m: if m:
todo_str = m.group(1) #keep what is inside the parentheses todo_str = m.group(1) # keep what is inside the parentheses
# identify BASE error code, e.g. from the form BASE + 0x01 # identify BASE error code, e.g. from the form BASE + 0x01
m = re.search(r'\s*(\w+)\s*\+(.+)', todo_str) m = re.search(r'\s*(\w+)\s*\+(.+)', todo_str)
if m: if m:
related = m.group(1) # BASE related = m.group(1) # BASE
todo_str = m.group(2) # keep and process only what is after "BASE +" todo_str = m.group(2) # keep and process only what is after "BASE +"
# try to match a hexadecimal number # try to match a hexadecimal number
m = re.search(r'0x([0-9A-Fa-f]+)', todo_str) m = re.search(r'0x([0-9A-Fa-f]+)', todo_str)
@ -153,8 +158,8 @@ def process(line, idf_path, include_as):
num = int(m.group(1), 10) num = int(m.group(1), 10)
elif re.match(r'\w+', todo_str): elif re.match(r'\w+', todo_str):
# It is possible that there is no number, e.g. #define ERROR BASE # It is possible that there is no number, e.g. #define ERROR BASE
related = todo_str # BASE error related = todo_str # BASE error
num = 0 # (BASE + 0) num = 0 # (BASE + 0)
else: else:
raise InputError(idf_path, "Cannot parse line %s" % line) raise InputError(idf_path, "Cannot parse line %s" % line)
@ -168,6 +173,7 @@ def process(line, idf_path, include_as):
# Store the information available now and compute the error code later # Store the information available now and compute the error code later
unproc_list.append(ErrItem(words[1], idf_path, include_as, comment, related, num)) unproc_list.append(ErrItem(words[1], idf_path, include_as, comment, related, num))
def process_remaining_errors(): def process_remaining_errors():
""" """
Create errors which could not be processed before because the error code Create errors which could not be processed before because the error code
@ -180,7 +186,6 @@ def process_remaining_errors():
for item in unproc_list: for item in unproc_list:
if item.rel_str in rev_err_dict: if item.rel_str in rev_err_dict:
base_num = rev_err_dict[item.rel_str] base_num = rev_err_dict[item.rel_str]
base = err_dict[base_num][0]
num = base_num + item.rel_off num = base_num + item.rel_off
err_dict[num].append(ErrItem(item.name, item.file, item.include_as, item.comment)) err_dict[num].append(ErrItem(item.name, item.file, item.include_as, item.comment))
rev_err_dict[item.name] = num rev_err_dict[item.name] = num
@ -189,6 +194,7 @@ def process_remaining_errors():
del unproc_list[:] del unproc_list[:]
def path_to_include(path): def path_to_include(path):
""" """
Process the path (relative to the IDF project) in a form which can be used Process the path (relative to the IDF project) in a form which can be used
@ -207,7 +213,8 @@ def path_to_include(path):
# no include in the path -> use just the filename # no include in the path -> use just the filename
return os.path.basename(path) return os.path.basename(path)
else: else:
return os.sep.join(spl_path[i+1:]) # subdirectories and filename in "include" return os.sep.join(spl_path[i + 1:]) # subdirectories and filename in "include"
def print_warning(error_list, error_code): def print_warning(error_list, error_code):
""" """
@ -217,6 +224,7 @@ def print_warning(error_list, error_code):
for e in error_list: for e in error_list:
print(" " + str(e)) print(" " + str(e))
def max_string_width(): def max_string_width():
max = 0 max = 0
for k in err_dict: for k in err_dict:
@ -226,6 +234,7 @@ def max_string_width():
max = x max = x
return max return max
def generate_c_output(fin, fout): def generate_c_output(fin, fout):
""" """
Writes the output to fout based on th error dictionary err_dict and Writes the output to fout based on th error dictionary err_dict and
@ -247,7 +256,7 @@ def generate_c_output(fin, fout):
include_list = list(includes) include_list = list(includes)
include_list.sort() include_list.sort()
max_width = max_string_width() + 17 + 1 # length of " ERR_TBL_IT()," with spaces is 17 max_width = max_string_width() + 17 + 1 # length of " ERR_TBL_IT()," with spaces is 17
max_decdig = max(len(str(k)) for k in err_dict) max_decdig = max(len(str(k)) for k in err_dict)
for line in fin: for line in fin:
@ -271,7 +280,7 @@ def generate_c_output(fin, fout):
fout.write("# ifdef %s\n" % e.name) fout.write("# ifdef %s\n" % e.name)
fout.write(table_line) fout.write(table_line)
hexnum_length = 0 hexnum_length = 0
if k > 0: # negative number and zero should be only ESP_FAIL and ESP_OK if k > 0: # negative number and zero should be only ESP_FAIL and ESP_OK
hexnum = " 0x%x" % k hexnum = " 0x%x" % k
hexnum_length = len(hexnum) hexnum_length = len(hexnum)
fout.write(hexnum) fout.write(hexnum)
@ -280,7 +289,7 @@ def generate_c_output(fin, fout):
fout.write(" %s" % e.comment) fout.write(" %s" % e.comment)
else: else:
indent = " " * (len(table_line) + hexnum_length + 1) indent = " " * (len(table_line) + hexnum_length + 1)
w = textwrap.wrap(e.comment, width=120, initial_indent = indent, subsequent_indent = indent) w = textwrap.wrap(e.comment, width=120, initial_indent=indent, subsequent_indent=indent)
# this couldn't be done with initial_indent because there is no initial_width option # this couldn't be done with initial_indent because there is no initial_width option
fout.write(" %s" % w[0].strip()) fout.write(" %s" % w[0].strip())
for i in range(1, len(w)): for i in range(1, len(w)):
@ -289,6 +298,7 @@ def generate_c_output(fin, fout):
else: else:
fout.write(line) fout.write(line)
def generate_rst_output(fout): def generate_rst_output(fout):
for k in sorted(err_dict.keys()): for k in sorted(err_dict.keys()):
v = err_dict[k][0] v = err_dict[k][0]
@ -301,6 +311,7 @@ def generate_rst_output(fout):
fout.write(': {}'.format(v.comment)) fout.write(': {}'.format(v.comment))
fout.write('\n\n') fout.write('\n\n')
def main(): def main():
if 'IDF_PATH' in os.environ: if 'IDF_PATH' in os.environ:
idf_path = os.environ['IDF_PATH'] idf_path = os.environ['IDF_PATH']
@ -348,5 +359,6 @@ def main():
with open(args.c_input, 'r', encoding='utf-8') as fin, open(args.c_output, 'w', encoding='utf-8') as fout: with open(args.c_input, 'r', encoding='utf-8') as fin, open(args.c_output, 'w', encoding='utf-8') as fout:
generate_c_output(fin, fout) generate_c_output(fin, fout)
if __name__ == "__main__": if __name__ == "__main__":
main() main()

View file

@ -36,18 +36,20 @@ import re
import shutil import shutil
import json import json
class FatalError(RuntimeError): class FatalError(RuntimeError):
""" """
Wrapper class for runtime errors that aren't caused by bugs in idf.py or the build proces.s Wrapper class for runtime errors that aren't caused by bugs in idf.py or the build proces.s
""" """
pass pass
# Use this Python interpreter for any subprocesses we launch # Use this Python interpreter for any subprocesses we launch
PYTHON=sys.executable PYTHON = sys.executable
# note: os.environ changes don't automatically propagate to child processes, # note: os.environ changes don't automatically propagate to child processes,
# you have to pass env=os.environ explicitly anywhere that we create a process # you have to pass env=os.environ explicitly anywhere that we create a process
os.environ["PYTHON"]=sys.executable os.environ["PYTHON"] = sys.executable
# Make flavors, across the various kinds of Windows environments & POSIX... # Make flavors, across the various kinds of Windows environments & POSIX...
if "MSYSTEM" in os.environ: # MSYS if "MSYSTEM" in os.environ: # MSYS
@ -60,13 +62,15 @@ else:
MAKE_CMD = "make" MAKE_CMD = "make"
MAKE_GENERATOR = "Unix Makefiles" MAKE_GENERATOR = "Unix Makefiles"
GENERATORS = [ GENERATORS = \
# ('generator name', 'build command line', 'version command line', 'verbose flag') [
("Ninja", [ "ninja" ], [ "ninja", "--version" ], "-v"), # ('generator name', 'build command line', 'version command line', 'verbose flag')
(MAKE_GENERATOR, [ MAKE_CMD, "-j", str(multiprocessing.cpu_count()+2) ], [ "make", "--version" ], "VERBOSE=1"), ("Ninja", ["ninja"], ["ninja", "--version"], "-v"),
(MAKE_GENERATOR, [MAKE_CMD, "-j", str(multiprocessing.cpu_count() + 2)], ["make", "--version"], "VERBOSE=1"),
] ]
GENERATOR_CMDS = dict( (a[0], a[1]) for a in GENERATORS ) GENERATOR_CMDS = dict((a[0], a[1]) for a in GENERATORS)
GENERATOR_VERBOSE = dict( (a[0], a[3]) for a in GENERATORS ) GENERATOR_VERBOSE = dict((a[0], a[3]) for a in GENERATORS)
def _run_tool(tool_name, args, cwd): def _run_tool(tool_name, args, cwd):
def quote_arg(arg): def quote_arg(arg):
@ -83,6 +87,7 @@ def _run_tool(tool_name, args, cwd):
except subprocess.CalledProcessError as e: except subprocess.CalledProcessError as e:
raise FatalError("%s failed with exit code %d" % (tool_name, e.returncode)) raise FatalError("%s failed with exit code %d" % (tool_name, e.returncode))
def check_environment(): def check_environment():
""" """
Verify the environment contains the top-level tools we need to operate Verify the environment contains the top-level tools we need to operate
@ -96,7 +101,8 @@ def check_environment():
if "IDF_PATH" in os.environ: if "IDF_PATH" in os.environ:
set_idf_path = os.path.realpath(os.environ["IDF_PATH"]) set_idf_path = os.path.realpath(os.environ["IDF_PATH"])
if set_idf_path != detected_idf_path: if set_idf_path != detected_idf_path:
print("WARNING: IDF_PATH environment variable is set to %s but idf.py path indicates IDF directory %s. Using the environment variable directory, but results may be unexpected..." print("WARNING: IDF_PATH environment variable is set to %s but idf.py path indicates IDF directory %s. "
"Using the environment variable directory, but results may be unexpected..."
% (set_idf_path, detected_idf_path)) % (set_idf_path, detected_idf_path))
else: else:
print("Setting IDF_PATH environment variable: %s" % detected_idf_path) print("Setting IDF_PATH environment variable: %s" % detected_idf_path)
@ -105,19 +111,21 @@ def check_environment():
# check Python dependencies # check Python dependencies
print("Checking Python dependencies...") print("Checking Python dependencies...")
try: try:
subprocess.check_call([ os.environ["PYTHON"], subprocess.check_call([os.environ["PYTHON"],
os.path.join(os.environ["IDF_PATH"], "tools", "check_python_dependencies.py")], os.path.join(os.environ["IDF_PATH"], "tools", "check_python_dependencies.py")],
env=os.environ) env=os.environ)
except subprocess.CalledProcessError: except subprocess.CalledProcessError:
raise SystemExit(1) raise SystemExit(1)
def executable_exists(args): def executable_exists(args):
try: try:
subprocess.check_output(args) subprocess.check_output(args)
return True return True
except: except Exception:
return False return False
def detect_cmake_generator(): def detect_cmake_generator():
""" """
Find the default cmake generator, if none was specified. Raises an exception if no valid generator is found. Find the default cmake generator, if none was specified. Raises an exception if no valid generator is found.
@ -127,6 +135,7 @@ def detect_cmake_generator():
return generator return generator
raise FatalError("To use idf.py, either the 'ninja' or 'GNU make' build tool must be available in the PATH") raise FatalError("To use idf.py, either the 'ninja' or 'GNU make' build tool must be available in the PATH")
def _ensure_build_directory(args, always_run_cmake=False): def _ensure_build_directory(args, always_run_cmake=False):
"""Check the build directory exists and that cmake has been run there. """Check the build directory exists and that cmake has been run there.
@ -158,15 +167,15 @@ def _ensure_build_directory(args, always_run_cmake=False):
try: try:
cmake_args = ["cmake", "-G", args.generator, "-DPYTHON_DEPS_CHECKED=1"] cmake_args = ["cmake", "-G", args.generator, "-DPYTHON_DEPS_CHECKED=1"]
if not args.no_warnings: if not args.no_warnings:
cmake_args += [ "--warn-uninitialized" ] cmake_args += ["--warn-uninitialized"]
if args.no_ccache: if args.no_ccache:
cmake_args += [ "-DCCACHE_DISABLE=1" ] cmake_args += ["-DCCACHE_DISABLE=1"]
if args.define_cache_entry: if args.define_cache_entry:
cmake_args += ["-D" + d for d in args.define_cache_entry] cmake_args += ["-D" + d for d in args.define_cache_entry]
cmake_args += [ project_dir] cmake_args += [project_dir]
_run_tool("cmake", cmake_args, cwd=args.build_dir) _run_tool("cmake", cmake_args, cwd=args.build_dir)
except: except Exception:
# don't allow partially valid CMakeCache.txt files, # don't allow partially valid CMakeCache.txt files,
# to keep the "should I run cmake?" logic simple # to keep the "should I run cmake?" logic simple
if os.path.exists(cache_path): if os.path.exists(cache_path):
@ -183,13 +192,13 @@ def _ensure_build_directory(args, always_run_cmake=False):
args.generator = generator # reuse the previously configured generator, if none was given args.generator = generator # reuse the previously configured generator, if none was given
if generator != args.generator: if generator != args.generator:
raise FatalError("Build is configured for generator '%s' not '%s'. Run 'idf.py fullclean' to start again." raise FatalError("Build is configured for generator '%s' not '%s'. Run 'idf.py fullclean' to start again."
% (generator, args.generator)) % (generator, args.generator))
try: try:
home_dir = cache["CMAKE_HOME_DIRECTORY"] home_dir = cache["CMAKE_HOME_DIRECTORY"]
if os.path.normcase(os.path.realpath(home_dir)) != os.path.normcase(os.path.realpath(project_dir)): if os.path.normcase(os.path.realpath(home_dir)) != os.path.normcase(os.path.realpath(project_dir)):
raise FatalError("Build directory '%s' configured for project '%s' not '%s'. Run 'idf.py fullclean' to start again." raise FatalError("Build directory '%s' configured for project '%s' not '%s'. Run 'idf.py fullclean' to start again."
% (build_dir, os.path.realpath(home_dir), os.path.realpath(project_dir))) % (build_dir, os.path.realpath(home_dir), os.path.realpath(project_dir)))
except KeyError: except KeyError:
pass # if cmake failed part way, CMAKE_HOME_DIRECTORY may not be set yet pass # if cmake failed part way, CMAKE_HOME_DIRECTORY may not be set yet
@ -209,9 +218,10 @@ def parse_cmakecache(path):
# groups are name, type, value # groups are name, type, value
m = re.match(r"^([^#/:=]+):([^:=]+)=(.+)\n$", line) m = re.match(r"^([^#/:=]+):([^:=]+)=(.+)\n$", line)
if m: if m:
result[m.group(1)] = m.group(3) result[m.group(1)] = m.group(3)
return result return result
def build_target(target_name, args): def build_target(target_name, args):
""" """
Execute the target build system to build target 'target_name' Execute the target build system to build target 'target_name'
@ -228,11 +238,11 @@ def build_target(target_name, args):
# will point to files in another project, if these files are perfect duplicates of each other.) # will point to files in another project, if these files are perfect duplicates of each other.)
# #
# It would be nicer to set these from cmake, but there's no cross-platform way to set build-time environment # It would be nicer to set these from cmake, but there's no cross-platform way to set build-time environment
#os.environ["CCACHE_BASEDIR"] = args.build_dir # os.environ["CCACHE_BASEDIR"] = args.build_dir
#os.environ["CCACHE_NO_HASHDIR"] = "1" # os.environ["CCACHE_NO_HASHDIR"] = "1"
pass pass
if args.verbose: if args.verbose:
generator_cmd += [ GENERATOR_VERBOSE[args.generator] ] generator_cmd += [GENERATOR_VERBOSE[args.generator]]
_run_tool(generator_cmd[0], generator_cmd + [target_name], args.build_dir) _run_tool(generator_cmd[0], generator_cmd + [target_name], args.build_dir)
@ -241,17 +251,18 @@ def _get_esptool_args(args):
esptool_path = os.path.join(os.environ["IDF_PATH"], "components/esptool_py/esptool/esptool.py") esptool_path = os.path.join(os.environ["IDF_PATH"], "components/esptool_py/esptool/esptool.py")
if args.port is None: if args.port is None:
args.port = get_default_serial_port() args.port = get_default_serial_port()
result = [ PYTHON, esptool_path ] result = [PYTHON, esptool_path]
result += [ "-p", args.port ] result += ["-p", args.port]
result += [ "-b", str(args.baud) ] result += ["-b", str(args.baud)]
with open(os.path.join(args.build_dir, "flasher_args.json")) as f: with open(os.path.join(args.build_dir, "flasher_args.json")) as f:
flasher_args = json.load(f) flasher_args = json.load(f)
extra_esptool_args = flasher_args["extra_esptool_args"] extra_esptool_args = flasher_args["extra_esptool_args"]
result += [ "--after", extra_esptool_args["after"] ] result += ["--after", extra_esptool_args["after"]]
return result return result
def flash(action, args): def flash(action, args):
""" """
Run esptool to flash the entire project, from an argfile generated by the build system Run esptool to flash the entire project, from an argfile generated by the build system
@ -263,14 +274,16 @@ def flash(action, args):
"flash": "flash_project_args", "flash": "flash_project_args",
}[action] }[action]
esptool_args = _get_esptool_args(args) esptool_args = _get_esptool_args(args)
esptool_args += [ "write_flash", "@"+flasher_args_path ] esptool_args += ["write_flash", "@" + flasher_args_path]
_run_tool("esptool.py", esptool_args, args.build_dir) _run_tool("esptool.py", esptool_args, args.build_dir)
def erase_flash(action, args): def erase_flash(action, args):
esptool_args = _get_esptool_args(args) esptool_args = _get_esptool_args(args)
esptool_args += [ "erase_flash" ] esptool_args += ["erase_flash"]
_run_tool("esptool.py", esptool_args, args.build_dir) _run_tool("esptool.py", esptool_args, args.build_dir)
def monitor(action, args): def monitor(action, args):
""" """
Run idf_monitor.py to watch build output Run idf_monitor.py to watch build output
@ -285,19 +298,21 @@ def monitor(action, args):
elf_file = os.path.join(args.build_dir, project_desc["app_elf"]) elf_file = os.path.join(args.build_dir, project_desc["app_elf"])
if not os.path.exists(elf_file): if not os.path.exists(elf_file):
raise FatalError("ELF file '%s' not found. You need to build & flash the project before running 'monitor', and the binary on the device must match the one in the build directory exactly. Try 'idf.py flash monitor'." % elf_file) raise FatalError("ELF file '%s' not found. You need to build & flash the project before running 'monitor', "
"and the binary on the device must match the one in the build directory exactly. "
"Try 'idf.py flash monitor'." % elf_file)
idf_monitor = os.path.join(os.environ["IDF_PATH"], "tools/idf_monitor.py") idf_monitor = os.path.join(os.environ["IDF_PATH"], "tools/idf_monitor.py")
monitor_args = [PYTHON, idf_monitor ] monitor_args = [PYTHON, idf_monitor]
if args.port is not None: if args.port is not None:
monitor_args += [ "-p", args.port ] monitor_args += ["-p", args.port]
monitor_args += [ "-b", project_desc["monitor_baud"] ] monitor_args += ["-b", project_desc["monitor_baud"]]
monitor_args += [ elf_file ] monitor_args += [elf_file]
idf_py = [ PYTHON ] + get_commandline_options() # commands to re-run idf.py idf_py = [PYTHON] + get_commandline_options() # commands to re-run idf.py
monitor_args += [ "-m", " ".join("'%s'" % a for a in idf_py) ] monitor_args += ["-m", " ".join("'%s'" % a for a in idf_py)]
if "MSYSTEM" in os.environ: if "MSYSTEM" in os.environ:
monitor_args = [ "winpty" ] + monitor_args monitor_args = ["winpty"] + monitor_args
_run_tool("idf_monitor", monitor_args, args.project_dir) _run_tool("idf_monitor", monitor_args, args.project_dir)
@ -307,9 +322,11 @@ def clean(action, args):
return return
build_target("clean", args) build_target("clean", args)
def reconfigure(action, args): def reconfigure(action, args):
_ensure_build_directory(args, True) _ensure_build_directory(args, True)
def fullclean(action, args): def fullclean(action, args):
build_dir = args.build_dir build_dir = args.build_dir
if not os.path.isdir(build_dir): if not os.path.isdir(build_dir):
@ -320,8 +337,9 @@ def fullclean(action, args):
return return
if not os.path.exists(os.path.join(build_dir, "CMakeCache.txt")): if not os.path.exists(os.path.join(build_dir, "CMakeCache.txt")):
raise FatalError("Directory '%s' doesn't seem to be a CMake build directory. Refusing to automatically delete files in this directory. Delete the directory manually to 'clean' it." % build_dir) raise FatalError("Directory '%s' doesn't seem to be a CMake build directory. Refusing to automatically "
red_flags = [ "CMakeLists.txt", ".git", ".svn" ] "delete files in this directory. Delete the directory manually to 'clean' it." % build_dir)
red_flags = ["CMakeLists.txt", ".git", ".svn"]
for red in red_flags: for red in red_flags:
red = os.path.join(build_dir, red) red = os.path.join(build_dir, red)
if os.path.exists(red): if os.path.exists(red):
@ -334,6 +352,7 @@ def fullclean(action, args):
else: else:
os.remove(f) os.remove(f)
def print_closing_message(args): def print_closing_message(args):
# print a closing message of some kind # print a closing message of some kind
# #
@ -362,7 +381,7 @@ def print_closing_message(args):
else: # flashing the whole project else: # flashing the whole project
cmd = " ".join(flasher_args["write_flash_args"]) + " " cmd = " ".join(flasher_args["write_flash_args"]) + " "
flash_items = sorted(((o,f) for (o,f) in flasher_args["flash_files"].items() if len(o) > 0), flash_items = sorted(((o,f) for (o,f) in flasher_args["flash_files"].items() if len(o) > 0),
key = lambda x: int(x[0], 0)) key=lambda x: int(x[0], 0))
for o,f in flash_items: for o,f in flash_items:
cmd += o + " " + flasher_path(f) + " " cmd += o + " " + flasher_path(f) + " "
@ -384,33 +403,35 @@ def print_closing_message(args):
if "bootloader" in args.actions: if "bootloader" in args.actions:
print_flashing_message("Bootloader", "bootloader") print_flashing_message("Bootloader", "bootloader")
ACTIONS = { ACTIONS = {
# action name : ( function (or alias), dependencies, order-only dependencies ) # action name : ( function (or alias), dependencies, order-only dependencies )
"all" : ( build_target, [], [ "reconfigure", "menuconfig", "clean", "fullclean" ] ), "all": (build_target, [], ["reconfigure", "menuconfig", "clean", "fullclean"]),
"build": ( "all", [], [] ), # build is same as 'all' target "build": ("all", [], []), # build is same as 'all' target
"clean": ( clean, [], [ "fullclean" ] ), "clean": (clean, [], ["fullclean"]),
"fullclean": ( fullclean, [], [] ), "fullclean": (fullclean, [], []),
"reconfigure": ( reconfigure, [], [ "menuconfig" ] ), "reconfigure": (reconfigure, [], ["menuconfig"]),
"menuconfig": ( build_target, [], [] ), "menuconfig": (build_target, [], []),
"defconfig": ( build_target, [], [] ), "defconfig": (build_target, [], []),
"confserver": ( build_target, [], [] ), "confserver": (build_target, [], []),
"size": ( build_target, [ "app" ], [] ), "size": (build_target, ["app"], []),
"size-components": ( build_target, [ "app" ], [] ), "size-components": (build_target, ["app"], []),
"size-files": ( build_target, [ "app" ], [] ), "size-files": (build_target, ["app"], []),
"bootloader": ( build_target, [], [] ), "bootloader": (build_target, [], []),
"bootloader-clean": ( build_target, [], [] ), "bootloader-clean": (build_target, [], []),
"bootloader-flash": ( flash, [ "bootloader" ], [ "erase_flash"] ), "bootloader-flash": (flash, ["bootloader"], ["erase_flash"]),
"app": ( build_target, [], [ "clean", "fullclean", "reconfigure" ] ), "app": (build_target, [], ["clean", "fullclean", "reconfigure"]),
"app-flash": ( flash, [ "app" ], [ "erase_flash"]), "app-flash": (flash, ["app"], ["erase_flash"]),
"partition_table": ( build_target, [], [ "reconfigure" ] ), "partition_table": (build_target, [], ["reconfigure"]),
"partition_table-flash": ( flash, [ "partition_table" ], [ "erase_flash" ]), "partition_table-flash": (flash, ["partition_table"], ["erase_flash"]),
"flash": ( flash, [ "all" ], [ "erase_flash" ] ), "flash": (flash, ["all"], ["erase_flash"]),
"erase_flash": ( erase_flash, [], []), "erase_flash": (erase_flash, [], []),
"monitor": ( monitor, [], [ "flash", "partition_table-flash", "bootloader-flash", "app-flash" ]), "monitor": (monitor, [], ["flash", "partition_table-flash", "bootloader-flash", "app-flash"]),
"erase_otadata": ( build_target, [], []), "erase_otadata": (build_target, [], []),
"read_otadata": ( build_target, [], []), "read_otadata": (build_target, [], []),
} }
def get_commandline_options(): def get_commandline_options():
""" Return all the command line options up to but not including the action """ """ Return all the command line options up to but not including the action """
result = [] result = []
@ -421,6 +442,7 @@ def get_commandline_options():
result.append(a) result.append(a)
return result return result
def get_default_serial_port(): def get_default_serial_port():
""" Return a default serial port. esptool can do this (smarter), but it can create """ Return a default serial port. esptool can do this (smarter), but it can create
inconsistencies where esptool.py uses one port and idf_monitor uses another. inconsistencies where esptool.py uses one port and idf_monitor uses another.
@ -431,23 +453,25 @@ def get_default_serial_port():
import serial.tools.list_ports import serial.tools.list_ports
ports = list(reversed(sorted( ports = list(reversed(sorted(
p.device for p in serial.tools.list_ports.comports() ))) p.device for p in serial.tools.list_ports.comports())))
try: try:
print ("Choosing default port %s (use '-p PORT' option to set a specific serial port)" % ports[0]) print("Choosing default port %s (use '-p PORT' option to set a specific serial port)" % ports[0])
return ports[0] return ports[0]
except IndexError: except IndexError:
raise RuntimeError("No serial ports found. Connect a device, or use '-p PORT' option to set a specific port.") raise RuntimeError("No serial ports found. Connect a device, or use '-p PORT' option to set a specific port.")
# Import the actions, arguments extension file # Import the actions, arguments extension file
if os.path.exists(os.path.join(os.getcwd(), "idf_ext.py")): if os.path.exists(os.path.join(os.getcwd(), "idf_ext.py")):
sys.path.append(os.getcwd()) sys.path.append(os.getcwd())
try: try:
from idf_ext import add_action_extensions, add_argument_extensions from idf_ext import add_action_extensions, add_argument_extensions
except ImportError as e: except ImportError:
print("Error importing extension file idf_ext.py. Skipping.") print("Error importing extension file idf_ext.py. Skipping.")
print("Please make sure that it contains implementations (even if they're empty implementations) of") print("Please make sure that it contains implementations (even if they're empty implementations) of")
print("add_action_extensions and add_argument_extensions.") print("add_action_extensions and add_argument_extensions.")
def main(): def main():
if sys.version_info[0] != 2 or sys.version_info[1] != 7: if sys.version_info[0] != 2 or sys.version_info[1] != 7:
print("Note: You are using Python %d.%d.%d. Python 3 support is new, please report any problems " print("Note: You are using Python %d.%d.%d. Python 3 support is new, please report any problems "
@ -457,13 +481,13 @@ def main():
# Add actions extensions # Add actions extensions
try: try:
add_action_extensions({ add_action_extensions({
"build_target": build_target, "build_target": build_target,
"reconfigure" : reconfigure, "reconfigure": reconfigure,
"flash" : flash, "flash": flash,
"monitor" : monitor, "monitor": monitor,
"clean" : clean, "clean": clean,
"fullclean" : fullclean "fullclean": fullclean
}, ACTIONS) }, ACTIONS)
except NameError: except NameError:
pass pass
@ -478,7 +502,8 @@ def main():
parser.add_argument('-n', '--no-warnings', help="Disable Cmake warnings", action="store_true") parser.add_argument('-n', '--no-warnings', help="Disable Cmake warnings", action="store_true")
parser.add_argument('-v', '--verbose', help="Verbose build output", action="store_true") parser.add_argument('-v', '--verbose', help="Verbose build output", action="store_true")
parser.add_argument('-D', '--define-cache-entry', help="Create a cmake cache entry", nargs='+') parser.add_argument('-D', '--define-cache-entry', help="Create a cmake cache entry", nargs='+')
parser.add_argument('--no-ccache', help="Disable ccache. Otherwise, if ccache is available on the PATH then it will be used for faster builds.", action="store_true") parser.add_argument('--no-ccache', help="Disable ccache. Otherwise, if ccache is available on the PATH then it will be used for faster builds.",
action="store_true")
parser.add_argument('actions', help="Actions (build targets or other operations)", nargs='+', parser.add_argument('actions', help="Actions (build targets or other operations)", nargs='+',
choices=ACTIONS.keys()) choices=ACTIONS.keys())
@ -494,21 +519,23 @@ def main():
# Advanced parameter checks # Advanced parameter checks
if args.build_dir is not None and os.path.realpath(args.project_dir) == os.path.realpath(args.build_dir): if args.build_dir is not None and os.path.realpath(args.project_dir) == os.path.realpath(args.build_dir):
raise FatalError("Setting the build directory to the project directory is not supported. Suggest dropping --build-dir option, the default is a 'build' subdirectory inside the project directory.") raise FatalError("Setting the build directory to the project directory is not supported. Suggest dropping "
"--build-dir option, the default is a 'build' subdirectory inside the project directory.")
if args.build_dir is None: if args.build_dir is None:
args.build_dir = os.path.join(args.project_dir, "build") args.build_dir = os.path.join(args.project_dir, "build")
args.build_dir = os.path.realpath(args.build_dir) args.build_dir = os.path.realpath(args.build_dir)
completed_actions = set() completed_actions = set()
def execute_action(action, remaining_actions): def execute_action(action, remaining_actions):
( function, dependencies, order_dependencies ) = ACTIONS[action] (function, dependencies, order_dependencies) = ACTIONS[action]
# very simple dependency management, build a set of completed actions and make sure # very simple dependency management, build a set of completed actions and make sure
# all dependencies are in it # all dependencies are in it
for dep in dependencies: for dep in dependencies:
if not dep in completed_actions: if dep not in completed_actions:
execute_action(dep, remaining_actions) execute_action(dep, remaining_actions)
for dep in order_dependencies: for dep in order_dependencies:
if dep in remaining_actions and not dep in completed_actions: if dep in remaining_actions and dep not in completed_actions:
execute_action(dep, remaining_actions) execute_action(dep, remaining_actions)
if action in completed_actions: if action in completed_actions:
@ -527,11 +554,10 @@ def main():
print_closing_message(args) print_closing_message(args)
if __name__ == "__main__": if __name__ == "__main__":
try: try:
main() main()
except FatalError as e: except FatalError as e:
print(e) print(e)
sys.exit(2) sys.exit(2)

View file

@ -22,23 +22,22 @@
# #
from __future__ import print_function from __future__ import print_function
from __future__ import unicode_literals from __future__ import unicode_literals
from builtins import dict import argparse
import argparse, sys, subprocess, re import re
import os.path import os.path
import pprint
import operator
DEFAULT_TOOLCHAIN_PREFIX = "xtensa-esp32-elf-" DEFAULT_TOOLCHAIN_PREFIX = "xtensa-esp32-elf-"
CHIP_SIZES = { CHIP_SIZES = {
"esp32" : { "esp32": {
"total_iram" : 0x20000, "total_iram": 0x20000,
"total_irom" : 0x330000, "total_irom": 0x330000,
"total_drom" : 0x800000, "total_drom": 0x800000,
# total dram is determined from objdump output # total dram is determined from objdump output
} }
} }
def scan_to_header(f, header_line): def scan_to_header(f, header_line):
""" Scan forward in a file until you reach 'header_line', then return """ """ Scan forward in a file until you reach 'header_line', then return """
for line in f: for line in f:
@ -46,11 +45,13 @@ def scan_to_header(f, header_line):
return return
raise RuntimeError("Didn't find line '%s' in file" % header_line) raise RuntimeError("Didn't find line '%s' in file" % header_line)
def load_map_data(map_file): def load_map_data(map_file):
memory_config = load_memory_config(map_file) memory_config = load_memory_config(map_file)
sections = load_sections(map_file) sections = load_sections(map_file)
return memory_config, sections return memory_config, sections
def load_memory_config(map_file): def load_memory_config(map_file):
""" Memory Configuration section is the total size of each output section """ """ Memory Configuration section is the total size of each output section """
result = {} result = {}
@ -64,19 +65,21 @@ def load_memory_config(map_file):
else: else:
return result # we're at the end of the Memory Configuration return result # we're at the end of the Memory Configuration
section = { section = {
"name" : m.group("name"), "name": m.group("name"),
"origin" : int(m.group("origin"), 16), "origin": int(m.group("origin"), 16),
"length" : int(m.group("length"), 16), "length": int(m.group("length"), 16),
} }
if section["name"] != "*default*": if section["name"] != "*default*":
result[section["name"]] = section result[section["name"]] = section
raise RuntimeError("End of file while scanning memory configuration?") raise RuntimeError("End of file while scanning memory configuration?")
def load_sections(map_file): def load_sections(map_file):
""" Load section size information from the MAP file. """ Load section size information from the MAP file.
Returns a dict of 'sections', where each key is a section name and the value Returns a dict of 'sections', where each key is a section name and the value
is a dict with details about this section, including a "sources" key which holds a list of source file line information for each symbol linked into the section. is a dict with details about this section, including a "sources" key which holds a list of source file line
information for each symbol linked into the section.
""" """
scan_to_header(map_file, "Linker script and memory map") scan_to_header(map_file, "Linker script and memory map")
sections = {} sections = {}
@ -88,10 +91,10 @@ def load_sections(map_file):
m = re.match(RE_SECTION_HEADER, line) m = re.match(RE_SECTION_HEADER, line)
if m is not None: # start of a new section if m is not None: # start of a new section
section = { section = {
"name" : m.group("name"), "name": m.group("name"),
"address" : int(m.group("address"), 16), "address": int(m.group("address"), 16),
"size" : int(m.group("size"), 16), "size": int(m.group("size"), 16),
"sources" : [], "sources": [],
} }
sections[section["name"]] = section sections[section["name"]] = section
continue continue
@ -113,14 +116,14 @@ def load_sections(map_file):
archive = "(exe)" archive = "(exe)"
source = { source = {
"size" : int(m.group("size"), 16), "size": int(m.group("size"), 16),
"address" : int(m.group("address"), 16), "address": int(m.group("address"), 16),
"archive" : os.path.basename(archive), "archive": os.path.basename(archive),
"object_file" : os.path.basename(m.group("object_file")), "object_file": os.path.basename(m.group("object_file")),
"sym_name" : sym_name, "sym_name": sym_name,
} }
source["file"] = "%s:%s" % (source["archive"], source["object_file"]) source["file"] = "%s:%s" % (source["archive"], source["object_file"])
section["sources"] += [ source ] section["sources"] += [source]
# In some cases the section name appears on the previous line, back it up in here # In some cases the section name appears on the previous line, back it up in here
RE_SYMBOL_ONLY_LINE = r"^ (?P<sym_name>\S*)$" RE_SYMBOL_ONLY_LINE = r"^ (?P<sym_name>\S*)$"
@ -130,6 +133,7 @@ def load_sections(map_file):
return sections return sections
def sizes_by_key(sections, key): def sizes_by_key(sections, key):
""" Takes a dict of sections (from load_sections) and returns """ Takes a dict of sections (from load_sections) and returns
a dict keyed by 'key' with aggregate output size information. a dict keyed by 'key' with aggregate output size information.
@ -147,6 +151,7 @@ def sizes_by_key(sections, key):
archive[section["name"]] += s["size"] archive[section["name"]] += s["size"]
return result return result
def main(): def main():
parser = argparse.ArgumentParser("idf_size - a tool to print IDF elf file sizes") parser = argparse.ArgumentParser("idf_size - a tool to print IDF elf file sizes")
@ -183,6 +188,7 @@ def main():
print("Symbols within the archive:", args.archive_details, "(Not all symbols may be reported)") print("Symbols within the archive:", args.archive_details, "(Not all symbols may be reported)")
print_archive_symbols(sections, args.archive_details) print_archive_symbols(sections, args.archive_details)
def print_summary(memory_config, sections): def print_summary(memory_config, sections):
def get_size(section): def get_size(section):
try: try:
@ -196,7 +202,7 @@ def print_summary(memory_config, sections):
used_data = get_size(".dram0.data") used_data = get_size(".dram0.data")
used_bss = get_size(".dram0.bss") used_bss = get_size(".dram0.bss")
used_dram = used_data + used_bss used_dram = used_data + used_bss
used_iram = sum( get_size(s) for s in sections if s.startswith(".iram0") ) used_iram = sum(get_size(s) for s in sections if s.startswith(".iram0"))
flash_code = get_size(".flash.text") flash_code = get_size(".flash.text")
flash_rodata = get_size(".flash.rodata") flash_rodata = get_size(".flash.rodata")
total_size = used_data + used_iram + flash_code + flash_rodata total_size = used_data + used_iram + flash_code + flash_rodata
@ -214,10 +220,10 @@ def print_summary(memory_config, sections):
print(" Flash rodata: %7d bytes" % flash_rodata) print(" Flash rodata: %7d bytes" % flash_rodata)
print("Total image size:~%7d bytes (.bin may be padded larger)" % (total_size)) print("Total image size:~%7d bytes (.bin may be padded larger)" % (total_size))
def print_detailed_sizes(sections, key, header): def print_detailed_sizes(sections, key, header):
sizes = sizes_by_key(sections, key) sizes = sizes_by_key(sections, key)
sub_heading = None
headings = (header, headings = (header,
"DRAM .data", "DRAM .data",
"& .bss", "& .bss",
@ -240,6 +246,7 @@ def print_detailed_sizes(sections, key, header):
def return_total_size(elem): def return_total_size(elem):
val = elem[1] val = elem[1]
return val["total"] return val["total"]
def return_header(elem): def return_header(elem):
return elem[0] return elem[0]
s = sorted(list(result.items()), key=return_header) s = sorted(list(result.items()), key=return_header)
@ -255,6 +262,7 @@ def print_detailed_sizes(sections, key, header):
v["flash_rodata"], v["flash_rodata"],
v["total"])) v["total"]))
def print_archive_symbols(sections, archive): def print_archive_symbols(sections, archive):
interested_sections = [".dram0.data", ".dram0.bss", ".iram0.text", ".iram0.vectors", ".flash.text", ".flash.rodata"] interested_sections = [".dram0.data", ".dram0.bss", ".iram0.text", ".iram0.vectors", ".flash.text", ".flash.rodata"]
result = {} result = {}
@ -267,7 +275,7 @@ def print_archive_symbols(sections, archive):
for s in section["sources"]: for s in section["sources"]:
if archive != s["archive"]: if archive != s["archive"]:
continue continue
s["sym_name"] = re.sub("(.text.|.literal.|.data.|.bss.|.rodata.)", "", s["sym_name"]); s["sym_name"] = re.sub("(.text.|.literal.|.data.|.bss.|.rodata.)", "", s["sym_name"])
result[section_name][s["sym_name"]] = result[section_name].get(s["sym_name"], 0) + s["size"] result[section_name][s["sym_name"]] = result[section_name].get(s["sym_name"], 0) + s["size"]
for t in interested_sections: for t in interested_sections:
print("\nSymbols from section:", t) print("\nSymbols from section:", t)
@ -275,10 +283,10 @@ def print_archive_symbols(sections, archive):
s = sorted(list(result[t].items()), key=lambda k_v: k_v[0]) s = sorted(list(result[t].items()), key=lambda k_v: k_v[0])
# do a secondary sort in order to have consistent order (for diff-ing the output) # do a secondary sort in order to have consistent order (for diff-ing the output)
for key,val in sorted(s, key=lambda k_v: k_v[1], reverse=True): for key,val in sorted(s, key=lambda k_v: k_v[1], reverse=True):
print(("%s(%d)"% (key.replace(t + ".", ""), val)), end=' ') print(("%s(%d)" % (key.replace(t + ".", ""), val)), end=' ')
section_total += val section_total += val
print("\nSection total:",section_total) print("\nSection total:",section_total)
if __name__ == "__main__": if __name__ == "__main__":
main() main()

View file

@ -30,13 +30,13 @@ import json
import gen_kconfig_doc import gen_kconfig_doc
import kconfiglib import kconfiglib
import pprint
__version__ = "0.1" __version__ = "0.1"
if not "IDF_CMAKE" in os.environ: if "IDF_CMAKE" not in os.environ:
os.environ["IDF_CMAKE"] = "" os.environ["IDF_CMAKE"] = ""
def main(): def main():
parser = argparse.ArgumentParser(description='confgen.py v%s - Config Generation Tool' % __version__, prog=os.path.basename(sys.argv[0])) parser = argparse.ArgumentParser(description='confgen.py v%s - Config Generation Tool' % __version__, prog=os.path.basename(sys.argv[0]))
@ -46,7 +46,8 @@ def main():
default=None) default=None)
parser.add_argument('--defaults', parser.add_argument('--defaults',
help='Optional project defaults file, used if --config file doesn\'t exist. Multiple files can be specified using multiple --defaults arguments.', help='Optional project defaults file, used if --config file doesn\'t exist. '
'Multiple files can be specified using multiple --defaults arguments.',
nargs='?', nargs='?',
default=[], default=[],
action='append') action='append')
@ -70,15 +71,15 @@ def main():
args = parser.parse_args() args = parser.parse_args()
for fmt, filename in args.output: for fmt, filename in args.output:
if not fmt in OUTPUT_FORMATS.keys(): if fmt not in OUTPUT_FORMATS.keys():
print("Format '%s' not recognised. Known formats: %s" % (fmt, OUTPUT_FORMATS.keys())) print("Format '%s' not recognised. Known formats: %s" % (fmt, OUTPUT_FORMATS.keys()))
sys.exit(1) sys.exit(1)
try: try:
args.env = [ (name,value) for (name,value) in ( e.split("=",1) for e in args.env) ] args.env = [(name,value) for (name,value) in (e.split("=",1) for e in args.env)]
except ValueError: except ValueError:
print("--env arguments must each contain =. To unset an environment variable, use 'ENV='") print("--env arguments must each contain =. To unset an environment variable, use 'ENV='")
sys.exit(1) sys.exit(1)
for name, value in args.env: for name, value in args.env:
os.environ[name] = value os.environ[name] = value
@ -124,6 +125,7 @@ def write_config(config, filename):
""" """
config.write_config(filename, header=CONFIG_HEADING) config.write_config(filename, header=CONFIG_HEADING)
def write_header(config, filename): def write_header(config, filename):
CONFIG_HEADING = """/* CONFIG_HEADING = """/*
* Automatically generated file. DO NOT EDIT. * Automatically generated file. DO NOT EDIT.
@ -133,6 +135,7 @@ def write_header(config, filename):
""" """
config.write_autoconf(filename, header=CONFIG_HEADING) config.write_autoconf(filename, header=CONFIG_HEADING)
def write_cmake(config, filename): def write_cmake(config, filename):
with open(filename, "w") as f: with open(filename, "w") as f:
write = f.write write = f.write
@ -143,6 +146,7 @@ def write_cmake(config, filename):
# Espressif IoT Development Framework (ESP-IDF) Configuration cmake include file # Espressif IoT Development Framework (ESP-IDF) Configuration cmake include file
# #
""") """)
def write_node(node): def write_node(node):
sym = node.item sym = node.item
if not isinstance(sym, kconfiglib.Symbol): if not isinstance(sym, kconfiglib.Symbol):
@ -158,8 +162,10 @@ def write_cmake(config, filename):
prefix, sym.name, val)) prefix, sym.name, val))
config.walk_menu(write_node) config.walk_menu(write_node)
def get_json_values(config): def get_json_values(config):
config_dict = {} config_dict = {}
def write_node(node): def write_node(node):
sym = node.item sym = node.item
if not isinstance(sym, kconfiglib.Symbol): if not isinstance(sym, kconfiglib.Symbol):
@ -167,7 +173,7 @@ def get_json_values(config):
val = sym.str_value # this calculates _write_to_conf, due to kconfiglib magic val = sym.str_value # this calculates _write_to_conf, due to kconfiglib magic
if sym._write_to_conf: if sym._write_to_conf:
if sym.type in [ kconfiglib.BOOL, kconfiglib.TRISTATE ]: if sym.type in [kconfiglib.BOOL, kconfiglib.TRISTATE]:
val = (val != "n") val = (val != "n")
elif sym.type == kconfiglib.HEX: elif sym.type == kconfiglib.HEX:
val = int(val, 16) val = int(val, 16)
@ -177,11 +183,13 @@ def get_json_values(config):
config.walk_menu(write_node) config.walk_menu(write_node)
return config_dict return config_dict
def write_json(config, filename): def write_json(config, filename):
config_dict = get_json_values(config) config_dict = get_json_values(config)
with open(filename, "w") as f: with open(filename, "w") as f:
json.dump(config_dict, f, indent=4, sort_keys=True) json.dump(config_dict, f, indent=4, sort_keys=True)
def write_json_menus(config, filename): def write_json_menus(config, filename):
result = [] # root level items result = [] # root level items
node_lookup = {} # lookup from MenuNode to an item in result node_lookup = {} # lookup from MenuNode to an item in result
@ -190,7 +198,7 @@ def write_json_menus(config, filename):
try: try:
json_parent = node_lookup[node.parent]["children"] json_parent = node_lookup[node.parent]["children"]
except KeyError: except KeyError:
assert not node.parent in node_lookup # if fails, we have a parent node with no "children" entity (ie a bug) assert node.parent not in node_lookup # if fails, we have a parent node with no "children" entity (ie a bug)
json_parent = result # root level node json_parent = result # root level node
# node.kconfig.y means node has no dependency, # node.kconfig.y means node has no dependency,
@ -206,11 +214,11 @@ def write_json_menus(config, filename):
new_json = None new_json = None
if node.item == kconfiglib.MENU or is_menuconfig: if node.item == kconfiglib.MENU or is_menuconfig:
new_json = { "type" : "menu", new_json = {"type": "menu",
"title" : node.prompt[0], "title": node.prompt[0],
"depends_on": depends, "depends_on": depends,
"children": [] "children": []
} }
if is_menuconfig: if is_menuconfig:
sym = node.item sym = node.item
new_json["name"] = sym.name new_json["name"] = sym.name
@ -236,12 +244,12 @@ def write_json_menus(config, filename):
greatest_range = [int(min_range.str_value), int(max_range.str_value)] greatest_range = [int(min_range.str_value), int(max_range.str_value)]
new_json = { new_json = {
"type" : kconfiglib.TYPE_TO_STR[sym.type], "type": kconfiglib.TYPE_TO_STR[sym.type],
"name" : sym.name, "name": sym.name,
"title": node.prompt[0] if node.prompt else None, "title": node.prompt[0] if node.prompt else None,
"depends_on" : depends, "depends_on": depends,
"help": node.help, "help": node.help,
"range" : greatest_range, "range": greatest_range,
"children": [], "children": [],
} }
elif isinstance(node.item, kconfiglib.Choice): elif isinstance(node.item, kconfiglib.Choice):
@ -250,7 +258,7 @@ def write_json_menus(config, filename):
"type": "choice", "type": "choice",
"title": node.prompt[0], "title": node.prompt[0],
"name": choice.name, "name": choice.name,
"depends_on" : depends, "depends_on": depends,
"help": node.help, "help": node.help,
"children": [] "children": []
} }
@ -263,6 +271,7 @@ def write_json_menus(config, filename):
with open(filename, "w") as f: with open(filename, "w") as f:
f.write(json.dumps(result, sort_keys=True, indent=4)) f.write(json.dumps(result, sort_keys=True, indent=4))
def update_if_changed(source, destination): def update_if_changed(source, destination):
with open(source, "r") as f: with open(source, "r") as f:
source_contents = f.read() source_contents = f.read()
@ -276,14 +285,14 @@ def update_if_changed(source, destination):
f.write(source_contents) f.write(source_contents)
OUTPUT_FORMATS = { OUTPUT_FORMATS = {"config": write_config,
"config" : write_config, "header": write_header,
"header" : write_header, "cmake": write_cmake,
"cmake" : write_cmake, "docs": gen_kconfig_doc.write_docs,
"docs" : gen_kconfig_doc.write_docs, "json": write_json,
"json" : write_json, "json_menus": write_json_menus,
"json_menus" : write_json_menus, }
}
class FatalError(RuntimeError): class FatalError(RuntimeError):
""" """
@ -291,6 +300,7 @@ class FatalError(RuntimeError):
""" """
pass pass
if __name__ == '__main__': if __name__ == '__main__':
try: try:
main() main()

View file

@ -12,12 +12,13 @@ import sys
import confgen import confgen
from confgen import FatalError, __version__ from confgen import FatalError, __version__
def main(): def main():
parser = argparse.ArgumentParser(description='confserver.py v%s - Config Generation Tool' % __version__, prog=os.path.basename(sys.argv[0])) parser = argparse.ArgumentParser(description='confserver.py v%s - Config Generation Tool' % __version__, prog=os.path.basename(sys.argv[0]))
parser.add_argument('--config', parser.add_argument('--config',
help='Project configuration settings', help='Project configuration settings',
required=True) required=True)
parser.add_argument('--kconfig', parser.add_argument('--kconfig',
help='KConfig file with config item definitions', help='KConfig file with config item definitions',
@ -29,10 +30,10 @@ def main():
args = parser.parse_args() args = parser.parse_args()
try: try:
args.env = [ (name,value) for (name,value) in ( e.split("=",1) for e in args.env) ] args.env = [(name,value) for (name,value) in (e.split("=",1) for e in args.env)]
except ValueError: except ValueError:
print("--env arguments must each contain =. To unset an environment variable, use 'ENV='") print("--env arguments must each contain =. To unset an environment variable, use 'ENV='")
sys.exit(1) sys.exit(1)
for name, value in args.env: for name, value in args.env:
os.environ[name] = value os.environ[name] = value
@ -47,7 +48,7 @@ def run_server(kconfig, sdkconfig):
config_dict = confgen.get_json_values(config) config_dict = confgen.get_json_values(config)
ranges_dict = get_ranges(config) ranges_dict = get_ranges(config)
json.dump({"version": 1, "values" : config_dict, "ranges" : ranges_dict}, sys.stdout) json.dump({"version": 1, "values": config_dict, "ranges": ranges_dict}, sys.stdout)
print("\n") print("\n")
while True: while True:
@ -81,7 +82,7 @@ def run_server(kconfig, sdkconfig):
values_diff = diff(before, after) values_diff = diff(before, after)
ranges_diff = diff(before_ranges, after_ranges) ranges_diff = diff(before_ranges, after_ranges)
response = {"version" : 1, "values" : values_diff, "ranges" : ranges_diff} response = {"version": 1, "values": values_diff, "ranges": ranges_diff}
if error: if error:
for e in error: for e in error:
print("Error: %s" % e, file=sys.stderr) print("Error: %s" % e, file=sys.stderr)
@ -91,10 +92,10 @@ def run_server(kconfig, sdkconfig):
def handle_request(config, req): def handle_request(config, req):
if not "version" in req: if "version" not in req:
return [ "All requests must have a 'version'" ] return ["All requests must have a 'version'"]
if int(req["version"]) != 1: if int(req["version"]) != 1:
return [ "Only version 1 requests supported" ] return ["Only version 1 requests supported"]
error = [] error = []
@ -103,7 +104,7 @@ def handle_request(config, req):
try: try:
config.load_config(req["load"]) config.load_config(req["load"])
except Exception as e: except Exception as e:
error += [ "Failed to load from %s: %s" % (req["load"], e) ] error += ["Failed to load from %s: %s" % (req["load"], e)]
if "set" in req: if "set" in req:
handle_set(config, error, req["set"]) handle_set(config, error, req["set"])
@ -113,16 +114,17 @@ def handle_request(config, req):
print("Saving config to %s..." % req["save"], file=sys.stderr) print("Saving config to %s..." % req["save"], file=sys.stderr)
confgen.write_config(config, req["save"]) confgen.write_config(config, req["save"])
except Exception as e: except Exception as e:
error += [ "Failed to save to %s: %s" % (req["save"], e) ] error += ["Failed to save to %s: %s" % (req["save"], e)]
return error return error
def handle_set(config, error, to_set): def handle_set(config, error, to_set):
missing = [ k for k in to_set if not k in config.syms ] missing = [k for k in to_set if k not in config.syms]
if missing: if missing:
error.append("The following config symbol(s) were not found: %s" % (", ".join(missing))) error.append("The following config symbol(s) were not found: %s" % (", ".join(missing)))
# replace name keys with the full config symbol for each key: # replace name keys with the full config symbol for each key:
to_set = dict((config.syms[k],v) for (k,v) in to_set.items() if not k in missing) to_set = dict((config.syms[k],v) for (k,v) in to_set.items() if k not in missing)
# Work through the list of values to set, noting that # Work through the list of values to set, noting that
# some may not be immediately applicable (maybe they depend # some may not be immediately applicable (maybe they depend
@ -130,14 +132,14 @@ def handle_set(config, error, to_set):
# knowing if any value is unsettable until then end # knowing if any value is unsettable until then end
while len(to_set): while len(to_set):
set_pass = [ (k,v) for (k,v) in to_set.items() if k.visibility ] set_pass = [(k,v) for (k,v) in to_set.items() if k.visibility]
if not set_pass: if not set_pass:
break # no visible keys left break # no visible keys left
for (sym,val) in set_pass: for (sym,val) in set_pass:
if sym.type in (kconfiglib.BOOL, kconfiglib.TRISTATE): if sym.type in (kconfiglib.BOOL, kconfiglib.TRISTATE):
if val == True: if val is True:
sym.set_value(2) sym.set_value(2)
elif val == False: elif val is False:
sym.set_value(0) sym.set_value(0)
else: else:
error.append("Boolean symbol %s only accepts true/false values" % sym.name) error.append("Boolean symbol %s only accepts true/false values" % sym.name)
@ -150,7 +152,6 @@ def handle_set(config, error, to_set):
error.append("The following config symbol(s) were not visible so were not updated: %s" % (", ".join(s.name for s in to_set))) error.append("The following config symbol(s) were not visible so were not updated: %s" % (", ".join(s.name for s in to_set)))
def diff(before, after): def diff(before, after):
""" """
Return a dictionary with the difference between 'before' and 'after' (either with the new value if changed, Return a dictionary with the difference between 'before' and 'after' (either with the new value if changed,
@ -164,6 +165,7 @@ def diff(before, after):
def get_ranges(config): def get_ranges(config):
ranges_dict = {} ranges_dict = {}
def handle_node(node): def handle_node(node):
sym = node.item sym = node.item
if not isinstance(sym, kconfiglib.Symbol): if not isinstance(sym, kconfiglib.Symbol):
@ -182,4 +184,3 @@ if __name__ == '__main__':
except FatalError as e: except FatalError as e:
print("A fatal error occurred: %s" % e, file=sys.stderr) print("A fatal error occurred: %s" % e, file=sys.stderr)
sys.exit(2) sys.exit(2)

View file

@ -21,7 +21,6 @@
# See the License for the specific language governing permissions and # See the License for the specific language governing permissions and
# limitations under the License. # limitations under the License.
from __future__ import print_function from __future__ import print_function
import os
import re import re
import kconfiglib import kconfiglib
@ -33,7 +32,8 @@ HEADING_SYMBOLS = '#*=-^"+'
# Keep the heading level in sync with api-reference/kconfig.rst # Keep the heading level in sync with api-reference/kconfig.rst
INITIAL_HEADING_LEVEL = 3 INITIAL_HEADING_LEVEL = 3
MAX_HEADING_LEVEL = len(HEADING_SYMBOLS)-1 MAX_HEADING_LEVEL = len(HEADING_SYMBOLS) - 1
def write_docs(config, filename): def write_docs(config, filename):
""" Note: writing .rst documentation ignores the current value """ Note: writing .rst documentation ignores the current value
@ -42,22 +42,25 @@ def write_docs(config, filename):
with open(filename, "w") as f: with open(filename, "w") as f:
config.walk_menu(lambda node: write_menu_item(f, node)) config.walk_menu(lambda node: write_menu_item(f, node))
def node_is_menu(node): def node_is_menu(node):
try: try:
return node.item == kconfiglib.MENU or node.is_menuconfig return node.item == kconfiglib.MENU or node.is_menuconfig
except AttributeError: except AttributeError:
return False # not all MenuNodes have is_menuconfig for some reason return False # not all MenuNodes have is_menuconfig for some reason
def get_breadcrumbs(node): def get_breadcrumbs(node):
# this is a bit wasteful as it recalculates each time, but still... # this is a bit wasteful as it recalculates each time, but still...
result = [] result = []
node = node.parent node = node.parent
while node.parent: while node.parent:
if node.prompt: if node.prompt:
result = [ ":ref:`%s`" % get_link_anchor(node) ] + result result = [":ref:`%s`" % get_link_anchor(node)] + result
node = node.parent node = node.parent
return " > ".join(result) return " > ".join(result)
def get_link_anchor(node): def get_link_anchor(node):
try: try:
return "CONFIG_%s" % node.item.name return "CONFIG_%s" % node.item.name
@ -68,11 +71,12 @@ def get_link_anchor(node):
result = [] result = []
while node.parent: while node.parent:
if node.prompt: if node.prompt:
result = [ re.sub(r"[^a-zA-z0-9]+", "-", node.prompt[0]) ] + result result = [re.sub(r"[^a-zA-z0-9]+", "-", node.prompt[0])] + result
node = node.parent node = node.parent
result = "-".join(result).lower() result = "-".join(result).lower()
return result return result
def get_heading_level(node): def get_heading_level(node):
result = INITIAL_HEADING_LEVEL result = INITIAL_HEADING_LEVEL
node = node.parent node = node.parent
@ -83,6 +87,7 @@ def get_heading_level(node):
node = node.parent node = node.parent
return result return result
def format_rest_text(text, indent): def format_rest_text(text, indent):
# Format an indented text block for use with ReST # Format an indented text block for use with ReST
text = indent + text.replace('\n', '\n' + indent) text = indent + text.replace('\n', '\n' + indent)
@ -92,6 +97,7 @@ def format_rest_text(text, indent):
text += '\n' text += '\n'
return text return text
def node_should_write(node): def node_should_write(node):
if not node.prompt: if not node.prompt:
return False # Don't do anything for invisible menu items return False # Don't do anything for invisible menu items
@ -101,6 +107,7 @@ def node_should_write(node):
return True return True
def write_menu_item(f, node): def write_menu_item(f, node):
if not node_should_write(node): if not node_should_write(node):
return return
@ -112,7 +119,7 @@ def write_menu_item(f, node):
is_menu = node_is_menu(node) is_menu = node_is_menu(node)
## Heading # Heading
if name: if name:
title = 'CONFIG_%s' % name title = 'CONFIG_%s' % name
else: else:
@ -167,6 +174,6 @@ def write_menu_item(f, node):
child = child.next child = child.next
f.write('\n') f.write('\n')
if __name__ == '__main__': if __name__ == '__main__':
print("Run this via 'confgen.py --output doc FILENAME'") print("Run this via 'confgen.py --output doc FILENAME'")

View file

@ -1,25 +1,16 @@
#!/usr/bin/env python #!/usr/bin/env python
from __future__ import print_function from __future__ import print_function
import os import os
import sys
import threading
import time
import json import json
import argparse import argparse
import shutil
import tempfile import tempfile
import pexpect import pexpect
sys.path.append("..")
import confserver
def create_server_thread(*args):
t = threading.Thread()
def parse_testcases(): def parse_testcases():
with open("testcases.txt", "r") as f: with open("testcases.txt", "r") as f:
cases = [ l for l in f.readlines() if len(l.strip()) > 0 ] cases = [l for l in f.readlines() if len(l.strip()) > 0]
# Each 3 lines in the file should be formatted as: # Each 3 lines in the file should be formatted as:
# * Description of the test change # * Description of the test change
# * JSON "changes" to send to the server # * JSON "changes" to send to the server
@ -29,19 +20,20 @@ def parse_testcases():
for i in range(0, len(cases), 3): for i in range(0, len(cases), 3):
desc = cases[i] desc = cases[i]
send = cases[i+1] send = cases[i + 1]
expect = cases[i+2] expect = cases[i + 2]
if not desc.startswith("* "): if not desc.startswith("* "):
raise RuntimeError("Unexpected description at line %d: '%s'" % (i+1, desc)) raise RuntimeError("Unexpected description at line %d: '%s'" % (i + 1, desc))
if not send.startswith("> "): if not send.startswith("> "):
raise RuntimeError("Unexpected send at line %d: '%s'" % (i+2, send)) raise RuntimeError("Unexpected send at line %d: '%s'" % (i + 2, send))
if not expect.startswith("< "): if not expect.startswith("< "):
raise RuntimeError("Unexpected expect at line %d: '%s'" % (i+3, expect)) raise RuntimeError("Unexpected expect at line %d: '%s'" % (i + 3, expect))
desc = desc[2:] desc = desc[2:]
send = json.loads(send[2:]) send = json.loads(send[2:])
expect = json.loads(expect[2:]) expect = json.loads(expect[2:])
yield (desc, send, expect) yield (desc, send, expect)
def main(): def main():
parser = argparse.ArgumentParser() parser = argparse.ArgumentParser()
parser.add_argument('--logfile', type=argparse.FileType('w'), help='Optional session log of the interactions with confserver.py') parser.add_argument('--logfile', type=argparse.FileType('w'), help='Optional session log of the interactions with confserver.py')
@ -72,7 +64,7 @@ def main():
for (desc, send, expected) in cases: for (desc, send, expected) in cases:
print(desc) print(desc)
req = { "version" : "1", "set" : send } req = {"version": "1", "set": send}
req = json.dumps(req) req = json.dumps(req)
print("Sending: %s" % (req)) print("Sending: %s" % (req))
p.send("%s\n" % req) p.send("%s\n" % req)
@ -84,13 +76,13 @@ def main():
read_vals = readback[expect_key] read_vals = readback[expect_key]
exp_vals = expected[expect_key] exp_vals = expected[expect_key]
if read_vals != exp_vals: if read_vals != exp_vals:
expect_diff = dict((k,v) for (k,v) in exp_vals.items() if not k in read_vals or v != read_vals[k]) expect_diff = dict((k,v) for (k,v) in exp_vals.items() if k not in read_vals or v != read_vals[k])
raise RuntimeError("Test failed! Was expecting %s: %s" % (expect_key, json.dumps(expect_diff))) raise RuntimeError("Test failed! Was expecting %s: %s" % (expect_key, json.dumps(expect_diff)))
print("OK") print("OK")
print("Testing load/save...") print("Testing load/save...")
before = os.stat(temp_sdkconfig_path).st_mtime before = os.stat(temp_sdkconfig_path).st_mtime
p.send("%s\n" % json.dumps({ "version" : "1", "save" : temp_sdkconfig_path })) p.send("%s\n" % json.dumps({"version": "1", "save": temp_sdkconfig_path}))
save_result = expect_json() save_result = expect_json()
print("Save result: %s" % (json.dumps(save_result))) print("Save result: %s" % (json.dumps(save_result)))
assert len(save_result["values"]) == 0 assert len(save_result["values"]) == 0
@ -98,7 +90,7 @@ def main():
after = os.stat(temp_sdkconfig_path).st_mtime after = os.stat(temp_sdkconfig_path).st_mtime
assert after > before assert after > before
p.send("%s\n" % json.dumps({ "version" : "1", "load" : temp_sdkconfig_path })) p.send("%s\n" % json.dumps({"version": "1", "load": temp_sdkconfig_path}))
load_result = expect_json() load_result = expect_json()
print("Load result: %s" % (json.dumps(load_result))) print("Load result: %s" % (json.dumps(load_result)))
assert len(load_result["values"]) > 0 # loading same file should return all config items assert len(load_result["values"]) > 0 # loading same file should return all config items
@ -111,6 +103,6 @@ def main():
except OSError: except OSError:
pass pass
if __name__ == "__main__": if __name__ == "__main__":
main() main()

View file

@ -15,19 +15,29 @@
# #
import re import re
import collections
import sys
import os import os
from sdkconfig import SDKConfig from sdkconfig import SDKConfig
from pyparsing import * from pyparsing import OneOrMore
from pyparsing import restOfLine
from pyparsing import alphanums
from pyparsing import Word
from pyparsing import alphas
from pyparsing import ParseBaseException
from pyparsing import Suppress
from pyparsing import Group
from pyparsing import Literal
from pyparsing import ZeroOrMore
from pyparsing import Optional
from pyparsing import originalTextFor
from common import LdGenFailure from common import LdGenFailure
"""
Fragment file internal representation. Parses and stores instances of the fragment definitions
contained within the file.
"""
class FragmentFileModel(): class FragmentFileModel():
"""
Fragment file internal representation. Parses and stores instances of the fragment definitions
contained within the file.
"""
def __init__(self, fragment_file): def __init__(self, fragment_file):
path = os.path.realpath(fragment_file.name) path = os.path.realpath(fragment_file.name)
@ -54,13 +64,14 @@ class FragmentFileModel():
for fragment in self.fragments: for fragment in self.fragments:
fragment.path = path fragment.path = path
"""
Encapsulates a fragment as defined in the generator syntax. Sets values common to all fragment and performs processing
such as checking the validity of the fragment name and getting the entry values.
"""
class Fragment:
IDENTIFIER = Word(alphas+"_", alphanums+"_") class Fragment:
"""
Encapsulates a fragment as defined in the generator syntax. Sets values common to all fragment and performs processing
such as checking the validity of the fragment name and getting the entry values.
"""
IDENTIFIER = Word(alphas + "_", alphanums + "_")
ENTITY = Word(alphanums + ".-_$") ENTITY = Word(alphanums + ".-_$")
def __init__(self, name, entries): def __init__(self, name, entries):
@ -68,6 +79,7 @@ class Fragment:
self.name = name self.name = name
self.entries = entries self.entries = entries
class Sections(Fragment): class Sections(Fragment):
def __init__(self, name, entries): def __init__(self, name, entries):
@ -113,10 +125,11 @@ class Sections(Fragment):
return sections return sections
"""
Encapsulates a scheme fragment, which defines what target input sections are placed under.
"""
class Scheme(Fragment): class Scheme(Fragment):
"""
Encapsulates a scheme fragment, which defines what target input sections are placed under.
"""
def __init__(self, name, items): def __init__(self, name, items):
Fragment.__init__(self, name, items) Fragment.__init__(self, name, items)
@ -151,10 +164,11 @@ class Scheme(Fragment):
return scheme return scheme
"""
Encapsulates a mapping fragment, which defines what targets the input sections of mappable entties are placed under.
"""
class Mapping(Fragment): class Mapping(Fragment):
"""
Encapsulates a mapping fragment, which defines what targets the input sections of mappable entties are placed under.
"""
# Name of the default condition entry # Name of the default condition entry
DEFAULT_CONDITION = "default" DEFAULT_CONDITION = "default"
@ -192,10 +206,10 @@ class Mapping(Fragment):
for normal_group in self.entries[0]: for normal_group in self.entries[0]:
# Get the original string of the condition # Get the original string of the condition
condition = next(iter(normal_group.condition.asList())).strip() condition = next(iter(normal_group.condition.asList())).strip()
mappings = self._create_mappings_set(normal_group[1]) mappings = self._create_mappings_set(normal_group[1])
processed.append((condition, mappings)) processed.append((condition, mappings))
default_group = self.entries[1] default_group = self.entries[1]
if len(default_group) > 1: if len(default_group) > 1:
@ -217,9 +231,6 @@ class Mapping(Fragment):
# Match header [mapping] # Match header [mapping]
header = Suppress("[") + Suppress("mapping") + Suppress("]") header = Suppress("[") + Suppress("mapping") + Suppress("]")
# Define possbile values for input archive and object file
filename = Word(alphanums + "-" + "_")
# There are three possible patterns for mapping entries: # There are three possible patterns for mapping entries:
# obj:symbol (scheme) # obj:symbol (scheme)
# obj (scheme) # obj (scheme)

View file

@ -14,22 +14,20 @@
# limitations under the License. # limitations under the License.
# #
import re
import collections import collections
import itertools import itertools
import os import os
import subprocess
import fnmatch import fnmatch
from sdkconfig import SDKConfig from fragments import Sections, Scheme, Mapping, Fragment
from fragments import FragmentFileModel, Sections, Scheme, Mapping, Fragment from pyparsing import Suppress, White, ParseException, Literal, Regex, Group, ZeroOrMore, Word, OneOrMore, nums, alphanums, alphas, Optional
from pyparsing import *
from common import LdGenFailure from common import LdGenFailure
"""
Encapsulates a generated placement rule placed under a target
"""
class PlacementRule(): class PlacementRule():
"""
Encapsulates a generated placement rule placed under a target
"""
DEFAULT_SPECIFICITY = 0 DEFAULT_SPECIFICITY = 0
ARCHIVE_SPECIFICITY = 1 ARCHIVE_SPECIFICITY = 1
@ -71,12 +69,12 @@ class PlacementRule():
(section, expansion) = section_data (section, expansion) = section_data
if expansion: if expansion:
metadata = self.__metadata(self.__container([]), self.__container([expansion]), self.__container(True)) metadata = self.__metadata(self.__container([]), self.__container([expansion]), self.__container(True))
self.sections[section] = metadata self.sections[section] = metadata
def get_section_names(self): def get_section_names(self):
return self.sections.keys() return self.sections.keys()
def add_exclusion(self, other, sections_infos = None): def add_exclusion(self, other, sections_infos=None):
# Utility functions for this method # Utility functions for this method
def do_section_expansion(rule, section): def do_section_expansion(rule, section):
if section in rule.get_section_names(): if section in rule.get_section_names():
@ -116,7 +114,7 @@ class PlacementRule():
# most specific rule from the list, and if an even more specific rule is found, # most specific rule from the list, and if an even more specific rule is found,
# replace it entirely. Otherwise, keep appending. # replace it entirely. Otherwise, keep appending.
exclusions = self.sections[section].excludes exclusions = self.sections[section].excludes
exclusions_list = exclusions.content if exclusions.content != None else [] exclusions_list = exclusions.content if exclusions.content is not None else []
exclusions_to_remove = filter(lambda r: r.is_more_specific_rule_of(other), exclusions_list) exclusions_to_remove = filter(lambda r: r.is_more_specific_rule_of(other), exclusions_list)
remaining_exclusions = [e for e in exclusions_list if e not in exclusions_to_remove] remaining_exclusions = [e for e in exclusions_list if e not in exclusions_to_remove]
@ -132,8 +130,8 @@ class PlacementRule():
return False return False
# Compare archive, obj and target # Compare archive, obj and target
for entity_index in range (1, other.specificity + 1): for entity_index in range(1, other.specificity + 1):
if self[entity_index] != other[entity_index] and other[entity_index] != None: if self[entity_index] != other[entity_index] and other[entity_index] is not None:
return False return False
return True return True
@ -143,15 +141,15 @@ class PlacementRule():
return False return False
# Compare archive, obj and target # Compare archive, obj and target
for entity_index in range (1, other.specificity + 1): for entity_index in range(1, other.specificity + 1):
if self[entity_index] != other[entity_index] and other[entity_index] != None: if self[entity_index] != other[entity_index] and other[entity_index] is not None:
return False return False
return True return True
def __getitem__(self, key): def __getitem__(self, key):
if key == PlacementRule.ARCHIVE_SPECIFICITY: if key == PlacementRule.ARCHIVE_SPECIFICITY:
return self.archive return self.archive
elif key == PlacementRule.OBJECT_SPECIFICITY: elif key == PlacementRule.OBJECT_SPECIFICITY:
return self.obj return self.obj
elif key == PlacementRule.SYMBOL_SPECIFICITY: elif key == PlacementRule.SYMBOL_SPECIFICITY:
@ -193,7 +191,7 @@ class PlacementRule():
sections_string = " ".join(sections_string) sections_string = " ".join(sections_string)
archive = str(self.archive) if self.archive else "" archive = str(self.archive) if self.archive else ""
obj = (str(self.obj) + (".*" if self.obj else "")) if self.obj else "" obj = (str(self.obj) + (".*" if self.obj else "")) if self.obj else ""
# Handle output string generation based on information available # Handle output string generation based on information available
if self.specificity == PlacementRule.DEFAULT_SPECIFICITY: if self.specificity == PlacementRule.DEFAULT_SPECIFICITY:
@ -247,10 +245,11 @@ class PlacementRule():
yield self.symbol yield self.symbol
raise StopIteration raise StopIteration
"""
Implements generation of placement rules based on collected sections, scheme and mapping fragment.
"""
class GenerationModel: class GenerationModel:
"""
Implements generation of placement rules based on collected sections, scheme and mapping fragment.
"""
DEFAULT_SCHEME = "default" DEFAULT_SCHEME = "default"
@ -273,7 +272,7 @@ class GenerationModel:
rule = PlacementRule(archive, obj, symbol, section_entries, target) rule = PlacementRule(archive, obj, symbol, section_entries, target)
if not rule in rules: if rule not in rules:
rules.append(rule) rules.append(rule)
def _build_scheme_dictionary(self): def _build_scheme_dictionary(self):
@ -403,7 +402,7 @@ class GenerationModel:
def _create_extra_rules(self, rules): def _create_extra_rules(self, rules):
# This function generates extra rules for symbol specific rules. The reason for generating extra rules is to isolate, # This function generates extra rules for symbol specific rules. The reason for generating extra rules is to isolate,
# as much as possible, rules that require expansion. Particularly, object specific extra rules are generated. # as much as possible, rules that require expansion. Particularly, object specific extra rules are generated.
rules_to_process = sorted(rules, key = lambda r: r.specificity) rules_to_process = sorted(rules, key=lambda r: r.specificity)
symbol_specific_rules = list(filter(lambda r: r.specificity == PlacementRule.SYMBOL_SPECIFICITY, rules_to_process)) symbol_specific_rules = list(filter(lambda r: r.specificity == PlacementRule.SYMBOL_SPECIFICITY, rules_to_process))
extra_rules = dict() extra_rules = dict()
@ -433,7 +432,8 @@ class GenerationModel:
extra_rule = extra_rules[extra_rules_key] extra_rule = extra_rules[extra_rules_key]
if section not in extra_rule.get_section_names(): if section not in extra_rule.get_section_names():
new_rule = PlacementRule(extra_rule.archive, extra_rule.obj, extra_rule.symbol, list(extra_rule.get_section_names()) + [section] , extra_rule.target) new_rule = PlacementRule(extra_rule.archive, extra_rule.obj, extra_rule.symbol,
list(extra_rule.get_section_names()) + [section], extra_rule.target)
extra_rules[extra_rules_key] = new_rule extra_rules[extra_rules_key] = new_rule
except KeyError: except KeyError:
extra_rule = PlacementRule(symbol_specific_rule.archive, symbol_specific_rule.obj, None, [section], section_rule.target) extra_rule = PlacementRule(symbol_specific_rule.archive, symbol_specific_rule.obj, None, [section], section_rule.target)
@ -452,16 +452,16 @@ class GenerationModel:
# Sort the rules by means of how specific they are. Sort by specificity from lowest to highest # Sort the rules by means of how specific they are. Sort by specificity from lowest to highest
# * -> lib:* -> lib:obj -> lib:obj:symbol # * -> lib:* -> lib:obj -> lib:obj:symbol
sorted_rules = sorted(rules, key = lambda r: r.specificity) sorted_rules = sorted(rules, key=lambda r: r.specificity)
# Now that the rules have been sorted, loop through each rule, and then loop # Now that the rules have been sorted, loop through each rule, and then loop
# through rules below it (higher indeces), adding exclusions whenever appropriate. # through rules below it (higher indeces), adding exclusions whenever appropriate.
for general_rule in sorted_rules: for general_rule in sorted_rules:
for specific_rule in reversed(sorted_rules): for specific_rule in reversed(sorted_rules):
if (specific_rule.specificity > general_rule.specificity and \ if (specific_rule.specificity > general_rule.specificity and
specific_rule.specificity != PlacementRule.SYMBOL_SPECIFICITY) or \ specific_rule.specificity != PlacementRule.SYMBOL_SPECIFICITY) or \
(specific_rule.specificity == PlacementRule.SYMBOL_SPECIFICITY and \ (specific_rule.specificity == PlacementRule.SYMBOL_SPECIFICITY and
general_rule.specificity == PlacementRule.OBJECT_SPECIFICITY): general_rule.specificity == PlacementRule.OBJECT_SPECIFICITY):
general_rule.add_exclusion(specific_rule, sections_info) general_rule.add_exclusion(specific_rule, sections_info)
def add_fragments_from_file(self, fragment_file): def add_fragments_from_file(self, fragment_file):
@ -484,11 +484,12 @@ class GenerationModel:
dict_to_append_to[fragment.name] = fragment dict_to_append_to[fragment.name] = fragment
"""
Encapsulates a linker script template file. Finds marker syntax and handles replacement to generate the
final output.
"""
class TemplateModel: class TemplateModel:
"""
Encapsulates a linker script template file. Finds marker syntax and handles replacement to generate the
final output.
"""
Marker = collections.namedtuple("Marker", "target indent rules") Marker = collections.namedtuple("Marker", "target indent rules")
@ -526,7 +527,6 @@ class TemplateModel:
target = None target = None
try: try:
target = member.target target = member.target
indent = member.indent
rules = member.rules rules = member.rules
del rules[:] del rules[:]
@ -535,7 +535,7 @@ class TemplateModel:
except KeyError: except KeyError:
message = GenerationException.UNDEFINED_REFERENCE + " to target '" + target + "'." message = GenerationException.UNDEFINED_REFERENCE + " to target '" + target + "'."
raise GenerationException(message) raise GenerationException(message)
except AttributeError as a: except AttributeError:
pass pass
def write(self, output_file): def write(self, output_file):
@ -557,11 +557,12 @@ class TemplateModel:
except AttributeError: except AttributeError:
output_file.write(member) output_file.write(member)
"""
Exception for linker script generation failures such as undefined references/ failure to
evaluate conditions, duplicate mappings, etc.
"""
class GenerationException(LdGenFailure): class GenerationException(LdGenFailure):
"""
Exception for linker script generation failures such as undefined references/ failure to
evaluate conditions, duplicate mappings, etc.
"""
UNDEFINED_REFERENCE = "Undefined reference" UNDEFINED_REFERENCE = "Undefined reference"
@ -575,11 +576,12 @@ class GenerationException(LdGenFailure):
else: else:
return self.message return self.message
"""
Encapsulates an output of objdump. Contains information about the static library sections
and names
"""
class SectionsInfo(dict): class SectionsInfo(dict):
"""
Encapsulates an output of objdump. Contains information about the static library sections
and names
"""
__info = collections.namedtuple("__info", "filename content") __info = collections.namedtuple("__info", "filename content")
@ -607,8 +609,11 @@ class SectionsInfo(dict):
object = Fragment.ENTITY.setResultsName("object") + Literal(":").suppress() + Literal("file format elf32-xtensa-le").suppress() object = Fragment.ENTITY.setResultsName("object") + Literal(":").suppress() + Literal("file format elf32-xtensa-le").suppress()
# Sections table # Sections table
header = Suppress(Literal("Sections:") + Literal("Idx") + Literal("Name") + Literal("Size") + Literal("VMA") + Literal("LMA") + Literal("File off") + Literal("Algn")) header = Suppress(Literal("Sections:") + Literal("Idx") + Literal("Name") + Literal("Size") + Literal("VMA") +
entry = Word(nums).suppress() + Fragment.ENTITY + Suppress(OneOrMore(Word(alphanums, exact=8)) + Word(nums + "*") + ZeroOrMore(Word(alphas.upper()) + Optional(Literal(",")))) Literal("LMA") + Literal("File off") + Literal("Algn"))
entry = Word(nums).suppress() + Fragment.ENTITY + Suppress(OneOrMore(Word(alphanums, exact=8)) +
Word(nums + "*") + ZeroOrMore(Word(alphas.upper()) +
Optional(Literal(","))))
# Content is object file line + sections table # Content is object file line + sections table
content = Group(object + header + Group(ZeroOrMore(entry)).setResultsName("sections")) content = Group(object + header + Group(ZeroOrMore(entry)).setResultsName("sections"))

View file

@ -16,8 +16,6 @@
# #
import argparse import argparse
import os
import traceback
import sys import sys
import tempfile import tempfile
@ -26,41 +24,41 @@ from sdkconfig import SDKConfig
from generation import GenerationModel, TemplateModel, SectionsInfo from generation import GenerationModel, TemplateModel, SectionsInfo
from common import LdGenFailure from common import LdGenFailure
def main(): def main():
argparser = argparse.ArgumentParser(description = "ESP-IDF linker script generator") argparser = argparse.ArgumentParser(description="ESP-IDF linker script generator")
argparser.add_argument( argparser.add_argument(
"--input", "-i", "--input", "-i",
help = "Linker template file", help="Linker template file",
type = argparse.FileType("r")) type=argparse.FileType("r"))
argparser.add_argument( argparser.add_argument(
"--fragments", "-f", "--fragments", "-f",
type = argparse.FileType("r"), type=argparse.FileType("r"),
help = "Input fragment files", help="Input fragment files",
nargs = "+") nargs="+")
argparser.add_argument( argparser.add_argument(
"--sections", "-s", "--sections", "-s",
type = argparse.FileType("r"), type=argparse.FileType("r"),
help = "Library sections info", help="Library sections info")
)
argparser.add_argument( argparser.add_argument(
"--output", "-o", "--output", "-o",
help = "Output linker script", help="Output linker script",
type = str) type=str)
argparser.add_argument( argparser.add_argument(
"--config", "-c", "--config", "-c",
help = "Project configuration", help="Project configuration",
type = argparse.FileType("r")) type=argparse.FileType("r"))
argparser.add_argument( argparser.add_argument(
"--kconfig", "-k", "--kconfig", "-k",
help = "IDF Kconfig file", help="IDF Kconfig file",
type = argparse.FileType("r")) type=argparse.FileType("r"))
argparser.add_argument( argparser.add_argument(
"--env", "-e", "--env", "-e",
@ -110,5 +108,6 @@ def main():
print("linker script generation failed for %s\nERROR: %s" % (input_file.name, e)) print("linker script generation failed for %s\nERROR: %s" % (input_file.name, e))
sys.exit(1) sys.exit(1)
if __name__ == "__main__": if __name__ == "__main__":
main() main()

View file

@ -15,28 +15,30 @@
# #
import os import os
from pyparsing import * from pyparsing import Word, printables, Combine, Literal, hexnums, quotedString, Optional, nums, removeQuotes, oneOf, Group, infixNotation, opAssoc
import sys import sys
parent_dir_name = os.path.dirname(os.path.dirname(os.path.realpath(__file__))) try:
kconfig_new_dir = os.path.abspath(parent_dir_name + "/kconfig_new") import kconfiglib
sys.path.append(kconfig_new_dir) except ImportError:
import kconfiglib parent_dir_name = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
kconfig_new_dir = os.path.abspath(parent_dir_name + "/kconfig_new")
sys.path.append(kconfig_new_dir)
import kconfiglib
"""
Encapsulates an sdkconfig file. Defines grammar of a configuration entry, and enables
evaluation of logical expressions involving those entries.
"""
class SDKConfig: class SDKConfig:
"""
Encapsulates an sdkconfig file. Defines grammar of a configuration entry, and enables
evaluation of logical expressions involving those entries.
"""
# A configuration entry is in the form CONFIG=VALUE. Definitions of components of that grammar # A configuration entry is in the form CONFIG=VALUE. Definitions of components of that grammar
IDENTIFIER = Word(printables.upper()) IDENTIFIER = Word(printables.upper())
HEX = Combine("0x" + Word(hexnums)).setParseAction(lambda t:int(t[0], 16)) HEX = Combine("0x" + Word(hexnums)).setParseAction(lambda t:int(t[0], 16))
DECIMAL = Combine(Optional(Literal("+") | Literal("-")) + Word(nums)).setParseAction(lambda t:int(t[0])) DECIMAL = Combine(Optional(Literal("+") | Literal("-")) + Word(nums)).setParseAction(lambda t:int(t[0]))
LITERAL = Word(printables) LITERAL = Word(printables)
QUOTED_LITERAL = quotedString.setParseAction(removeQuotes) QUOTED_LITERAL = quotedString.setParseAction(removeQuotes)
VALUE = HEX | DECIMAL | LITERAL | QUOTED_LITERAL VALUE = HEX | DECIMAL | LITERAL | QUOTED_LITERAL
@ -44,8 +46,8 @@ class SDKConfig:
# Operators supported by the expression evaluation # Operators supported by the expression evaluation
OPERATOR = oneOf(["=", "!=", ">", "<", "<=", ">="]) OPERATOR = oneOf(["=", "!=", ">", "<", "<=", ">="])
def __init__(self, kconfig_file, sdkconfig_file, env = []): def __init__(self, kconfig_file, sdkconfig_file, env=[]):
env = [ (name, value) for (name,value) in ( e.split("=",1) for e in env) ] env = [(name, value) for (name,value) in (e.split("=",1) for e in env)]
for name, value in env: for name, value in env:
value = " ".join(value.split()) value = " ".join(value.split())
@ -57,11 +59,11 @@ class SDKConfig:
def evaluate_expression(self, expression): def evaluate_expression(self, expression):
result = self.config.eval_string(expression) result = self.config.eval_string(expression)
if result == 0: # n if result == 0: # n
return False return False
elif result == 2: # y elif result == 2: # y
return True return True
else: # m else: # m
raise Exception("Unsupported config expression result.") raise Exception("Unsupported config expression result.")
@staticmethod @staticmethod
@ -77,10 +79,9 @@ class SDKConfig:
condition = Group(Optional("(").suppress() + test + Optional(")").suppress()) condition = Group(Optional("(").suppress() + test + Optional(")").suppress())
grammar = infixNotation( grammar = infixNotation(condition, [
condition, [ ("!", 1, opAssoc.RIGHT),
("!", 1, opAssoc.RIGHT), ("&&", 2, opAssoc.LEFT),
("&&", 2, opAssoc.LEFT), ("||", 2, opAssoc.LEFT)])
("||", 2, opAssoc.LEFT)])
return grammar return grammar

View file

@ -17,12 +17,17 @@
import unittest import unittest
import sys import sys
import os from pyparsing import ParseException
from pyparsing import restOfLine
try:
import fragments
except ImportError:
sys.path.append('../')
import fragments
from sdkconfig import SDKConfig
sys.path.append('../')
from fragments import *
from pyparsing import *
from sdkconfig import *
class FragmentTest(unittest.TestCase): class FragmentTest(unittest.TestCase):
@ -31,10 +36,11 @@ class FragmentTest(unittest.TestCase):
fragment = self.parser.parseString(text, parseAll=True) fragment = self.parser.parseString(text, parseAll=True)
return fragment[0] return fragment[0]
class SectionsTest(FragmentTest): class SectionsTest(FragmentTest):
def setUp(self): def setUp(self):
self.parser = Sections.get_fragment_grammar() self.parser = fragments.Sections.get_fragment_grammar()
def test_valid_entries(self): def test_valid_entries(self):
valid_entries = """ valid_entries = """
@ -74,7 +80,7 @@ class SectionsTest(FragmentTest):
""" """
with self.assertRaises(ParseException): with self.assertRaises(ParseException):
sections = self.parse(blank_entries) self.parse(blank_entries)
def test_invalid_names(self): def test_invalid_names(self):
with_spaces = """ with_spaces = """
@ -93,13 +99,13 @@ class SectionsTest(FragmentTest):
""" """
with self.assertRaises(ParseException): with self.assertRaises(ParseException):
sections = self.parse(with_spaces) self.parse(with_spaces)
with self.assertRaises(ParseException): with self.assertRaises(ParseException):
sections = self.parse(begins_with_number) self.parse(begins_with_number)
with self.assertRaises(ParseException): with self.assertRaises(ParseException):
sections = self.parse(with_special_character) self.parse(with_special_character)
def test_non_existent_entries(self): def test_non_existent_entries(self):
misspelled_entries_field = """ misspelled_entries_field = """
@ -113,10 +119,10 @@ class SectionsTest(FragmentTest):
""" """
with self.assertRaises(ParseException): with self.assertRaises(ParseException):
sections = self.parse(misspelled_entries_field) self.parse(misspelled_entries_field)
with self.assertRaises(ParseException): with self.assertRaises(ParseException):
sections = self.parse(missing_entries_field) self.parse(missing_entries_field)
def test_duplicate_entries(self): def test_duplicate_entries(self):
duplicate_entries = """ duplicate_entries = """
@ -143,10 +149,11 @@ class SectionsTest(FragmentTest):
self.assertEqual(set(entries), expected) self.assertEqual(set(entries), expected)
class SchemeTest(FragmentTest): class SchemeTest(FragmentTest):
def setUp(self): def setUp(self):
self.parser = Scheme.get_fragment_grammar() self.parser = fragments.Scheme.get_fragment_grammar()
def test_valid_entries(self): def test_valid_entries(self):
valid_entries = """ valid_entries = """
@ -202,10 +209,10 @@ class SchemeTest(FragmentTest):
""" """
with self.assertRaises(ParseException): with self.assertRaises(ParseException):
scheme = self.parse(wrong_character) self.parse(wrong_character)
with self.assertRaises(ParseException): with self.assertRaises(ParseException):
scheme = self.parse(single_word) self.parse(single_word)
def test_blank_entries(self): def test_blank_entries(self):
blank_entries = """ blank_entries = """
@ -214,7 +221,7 @@ class SchemeTest(FragmentTest):
""" """
with self.assertRaises(ParseException): with self.assertRaises(ParseException):
sections = self.parse(blank_entries) self.parse(blank_entries)
def test_non_existent_entries(self): def test_non_existent_entries(self):
misspelled_entries_field = """ misspelled_entries_field = """
@ -228,15 +235,16 @@ class SchemeTest(FragmentTest):
""" """
with self.assertRaises(ParseException): with self.assertRaises(ParseException):
sections = self.parse(misspelled_entries_field) self.parse(misspelled_entries_field)
with self.assertRaises(ParseException): with self.assertRaises(ParseException):
sections = self.parse(missing_entries_field) self.parse(missing_entries_field)
class MappingTest(FragmentTest): class MappingTest(FragmentTest):
def setUp(self): def setUp(self):
self.parser = Mapping.get_fragment_grammar() self.parser = fragments.Mapping.get_fragment_grammar()
def parse_expression(self, expression): def parse_expression(self, expression):
parser = SDKConfig.get_expression_grammar() parser = SDKConfig.get_expression_grammar()
@ -264,12 +272,12 @@ class MappingTest(FragmentTest):
entries = mapping.entries entries = mapping.entries
expected = [("default", { expected = [("default", {
("obj", "symbol", "noflash"), ("obj", "symbol", "noflash"),
("obj", None, "noflash"), ("obj", None, "noflash"),
("obj", "symbol_2", "noflash"), ("obj", "symbol_2", "noflash"),
("obj_2", None, "noflash"), ("obj_2", None, "noflash"),
("*", None, "noflash") ("*", None, "noflash")
} ) ] })]
self.assertEqual(entries, expected) self.assertEqual(entries, expected)
@ -360,43 +368,43 @@ class MappingTest(FragmentTest):
""" """
with self.assertRaises(ParseException): with self.assertRaises(ParseException):
sections = self.parse(with_fragment_name) self.parse(with_fragment_name)
with self.assertRaises(ParseException): with self.assertRaises(ParseException):
sections = self.parse(missing_archive) self.parse(missing_archive)
with self.assertRaises(ParseException): with self.assertRaises(ParseException):
sections = self.parse(misspelled_archive) self.parse(misspelled_archive)
with self.assertRaises(ParseException): with self.assertRaises(ParseException):
sections = self.parse(missing_entries) self.parse(missing_entries)
with self.assertRaises(ParseException): with self.assertRaises(ParseException):
sections = self.parse(misspelled_entries) self.parse(misspelled_entries)
with self.assertRaises(ParseException): with self.assertRaises(ParseException):
sections = self.parse(missing_symbols) self.parse(missing_symbols)
with self.assertRaises(ParseException): with self.assertRaises(ParseException):
sections = self.parse(missing_scheme_1) self.parse(missing_scheme_1)
with self.assertRaises(ParseException): with self.assertRaises(ParseException):
sections = self.parse(missing_scheme_2) self.parse(missing_scheme_2)
with self.assertRaises(ParseException): with self.assertRaises(ParseException):
sections = self.parse(missing_entity) self.parse(missing_entity)
with self.assertRaises(ParseException): with self.assertRaises(ParseException):
sections = self.parse(wilcard_symbol) self.parse(wilcard_symbol)
with self.assertRaises(ParseException): with self.assertRaises(ParseException):
sections = self.parse(empty_object_with_symbol) self.parse(empty_object_with_symbol)
with self.assertRaises(ParseException): with self.assertRaises(ParseException):
sections = self.parse(wildcard_object_with_symbol) self.parse(wildcard_object_with_symbol)
with self.assertRaises(ParseException): with self.assertRaises(ParseException):
sections = self.parse(empty_definition) self.parse(empty_definition)
def test_explicit_blank_default_w_others(self): def test_explicit_blank_default_w_others(self):
expl_blnk_w_oth = """ expl_blnk_w_oth = """
@ -412,14 +420,13 @@ class MappingTest(FragmentTest):
entries = mapping.entries entries = mapping.entries
expected = [ ( entries[0][0] , { expected = [(entries[0][0], {
("obj_a", None, "noflash"), ("obj_a", None, "noflash"),
} ), }),
("default", set() ) ] ("default", set())]
self.assertEqual(entries, expected) self.assertEqual(entries, expected)
def test_implicit_blank_default_w_others(self): def test_implicit_blank_default_w_others(self):
impl_blnk_w_oth = """ impl_blnk_w_oth = """
[mapping] [mapping]
@ -433,10 +440,10 @@ class MappingTest(FragmentTest):
entries = mapping.entries entries = mapping.entries
expected = [ ( entries[0][0] , { expected = [(entries[0][0], {
("obj_a", None, "noflash"), ("obj_a", None, "noflash"),
} ), }),
("default", set() ) ] ("default", set())]
self.assertEqual(entries, expected) self.assertEqual(entries, expected)
@ -449,7 +456,7 @@ class MappingTest(FragmentTest):
""" """
mapping = self.parse(expl_blnk_def) mapping = self.parse(expl_blnk_def)
entries = mapping.entries entries = mapping.entries
expected = [ ("default", set() ) ] expected = [("default", set())]
self.assertEqual(entries, expected) self.assertEqual(entries, expected)
@ -462,7 +469,7 @@ class MappingTest(FragmentTest):
""" """
mapping = self.parse(impl_blnk_def) mapping = self.parse(impl_blnk_def)
entries = mapping.entries entries = mapping.entries
expected = [ ("default", set() ) ] expected = [("default", set())]
self.assertEqual(entries, expected) self.assertEqual(entries, expected)
@ -486,19 +493,19 @@ class MappingTest(FragmentTest):
entries = mapping.entries entries = mapping.entries
expected = [ ( entries[0][0] , { expected = [(entries[0][0], {
("obj_a1", None, "noflash"), ("obj_a1", None, "noflash"),
("obj_a2", None, "noflash"), ("obj_a2", None, "noflash"),
} ), }),
( entries[1][0] , { (entries[1][0], {
("obj_b1", None, "noflash"), ("obj_b1", None, "noflash"),
("obj_b2", None, "noflash"), ("obj_b2", None, "noflash"),
("obj_b3", None, "noflash"), ("obj_b3", None, "noflash"),
} ), }),
( entries[2][0] , { (entries[2][0], {
("obj_c1", None, "noflash"), ("obj_c1", None, "noflash"),
} ), }),
("default", set() ) ] ("default", set())]
self.assertEqual(entries, expected) self.assertEqual(entries, expected)
@ -522,18 +529,18 @@ class MappingTest(FragmentTest):
entries = mapping.entries entries = mapping.entries
expected = [ ( entries[0][0] , { expected = [(entries[0][0], {
("obj_a", None, "noflash") ("obj_a", None, "noflash")
} ), }),
( entries[1][0] , set()), (entries[1][0], set()),
( entries[2][0] , { (entries[2][0], {
("obj_c", None, "noflash") ("obj_c", None, "noflash")
} ), }),
( entries[3][0] , set()), (entries[3][0], set()),
( entries[4][0] , set()), (entries[4][0], set()),
( "default" , { ("default", {
("obj", None, "noflash") ("obj", None, "noflash")
} ) ] })]
self.assertEqual(entries, expected) self.assertEqual(entries, expected)
@ -548,8 +555,7 @@ class MappingTest(FragmentTest):
""" """
with self.assertRaises(ParseException): with self.assertRaises(ParseException):
mapping = self.parse(blank_first_condition) self.parse(blank_first_condition)
def test_nonlast_default(self): def test_nonlast_default(self):
nonlast_default_1 = """ nonlast_default_1 = """
@ -587,13 +593,13 @@ class MappingTest(FragmentTest):
""" """
with self.assertRaises(ParseException): with self.assertRaises(ParseException):
mapping = self.parse(nonlast_default_1) self.parse(nonlast_default_1)
with self.assertRaises(ParseException): with self.assertRaises(ParseException):
mapping = self.parse(nonlast_default_2) self.parse(nonlast_default_2)
with self.assertRaises(ParseException): with self.assertRaises(ParseException):
mapping = self.parse(nonlast_default_3) self.parse(nonlast_default_3)
def test_duplicate_default(self): def test_duplicate_default(self):
duplicate_default_1 = """ duplicate_default_1 = """
@ -623,10 +629,11 @@ class MappingTest(FragmentTest):
""" """
with self.assertRaises(ParseException): with self.assertRaises(ParseException):
mapping = self.parse(duplicate_default_1) self.parse(duplicate_default_1)
with self.assertRaises(ParseException): with self.assertRaises(ParseException):
mapping = self.parse(duplicate_default_2) self.parse(duplicate_default_2)
if __name__ =="__main__":
if __name__ == "__main__":
unittest.main() unittest.main()

View file

@ -17,11 +17,25 @@
import unittest import unittest
import sys import sys
import os
sys.path.append('../') try:
from generation import * from generation import PlacementRule
from pyparsing import * except ImportError:
sys.path.append('../')
from generation import PlacementRule
from generation import GenerationException
from generation import SectionsInfo
from generation import TemplateModel
from generation import GenerationModel
from fragments import FragmentFileModel
from fragments import Mapping
from fragments import Sections
from fragments import Scheme
from sdkconfig import SDKConfig
class GenerationModelTest(unittest.TestCase): class GenerationModelTest(unittest.TestCase):
@ -270,7 +284,6 @@ class GenerationModelTest(unittest.TestCase):
self._compare_rules(expected, actual) self._compare_rules(expected, actual)
def test_rule_generation_nominal_4(self): def test_rule_generation_nominal_4(self):
normal = """ normal = """
[mapping] [mapping]
@ -524,8 +537,10 @@ class GenerationModelTest(unittest.TestCase):
dram0_data_E1 = PlacementRule("libfreertos.a", "croutine", "prvCheckPendingReadyList", self.model.sections["rodata"].entries, "dram0_data") dram0_data_E1 = PlacementRule("libfreertos.a", "croutine", "prvCheckPendingReadyList", self.model.sections["rodata"].entries, "dram0_data")
rtc_text_E2 = PlacementRule("libfreertos.a", "croutine", "prvCheckDelayedList", self.model.sections["text"].entries, "rtc_text") rtc_text_E2 = PlacementRule("libfreertos.a", "croutine", "prvCheckDelayedList", self.model.sections["text"].entries, "rtc_text")
rtc_data_E2 = PlacementRule("libfreertos.a", "croutine", "prvCheckDelayedList", self.model.sections["data"].entries + self.model.sections["rodata"].entries, "rtc_data") rtc_data_E2 = PlacementRule("libfreertos.a", "croutine", "prvCheckDelayedList",
rtc_bss_E2 = PlacementRule("libfreertos.a", "croutine", "prvCheckDelayedList", self.model.sections["bss"].entries + self.model.sections["common"].entries, "rtc_bss") self.model.sections["data"].entries + self.model.sections["rodata"].entries, "rtc_data")
rtc_bss_E2 = PlacementRule("libfreertos.a", "croutine", "prvCheckDelayedList",
self.model.sections["bss"].entries + self.model.sections["common"].entries, "rtc_bss")
iram0_text_E3 = PlacementRule("libfreertos.a", "croutine", "xCoRoutineCreate", self.model.sections["text"].entries, "iram0_text") iram0_text_E3 = PlacementRule("libfreertos.a", "croutine", "xCoRoutineCreate", self.model.sections["text"].entries, "iram0_text")
dram0_data_E3 = PlacementRule("libfreertos.a", "croutine", "xCoRoutineCreate", self.model.sections["rodata"].entries, "dram0_data") dram0_data_E3 = PlacementRule("libfreertos.a", "croutine", "xCoRoutineCreate", self.model.sections["rodata"].entries, "dram0_data")
@ -591,8 +606,10 @@ class GenerationModelTest(unittest.TestCase):
dram0_bss_default = self._get_default("dram0_bss", expected) dram0_bss_default = self._get_default("dram0_bss", expected)
rtc_text_E1 = PlacementRule("libfreertos.a", "croutine", "prvCheckDelayedList", self.model.sections["text"].entries, "rtc_text") rtc_text_E1 = PlacementRule("libfreertos.a", "croutine", "prvCheckDelayedList", self.model.sections["text"].entries, "rtc_text")
rtc_data_E1 = PlacementRule("libfreertos.a", "croutine", "prvCheckDelayedList", self.model.sections["data"].entries + self.model.sections["rodata"].entries, "rtc_data") rtc_data_E1 = PlacementRule("libfreertos.a", "croutine", "prvCheckDelayedList",
rtc_bss_E1 = PlacementRule("libfreertos.a", "croutine", "prvCheckDelayedList", self.model.sections["bss"].entries + self.model.sections["common"].entries, "rtc_bss") self.model.sections["data"].entries + self.model.sections["rodata"].entries, "rtc_data")
rtc_bss_E1 = PlacementRule("libfreertos.a", "croutine", "prvCheckDelayedList",
self.model.sections["bss"].entries + self.model.sections["common"].entries, "rtc_bss")
iram0_text_E2 = PlacementRule("libfreertos.a", "croutine", None, self.model.sections["text"].entries, "iram0_text") iram0_text_E2 = PlacementRule("libfreertos.a", "croutine", None, self.model.sections["text"].entries, "iram0_text")
dram0_data_E2 = PlacementRule("libfreertos.a", "croutine", None, self.model.sections["rodata"].entries, "dram0_data") dram0_data_E2 = PlacementRule("libfreertos.a", "croutine", None, self.model.sections["rodata"].entries, "dram0_data")
@ -648,8 +665,10 @@ class GenerationModelTest(unittest.TestCase):
dram0_bss_default = self._get_default("dram0_bss", expected) dram0_bss_default = self._get_default("dram0_bss", expected)
rtc_text_E1 = PlacementRule("libfreertos.a", "croutine", "prvCheckDelayedList", self.model.sections["text"].entries, "rtc_text") rtc_text_E1 = PlacementRule("libfreertos.a", "croutine", "prvCheckDelayedList", self.model.sections["text"].entries, "rtc_text")
rtc_data_E1 = PlacementRule("libfreertos.a", "croutine", "prvCheckDelayedList", self.model.sections["data"].entries + self.model.sections["rodata"].entries, "rtc_data") rtc_data_E1 = PlacementRule("libfreertos.a", "croutine", "prvCheckDelayedList",
rtc_bss_E1 = PlacementRule("libfreertos.a", "croutine", "prvCheckDelayedList", self.model.sections["bss"].entries + self.model.sections["common"].entries, "rtc_bss") self.model.sections["data"].entries + self.model.sections["rodata"].entries, "rtc_data")
rtc_bss_E1 = PlacementRule("libfreertos.a", "croutine", "prvCheckDelayedList",
self.model.sections["bss"].entries + self.model.sections["common"].entries, "rtc_bss")
iram0_text_E2 = PlacementRule("libfreertos.a", None, None, self.model.sections["text"].entries, "iram0_text") iram0_text_E2 = PlacementRule("libfreertos.a", None, None, self.model.sections["text"].entries, "iram0_text")
dram0_data_E2 = PlacementRule("libfreertos.a", None, None, self.model.sections["rodata"].entries, "dram0_data") dram0_data_E2 = PlacementRule("libfreertos.a", None, None, self.model.sections["rodata"].entries, "dram0_data")
@ -767,8 +786,10 @@ class GenerationModelTest(unittest.TestCase):
dram0_bss_default = self._get_default("dram0_bss", expected) dram0_bss_default = self._get_default("dram0_bss", expected)
rtc_text_E1 = PlacementRule("libfreertos.a", "croutine", "prvCheckDelayedList", self.model.sections["text"].entries, "rtc_text") rtc_text_E1 = PlacementRule("libfreertos.a", "croutine", "prvCheckDelayedList", self.model.sections["text"].entries, "rtc_text")
rtc_data_E1 = PlacementRule("libfreertos.a", "croutine", "prvCheckDelayedList", self.model.sections["data"].entries + self.model.sections["rodata"].entries, "rtc_data") rtc_data_E1 = PlacementRule("libfreertos.a", "croutine", "prvCheckDelayedList",
rtc_bss_E1 = PlacementRule("libfreertos.a", "croutine", "prvCheckDelayedList", self.model.sections["bss"].entries + self.model.sections["common"].entries, "rtc_bss") self.model.sections["data"].entries + self.model.sections["rodata"].entries, "rtc_data")
rtc_bss_E1 = PlacementRule("libfreertos.a", "croutine", "prvCheckDelayedList",
self.model.sections["bss"].entries + self.model.sections["common"].entries, "rtc_bss")
iram0_text_E2 = PlacementRule("libfreertos.a", "croutine", None, self.model.sections["text"].entries, "iram0_text") iram0_text_E2 = PlacementRule("libfreertos.a", "croutine", None, self.model.sections["text"].entries, "iram0_text")
dram0_data_E2 = PlacementRule("libfreertos.a", "croutine", None, self.model.sections["rodata"].entries, "dram0_data") dram0_data_E2 = PlacementRule("libfreertos.a", "croutine", None, self.model.sections["rodata"].entries, "dram0_data")
@ -847,8 +868,10 @@ class GenerationModelTest(unittest.TestCase):
rtc_bss_E3 = PlacementRule("libfreertos.a", "croutine", None, self.model.sections["bss"].entries + self.model.sections["common"].entries, "rtc_bss") rtc_bss_E3 = PlacementRule("libfreertos.a", "croutine", None, self.model.sections["bss"].entries + self.model.sections["common"].entries, "rtc_bss")
rtc_text_E4 = PlacementRule("libfreertos.a", "event_groups", None, self.model.sections["text"].entries, "rtc_text") rtc_text_E4 = PlacementRule("libfreertos.a", "event_groups", None, self.model.sections["text"].entries, "rtc_text")
rtc_data_E4 = PlacementRule("libfreertos.a", "event_groups", None, self.model.sections["data"].entries + self.model.sections["rodata"].entries, "rtc_data") rtc_data_E4 = PlacementRule("libfreertos.a", "event_groups", None,
rtc_bss_E4 = PlacementRule("libfreertos.a", "event_groups", None, self.model.sections["bss"].entries + self.model.sections["common"].entries, "rtc_bss") self.model.sections["data"].entries + self.model.sections["rodata"].entries, "rtc_data")
rtc_bss_E4 = PlacementRule("libfreertos.a", "event_groups", None,
self.model.sections["bss"].entries + self.model.sections["common"].entries, "rtc_bss")
iram0_text_E5 = PlacementRule("libfreertos.a", None, None, self.model.sections["text"].entries, "iram0_text") iram0_text_E5 = PlacementRule("libfreertos.a", None, None, self.model.sections["text"].entries, "iram0_text")
dram0_data_E5 = PlacementRule("libfreertos.a", None, None, self.model.sections["rodata"].entries, "dram0_data") dram0_data_E5 = PlacementRule("libfreertos.a", None, None, self.model.sections["rodata"].entries, "dram0_data")
@ -918,8 +941,10 @@ class GenerationModelTest(unittest.TestCase):
dram0_data_E1 = PlacementRule("libfreertos.a", "croutine", "prvCheckDelayedList", self.model.sections["rodata"].entries, "dram0_data") dram0_data_E1 = PlacementRule("libfreertos.a", "croutine", "prvCheckDelayedList", self.model.sections["rodata"].entries, "dram0_data")
rtc_text_E2 = PlacementRule("libfreertos.a", "event_groups", "xEventGroupCreate", self.model.sections["text"].entries, "rtc_text") rtc_text_E2 = PlacementRule("libfreertos.a", "event_groups", "xEventGroupCreate", self.model.sections["text"].entries, "rtc_text")
rtc_data_E2 = PlacementRule("libfreertos.a", "event_groups", "xEventGroupCreate", self.model.sections["data"].entries + self.model.sections["rodata"].entries, "rtc_data") rtc_data_E2 = PlacementRule("libfreertos.a", "event_groups", "xEventGroupCreate",
rtc_bss_E2 = PlacementRule("libfreertos.a", "event_groups", "xEventGroupCreate", self.model.sections["bss"].entries + self.model.sections["common"].entries, "rtc_bss") self.model.sections["data"].entries + self.model.sections["rodata"].entries, "rtc_data")
rtc_bss_E2 = PlacementRule("libfreertos.a", "event_groups", "xEventGroupCreate",
self.model.sections["bss"].entries + self.model.sections["common"].entries, "rtc_bss")
rtc_text_E3 = PlacementRule("libfreertos.a", "croutine", None, self.model.sections["text"].entries, "rtc_text") rtc_text_E3 = PlacementRule("libfreertos.a", "croutine", None, self.model.sections["text"].entries, "rtc_text")
rtc_data_E3 = PlacementRule("libfreertos.a", "croutine", None, self.model.sections["data"].entries + self.model.sections["rodata"].entries, "rtc_data") rtc_data_E3 = PlacementRule("libfreertos.a", "croutine", None, self.model.sections["data"].entries + self.model.sections["rodata"].entries, "rtc_data")
@ -1041,7 +1066,7 @@ class GenerationModelTest(unittest.TestCase):
croutine (noflash) croutine (noflash)
""" """
conflict_scheme = """ conflict_scheme = """
[scheme:conflict] [scheme:conflict]
entries: entries:
rodata -> dram0_data rodata -> dram0_data
@ -1052,9 +1077,9 @@ class GenerationModelTest(unittest.TestCase):
self._add_mapping(conflict_mapping) self._add_mapping(conflict_mapping)
with self.assertRaises(GenerationException): with self.assertRaises(GenerationException):
actual = self.model.generate_rules(self.sdkconfig, self.sections_info) self.model.generate_rules(self.sdkconfig, self.sections_info)
def test_rule_generation_condition (self): def test_rule_generation_condition(self):
generation_with_condition = """ generation_with_condition = """
[mapping] [mapping]
archive: lib.a archive: lib.a
@ -1083,7 +1108,7 @@ class GenerationModelTest(unittest.TestCase):
flash_rodata_default = self._get_default("flash_rodata", expected) flash_rodata_default = self._get_default("flash_rodata", expected)
if perf_level < 4: if perf_level < 4:
for append_no in range (1, perf_level + 1): for append_no in range(1, perf_level + 1):
iram_rule = PlacementRule("lib.a", "obj" + str(append_no), None, self.model.sections["text"].entries, "iram0_text") iram_rule = PlacementRule("lib.a", "obj" + str(append_no), None, self.model.sections["text"].entries, "iram0_text")
dram_rule = PlacementRule("lib.a", "obj" + str(append_no), None, self.model.sections["rodata"].entries, "dram0_data") dram_rule = PlacementRule("lib.a", "obj" + str(append_no), None, self.model.sections["rodata"].entries, "dram0_data")
@ -1095,5 +1120,6 @@ class GenerationModelTest(unittest.TestCase):
self._compare_rules(expected, actual) self._compare_rules(expected, actual)
if __name__ =="__main__":
unittest.main() if __name__ == "__main__":
unittest.main()

View file

@ -26,17 +26,18 @@ import socket
import pty import pty
import filecmp import filecmp
import threading import threading
import errno
test_list = ( test_list = (
# Add new tests here. All files should be placed in IN_DIR. Columns are: # Add new tests here. All files should be placed in IN_DIR. Columns are:
# Input file Filter string File with expected output Timeout # Input file Filter string File with expected output Timeout
('in1.txt', '', 'in1f1.txt', 60), ('in1.txt', '', 'in1f1.txt', 60),
('in1.txt', '*:V', 'in1f1.txt', 60), ('in1.txt', '*:V', 'in1f1.txt', 60),
('in1.txt', 'hello_world', 'in1f2.txt', 60), ('in1.txt', 'hello_world', 'in1f2.txt', 60),
('in1.txt', '*:N', 'in1f3.txt', 60), ('in1.txt', '*:N', 'in1f3.txt', 60),
('in2.txt', 'boot mdf_device_handle:I mesh:E vfs:I', 'in2f1.txt', 240), ('in2.txt', 'boot mdf_device_handle:I mesh:E vfs:I', 'in2f1.txt', 240),
('in2.txt', 'vfs', 'in2f2.txt', 240), ('in2.txt', 'vfs', 'in2f2.txt', 240),
) )
IN_DIR = 'tests/' # tests are in this directory IN_DIR = 'tests/' # tests are in this directory
OUT_DIR = 'outputs/' # test results are written to this directory (kept only for debugging purposes) OUT_DIR = 'outputs/' # test results are written to this directory (kept only for debugging purposes)
@ -51,6 +52,7 @@ SOCKET_TIMEOUT = 30
# the test is restarted after failure (idf_monitor has to be killed): # the test is restarted after failure (idf_monitor has to be killed):
RETRIES_PER_TEST = 5 RETRIES_PER_TEST = 5
def monitor_timeout(process): def monitor_timeout(process):
if process.poll() is None: if process.poll() is None:
# idf_monitor is still running # idf_monitor is still running
@ -64,6 +66,7 @@ def monitor_timeout(process):
else: else:
raise raise
class TestRunner(object): class TestRunner(object):
def __enter__(self): def __enter__(self):
self.serversocket = socket.socket(socket.AF_INET, socket.SOCK_STREAM) self.serversocket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
@ -85,12 +88,13 @@ class TestRunner(object):
clientsocket.setsockopt(socket.SOL_TCP, socket.TCP_NODELAY, 1) clientsocket.setsockopt(socket.SOL_TCP, socket.TCP_NODELAY, 1)
return clientsocket return clientsocket
def test_iteration(runner, test, startup_timeout): def test_iteration(runner, test, startup_timeout):
print('\nRunning test on {} with filter "{}" and expecting {}'.format(test[0], test[1], test[2])) print('\nRunning test on {} with filter "{}" and expecting {}'.format(test[0], test[1], test[2]))
try: try:
with open(OUT_DIR + test[2], "w", encoding='utf-8') as o_f, open(ERR_OUT, "w", encoding='utf-8') as e_f: with open(OUT_DIR + test[2], "w", encoding='utf-8') as o_f, open(ERR_OUT, "w", encoding='utf-8') as e_f:
monitor_cmd = [sys.executable, monitor_cmd = [sys.executable,
IDF_MONITOR, '--port', 'socket://{}:{}'.format(HOST, runner.port), '--print_filter', test[1], ELF_FILE] IDF_MONITOR, '--port', 'socket://{}:{}'.format(HOST, runner.port), '--print_filter', test[1], ELF_FILE]
(master_fd, slave_fd) = pty.openpty() (master_fd, slave_fd) = pty.openpty()
print('\t', ' '.join(monitor_cmd), sep='') print('\t', ' '.join(monitor_cmd), sep='')
print('\tstdout="{}" stderr="{}" stdin="{}"'.format(o_f.name, e_f.name, os.ttyname(slave_fd))) print('\tstdout="{}" stderr="{}" stdin="{}"'.format(o_f.name, e_f.name, os.ttyname(slave_fd)))
@ -140,6 +144,7 @@ def test_iteration(runner, test, startup_timeout):
else: else:
raise RuntimeError("The contents of the files are different. Please examine the artifacts.") raise RuntimeError("The contents of the files are different. Please examine the artifacts.")
def main(): def main():
gstart = time.time() gstart = time.time()
if not os.path.exists(OUT_DIR): if not os.path.exists(OUT_DIR):
@ -169,5 +174,6 @@ def main():
gend = time.time() gend = time.time()
print('Execution took {:.2f} seconds\n'.format(gend - gstart)) print('Execution took {:.2f} seconds\n'.format(gend - gstart))
if __name__ == "__main__": if __name__ == "__main__":
main() main()

View file

@ -16,8 +16,12 @@
import sys import sys
sys.path.append('..') try:
import idf_size import idf_size
except ImportError:
sys.path.append('..')
import idf_size
if __name__ == "__main__": if __name__ == "__main__":
try: try:

View file

@ -22,11 +22,15 @@ import sys
import re import re
import argparse import argparse
test_fw_path = os.getenv("TEST_FW_PATH") try:
if test_fw_path: from Utility.CIAssignTest import AssignTest
sys.path.insert(0, test_fw_path) except ImportError:
test_fw_path = os.getenv("TEST_FW_PATH")
if test_fw_path:
sys.path.insert(0, test_fw_path)
from Utility.CIAssignTest import AssignTest
from Utility.CIAssignTest import AssignTest, Group from Utility.CIAssignTest import Group
class ExampleGroup(Group): class ExampleGroup(Group):

View file

@ -9,11 +9,13 @@ import argparse
import yaml import yaml
test_fw_path = os.getenv("TEST_FW_PATH") try:
if test_fw_path: from Utility import CIAssignTest
sys.path.insert(0, test_fw_path) except ImportError:
test_fw_path = os.getenv("TEST_FW_PATH")
from Utility import CIAssignTest if test_fw_path:
sys.path.insert(0, test_fw_path)
from Utility import CIAssignTest
class Group(CIAssignTest.Group): class Group(CIAssignTest.Group):

View file

@ -426,10 +426,10 @@ class BaseDUT(object):
:param data: data which needs to be checked and maybe transformed :param data: data which needs to be checked and maybe transformed
""" """
if type(data) is type(u''): if isinstance(data, type(u'')):
try: try:
data = data.encode('utf-8') data = data.encode('utf-8')
except: except Exception:
print(u'Cannot encode {} of type {}'.format(data, type(data))) print(u'Cannot encode {} of type {}'.format(data, type(data)))
raise raise
return data return data
@ -529,9 +529,9 @@ class BaseDUT(object):
:return: match groups if match succeed otherwise None :return: match groups if match succeed otherwise None
""" """
ret = None ret = None
if type(pattern.pattern) is type(u''): if isinstance(pattern.pattern, type(u'')):
pattern = re.compile(BaseDUT.u_to_bytearray(pattern.pattern)) pattern = re.compile(BaseDUT.u_to_bytearray(pattern.pattern))
if type(data) is type(u''): if isinstance(data, type(u'')):
data = BaseDUT.u_to_bytearray(data) data = BaseDUT.u_to_bytearray(data)
match = pattern.search(data) match = pattern.search(data)
if match: if match:
@ -543,7 +543,7 @@ class BaseDUT(object):
EXPECT_METHOD = [ EXPECT_METHOD = [
[type(re.compile("")), "_expect_re"], [type(re.compile("")), "_expect_re"],
[type(b''), "_expect_str"], # Python 2 & 3 hook to work without 'from builtins import str' from future [type(b''), "_expect_str"], # Python 2 & 3 hook to work without 'from builtins import str' from future
[type(u''), "_expect_str"], [type(u''), "_expect_str"],
] ]

View file

@ -67,7 +67,7 @@ class Config(object):
try: try:
value = self.configs[variable_name] value = self.configs[variable_name]
except KeyError: except KeyError:
#TODO: to support auto get variable here # TODO: to support auto get variable here
value = None value = None
if value is None: if value is None:
raise ValueError("Failed to get variable") raise ValueError("Failed to get variable")

View file

@ -41,11 +41,11 @@ class IDFApp(App.BaseApp):
"or 'idf.py build' " "or 'idf.py build' "
"for resolving the issue." "for resolving the issue."
"").format(self.IDF_DOWNLOAD_CONFIG_FILE, self.IDF_FLASH_ARGS_FILE, "").format(self.IDF_DOWNLOAD_CONFIG_FILE, self.IDF_FLASH_ARGS_FILE,
self.binary_path, self.IDF_DOWNLOAD_CONFIG_FILE) self.binary_path, self.IDF_DOWNLOAD_CONFIG_FILE)
raise AssertionError(msg) raise AssertionError(msg)
self.flash_files, self.flash_settings = self._parse_flash_download_config() self.flash_files, self.flash_settings = self._parse_flash_download_config()
self.partition_table = self._parse_partition_table() self.partition_table = self._parse_partition_table()
@classmethod @classmethod
def get_sdk_path(cls): def get_sdk_path(cls):
@ -54,7 +54,6 @@ class IDFApp(App.BaseApp):
assert os.path.exists(idf_path) assert os.path.exists(idf_path)
return idf_path return idf_path
def get_binary_path(self, app_path): def get_binary_path(self, app_path):
""" """
get binary path according to input app_path. get binary path according to input app_path.
@ -81,7 +80,7 @@ class IDFApp(App.BaseApp):
# CMake version using build metadata file # CMake version using build metadata file
with open(os.path.join(self.binary_path, self.IDF_FLASH_ARGS_FILE), "r") as f: with open(os.path.join(self.binary_path, self.IDF_FLASH_ARGS_FILE), "r") as f:
args = json.load(f) args = json.load(f)
flash_files = [ (offs,file) for (offs,file) in args["flash_files"].items() if offs != "" ] flash_files = [(offs,file) for (offs,file) in args["flash_files"].items() if offs != ""]
flash_settings = args["flash_settings"] flash_settings = args["flash_settings"]
else: else:
# GNU Make version uses download.config arguments file # GNU Make version uses download.config arguments file
@ -92,13 +91,13 @@ class IDFApp(App.BaseApp):
for idx in range(0, len(args), 2): # process arguments in pairs for idx in range(0, len(args), 2): # process arguments in pairs
if args[idx].startswith("--"): if args[idx].startswith("--"):
# strip the -- from the command line argument # strip the -- from the command line argument
flash_settings[args[idx][2:]] = args[idx+1] flash_settings[args[idx][2:]] = args[idx + 1]
else: else:
# offs, filename # offs, filename
flash_files.append( (args[idx], args[idx+1]) ) flash_files.append((args[idx], args[idx + 1]))
# make file offsets into integers, make paths absolute # make file offsets into integers, make paths absolute
flash_files = [ (int(offs, 0), os.path.join(self.binary_path, path.strip())) for (offs, path) in flash_files ] flash_files = [(int(offs, 0), os.path.join(self.binary_path, path.strip())) for (offs, path) in flash_files]
return (flash_files, flash_settings) return (flash_files, flash_settings)

View file

@ -17,11 +17,8 @@ import os
import os.path import os.path
import sys import sys
import re import re
import subprocess
import functools import functools
import random
import tempfile import tempfile
import time
from serial.tools import list_ports from serial.tools import list_ports
@ -94,7 +91,7 @@ class IDFDUT(DUT.SerialDUT):
esp = esptool.ESP32ROM(port) esp = esptool.ESP32ROM(port)
esp.connect() esp.connect()
return esp.read_mac() return esp.read_mac()
except RuntimeError as e: except RuntimeError:
return None return None
finally: finally:
esp._port.close() esp._port.close()
@ -112,7 +109,7 @@ class IDFDUT(DUT.SerialDUT):
""" """
try: try:
# note: opening here prevents us from having to seek back to 0 each time # note: opening here prevents us from having to seek back to 0 each time
flash_files = [ (offs, open(path, "rb")) for (offs, path) in self.app.flash_files ] flash_files = [(offs, open(path, "rb")) for (offs, path) in self.app.flash_files]
if erase_nvs: if erase_nvs:
address = self.app.partition_table["nvs"]["offset"] address = self.app.partition_table["nvs"]["offset"]
@ -120,7 +117,7 @@ class IDFDUT(DUT.SerialDUT):
nvs_file = tempfile.TemporaryFile() nvs_file = tempfile.TemporaryFile()
nvs_file.write(b'\xff' * size) nvs_file.write(b'\xff' * size)
nvs_file.seek(0) nvs_file.seek(0)
flash_files.append( (int(address, 0), nvs_file) ) flash_files.append((int(address, 0), nvs_file))
# fake flasher args object, this is a hack until # fake flasher args object, this is a hack until
# esptool Python API is improved # esptool Python API is improved
@ -158,7 +155,7 @@ class IDFDUT(DUT.SerialDUT):
:param: erase_nvs: whether erase NVS partition during flash :param: erase_nvs: whether erase NVS partition during flash
:return: None :return: None
""" """
for baud_rate in [ 921600, 115200 ]: for baud_rate in [921600, 115200]:
try: try:
self._try_flash(erase_nvs, baud_rate) self._try_flash(erase_nvs, baud_rate)
break break
@ -183,7 +180,7 @@ class IDFDUT(DUT.SerialDUT):
:return: None :return: None
""" """
raise NotImplementedError() # TODO: implement this raise NotImplementedError() # TODO: implement this
address = self.app.partition_table[partition]["offset"] # address = self.app.partition_table[partition]["offset"]
size = self.app.partition_table[partition]["size"] size = self.app.partition_table[partition]["size"]
# TODO can use esp.erase_region() instead of this, I think # TODO can use esp.erase_region() instead of this, I think
with open(".erase_partition.tmp", "wb") as f: with open(".erase_partition.tmp", "wb") as f:
@ -231,7 +228,7 @@ class IDFDUT(DUT.SerialDUT):
return [x for x in ports if not cls.INVALID_PORT_PATTERN.search(x)] return [x for x in ports if not cls.INVALID_PORT_PATTERN.search(x)]
# On MacOs with python3.6: type of espport is already utf8 # On MacOs with python3.6: type of espport is already utf8
if type(espport) is type(u''): if isinstance(espport, type(u'')):
port_hint = espport port_hint = espport
else: else:
port_hint = espport.decode('utf8') port_hint = espport.decode('utf8')

View file

@ -53,7 +53,7 @@ class Runner(threading.Thread):
for case in self.test_cases: for case in self.test_cases:
result = case.run() result = case.run()
self.test_result.append(result) self.test_result.append(result)
def get_test_result(self): def get_test_result(self):
return self.test_result and all(self.test_result) return self.test_result and all(self.test_result)

View file

@ -59,9 +59,9 @@ def _convert_to_lower_case_bytes(item):
""" """
if isinstance(item, (tuple, list)): if isinstance(item, (tuple, list)):
output = [_convert_to_lower_case_bytes(v) for v in item] output = [_convert_to_lower_case_bytes(v) for v in item]
elif type(item) == type(b''): elif isinstance(item, type(b'')):
output = item.lower() output = item.lower()
elif type(item) == type(u''): elif isinstance(item, type(u'')):
output = item.encode().lower() output = item.encode().lower()
else: else:
output = item output = item

View file

@ -15,7 +15,7 @@
import matplotlib import matplotlib
# fix can't draw figure with docker # fix can't draw figure with docker
matplotlib.use('Agg') matplotlib.use('Agg')
import matplotlib.pyplot as plt import matplotlib.pyplot as plt # noqa: E402 - matplotlib.use('Agg') need to be before this
# candidate colors # candidate colors

View file

@ -90,6 +90,6 @@ class Control(object):
@classmethod @classmethod
def control_rest(cls, apc_ip, outlet, action): def control_rest(cls, apc_ip, outlet, action):
outlet_list = list(range(1, 9)) # has to be a list if we want to remove from it under Python 3 outlet_list = list(range(1, 9)) # has to be a list if we want to remove from it under Python 3
outlet_list.remove(outlet) outlet_list.remove(outlet)
cls.control(apc_ip, dict.fromkeys(outlet_list, action)) cls.control(apc_ip, dict.fromkeys(outlet_list, action))

View file

@ -44,7 +44,7 @@ class Search(object):
except ImportError as e: except ImportError as e:
print("ImportError: \r\n\tFile:" + file_name + "\r\n\tError:" + str(e)) print("ImportError: \r\n\tFile:" + file_name + "\r\n\tError:" + str(e))
for i, test_function in enumerate(test_functions): for i, test_function in enumerate(test_functions):
print("\t{}. ".format(i+1) + test_function.case_info["name"]) print("\t{}. ".format(i + 1) + test_function.case_info["name"])
return test_functions return test_functions
@classmethod @classmethod

View file

@ -29,7 +29,7 @@ def console_log(data, color="white", end="\n"):
if color not in _COLOR_CODES: if color not in _COLOR_CODES:
color = "white" color = "white"
color_codes = _COLOR_CODES[color] color_codes = _COLOR_CODES[color]
if type(data) is type(b''): if isinstance(data, type(b'')):
data = data.decode('utf-8', 'replace') data = data.decode('utf-8', 'replace')
print(color_codes + data, end=end) print(color_codes + data, end=end)
if color not in ["white", "W"]: if color not in ["white", "W"]:

View file

@ -155,6 +155,3 @@ texinfo_documents = [
author, 'TinyTestFW', 'One line description of project.', author, 'TinyTestFW', 'One line description of project.',
'Miscellaneous'), 'Miscellaneous'),
] ]

View file

@ -17,13 +17,16 @@ import re
import os import os
import sys import sys
# if we want to run test case outside `tiny-test-fw` folder, try:
# we need to insert tiny-test-fw path into sys path import TinyFW
test_fw_path = os.getenv("TEST_FW_PATH") except ImportError:
if test_fw_path and test_fw_path not in sys.path: # if we want to run test case outside `tiny-test-fw` folder,
sys.path.insert(0, test_fw_path) # we need to insert tiny-test-fw path into sys path
test_fw_path = os.getenv("TEST_FW_PATH")
if test_fw_path and test_fw_path not in sys.path:
sys.path.insert(0, test_fw_path)
import TinyFW
import TinyFW
import IDF import IDF

View file

@ -1,4 +1,3 @@
import sys
import glob import glob
import tempfile import tempfile
import os import os
@ -6,7 +5,6 @@ import os.path
import re import re
import shutil import shutil
import argparse import argparse
import json
import copy import copy
PROJECT_NAME = "unit-test-app" PROJECT_NAME = "unit-test-app"
@ -16,12 +14,13 @@ PROJECT_PATH = os.getcwd()
# Each file in configs/ directory defines a configuration. The format is the # Each file in configs/ directory defines a configuration. The format is the
# same as sdkconfig file. Configuration is applied on top of sdkconfig.defaults # same as sdkconfig file. Configuration is applied on top of sdkconfig.defaults
# file from the project directory # file from the project directory
CONFIG_NAMES = os.listdir(os.path.join(PROJECT_PATH, "configs")) CONFIG_NAMES = os.listdir(os.path.join(PROJECT_PATH, "configs"))
# Build (intermediate) and output (artifact) directories # Build (intermediate) and output (artifact) directories
BUILDS_DIR = os.path.join(PROJECT_PATH, "builds") BUILDS_DIR = os.path.join(PROJECT_PATH, "builds")
BINARIES_DIR = os.path.join(PROJECT_PATH, "output") BINARIES_DIR = os.path.join(PROJECT_PATH, "output")
# Convert the values passed to the -T parameter to corresponding cache entry definitions # Convert the values passed to the -T parameter to corresponding cache entry definitions
# TESTS_ALL and TEST_COMPONENTS # TESTS_ALL and TEST_COMPONENTS
class TestComponentAction(argparse.Action): class TestComponentAction(argparse.Action):
@ -46,10 +45,11 @@ class TestComponentAction(argparse.Action):
# Brute force add reconfigure at the very beginning # Brute force add reconfigure at the very beginning
existing_actions = getattr(namespace, "actions", []) existing_actions = getattr(namespace, "actions", [])
if not "reconfigure" in existing_actions: if "reconfigure" not in existing_actions:
existing_actions = ["reconfigure"] + existing_actions existing_actions = ["reconfigure"] + existing_actions
setattr(namespace, "actions", existing_actions) setattr(namespace, "actions", existing_actions)
class TestExcludeComponentAction(argparse.Action): class TestExcludeComponentAction(argparse.Action):
def __call__(self, parser, namespace, values, option_string=None): def __call__(self, parser, namespace, values, option_string=None):
# Create a new of cache definition entry, adding previous elements # Create a new of cache definition entry, adding previous elements
@ -66,22 +66,24 @@ class TestExcludeComponentAction(argparse.Action):
# Brute force add reconfigure at the very beginning # Brute force add reconfigure at the very beginning
existing_actions = getattr(namespace, "actions", []) existing_actions = getattr(namespace, "actions", [])
if not "reconfigure" in existing_actions: if "reconfigure" not in existing_actions:
existing_actions = ["reconfigure"] + existing_actions existing_actions = ["reconfigure"] + existing_actions
setattr(namespace, "actions", existing_actions) setattr(namespace, "actions", existing_actions)
def add_argument_extensions(parser): def add_argument_extensions(parser):
# For convenience, define a -T argument that gets converted to -D arguments # For convenience, define a -T argument that gets converted to -D arguments
parser.add_argument('-T', '--test-component', help="Specify the components to test", nargs='+', action=TestComponentAction) parser.add_argument('-T', '--test-component', help="Specify the components to test", nargs='+', action=TestComponentAction)
# For convenience, define a -T argument that gets converted to -D arguments # For convenience, define a -T argument that gets converted to -D arguments
parser.add_argument('-E', '--test-exclude-components', help="Specify the components to exclude from testing", nargs='+', action=TestExcludeComponentAction) parser.add_argument('-E', '--test-exclude-components', help="Specify the components to exclude from testing", nargs='+', action=TestExcludeComponentAction)
def add_action_extensions(base_functions, base_actions): def add_action_extensions(base_functions, base_actions):
def ut_apply_config(ut_apply_config_name, args): def ut_apply_config(ut_apply_config_name, args):
config_name = re.match(r"ut-apply-config-(.*)", ut_apply_config_name).group(1) config_name = re.match(r"ut-apply-config-(.*)", ut_apply_config_name).group(1)
def set_config_build_variables(prop, defval = None): def set_config_build_variables(prop, defval=None):
property_value = re.findall(r"^%s=(.+)" % prop, config_file_content, re.MULTILINE) property_value = re.findall(r"^%s=(.+)" % prop, config_file_content, re.MULTILINE)
if (property_value): if (property_value):
property_value = property_value[0] property_value = property_value[0]
@ -167,7 +169,7 @@ def add_action_extensions(base_functions, base_actions):
# For local builds, use 'apply-config-NAME' target and then use normal 'all' # For local builds, use 'apply-config-NAME' target and then use normal 'all'
# and 'flash' targets. # and 'flash' targets.
def ut_build(ut_build_name, args): def ut_build(ut_build_name, args):
# Create a copy of the passed arguments to prevent arg modifications to accrue if # Create a copy of the passed arguments to prevent arg modifications to accrue if
# all configs are being built # all configs are being built
build_args = copy.copy(args) build_args = copy.copy(args)

View file

@ -23,8 +23,8 @@ class Section(object):
return False return False
def __getitem__(self, item): def __getitem__(self, item):
""" """
process slice. process slice.
convert absolute address to relative address in current section and return slice result convert absolute address to relative address in current section and return slice result
""" """
if isinstance(item, int): if isinstance(item, int):
@ -128,11 +128,11 @@ class SectionTable(object):
key = {"address": address, "section": section} key = {"address": address, "section": section}
for section in self.table: for section in self.table:
if key in section: if key in section:
tmp = section[address:address+size] tmp = section[address:address + size]
value = 0 value = 0
for i in range(size): for i in range(size):
if endian == "LE": if endian == "LE":
value += ord(tmp[i]) << (i*8) value += ord(tmp[i]) << (i * 8)
elif endian == "BE": elif endian == "BE":
value += ord(tmp[i]) << ((size - i - 1) * 8) value += ord(tmp[i]) << ((size - i - 1) * 8)
else: else:

View file

@ -29,7 +29,7 @@ class Parser(object):
""" parse unit test cases from build files and create files for test bench """ """ parse unit test cases from build files and create files for test bench """
TAG_PATTERN = re.compile("([^=]+)(=)?(.+)?") TAG_PATTERN = re.compile("([^=]+)(=)?(.+)?")
DESCRIPTION_PATTERN = re.compile("\[([^]\[]+)\]") DESCRIPTION_PATTERN = re.compile("\[([^]\[]+)\]") # noqa: W605 - regular expression
CONFIG_PATTERN = re.compile(r"{([^}]+)}") CONFIG_PATTERN = re.compile(r"{([^}]+)}")
TEST_GROUPS_PATTERN = re.compile(r"TEST_GROUPS=(.*)$") TEST_GROUPS_PATTERN = re.compile(r"TEST_GROUPS=(.*)$")
@ -83,7 +83,7 @@ class Parser(object):
name_addr = table.get_unsigned_int(section, test_addr, 4) name_addr = table.get_unsigned_int(section, test_addr, 4)
desc_addr = table.get_unsigned_int(section, test_addr + 4, 4) desc_addr = table.get_unsigned_int(section, test_addr + 4, 4)
file_name_addr = table.get_unsigned_int(section, test_addr + 12, 4) file_name_addr = table.get_unsigned_int(section, test_addr + 12, 4)
function_count = table.get_unsigned_int(section, test_addr+20, 4) function_count = table.get_unsigned_int(section, test_addr + 20, 4)
name = table.get_string("any", name_addr) name = table.get_string("any", name_addr)
desc = table.get_string("any", desc_addr) desc = table.get_string("any", desc_addr)
file_name = table.get_string("any", file_name_addr) file_name = table.get_string("any", file_name_addr)
@ -213,7 +213,6 @@ class Parser(object):
return self.parse_tags_internal(configs, self.config_dependencies, self.CONFIG_PATTERN) return self.parse_tags_internal(configs, self.config_dependencies, self.CONFIG_PATTERN)
def get_test_groups(self, config_file): def get_test_groups(self, config_file):
""" """
If the config file includes TEST_GROUPS variable, return its value as a list of strings. If the config file includes TEST_GROUPS variable, return its value as a list of strings.
@ -325,7 +324,7 @@ def test_parser():
} }
sdkconfig = ["123", "789"] sdkconfig = ["123", "789"]
tags = parser.parse_tags_internal(sdkconfig, config_dependency, parser.CONFIG_PATTERN) tags = parser.parse_tags_internal(sdkconfig, config_dependency, parser.CONFIG_PATTERN)
assert sorted(tags) == ['a', 'd', 'f'] # sorted is required for older Python3, e.g. 3.4.8 assert sorted(tags) == ['a', 'd', 'f'] # sorted is required for older Python3, e.g. 3.4.8
def main(): def main():

View file

@ -26,7 +26,7 @@ import argparse
import threading import threading
try: try:
import TinyFW import TinyFW
except ImportError: except ImportError:
# if we want to run test case outside `tiny-test-fw` folder, # if we want to run test case outside `tiny-test-fw` folder,
# we need to insert tiny-test-fw path into sys path # we need to insert tiny-test-fw path into sys path
@ -374,7 +374,7 @@ class Handler(threading.Thread):
Utility.console_log("No case detected!", color="orange") Utility.console_log("No case detected!", color="orange")
while not self.finish and not self.force_stop.isSet(): while not self.finish and not self.force_stop.isSet():
try: try:
self.dut.expect_any((re.compile('\(' + str(self.child_case_index) + '\)\s"(\w+)"'), self.dut.expect_any((re.compile('\(' + str(self.child_case_index) + '\)\s"(\w+)"'), # noqa: W605 - regex
get_child_case_name), get_child_case_name),
(self.WAIT_SIGNAL_PATTERN, device_wait_action), # wait signal pattern (self.WAIT_SIGNAL_PATTERN, device_wait_action), # wait signal pattern
(self.SEND_SIGNAL_PATTERN, device_send_action), # send signal pattern (self.SEND_SIGNAL_PATTERN, device_send_action), # send signal pattern
@ -742,7 +742,7 @@ if __name__ == '__main__':
test_env = Env.Env(**env_config) test_env = Env.Env(**env_config)
detect_update_unit_test_info(test_env, extra_data=list_of_dicts, app_bin=args.app_bin) detect_update_unit_test_info(test_env, extra_data=list_of_dicts, app_bin=args.app_bin)
for index in range(1, args.repeat+1): for index in range(1, args.repeat + 1):
if args.repeat > 1: if args.repeat > 1:
Utility.console_log("Repetition {}".format(index), color="green") Utility.console_log("Repetition {}".format(index), color="green")
for dic in list_of_dicts: for dic in list_of_dicts:

View file

@ -3,11 +3,16 @@
# Wrapper to run make and preprocess any paths in the output from MSYS Unix-style paths # Wrapper to run make and preprocess any paths in the output from MSYS Unix-style paths
# to Windows paths, for Eclipse # to Windows paths, for Eclipse
from __future__ import print_function, division from __future__ import print_function, division
import sys, subprocess, os.path, re import sys
import subprocess
import os.path
import re
UNIX_PATH_RE = re.compile(r'(/[^ \'"]+)+') UNIX_PATH_RE = re.compile(r'(/[^ \'"]+)+')
paths = {} paths = {}
def check_path(path): def check_path(path):
try: try:
return paths[path] return paths[path]
@ -24,13 +29,15 @@ def check_path(path):
paths[path] = winpath paths[path] = winpath
return winpath return winpath
def main(): def main():
print("Running make in '%s'" % check_path(os.getcwd())) print("Running make in '%s'" % check_path(os.getcwd()))
make = subprocess.Popen(["make"] + sys.argv[1:] + ["BATCH_BUILD=1"], stdout=subprocess.PIPE) make = subprocess.Popen(["make"] + sys.argv[1:] + ["BATCH_BUILD=1"], stdout=subprocess.PIPE)
for line in iter(make.stdout.readline, ''): for line in iter(make.stdout.readline, ''):
line = re.sub(UNIX_PATH_RE, lambda m: check_path(m.group(0)), line) line = re.sub(UNIX_PATH_RE, lambda m: check_path(m.group(0)), line)
print(line.rstrip()) print(line.rstrip())
sys.exit(make.wait()) sys.exit(make.wait())
if __name__ == "__main__": if __name__ == "__main__":
main() main()