tools: Fix the Python coding style

This commit is contained in:
Roland Dobai 2018-12-04 13:46:48 +01:00
parent d453cce1b3
commit bfa9610f58
54 changed files with 745 additions and 648 deletions

66
.flake8
View file

@ -149,6 +149,8 @@ exclude =
components/expat/expat,
components/unity/unity,
examples/build_system/cmake/import_lib/main/lib/tinyxml2
# other third-party libraries
tools/kconfig_new/kconfiglib.py,
# autogenerated scripts
components/protocomm/python/constants_pb2.py,
components/protocomm/python/sec0_pb2.py,
@ -159,67 +161,5 @@ exclude =
examples/provisioning/custom_config/components/custom_provisioning/python/custom_config_pb2.py,
# temporary list (should be empty)
components/nvs_flash/nvs_partition_generator/nvs_partition_gen.py,
tools/ci/apply_bot_filter.py,
tools/cmake/convert_to_cmake.py,
tools/esp_app_trace/apptrace_proc.py,
tools/esp_app_trace/logtrace_proc.py,
tools/esp_app_trace/pylibelf/__init__.py,
tools/esp_app_trace/pylibelf/constants/__init__.py,
tools/esp_app_trace/pylibelf/iterators/__init__.py,
tools/esp_app_trace/pylibelf/macros/__init__.py,
tools/esp_app_trace/pylibelf/types/__init__.py,
tools/esp_app_trace/pylibelf/util/__init__.py,
tools/esp_app_trace/pylibelf/util/syms/__init__.py,
tools/esp_prov/proto/__init__.py,
tools/esp_prov/prov/__init__.py,
tools/esp_prov/prov/custom_prov.py,
tools/esp_prov/prov/wifi_prov.py,
tools/esp_prov/security/__init__.py,
tools/esp_prov/security/security.py,
tools/esp_prov/security/security0.py,
tools/esp_prov/security/security1.py,
tools/esp_prov/transport/__init__.py,
tools/esp_prov/transport/transport.py,
tools/esp_prov/transport/transport_ble.py,
tools/esp_prov/transport/transport_console.py,
tools/esp_prov/transport/transport_softap.py,
tools/esp_prov/utils/__init__.py,
tools/esp_prov/utils/convenience.py,
tools/gen_esp_err_to_name.py,
tools/idf.py,
tools/idf_size.py,
tools/kconfig_new/confgen.py,
tools/kconfig_new/confserver.py,
tools/kconfig_new/gen_kconfig_doc.py,
tools/kconfig_new/kconfiglib.py,
tools/kconfig_new/test/test_confserver.py,
tools/ldgen/fragments.py,
tools/ldgen/generation.py,
tools/ldgen/ldgen.py,
tools/ldgen/pyparsing.py,
tools/ldgen/sdkconfig.py,
tools/ldgen/test/test_fragments.py,
tools/ldgen/test/test_generation.py,
tools/esp_app_trace/pylibelf,
tools/mass_mfg/mfg_gen.py,
tools/test_idf_monitor/run_test_idf_monitor.py,
tools/test_idf_size/test_idf_size.py,
tools/tiny-test-fw/CIAssignExampleTest.py,
tools/tiny-test-fw/CIAssignUnitTest.py,
tools/tiny-test-fw/DUT.py,
tools/tiny-test-fw/EnvConfig.py,
tools/tiny-test-fw/IDF/IDFApp.py,
tools/tiny-test-fw/IDF/IDFDUT.py,
tools/tiny-test-fw/Runner.py,
tools/tiny-test-fw/TinyFW.py,
tools/tiny-test-fw/Utility/CaseConfig.py,
tools/tiny-test-fw/Utility/LineChart.py,
tools/tiny-test-fw/Utility/PowerControl.py,
tools/tiny-test-fw/Utility/SearchCases.py,
tools/tiny-test-fw/Utility/__init__.py,
tools/tiny-test-fw/docs/conf.py,
tools/tiny-test-fw/example.py,
tools/unit-test-app/idf_ext.py,
tools/unit-test-app/tools/CreateSectionTable.py,
tools/unit-test-app/tools/UnitTestParser.py,
tools/unit-test-app/unit_test.py,
tools/windows/eclipse_make.py,

View file

@ -55,8 +55,8 @@ if __name__ == "__main__":
if os.getenv("BOT_NEEDS_TRIGGER_BY_NAME", "0") == "1":
execute_by_default = False
need_to_execute = process_filter(True, "BOT_STAGE_FILTER", os.getenv("CI_JOB_STAGE")) \
and process_filter(execute_by_default, "BOT_JOB_FILTER", os.getenv("CI_JOB_NAME"))
need_to_execute = process_filter(True, "BOT_STAGE_FILTER", os.getenv("CI_JOB_STAGE")) and process_filter(execute_by_default,
"BOT_JOB_FILTER", os.getenv("CI_JOB_NAME"))
if need_to_execute:
sys.exit(0)
else:

View file

@ -8,10 +8,10 @@ import subprocess
import re
import os.path
import glob
import sys
debug = False
def get_make_variables(path, makefile="Makefile", expected_failure=False, variables={}):
"""
Given the path to a Makefile of some kind, return a dictionary of all variables defined in this Makefile
@ -54,6 +54,7 @@ def get_make_variables(path, makefile="Makefile", expected_failure=False, variab
return result
def get_component_variables(project_path, component_path):
make_vars = get_make_variables(component_path,
os.path.join(os.environ["IDF_PATH"],
@ -96,7 +97,6 @@ def get_component_variables(project_path, component_path):
component_srcs += srcs
make_vars["COMPONENT_SRCS"] = " ".join(component_srcs)
return make_vars
@ -111,7 +111,7 @@ def convert_project(project_path):
raise RuntimeError("This project already has a CMakeLists.txt file")
project_vars = get_make_variables(project_path, expected_failure=True)
if not "PROJECT_NAME" in project_vars:
if "PROJECT_NAME" not in project_vars:
raise RuntimeError("PROJECT_NAME does not appear to be defined in IDF project Makefile at %s" % project_path)
component_paths = project_vars["COMPONENT_PATHS"].split(" ")
@ -143,6 +143,7 @@ include($ENV{IDF_PATH}/tools/cmake/project.cmake)
print("Converted project %s" % project_cmakelists)
def convert_component(project_path, component_path):
if debug:
print("Converting %s..." % (component_path))

View file

@ -1,10 +1,12 @@
#!/usr/bin/env python
#
from __future__ import print_function
import argparse
import struct
import sys
class bcolors:
HEADER = '\033[95m'
OKBLUE = '\033[94m'
@ -15,6 +17,7 @@ class bcolors:
BOLD = '\033[1m'
UNDERLINE = '\033[4m'
def main():
ESP32_TRACE_BLOCK_HDR_SZ = 8
ESP32_TRACE_BLOCK_TASK_IDX = 0
@ -31,11 +34,11 @@ def main():
args = parser.parse_args()
print "===================================================================="
print("====================================================================")
try:
ftrc = open(args.file, 'rb')
except IOError as e:
print "Failed to open trace file (%s)!" % e
print("Failed to open trace file (%s)!" % e)
sys.exit(2)
passed = True
@ -49,76 +52,79 @@ def main():
ts = 0
trc_buf = ftrc.read(args.block_len)
if len(trc_buf) == 0:
# print 'EOF'
# print('EOF')
break
trc_data = struct.unpack('<LL%sB' % (len(trc_buf) - ESP32_TRACE_BLOCK_HDR_SZ), trc_buf)
if len(trc_data):
# print "%x %x, len %d" % (trc_data[0], trc_data[1], len(trc_data) - 2)
# print trc_data[2:]
# print("%x %x, len %d" % (trc_data[0], trc_data[1], len(trc_data) - 2))
# print(trc_data[2:])
# sys.exit(0)
task = trc_data[ESP32_TRACE_BLOCK_TASK_IDX]
ts = trc_data[ESP32_TRACE_BLOCK_TS_IDX]
# print ts
# print(ts)
if last_ts and last_ts >= ts:
# print "Global TS discontinuity %x -> %x, task %x, stamp %x at %x" % (last_ts, ts, task, data_stats[task]['stamp'], off)
# print("Global TS discontinuity %x -> %x, task %x, stamp %x at %x" % (last_ts, ts, task,
# data_stats[task]['stamp'], off))
if args.print_details:
print "Global TS discontinuity %x -> %x, task %x at %x" % (last_ts, ts, task, off)
print("Global TS discontinuity %x -> %x, task %x at %x" % (last_ts, ts, task, off))
# tot_discont += 1
# passed = False
last_ts = ts
if not task in data_stats:
print "%x: NEW TASK" % task
if task not in data_stats:
print("%x: NEW TASK" % task)
data_stats[task] = {'stamp': trc_data[ESP32_TRACE_BLOCK_DATA_IDX], 'last_ts': ts, 'count': 1, 'discont_offs': [], 'inv_stamps_offs': []}
else:
if data_stats[task]['last_ts'] == ts:
print "Task TS discontinuity %x -> %x, task %x, stamp %x at %x" % (last_ts, ts, task, data_stats[task]['stamp'], off)
print("Task TS discontinuity %x -> %x, task %x, stamp %x at %x" % (last_ts, ts, task, data_stats[task]['stamp'], off))
data_stats[task]['discont_offs'].append(off)
tot_discont += 1
passed = False
data_stats[task]['last_ts'] = ts
data_stats[task]['count'] += 1
if len(trc_data) > ESP32_TRACE_BLOCK_DATA_IDX:
# print "DATA = %x %x %x %x" % (trc_data[-4], trc_data[-3], trc_data[-2], trc_data[-1])
# print("DATA = %x %x %x %x" % (trc_data[-4], trc_data[-3], trc_data[-2], trc_data[-1]))
if args.print_tasks:
print "Task[%d] %x, ts %08x, stamp %x" % (off/args.block_len, task, ts, trc_data[ESP32_TRACE_BLOCK_DATA_IDX])
print("Task[%d] %x, ts %08x, stamp %x" % (off / args.block_len, task, ts, trc_data[ESP32_TRACE_BLOCK_DATA_IDX]))
else:
print "%x: NO DATA" % task
print("%x: NO DATA" % task)
else:
print "Failed to unpack data!"
print("Failed to unpack data!")
sys.exit(2)
# check data
for i in range(ESP32_TRACE_BLOCK_DATA_IDX, len(trc_data)):
if trc_data[i] != data_stats[task]['stamp']:
if not args.no_errors:
print "Invalid stamp %x->%x at %x, task %x" % (data_stats[task]['stamp'], trc_data[i], off + ESP32_TRACE_BLOCK_HDR_SZ + i, task)
print("Invalid stamp %x->%x at %x, task %x" % (data_stats[task]['stamp'], trc_data[i], off + ESP32_TRACE_BLOCK_HDR_SZ + i, task))
passed = False
data_stats[task]['stamp'] = trc_data[i]
data_stats[task]['inv_stamps_offs'].append(off)
# break
if len(trc_buf) < args.block_len:
print 'Last block (not full)'
print('Last block (not full)')
break
if data_stats[task]['stamp'] != None:
if data_stats[task]['stamp'] is not None:
data_stats[task]['stamp'] = (data_stats[task]['stamp'] + 1) & 0xFF
# print "stamp=%x" % data_stats[task][ESP32_TRACE_STAMP_IDX]
# print("stamp=%x" % data_stats[task][ESP32_TRACE_STAMP_IDX])
off += args.block_len
ftrc.close()
print "===================================================================="
print "Trace size %d bytes, discont %d\n" % (off, tot_discont)
print("====================================================================")
print("Trace size %d bytes, discont %d\n" % (off, tot_discont))
for t in data_stats:
print "Task %x. Total count %d. Inv stamps %d. TS Discontinuities %d." % (t, data_stats[t]['count'], len(data_stats[t]['inv_stamps_offs']), len(data_stats[t]['discont_offs']))
print("Task %x. Total count %d. Inv stamps %d. TS Discontinuities %d." % (t, data_stats[t]['count'],
len(data_stats[t]['inv_stamps_offs']), len(data_stats[t]['discont_offs'])))
if args.print_details:
print 'Invalid stamps offs: [{}]'.format(', '.join(hex(x) for x in data_stats[t]['inv_stamps_offs']))
print 'TS Discontinuities offs: [{}]'.format(', '.join(hex(x) for x in data_stats[t]['discont_offs']))
print "\n"
print('Invalid stamps offs: [{}]'.format(', '.join(hex(x) for x in data_stats[t]['inv_stamps_offs'])))
print('TS Discontinuities offs: [{}]'.format(', '.join(hex(x) for x in data_stats[t]['discont_offs'])))
print("\n")
if passed:
print "Data - OK"
print("Data - OK")
else:
print "Data - FAILED!"
print("Data - FAILED!")
if __name__ == '__main__':
main()

View file

@ -1,6 +1,7 @@
#!/usr/bin/env python
#
from __future__ import print_function
import argparse
import struct
import sys
@ -8,7 +9,8 @@ import pylibelf as elf
import pylibelf.util as elfutil
import pylibelf.iterators as elfiter
import pylibelf.constants as elfconst
from ctypes import *
import ctypes
class ESPLogTraceParserError(RuntimeError):
def __init__(self, message):
@ -44,7 +46,7 @@ def logtrace_parse(fname):
if len(trc_buf) < ESP32_LOGTRACE_HDR_SZ:
# print "EOF"
if len(trc_buf) > 0:
print "Unprocessed %d bytes of log record header!" % len(trc_buf)
print("Unprocessed %d bytes of log record header!" % len(trc_buf))
# data_ok = False
break
try:
@ -58,16 +60,16 @@ def logtrace_parse(fname):
except IOError as e:
raise ESPLogTraceParserError("Failed to read log record args (%s)!" % e)
if len(trc_buf) < args_sz:
# print "EOF"
# print("EOF")
if len(trc_buf) > 0:
print "Unprocessed %d bytes of log record args!" % len(trc_buf)
print("Unprocessed %d bytes of log record args!" % len(trc_buf))
# data_ok = False
break
try:
log_args = struct.unpack('<%sL' % nargs, trc_buf)
except struct.error as e:
raise ESPLogTraceParserError("Failed to unpack log record args (%s)!" % e)
# print log_args
# print(log_args)
recs.append(ESPLogTraceRecord(fmt_addr, list(log_args)))
ftrc.close()
@ -83,9 +85,9 @@ def logtrace_get_str_from_elf(felf, str_addr):
continue
if str_addr < hdr.sh_addr or str_addr >= hdr.sh_addr + hdr.sh_size:
continue
# print "Found SECT: %x..%x @ %x" % (hdr.sh_addr, hdr.sh_addr + hdr.sh_size, str_addr - hdr.sh_addr)
# print("Found SECT: %x..%x @ %x" % (hdr.sh_addr, hdr.sh_addr + hdr.sh_size, str_addr - hdr.sh_addr))
sec_data = elfiter.getOnlyData(sect).contents
buf = cast(sec_data.d_buf, POINTER(c_char))
buf = ctypes.cast(sec_data.d_buf, ctypes.POINTER(ctypes.c_char))
for i in range(str_addr - hdr.sh_addr, hdr.sh_size):
if buf[i] == "\0":
break
@ -94,6 +96,7 @@ def logtrace_get_str_from_elf(felf, str_addr):
return tgt_str
return None
def logtrace_formated_print(recs, elfname, no_err):
try:
felf = elfutil.open_elf(elfname)
@ -115,20 +118,21 @@ def logtrace_formated_print(recs, elfname, no_err):
if arg_str:
lrec.args[i] = arg_str
i += 1
# print "\nFmt = {%s}, args = %d/%s" % lrec
# print("\nFmt = {%s}, args = %d/%s" % lrec)
fmt_str = fmt_str.replace('%p', '%x')
# print "=====> " + fmt_str % lrec.args
# print("=====> " + fmt_str % lrec.args)
try:
print fmt_str % tuple(lrec.args),
# print ".",
print(fmt_str % tuple(lrec.args), end='')
# print(".", end='')
pass
except Exception as e:
if not no_err:
print "Print error (%s)" % e
print "\nFmt = {%s}, args = %d/%s" % (fmt_str, len(lrec.args), lrec.args)
print("Print error (%s)" % e)
print("\nFmt = {%s}, args = %d/%s" % (fmt_str, len(lrec.args), lrec.args))
elf.elf_end(felf)
def main():
parser = argparse.ArgumentParser(description='ESP32 Log Trace Parsing Tool')
@ -141,23 +145,24 @@ def main():
# parse trace file
try:
print "Parse trace file '%s'..." % args.trace_file
lrecs = logtrace_parse(args.trace_file);
print "Parsing completed."
print("Parse trace file '%s'..." % args.trace_file)
lrecs = logtrace_parse(args.trace_file)
print("Parsing completed.")
except ESPLogTraceParserError as e:
print "Failed to parse log trace (%s)!" % e
print("Failed to parse log trace (%s)!" % e)
sys.exit(2)
# print recs
# get format strings and print info
print "===================================================================="
print("====================================================================")
try:
logtrace_formated_print(lrecs, args.elf_file, args.no_errors);
logtrace_formated_print(lrecs, args.elf_file, args.no_errors)
except ESPLogTraceParserError as e:
print "Failed to print log trace (%s)!" % e
print("Failed to print log trace (%s)!" % e)
sys.exit(2)
print "\n====================================================================\n"
print("\n====================================================================\n")
print("Log records count: %d" % len(lrecs))
print "Log records count: %d" % len(lrecs)
if __name__ == '__main__':
main()

View file

@ -29,4 +29,5 @@ wifi_constants_pb2 = imp.load_source("wifi_constants_pb2", idf_path + "/componen
wifi_config_pb2 = imp.load_source("wifi_config_pb2", idf_path + "/components/wifi_provisioning/python/wifi_config_pb2.py")
# custom_provisioning component related python files generated from .proto files
custom_config_pb2 = imp.load_source("custom_config_pb2", idf_path + "/examples/provisioning/custom_config/components/custom_provisioning/python/custom_config_pb2.py")
custom_config_pb2 = imp.load_source("custom_config_pb2", idf_path +
"/examples/provisioning/custom_config/components/custom_provisioning/python/custom_config_pb2.py")

View file

@ -13,5 +13,5 @@
# limitations under the License.
#
from .wifi_prov import *
from .custom_prov import *
from .wifi_prov import * # noqa F403
from .custom_prov import * # noqa F403

View file

@ -21,10 +21,12 @@ from future.utils import tobytes
import utils
import proto
def print_verbose(security_ctx, data):
if (security_ctx.verbose):
print("++++ " + data + " ++++")
def custom_config_request(security_ctx, info, version):
# Form protobuf request packet from custom-config data
cmd = proto.custom_config_pb2.CustomConfigRequest()
@ -34,6 +36,7 @@ def custom_config_request(security_ctx, info, version):
print_verbose(security_ctx, "Client -> Device (CustomConfig cmd) " + utils.str_to_hexstr(enc_cmd))
return enc_cmd
def custom_config_response(security_ctx, response_data):
# Interpret protobuf response packet
decrypt = security_ctx.decrypt_data(tobytes(response_data))

View file

@ -21,10 +21,12 @@ from future.utils import tobytes
import utils
import proto
def print_verbose(security_ctx, data):
if (security_ctx.verbose):
print("++++ " + data + " ++++")
def config_get_status_request(security_ctx):
# Form protobuf request packet for GetStatus command
cfg1 = proto.wifi_config_pb2.WiFiConfigPayload()
@ -35,6 +37,7 @@ def config_get_status_request(security_ctx):
print_verbose(security_ctx, "Client -> Device (Encrypted CmdGetStatus) " + utils.str_to_hexstr(encrypted_cfg))
return encrypted_cfg
def config_get_status_response(security_ctx, response_data):
# Interpret protobuf response packet from GetStatus command
decrypted_message = security_ctx.decrypt_data(tobytes(response_data))
@ -56,6 +59,7 @@ def config_get_status_response(security_ctx, response_data):
print("++++ Failure reason: " + "Incorrect SSID ++++")
return cmd_resp1.resp_get_status.sta_state
def config_set_config_request(security_ctx, ssid, passphrase):
# Form protobuf request packet for SetConfig command
cmd = proto.wifi_config_pb2.WiFiConfigPayload()
@ -66,6 +70,7 @@ def config_set_config_request(security_ctx, ssid, passphrase):
print_verbose(security_ctx, "Client -> Device (SetConfig cmd) " + utils.str_to_hexstr(enc_cmd))
return enc_cmd
def config_set_config_response(security_ctx, response_data):
# Interpret protobuf response packet from SetConfig command
decrypt = security_ctx.decrypt_data(tobytes(response_data))
@ -74,6 +79,7 @@ def config_set_config_response(security_ctx, response_data):
print_verbose(security_ctx, "SetConfig status " + str(cmd_resp4.resp_set_config.status))
return cmd_resp4.resp_set_config.status
def config_apply_config_request(security_ctx):
# Form protobuf request packet for ApplyConfig command
cmd = proto.wifi_config_pb2.WiFiConfigPayload()
@ -82,6 +88,7 @@ def config_apply_config_request(security_ctx):
print_verbose(security_ctx, "Client -> Device (ApplyConfig cmd) " + utils.str_to_hexstr(enc_cmd))
return enc_cmd
def config_apply_config_response(security_ctx, response_data):
# Interpret protobuf response packet from ApplyConfig command
decrypt = security_ctx.decrypt_data(tobytes(response_data))

View file

@ -13,5 +13,5 @@
# limitations under the License.
#
from .security0 import *
from .security1 import *
from .security0 import * # noqa: F403, F401
from .security1 import * # noqa: F403, F401

View file

@ -15,7 +15,7 @@
# Base class for protocomm security
class Security:
def __init__(self, security_session):
self.security_session = security_session

View file

@ -19,9 +19,9 @@
from __future__ import print_function
from future.utils import tobytes
import utils
import proto
from .security import *
from .security import Security
class Security0(Security):
def __init__(self, verbose):

View file

@ -21,7 +21,7 @@ from future.utils import tobytes
import utils
import proto
from .security import *
from .security import Security
from cryptography.hazmat.backends import default_backend
from cryptography.hazmat.primitives import hashes
@ -30,6 +30,7 @@ from cryptography.hazmat.primitives.ciphers import Cipher, algorithms, modes
import session_pb2
# Enum for state of protocomm_security1 FSM
class security_state:
REQUEST1 = 0
@ -37,6 +38,7 @@ class security_state:
RESPONSE2 = 2
FINISHED = 3
def xor(a, b):
# XOR two inputs of type `bytes`
ret = bytearray()
@ -50,6 +52,7 @@ def xor(a, b):
# Convert bytearray to bytes
return bytes(ret)
class Security1(Security):
def __init__(self, pop, verbose):
# Initialize state of the security1 FSM

View file

@ -13,6 +13,6 @@
# limitations under the License.
#
from .transport_console import *
from .transport_softap import *
from .transport_ble import *
from .transport_console import * # noqa: F403, F401
from .transport_softap import * # noqa: F403, F401
from .transport_ble import * # noqa: F403, F401

View file

@ -17,6 +17,7 @@
import abc
class Transport():
@abc.abstractmethod

View file

@ -15,10 +15,11 @@
from __future__ import print_function
from .transport import *
from .transport import Transport
from . import ble_cli
class Transport_BLE(Transport):
def __init__(self, devname, service_uuid, nu_lookup):
# Expect service UUID like '0000ffff-0000-1000-8000-00805f9b34fb'
@ -44,7 +45,7 @@ class Transport_BLE(Transport):
# Make sure device is disconnected before application gets closed
try:
self.disconnect()
except:
except Exception:
pass
def disconnect(self):

View file

@ -18,7 +18,8 @@ from builtins import input
import utils
from .transport import *
from .transport import Transport
class Transport_Console(Transport):

View file

@ -18,7 +18,8 @@ from future.utils import tobytes
import http.client
from .transport import *
from .transport import Transport
class Transport_Softap(Transport):
def __init__(self, url):

View file

@ -13,4 +13,4 @@
# limitations under the License.
#
from .convenience import *
from .convenience import * # noqa: F403, F401

View file

@ -15,11 +15,13 @@
# Convenience functions for commonly used data type conversions
def str_to_hexstr(string):
# Form hexstr by appending ASCII codes (in hex) corresponding to
# each character in the input string
return ''.join('{:02x}'.format(ord(c)) for c in string)
def hexstr_to_str(hexstr):
# Prepend 0 (if needed) to make the hexstr length an even number
if len(hexstr) % 2 == 1:

View file

@ -52,6 +52,7 @@ err_dict = collections.defaultdict(list) #identified errors are stored here; map
rev_err_dict = dict() # map of error string to error code
unproc_list = list() # errors with unknown codes which depend on other errors
class ErrItem(object):
"""
Contains information about the error:
@ -69,6 +70,7 @@ class ErrItem(object):
self.comment = comment
self.rel_str = rel_str
self.rel_off = rel_off
def __str__(self):
ret = self.name + " from " + self.file
if (self.rel_str != ""):
@ -76,6 +78,7 @@ class ErrItem(object):
if self.comment != "":
ret += " // " + self.comment
return ret
def __cmp__(self, other):
if self.file in priority_headers and other.file not in priority_headers:
return -1
@ -99,6 +102,7 @@ class ErrItem(object):
else:
return 0
class InputError(RuntimeError):
"""
Represents and error on the input
@ -106,6 +110,7 @@ class InputError(RuntimeError):
def __init__(self, p, e):
super(InputError, self).__init__(p + ": " + e)
def process(line, idf_path, include_as):
"""
Process a line of text from file idf_path (relative to IDF project).
@ -168,6 +173,7 @@ def process(line, idf_path, include_as):
# Store the information available now and compute the error code later
unproc_list.append(ErrItem(words[1], idf_path, include_as, comment, related, num))
def process_remaining_errors():
"""
Create errors which could not be processed before because the error code
@ -180,7 +186,6 @@ def process_remaining_errors():
for item in unproc_list:
if item.rel_str in rev_err_dict:
base_num = rev_err_dict[item.rel_str]
base = err_dict[base_num][0]
num = base_num + item.rel_off
err_dict[num].append(ErrItem(item.name, item.file, item.include_as, item.comment))
rev_err_dict[item.name] = num
@ -189,6 +194,7 @@ def process_remaining_errors():
del unproc_list[:]
def path_to_include(path):
"""
Process the path (relative to the IDF project) in a form which can be used
@ -209,6 +215,7 @@ def path_to_include(path):
else:
return os.sep.join(spl_path[i + 1:]) # subdirectories and filename in "include"
def print_warning(error_list, error_code):
"""
Print warning about errors with the same error code
@ -217,6 +224,7 @@ def print_warning(error_list, error_code):
for e in error_list:
print(" " + str(e))
def max_string_width():
max = 0
for k in err_dict:
@ -226,6 +234,7 @@ def max_string_width():
max = x
return max
def generate_c_output(fin, fout):
"""
Writes the output to fout based on th error dictionary err_dict and
@ -289,6 +298,7 @@ def generate_c_output(fin, fout):
else:
fout.write(line)
def generate_rst_output(fout):
for k in sorted(err_dict.keys()):
v = err_dict[k][0]
@ -301,6 +311,7 @@ def generate_rst_output(fout):
fout.write(': {}'.format(v.comment))
fout.write('\n\n')
def main():
if 'IDF_PATH' in os.environ:
idf_path = os.environ['IDF_PATH']
@ -348,5 +359,6 @@ def main():
with open(args.c_input, 'r', encoding='utf-8') as fin, open(args.c_output, 'w', encoding='utf-8') as fout:
generate_c_output(fin, fout)
if __name__ == "__main__":
main()

View file

@ -36,12 +36,14 @@ import re
import shutil
import json
class FatalError(RuntimeError):
"""
Wrapper class for runtime errors that aren't caused by bugs in idf.py or the build proces.s
"""
pass
# Use this Python interpreter for any subprocesses we launch
PYTHON = sys.executable
@ -60,7 +62,8 @@ else:
MAKE_CMD = "make"
MAKE_GENERATOR = "Unix Makefiles"
GENERATORS = [
GENERATORS = \
[
# ('generator name', 'build command line', 'version command line', 'verbose flag')
("Ninja", ["ninja"], ["ninja", "--version"], "-v"),
(MAKE_GENERATOR, [MAKE_CMD, "-j", str(multiprocessing.cpu_count() + 2)], ["make", "--version"], "VERBOSE=1"),
@ -68,6 +71,7 @@ GENERATORS = [
GENERATOR_CMDS = dict((a[0], a[1]) for a in GENERATORS)
GENERATOR_VERBOSE = dict((a[0], a[3]) for a in GENERATORS)
def _run_tool(tool_name, args, cwd):
def quote_arg(arg):
" Quote 'arg' if necessary "
@ -83,6 +87,7 @@ def _run_tool(tool_name, args, cwd):
except subprocess.CalledProcessError as e:
raise FatalError("%s failed with exit code %d" % (tool_name, e.returncode))
def check_environment():
"""
Verify the environment contains the top-level tools we need to operate
@ -96,7 +101,8 @@ def check_environment():
if "IDF_PATH" in os.environ:
set_idf_path = os.path.realpath(os.environ["IDF_PATH"])
if set_idf_path != detected_idf_path:
print("WARNING: IDF_PATH environment variable is set to %s but idf.py path indicates IDF directory %s. Using the environment variable directory, but results may be unexpected..."
print("WARNING: IDF_PATH environment variable is set to %s but idf.py path indicates IDF directory %s. "
"Using the environment variable directory, but results may be unexpected..."
% (set_idf_path, detected_idf_path))
else:
print("Setting IDF_PATH environment variable: %s" % detected_idf_path)
@ -111,13 +117,15 @@ def check_environment():
except subprocess.CalledProcessError:
raise SystemExit(1)
def executable_exists(args):
try:
subprocess.check_output(args)
return True
except:
except Exception:
return False
def detect_cmake_generator():
"""
Find the default cmake generator, if none was specified. Raises an exception if no valid generator is found.
@ -127,6 +135,7 @@ def detect_cmake_generator():
return generator
raise FatalError("To use idf.py, either the 'ninja' or 'GNU make' build tool must be available in the PATH")
def _ensure_build_directory(args, always_run_cmake=False):
"""Check the build directory exists and that cmake has been run there.
@ -166,7 +175,7 @@ def _ensure_build_directory(args, always_run_cmake=False):
cmake_args += [project_dir]
_run_tool("cmake", cmake_args, cwd=args.build_dir)
except:
except Exception:
# don't allow partially valid CMakeCache.txt files,
# to keep the "should I run cmake?" logic simple
if os.path.exists(cache_path):
@ -212,6 +221,7 @@ def parse_cmakecache(path):
result[m.group(1)] = m.group(3)
return result
def build_target(target_name, args):
"""
Execute the target build system to build target 'target_name'
@ -252,6 +262,7 @@ def _get_esptool_args(args):
result += ["--after", extra_esptool_args["after"]]
return result
def flash(action, args):
"""
Run esptool to flash the entire project, from an argfile generated by the build system
@ -266,11 +277,13 @@ def flash(action, args):
esptool_args += ["write_flash", "@" + flasher_args_path]
_run_tool("esptool.py", esptool_args, args.build_dir)
def erase_flash(action, args):
esptool_args = _get_esptool_args(args)
esptool_args += ["erase_flash"]
_run_tool("esptool.py", esptool_args, args.build_dir)
def monitor(action, args):
"""
Run idf_monitor.py to watch build output
@ -285,7 +298,9 @@ def monitor(action, args):
elf_file = os.path.join(args.build_dir, project_desc["app_elf"])
if not os.path.exists(elf_file):
raise FatalError("ELF file '%s' not found. You need to build & flash the project before running 'monitor', and the binary on the device must match the one in the build directory exactly. Try 'idf.py flash monitor'." % elf_file)
raise FatalError("ELF file '%s' not found. You need to build & flash the project before running 'monitor', "
"and the binary on the device must match the one in the build directory exactly. "
"Try 'idf.py flash monitor'." % elf_file)
idf_monitor = os.path.join(os.environ["IDF_PATH"], "tools/idf_monitor.py")
monitor_args = [PYTHON, idf_monitor]
if args.port is not None:
@ -307,9 +322,11 @@ def clean(action, args):
return
build_target("clean", args)
def reconfigure(action, args):
_ensure_build_directory(args, True)
def fullclean(action, args):
build_dir = args.build_dir
if not os.path.isdir(build_dir):
@ -320,7 +337,8 @@ def fullclean(action, args):
return
if not os.path.exists(os.path.join(build_dir, "CMakeCache.txt")):
raise FatalError("Directory '%s' doesn't seem to be a CMake build directory. Refusing to automatically delete files in this directory. Delete the directory manually to 'clean' it." % build_dir)
raise FatalError("Directory '%s' doesn't seem to be a CMake build directory. Refusing to automatically "
"delete files in this directory. Delete the directory manually to 'clean' it." % build_dir)
red_flags = ["CMakeLists.txt", ".git", ".svn"]
for red in red_flags:
red = os.path.join(build_dir, red)
@ -334,6 +352,7 @@ def fullclean(action, args):
else:
os.remove(f)
def print_closing_message(args):
# print a closing message of some kind
#
@ -384,6 +403,7 @@ def print_closing_message(args):
if "bootloader" in args.actions:
print_flashing_message("Bootloader", "bootloader")
ACTIONS = {
# action name : ( function (or alias), dependencies, order-only dependencies )
"all": (build_target, [], ["reconfigure", "menuconfig", "clean", "fullclean"]),
@ -411,6 +431,7 @@ ACTIONS = {
"read_otadata": (build_target, [], []),
}
def get_commandline_options():
""" Return all the command line options up to but not including the action """
result = []
@ -421,6 +442,7 @@ def get_commandline_options():
result.append(a)
return result
def get_default_serial_port():
""" Return a default serial port. esptool can do this (smarter), but it can create
inconsistencies where esptool.py uses one port and idf_monitor uses another.
@ -438,16 +460,18 @@ def get_default_serial_port():
except IndexError:
raise RuntimeError("No serial ports found. Connect a device, or use '-p PORT' option to set a specific port.")
# Import the actions, arguments extension file
if os.path.exists(os.path.join(os.getcwd(), "idf_ext.py")):
sys.path.append(os.getcwd())
try:
from idf_ext import add_action_extensions, add_argument_extensions
except ImportError as e:
except ImportError:
print("Error importing extension file idf_ext.py. Skipping.")
print("Please make sure that it contains implementations (even if they're empty implementations) of")
print("add_action_extensions and add_argument_extensions.")
def main():
if sys.version_info[0] != 2 or sys.version_info[1] != 7:
print("Note: You are using Python %d.%d.%d. Python 3 support is new, please report any problems "
@ -478,7 +502,8 @@ def main():
parser.add_argument('-n', '--no-warnings', help="Disable Cmake warnings", action="store_true")
parser.add_argument('-v', '--verbose', help="Verbose build output", action="store_true")
parser.add_argument('-D', '--define-cache-entry', help="Create a cmake cache entry", nargs='+')
parser.add_argument('--no-ccache', help="Disable ccache. Otherwise, if ccache is available on the PATH then it will be used for faster builds.", action="store_true")
parser.add_argument('--no-ccache', help="Disable ccache. Otherwise, if ccache is available on the PATH then it will be used for faster builds.",
action="store_true")
parser.add_argument('actions', help="Actions (build targets or other operations)", nargs='+',
choices=ACTIONS.keys())
@ -494,21 +519,23 @@ def main():
# Advanced parameter checks
if args.build_dir is not None and os.path.realpath(args.project_dir) == os.path.realpath(args.build_dir):
raise FatalError("Setting the build directory to the project directory is not supported. Suggest dropping --build-dir option, the default is a 'build' subdirectory inside the project directory.")
raise FatalError("Setting the build directory to the project directory is not supported. Suggest dropping "
"--build-dir option, the default is a 'build' subdirectory inside the project directory.")
if args.build_dir is None:
args.build_dir = os.path.join(args.project_dir, "build")
args.build_dir = os.path.realpath(args.build_dir)
completed_actions = set()
def execute_action(action, remaining_actions):
(function, dependencies, order_dependencies) = ACTIONS[action]
# very simple dependency management, build a set of completed actions and make sure
# all dependencies are in it
for dep in dependencies:
if not dep in completed_actions:
if dep not in completed_actions:
execute_action(dep, remaining_actions)
for dep in order_dependencies:
if dep in remaining_actions and not dep in completed_actions:
if dep in remaining_actions and dep not in completed_actions:
execute_action(dep, remaining_actions)
if action in completed_actions:
@ -527,11 +554,10 @@ def main():
print_closing_message(args)
if __name__ == "__main__":
try:
main()
except FatalError as e:
print(e)
sys.exit(2)

View file

@ -22,11 +22,9 @@
#
from __future__ import print_function
from __future__ import unicode_literals
from builtins import dict
import argparse, sys, subprocess, re
import argparse
import re
import os.path
import pprint
import operator
DEFAULT_TOOLCHAIN_PREFIX = "xtensa-esp32-elf-"
@ -39,6 +37,7 @@ CHIP_SIZES = {
}
}
def scan_to_header(f, header_line):
""" Scan forward in a file until you reach 'header_line', then return """
for line in f:
@ -46,11 +45,13 @@ def scan_to_header(f, header_line):
return
raise RuntimeError("Didn't find line '%s' in file" % header_line)
def load_map_data(map_file):
memory_config = load_memory_config(map_file)
sections = load_sections(map_file)
return memory_config, sections
def load_memory_config(map_file):
""" Memory Configuration section is the total size of each output section """
result = {}
@ -72,11 +73,13 @@ def load_memory_config(map_file):
result[section["name"]] = section
raise RuntimeError("End of file while scanning memory configuration?")
def load_sections(map_file):
""" Load section size information from the MAP file.
Returns a dict of 'sections', where each key is a section name and the value
is a dict with details about this section, including a "sources" key which holds a list of source file line information for each symbol linked into the section.
is a dict with details about this section, including a "sources" key which holds a list of source file line
information for each symbol linked into the section.
"""
scan_to_header(map_file, "Linker script and memory map")
sections = {}
@ -130,6 +133,7 @@ def load_sections(map_file):
return sections
def sizes_by_key(sections, key):
""" Takes a dict of sections (from load_sections) and returns
a dict keyed by 'key' with aggregate output size information.
@ -147,6 +151,7 @@ def sizes_by_key(sections, key):
archive[section["name"]] += s["size"]
return result
def main():
parser = argparse.ArgumentParser("idf_size - a tool to print IDF elf file sizes")
@ -183,6 +188,7 @@ def main():
print("Symbols within the archive:", args.archive_details, "(Not all symbols may be reported)")
print_archive_symbols(sections, args.archive_details)
def print_summary(memory_config, sections):
def get_size(section):
try:
@ -214,10 +220,10 @@ def print_summary(memory_config, sections):
print(" Flash rodata: %7d bytes" % flash_rodata)
print("Total image size:~%7d bytes (.bin may be padded larger)" % (total_size))
def print_detailed_sizes(sections, key, header):
sizes = sizes_by_key(sections, key)
sub_heading = None
headings = (header,
"DRAM .data",
"& .bss",
@ -240,6 +246,7 @@ def print_detailed_sizes(sections, key, header):
def return_total_size(elem):
val = elem[1]
return val["total"]
def return_header(elem):
return elem[0]
s = sorted(list(result.items()), key=return_header)
@ -255,6 +262,7 @@ def print_detailed_sizes(sections, key, header):
v["flash_rodata"],
v["total"]))
def print_archive_symbols(sections, archive):
interested_sections = [".dram0.data", ".dram0.bss", ".iram0.text", ".iram0.vectors", ".flash.text", ".flash.rodata"]
result = {}
@ -267,7 +275,7 @@ def print_archive_symbols(sections, archive):
for s in section["sources"]:
if archive != s["archive"]:
continue
s["sym_name"] = re.sub("(.text.|.literal.|.data.|.bss.|.rodata.)", "", s["sym_name"]);
s["sym_name"] = re.sub("(.text.|.literal.|.data.|.bss.|.rodata.)", "", s["sym_name"])
result[section_name][s["sym_name"]] = result[section_name].get(s["sym_name"], 0) + s["size"]
for t in interested_sections:
print("\nSymbols from section:", t)
@ -279,6 +287,6 @@ def print_archive_symbols(sections, archive):
section_total += val
print("\nSection total:",section_total)
if __name__ == "__main__":
main()

View file

@ -30,13 +30,13 @@ import json
import gen_kconfig_doc
import kconfiglib
import pprint
__version__ = "0.1"
if not "IDF_CMAKE" in os.environ:
if "IDF_CMAKE" not in os.environ:
os.environ["IDF_CMAKE"] = ""
def main():
parser = argparse.ArgumentParser(description='confgen.py v%s - Config Generation Tool' % __version__, prog=os.path.basename(sys.argv[0]))
@ -46,7 +46,8 @@ def main():
default=None)
parser.add_argument('--defaults',
help='Optional project defaults file, used if --config file doesn\'t exist. Multiple files can be specified using multiple --defaults arguments.',
help='Optional project defaults file, used if --config file doesn\'t exist. '
'Multiple files can be specified using multiple --defaults arguments.',
nargs='?',
default=[],
action='append')
@ -70,7 +71,7 @@ def main():
args = parser.parse_args()
for fmt, filename in args.output:
if not fmt in OUTPUT_FORMATS.keys():
if fmt not in OUTPUT_FORMATS.keys():
print("Format '%s' not recognised. Known formats: %s" % (fmt, OUTPUT_FORMATS.keys()))
sys.exit(1)
@ -124,6 +125,7 @@ def write_config(config, filename):
"""
config.write_config(filename, header=CONFIG_HEADING)
def write_header(config, filename):
CONFIG_HEADING = """/*
* Automatically generated file. DO NOT EDIT.
@ -133,6 +135,7 @@ def write_header(config, filename):
"""
config.write_autoconf(filename, header=CONFIG_HEADING)
def write_cmake(config, filename):
with open(filename, "w") as f:
write = f.write
@ -143,6 +146,7 @@ def write_cmake(config, filename):
# Espressif IoT Development Framework (ESP-IDF) Configuration cmake include file
#
""")
def write_node(node):
sym = node.item
if not isinstance(sym, kconfiglib.Symbol):
@ -158,8 +162,10 @@ def write_cmake(config, filename):
prefix, sym.name, val))
config.walk_menu(write_node)
def get_json_values(config):
config_dict = {}
def write_node(node):
sym = node.item
if not isinstance(sym, kconfiglib.Symbol):
@ -177,11 +183,13 @@ def get_json_values(config):
config.walk_menu(write_node)
return config_dict
def write_json(config, filename):
config_dict = get_json_values(config)
with open(filename, "w") as f:
json.dump(config_dict, f, indent=4, sort_keys=True)
def write_json_menus(config, filename):
result = [] # root level items
node_lookup = {} # lookup from MenuNode to an item in result
@ -190,7 +198,7 @@ def write_json_menus(config, filename):
try:
json_parent = node_lookup[node.parent]["children"]
except KeyError:
assert not node.parent in node_lookup # if fails, we have a parent node with no "children" entity (ie a bug)
assert node.parent not in node_lookup # if fails, we have a parent node with no "children" entity (ie a bug)
json_parent = result # root level node
# node.kconfig.y means node has no dependency,
@ -263,6 +271,7 @@ def write_json_menus(config, filename):
with open(filename, "w") as f:
f.write(json.dumps(result, sort_keys=True, indent=4))
def update_if_changed(source, destination):
with open(source, "r") as f:
source_contents = f.read()
@ -276,8 +285,7 @@ def update_if_changed(source, destination):
f.write(source_contents)
OUTPUT_FORMATS = {
"config" : write_config,
OUTPUT_FORMATS = {"config": write_config,
"header": write_header,
"cmake": write_cmake,
"docs": gen_kconfig_doc.write_docs,
@ -285,12 +293,14 @@ OUTPUT_FORMATS = {
"json_menus": write_json_menus,
}
class FatalError(RuntimeError):
"""
Class for runtime errors (not caused by bugs but by user input).
"""
pass
if __name__ == '__main__':
try:
main()

View file

@ -12,6 +12,7 @@ import sys
import confgen
from confgen import FatalError, __version__
def main():
parser = argparse.ArgumentParser(description='confserver.py v%s - Config Generation Tool' % __version__, prog=os.path.basename(sys.argv[0]))
@ -91,7 +92,7 @@ def run_server(kconfig, sdkconfig):
def handle_request(config, req):
if not "version" in req:
if "version" not in req:
return ["All requests must have a 'version'"]
if int(req["version"]) != 1:
return ["Only version 1 requests supported"]
@ -117,12 +118,13 @@ def handle_request(config, req):
return error
def handle_set(config, error, to_set):
missing = [ k for k in to_set if not k in config.syms ]
missing = [k for k in to_set if k not in config.syms]
if missing:
error.append("The following config symbol(s) were not found: %s" % (", ".join(missing)))
# replace name keys with the full config symbol for each key:
to_set = dict((config.syms[k],v) for (k,v) in to_set.items() if not k in missing)
to_set = dict((config.syms[k],v) for (k,v) in to_set.items() if k not in missing)
# Work through the list of values to set, noting that
# some may not be immediately applicable (maybe they depend
@ -135,9 +137,9 @@ def handle_set(config, error, to_set):
break # no visible keys left
for (sym,val) in set_pass:
if sym.type in (kconfiglib.BOOL, kconfiglib.TRISTATE):
if val == True:
if val is True:
sym.set_value(2)
elif val == False:
elif val is False:
sym.set_value(0)
else:
error.append("Boolean symbol %s only accepts true/false values" % sym.name)
@ -150,7 +152,6 @@ def handle_set(config, error, to_set):
error.append("The following config symbol(s) were not visible so were not updated: %s" % (", ".join(s.name for s in to_set)))
def diff(before, after):
"""
Return a dictionary with the difference between 'before' and 'after' (either with the new value if changed,
@ -164,6 +165,7 @@ def diff(before, after):
def get_ranges(config):
ranges_dict = {}
def handle_node(node):
sym = node.item
if not isinstance(sym, kconfiglib.Symbol):
@ -182,4 +184,3 @@ if __name__ == '__main__':
except FatalError as e:
print("A fatal error occurred: %s" % e, file=sys.stderr)
sys.exit(2)

View file

@ -21,7 +21,6 @@
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
import os
import re
import kconfiglib
@ -35,6 +34,7 @@ HEADING_SYMBOLS = '#*=-^"+'
INITIAL_HEADING_LEVEL = 3
MAX_HEADING_LEVEL = len(HEADING_SYMBOLS) - 1
def write_docs(config, filename):
""" Note: writing .rst documentation ignores the current value
of any items. ie the --config option can be ignored.
@ -42,12 +42,14 @@ def write_docs(config, filename):
with open(filename, "w") as f:
config.walk_menu(lambda node: write_menu_item(f, node))
def node_is_menu(node):
try:
return node.item == kconfiglib.MENU or node.is_menuconfig
except AttributeError:
return False # not all MenuNodes have is_menuconfig for some reason
def get_breadcrumbs(node):
# this is a bit wasteful as it recalculates each time, but still...
result = []
@ -58,6 +60,7 @@ def get_breadcrumbs(node):
node = node.parent
return " > ".join(result)
def get_link_anchor(node):
try:
return "CONFIG_%s" % node.item.name
@ -73,6 +76,7 @@ def get_link_anchor(node):
result = "-".join(result).lower()
return result
def get_heading_level(node):
result = INITIAL_HEADING_LEVEL
node = node.parent
@ -83,6 +87,7 @@ def get_heading_level(node):
node = node.parent
return result
def format_rest_text(text, indent):
# Format an indented text block for use with ReST
text = indent + text.replace('\n', '\n' + indent)
@ -92,6 +97,7 @@ def format_rest_text(text, indent):
text += '\n'
return text
def node_should_write(node):
if not node.prompt:
return False # Don't do anything for invisible menu items
@ -101,6 +107,7 @@ def node_should_write(node):
return True
def write_menu_item(f, node):
if not node_should_write(node):
return
@ -112,7 +119,7 @@ def write_menu_item(f, node):
is_menu = node_is_menu(node)
## Heading
# Heading
if name:
title = 'CONFIG_%s' % name
else:
@ -167,6 +174,6 @@ def write_menu_item(f, node):
child = child.next
f.write('\n')
if __name__ == '__main__':
print("Run this via 'confgen.py --output doc FILENAME'")

View file

@ -1,21 +1,12 @@
#!/usr/bin/env python
from __future__ import print_function
import os
import sys
import threading
import time
import json
import argparse
import shutil
import tempfile
import pexpect
sys.path.append("..")
import confserver
def create_server_thread(*args):
t = threading.Thread()
def parse_testcases():
with open("testcases.txt", "r") as f:
@ -42,6 +33,7 @@ def parse_testcases():
expect = json.loads(expect[2:])
yield (desc, send, expect)
def main():
parser = argparse.ArgumentParser()
parser.add_argument('--logfile', type=argparse.FileType('w'), help='Optional session log of the interactions with confserver.py')
@ -84,7 +76,7 @@ def main():
read_vals = readback[expect_key]
exp_vals = expected[expect_key]
if read_vals != exp_vals:
expect_diff = dict((k,v) for (k,v) in exp_vals.items() if not k in read_vals or v != read_vals[k])
expect_diff = dict((k,v) for (k,v) in exp_vals.items() if k not in read_vals or v != read_vals[k])
raise RuntimeError("Test failed! Was expecting %s: %s" % (expect_key, json.dumps(expect_diff)))
print("OK")
@ -111,6 +103,6 @@ def main():
except OSError:
pass
if __name__ == "__main__":
main()

View file

@ -15,19 +15,29 @@
#
import re
import collections
import sys
import os
from sdkconfig import SDKConfig
from pyparsing import *
from pyparsing import OneOrMore
from pyparsing import restOfLine
from pyparsing import alphanums
from pyparsing import Word
from pyparsing import alphas
from pyparsing import ParseBaseException
from pyparsing import Suppress
from pyparsing import Group
from pyparsing import Literal
from pyparsing import ZeroOrMore
from pyparsing import Optional
from pyparsing import originalTextFor
from common import LdGenFailure
class FragmentFileModel():
"""
Fragment file internal representation. Parses and stores instances of the fragment definitions
contained within the file.
"""
class FragmentFileModel():
def __init__(self, fragment_file):
path = os.path.realpath(fragment_file.name)
@ -54,11 +64,12 @@ class FragmentFileModel():
for fragment in self.fragments:
fragment.path = path
class Fragment:
"""
Encapsulates a fragment as defined in the generator syntax. Sets values common to all fragment and performs processing
such as checking the validity of the fragment name and getting the entry values.
"""
class Fragment:
IDENTIFIER = Word(alphas + "_", alphanums + "_")
ENTITY = Word(alphanums + ".-_$")
@ -68,6 +79,7 @@ class Fragment:
self.name = name
self.entries = entries
class Sections(Fragment):
def __init__(self, name, entries):
@ -113,10 +125,11 @@ class Sections(Fragment):
return sections
class Scheme(Fragment):
"""
Encapsulates a scheme fragment, which defines what target input sections are placed under.
"""
class Scheme(Fragment):
def __init__(self, name, items):
Fragment.__init__(self, name, items)
@ -151,10 +164,11 @@ class Scheme(Fragment):
return scheme
class Mapping(Fragment):
"""
Encapsulates a mapping fragment, which defines what targets the input sections of mappable entties are placed under.
"""
class Mapping(Fragment):
# Name of the default condition entry
DEFAULT_CONDITION = "default"
@ -217,9 +231,6 @@ class Mapping(Fragment):
# Match header [mapping]
header = Suppress("[") + Suppress("mapping") + Suppress("]")
# Define possbile values for input archive and object file
filename = Word(alphanums + "-" + "_")
# There are three possible patterns for mapping entries:
# obj:symbol (scheme)
# obj (scheme)

View file

@ -14,22 +14,20 @@
# limitations under the License.
#
import re
import collections
import itertools
import os
import subprocess
import fnmatch
from sdkconfig import SDKConfig
from fragments import FragmentFileModel, Sections, Scheme, Mapping, Fragment
from pyparsing import *
from fragments import Sections, Scheme, Mapping, Fragment
from pyparsing import Suppress, White, ParseException, Literal, Regex, Group, ZeroOrMore, Word, OneOrMore, nums, alphanums, alphas, Optional
from common import LdGenFailure
class PlacementRule():
"""
Encapsulates a generated placement rule placed under a target
"""
class PlacementRule():
DEFAULT_SPECIFICITY = 0
ARCHIVE_SPECIFICITY = 1
@ -116,7 +114,7 @@ class PlacementRule():
# most specific rule from the list, and if an even more specific rule is found,
# replace it entirely. Otherwise, keep appending.
exclusions = self.sections[section].excludes
exclusions_list = exclusions.content if exclusions.content != None else []
exclusions_list = exclusions.content if exclusions.content is not None else []
exclusions_to_remove = filter(lambda r: r.is_more_specific_rule_of(other), exclusions_list)
remaining_exclusions = [e for e in exclusions_list if e not in exclusions_to_remove]
@ -133,7 +131,7 @@ class PlacementRule():
# Compare archive, obj and target
for entity_index in range(1, other.specificity + 1):
if self[entity_index] != other[entity_index] and other[entity_index] != None:
if self[entity_index] != other[entity_index] and other[entity_index] is not None:
return False
return True
@ -144,7 +142,7 @@ class PlacementRule():
# Compare archive, obj and target
for entity_index in range(1, other.specificity + 1):
if self[entity_index] != other[entity_index] and other[entity_index] != None:
if self[entity_index] != other[entity_index] and other[entity_index] is not None:
return False
return True
@ -247,10 +245,11 @@ class PlacementRule():
yield self.symbol
raise StopIteration
class GenerationModel:
"""
Implements generation of placement rules based on collected sections, scheme and mapping fragment.
"""
class GenerationModel:
DEFAULT_SCHEME = "default"
@ -273,7 +272,7 @@ class GenerationModel:
rule = PlacementRule(archive, obj, symbol, section_entries, target)
if not rule in rules:
if rule not in rules:
rules.append(rule)
def _build_scheme_dictionary(self):
@ -433,7 +432,8 @@ class GenerationModel:
extra_rule = extra_rules[extra_rules_key]
if section not in extra_rule.get_section_names():
new_rule = PlacementRule(extra_rule.archive, extra_rule.obj, extra_rule.symbol, list(extra_rule.get_section_names()) + [section] , extra_rule.target)
new_rule = PlacementRule(extra_rule.archive, extra_rule.obj, extra_rule.symbol,
list(extra_rule.get_section_names()) + [section], extra_rule.target)
extra_rules[extra_rules_key] = new_rule
except KeyError:
extra_rule = PlacementRule(symbol_specific_rule.archive, symbol_specific_rule.obj, None, [section], section_rule.target)
@ -458,9 +458,9 @@ class GenerationModel:
# through rules below it (higher indeces), adding exclusions whenever appropriate.
for general_rule in sorted_rules:
for specific_rule in reversed(sorted_rules):
if (specific_rule.specificity > general_rule.specificity and \
if (specific_rule.specificity > general_rule.specificity and
specific_rule.specificity != PlacementRule.SYMBOL_SPECIFICITY) or \
(specific_rule.specificity == PlacementRule.SYMBOL_SPECIFICITY and \
(specific_rule.specificity == PlacementRule.SYMBOL_SPECIFICITY and
general_rule.specificity == PlacementRule.OBJECT_SPECIFICITY):
general_rule.add_exclusion(specific_rule, sections_info)
@ -484,11 +484,12 @@ class GenerationModel:
dict_to_append_to[fragment.name] = fragment
class TemplateModel:
"""
Encapsulates a linker script template file. Finds marker syntax and handles replacement to generate the
final output.
"""
class TemplateModel:
Marker = collections.namedtuple("Marker", "target indent rules")
@ -526,7 +527,6 @@ class TemplateModel:
target = None
try:
target = member.target
indent = member.indent
rules = member.rules
del rules[:]
@ -535,7 +535,7 @@ class TemplateModel:
except KeyError:
message = GenerationException.UNDEFINED_REFERENCE + " to target '" + target + "'."
raise GenerationException(message)
except AttributeError as a:
except AttributeError:
pass
def write(self, output_file):
@ -557,11 +557,12 @@ class TemplateModel:
except AttributeError:
output_file.write(member)
class GenerationException(LdGenFailure):
"""
Exception for linker script generation failures such as undefined references/ failure to
evaluate conditions, duplicate mappings, etc.
"""
class GenerationException(LdGenFailure):
UNDEFINED_REFERENCE = "Undefined reference"
@ -575,11 +576,12 @@ class GenerationException(LdGenFailure):
else:
return self.message
class SectionsInfo(dict):
"""
Encapsulates an output of objdump. Contains information about the static library sections
and names
"""
class SectionsInfo(dict):
__info = collections.namedtuple("__info", "filename content")
@ -607,8 +609,11 @@ class SectionsInfo(dict):
object = Fragment.ENTITY.setResultsName("object") + Literal(":").suppress() + Literal("file format elf32-xtensa-le").suppress()
# Sections table
header = Suppress(Literal("Sections:") + Literal("Idx") + Literal("Name") + Literal("Size") + Literal("VMA") + Literal("LMA") + Literal("File off") + Literal("Algn"))
entry = Word(nums).suppress() + Fragment.ENTITY + Suppress(OneOrMore(Word(alphanums, exact=8)) + Word(nums + "*") + ZeroOrMore(Word(alphas.upper()) + Optional(Literal(","))))
header = Suppress(Literal("Sections:") + Literal("Idx") + Literal("Name") + Literal("Size") + Literal("VMA") +
Literal("LMA") + Literal("File off") + Literal("Algn"))
entry = Word(nums).suppress() + Fragment.ENTITY + Suppress(OneOrMore(Word(alphanums, exact=8)) +
Word(nums + "*") + ZeroOrMore(Word(alphas.upper()) +
Optional(Literal(","))))
# Content is object file line + sections table
content = Group(object + header + Group(ZeroOrMore(entry)).setResultsName("sections"))

View file

@ -16,8 +16,6 @@
#
import argparse
import os
import traceback
import sys
import tempfile
@ -26,6 +24,7 @@ from sdkconfig import SDKConfig
from generation import GenerationModel, TemplateModel, SectionsInfo
from common import LdGenFailure
def main():
argparser = argparse.ArgumentParser(description="ESP-IDF linker script generator")
@ -44,8 +43,7 @@ def main():
argparser.add_argument(
"--sections", "-s",
type=argparse.FileType("r"),
help = "Library sections info",
)
help="Library sections info")
argparser.add_argument(
"--output", "-o",
@ -110,5 +108,6 @@ def main():
print("linker script generation failed for %s\nERROR: %s" % (input_file.name, e))
sys.exit(1)
if __name__ == "__main__":
main()

View file

@ -15,21 +15,23 @@
#
import os
from pyparsing import *
from pyparsing import Word, printables, Combine, Literal, hexnums, quotedString, Optional, nums, removeQuotes, oneOf, Group, infixNotation, opAssoc
import sys
try:
import kconfiglib
except ImportError:
parent_dir_name = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
kconfig_new_dir = os.path.abspath(parent_dir_name + "/kconfig_new")
sys.path.append(kconfig_new_dir)
import kconfiglib
class SDKConfig:
"""
Encapsulates an sdkconfig file. Defines grammar of a configuration entry, and enables
evaluation of logical expressions involving those entries.
"""
class SDKConfig:
# A configuration entry is in the form CONFIG=VALUE. Definitions of components of that grammar
IDENTIFIER = Word(printables.upper())
@ -77,8 +79,7 @@ class SDKConfig:
condition = Group(Optional("(").suppress() + test + Optional(")").suppress())
grammar = infixNotation(
condition, [
grammar = infixNotation(condition, [
("!", 1, opAssoc.RIGHT),
("&&", 2, opAssoc.LEFT),
("||", 2, opAssoc.LEFT)])

View file

@ -17,12 +17,17 @@
import unittest
import sys
import os
from pyparsing import ParseException
from pyparsing import restOfLine
try:
import fragments
except ImportError:
sys.path.append('../')
from fragments import *
from pyparsing import *
from sdkconfig import *
import fragments
from sdkconfig import SDKConfig
class FragmentTest(unittest.TestCase):
@ -31,10 +36,11 @@ class FragmentTest(unittest.TestCase):
fragment = self.parser.parseString(text, parseAll=True)
return fragment[0]
class SectionsTest(FragmentTest):
def setUp(self):
self.parser = Sections.get_fragment_grammar()
self.parser = fragments.Sections.get_fragment_grammar()
def test_valid_entries(self):
valid_entries = """
@ -74,7 +80,7 @@ class SectionsTest(FragmentTest):
"""
with self.assertRaises(ParseException):
sections = self.parse(blank_entries)
self.parse(blank_entries)
def test_invalid_names(self):
with_spaces = """
@ -93,13 +99,13 @@ class SectionsTest(FragmentTest):
"""
with self.assertRaises(ParseException):
sections = self.parse(with_spaces)
self.parse(with_spaces)
with self.assertRaises(ParseException):
sections = self.parse(begins_with_number)
self.parse(begins_with_number)
with self.assertRaises(ParseException):
sections = self.parse(with_special_character)
self.parse(with_special_character)
def test_non_existent_entries(self):
misspelled_entries_field = """
@ -113,10 +119,10 @@ class SectionsTest(FragmentTest):
"""
with self.assertRaises(ParseException):
sections = self.parse(misspelled_entries_field)
self.parse(misspelled_entries_field)
with self.assertRaises(ParseException):
sections = self.parse(missing_entries_field)
self.parse(missing_entries_field)
def test_duplicate_entries(self):
duplicate_entries = """
@ -143,10 +149,11 @@ class SectionsTest(FragmentTest):
self.assertEqual(set(entries), expected)
class SchemeTest(FragmentTest):
def setUp(self):
self.parser = Scheme.get_fragment_grammar()
self.parser = fragments.Scheme.get_fragment_grammar()
def test_valid_entries(self):
valid_entries = """
@ -202,10 +209,10 @@ class SchemeTest(FragmentTest):
"""
with self.assertRaises(ParseException):
scheme = self.parse(wrong_character)
self.parse(wrong_character)
with self.assertRaises(ParseException):
scheme = self.parse(single_word)
self.parse(single_word)
def test_blank_entries(self):
blank_entries = """
@ -214,7 +221,7 @@ class SchemeTest(FragmentTest):
"""
with self.assertRaises(ParseException):
sections = self.parse(blank_entries)
self.parse(blank_entries)
def test_non_existent_entries(self):
misspelled_entries_field = """
@ -228,15 +235,16 @@ class SchemeTest(FragmentTest):
"""
with self.assertRaises(ParseException):
sections = self.parse(misspelled_entries_field)
self.parse(misspelled_entries_field)
with self.assertRaises(ParseException):
sections = self.parse(missing_entries_field)
self.parse(missing_entries_field)
class MappingTest(FragmentTest):
def setUp(self):
self.parser = Mapping.get_fragment_grammar()
self.parser = fragments.Mapping.get_fragment_grammar()
def parse_expression(self, expression):
parser = SDKConfig.get_expression_grammar()
@ -360,43 +368,43 @@ class MappingTest(FragmentTest):
"""
with self.assertRaises(ParseException):
sections = self.parse(with_fragment_name)
self.parse(with_fragment_name)
with self.assertRaises(ParseException):
sections = self.parse(missing_archive)
self.parse(missing_archive)
with self.assertRaises(ParseException):
sections = self.parse(misspelled_archive)
self.parse(misspelled_archive)
with self.assertRaises(ParseException):
sections = self.parse(missing_entries)
self.parse(missing_entries)
with self.assertRaises(ParseException):
sections = self.parse(misspelled_entries)
self.parse(misspelled_entries)
with self.assertRaises(ParseException):
sections = self.parse(missing_symbols)
self.parse(missing_symbols)
with self.assertRaises(ParseException):
sections = self.parse(missing_scheme_1)
self.parse(missing_scheme_1)
with self.assertRaises(ParseException):
sections = self.parse(missing_scheme_2)
self.parse(missing_scheme_2)
with self.assertRaises(ParseException):
sections = self.parse(missing_entity)
self.parse(missing_entity)
with self.assertRaises(ParseException):
sections = self.parse(wilcard_symbol)
self.parse(wilcard_symbol)
with self.assertRaises(ParseException):
sections = self.parse(empty_object_with_symbol)
self.parse(empty_object_with_symbol)
with self.assertRaises(ParseException):
sections = self.parse(wildcard_object_with_symbol)
self.parse(wildcard_object_with_symbol)
with self.assertRaises(ParseException):
sections = self.parse(empty_definition)
self.parse(empty_definition)
def test_explicit_blank_default_w_others(self):
expl_blnk_w_oth = """
@ -419,7 +427,6 @@ class MappingTest(FragmentTest):
self.assertEqual(entries, expected)
def test_implicit_blank_default_w_others(self):
impl_blnk_w_oth = """
[mapping]
@ -548,8 +555,7 @@ class MappingTest(FragmentTest):
"""
with self.assertRaises(ParseException):
mapping = self.parse(blank_first_condition)
self.parse(blank_first_condition)
def test_nonlast_default(self):
nonlast_default_1 = """
@ -587,13 +593,13 @@ class MappingTest(FragmentTest):
"""
with self.assertRaises(ParseException):
mapping = self.parse(nonlast_default_1)
self.parse(nonlast_default_1)
with self.assertRaises(ParseException):
mapping = self.parse(nonlast_default_2)
self.parse(nonlast_default_2)
with self.assertRaises(ParseException):
mapping = self.parse(nonlast_default_3)
self.parse(nonlast_default_3)
def test_duplicate_default(self):
duplicate_default_1 = """
@ -623,10 +629,11 @@ class MappingTest(FragmentTest):
"""
with self.assertRaises(ParseException):
mapping = self.parse(duplicate_default_1)
self.parse(duplicate_default_1)
with self.assertRaises(ParseException):
mapping = self.parse(duplicate_default_2)
self.parse(duplicate_default_2)
if __name__ == "__main__":
unittest.main()

View file

@ -17,11 +17,25 @@
import unittest
import sys
import os
try:
from generation import PlacementRule
except ImportError:
sys.path.append('../')
from generation import *
from pyparsing import *
from generation import PlacementRule
from generation import GenerationException
from generation import SectionsInfo
from generation import TemplateModel
from generation import GenerationModel
from fragments import FragmentFileModel
from fragments import Mapping
from fragments import Sections
from fragments import Scheme
from sdkconfig import SDKConfig
class GenerationModelTest(unittest.TestCase):
@ -270,7 +284,6 @@ class GenerationModelTest(unittest.TestCase):
self._compare_rules(expected, actual)
def test_rule_generation_nominal_4(self):
normal = """
[mapping]
@ -524,8 +537,10 @@ class GenerationModelTest(unittest.TestCase):
dram0_data_E1 = PlacementRule("libfreertos.a", "croutine", "prvCheckPendingReadyList", self.model.sections["rodata"].entries, "dram0_data")
rtc_text_E2 = PlacementRule("libfreertos.a", "croutine", "prvCheckDelayedList", self.model.sections["text"].entries, "rtc_text")
rtc_data_E2 = PlacementRule("libfreertos.a", "croutine", "prvCheckDelayedList", self.model.sections["data"].entries + self.model.sections["rodata"].entries, "rtc_data")
rtc_bss_E2 = PlacementRule("libfreertos.a", "croutine", "prvCheckDelayedList", self.model.sections["bss"].entries + self.model.sections["common"].entries, "rtc_bss")
rtc_data_E2 = PlacementRule("libfreertos.a", "croutine", "prvCheckDelayedList",
self.model.sections["data"].entries + self.model.sections["rodata"].entries, "rtc_data")
rtc_bss_E2 = PlacementRule("libfreertos.a", "croutine", "prvCheckDelayedList",
self.model.sections["bss"].entries + self.model.sections["common"].entries, "rtc_bss")
iram0_text_E3 = PlacementRule("libfreertos.a", "croutine", "xCoRoutineCreate", self.model.sections["text"].entries, "iram0_text")
dram0_data_E3 = PlacementRule("libfreertos.a", "croutine", "xCoRoutineCreate", self.model.sections["rodata"].entries, "dram0_data")
@ -591,8 +606,10 @@ class GenerationModelTest(unittest.TestCase):
dram0_bss_default = self._get_default("dram0_bss", expected)
rtc_text_E1 = PlacementRule("libfreertos.a", "croutine", "prvCheckDelayedList", self.model.sections["text"].entries, "rtc_text")
rtc_data_E1 = PlacementRule("libfreertos.a", "croutine", "prvCheckDelayedList", self.model.sections["data"].entries + self.model.sections["rodata"].entries, "rtc_data")
rtc_bss_E1 = PlacementRule("libfreertos.a", "croutine", "prvCheckDelayedList", self.model.sections["bss"].entries + self.model.sections["common"].entries, "rtc_bss")
rtc_data_E1 = PlacementRule("libfreertos.a", "croutine", "prvCheckDelayedList",
self.model.sections["data"].entries + self.model.sections["rodata"].entries, "rtc_data")
rtc_bss_E1 = PlacementRule("libfreertos.a", "croutine", "prvCheckDelayedList",
self.model.sections["bss"].entries + self.model.sections["common"].entries, "rtc_bss")
iram0_text_E2 = PlacementRule("libfreertos.a", "croutine", None, self.model.sections["text"].entries, "iram0_text")
dram0_data_E2 = PlacementRule("libfreertos.a", "croutine", None, self.model.sections["rodata"].entries, "dram0_data")
@ -648,8 +665,10 @@ class GenerationModelTest(unittest.TestCase):
dram0_bss_default = self._get_default("dram0_bss", expected)
rtc_text_E1 = PlacementRule("libfreertos.a", "croutine", "prvCheckDelayedList", self.model.sections["text"].entries, "rtc_text")
rtc_data_E1 = PlacementRule("libfreertos.a", "croutine", "prvCheckDelayedList", self.model.sections["data"].entries + self.model.sections["rodata"].entries, "rtc_data")
rtc_bss_E1 = PlacementRule("libfreertos.a", "croutine", "prvCheckDelayedList", self.model.sections["bss"].entries + self.model.sections["common"].entries, "rtc_bss")
rtc_data_E1 = PlacementRule("libfreertos.a", "croutine", "prvCheckDelayedList",
self.model.sections["data"].entries + self.model.sections["rodata"].entries, "rtc_data")
rtc_bss_E1 = PlacementRule("libfreertos.a", "croutine", "prvCheckDelayedList",
self.model.sections["bss"].entries + self.model.sections["common"].entries, "rtc_bss")
iram0_text_E2 = PlacementRule("libfreertos.a", None, None, self.model.sections["text"].entries, "iram0_text")
dram0_data_E2 = PlacementRule("libfreertos.a", None, None, self.model.sections["rodata"].entries, "dram0_data")
@ -767,8 +786,10 @@ class GenerationModelTest(unittest.TestCase):
dram0_bss_default = self._get_default("dram0_bss", expected)
rtc_text_E1 = PlacementRule("libfreertos.a", "croutine", "prvCheckDelayedList", self.model.sections["text"].entries, "rtc_text")
rtc_data_E1 = PlacementRule("libfreertos.a", "croutine", "prvCheckDelayedList", self.model.sections["data"].entries + self.model.sections["rodata"].entries, "rtc_data")
rtc_bss_E1 = PlacementRule("libfreertos.a", "croutine", "prvCheckDelayedList", self.model.sections["bss"].entries + self.model.sections["common"].entries, "rtc_bss")
rtc_data_E1 = PlacementRule("libfreertos.a", "croutine", "prvCheckDelayedList",
self.model.sections["data"].entries + self.model.sections["rodata"].entries, "rtc_data")
rtc_bss_E1 = PlacementRule("libfreertos.a", "croutine", "prvCheckDelayedList",
self.model.sections["bss"].entries + self.model.sections["common"].entries, "rtc_bss")
iram0_text_E2 = PlacementRule("libfreertos.a", "croutine", None, self.model.sections["text"].entries, "iram0_text")
dram0_data_E2 = PlacementRule("libfreertos.a", "croutine", None, self.model.sections["rodata"].entries, "dram0_data")
@ -847,8 +868,10 @@ class GenerationModelTest(unittest.TestCase):
rtc_bss_E3 = PlacementRule("libfreertos.a", "croutine", None, self.model.sections["bss"].entries + self.model.sections["common"].entries, "rtc_bss")
rtc_text_E4 = PlacementRule("libfreertos.a", "event_groups", None, self.model.sections["text"].entries, "rtc_text")
rtc_data_E4 = PlacementRule("libfreertos.a", "event_groups", None, self.model.sections["data"].entries + self.model.sections["rodata"].entries, "rtc_data")
rtc_bss_E4 = PlacementRule("libfreertos.a", "event_groups", None, self.model.sections["bss"].entries + self.model.sections["common"].entries, "rtc_bss")
rtc_data_E4 = PlacementRule("libfreertos.a", "event_groups", None,
self.model.sections["data"].entries + self.model.sections["rodata"].entries, "rtc_data")
rtc_bss_E4 = PlacementRule("libfreertos.a", "event_groups", None,
self.model.sections["bss"].entries + self.model.sections["common"].entries, "rtc_bss")
iram0_text_E5 = PlacementRule("libfreertos.a", None, None, self.model.sections["text"].entries, "iram0_text")
dram0_data_E5 = PlacementRule("libfreertos.a", None, None, self.model.sections["rodata"].entries, "dram0_data")
@ -918,8 +941,10 @@ class GenerationModelTest(unittest.TestCase):
dram0_data_E1 = PlacementRule("libfreertos.a", "croutine", "prvCheckDelayedList", self.model.sections["rodata"].entries, "dram0_data")
rtc_text_E2 = PlacementRule("libfreertos.a", "event_groups", "xEventGroupCreate", self.model.sections["text"].entries, "rtc_text")
rtc_data_E2 = PlacementRule("libfreertos.a", "event_groups", "xEventGroupCreate", self.model.sections["data"].entries + self.model.sections["rodata"].entries, "rtc_data")
rtc_bss_E2 = PlacementRule("libfreertos.a", "event_groups", "xEventGroupCreate", self.model.sections["bss"].entries + self.model.sections["common"].entries, "rtc_bss")
rtc_data_E2 = PlacementRule("libfreertos.a", "event_groups", "xEventGroupCreate",
self.model.sections["data"].entries + self.model.sections["rodata"].entries, "rtc_data")
rtc_bss_E2 = PlacementRule("libfreertos.a", "event_groups", "xEventGroupCreate",
self.model.sections["bss"].entries + self.model.sections["common"].entries, "rtc_bss")
rtc_text_E3 = PlacementRule("libfreertos.a", "croutine", None, self.model.sections["text"].entries, "rtc_text")
rtc_data_E3 = PlacementRule("libfreertos.a", "croutine", None, self.model.sections["data"].entries + self.model.sections["rodata"].entries, "rtc_data")
@ -1052,7 +1077,7 @@ class GenerationModelTest(unittest.TestCase):
self._add_mapping(conflict_mapping)
with self.assertRaises(GenerationException):
actual = self.model.generate_rules(self.sdkconfig, self.sections_info)
self.model.generate_rules(self.sdkconfig, self.sections_info)
def test_rule_generation_condition(self):
generation_with_condition = """
@ -1095,5 +1120,6 @@ class GenerationModelTest(unittest.TestCase):
self._compare_rules(expected, actual)
if __name__ == "__main__":
unittest.main()

View file

@ -26,6 +26,7 @@ import socket
import pty
import filecmp
import threading
import errno
test_list = (
# Add new tests here. All files should be placed in IN_DIR. Columns are:
@ -51,6 +52,7 @@ SOCKET_TIMEOUT = 30
# the test is restarted after failure (idf_monitor has to be killed):
RETRIES_PER_TEST = 5
def monitor_timeout(process):
if process.poll() is None:
# idf_monitor is still running
@ -64,6 +66,7 @@ def monitor_timeout(process):
else:
raise
class TestRunner(object):
def __enter__(self):
self.serversocket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
@ -85,6 +88,7 @@ class TestRunner(object):
clientsocket.setsockopt(socket.SOL_TCP, socket.TCP_NODELAY, 1)
return clientsocket
def test_iteration(runner, test, startup_timeout):
print('\nRunning test on {} with filter "{}" and expecting {}'.format(test[0], test[1], test[2]))
try:
@ -140,6 +144,7 @@ def test_iteration(runner, test, startup_timeout):
else:
raise RuntimeError("The contents of the files are different. Please examine the artifacts.")
def main():
gstart = time.time()
if not os.path.exists(OUT_DIR):
@ -169,5 +174,6 @@ def main():
gend = time.time()
print('Execution took {:.2f} seconds\n'.format(gend - gstart))
if __name__ == "__main__":
main()

View file

@ -16,9 +16,13 @@
import sys
try:
import idf_size
except ImportError:
sys.path.append('..')
import idf_size
if __name__ == "__main__":
try:
idf_size.scan_to_header([], 'test')

View file

@ -22,11 +22,15 @@ import sys
import re
import argparse
try:
from Utility.CIAssignTest import AssignTest
except ImportError:
test_fw_path = os.getenv("TEST_FW_PATH")
if test_fw_path:
sys.path.insert(0, test_fw_path)
from Utility.CIAssignTest import AssignTest
from Utility.CIAssignTest import AssignTest, Group
from Utility.CIAssignTest import Group
class ExampleGroup(Group):

View file

@ -9,10 +9,12 @@ import argparse
import yaml
try:
from Utility import CIAssignTest
except ImportError:
test_fw_path = os.getenv("TEST_FW_PATH")
if test_fw_path:
sys.path.insert(0, test_fw_path)
from Utility import CIAssignTest

View file

@ -426,10 +426,10 @@ class BaseDUT(object):
:param data: data which needs to be checked and maybe transformed
"""
if type(data) is type(u''):
if isinstance(data, type(u'')):
try:
data = data.encode('utf-8')
except:
except Exception:
print(u'Cannot encode {} of type {}'.format(data, type(data)))
raise
return data
@ -529,9 +529,9 @@ class BaseDUT(object):
:return: match groups if match succeed otherwise None
"""
ret = None
if type(pattern.pattern) is type(u''):
if isinstance(pattern.pattern, type(u'')):
pattern = re.compile(BaseDUT.u_to_bytearray(pattern.pattern))
if type(data) is type(u''):
if isinstance(data, type(u'')):
data = BaseDUT.u_to_bytearray(data)
match = pattern.search(data)
if match:

View file

@ -54,7 +54,6 @@ class IDFApp(App.BaseApp):
assert os.path.exists(idf_path)
return idf_path
def get_binary_path(self, app_path):
"""
get binary path according to input app_path.

View file

@ -17,11 +17,8 @@ import os
import os.path
import sys
import re
import subprocess
import functools
import random
import tempfile
import time
from serial.tools import list_ports
@ -94,7 +91,7 @@ class IDFDUT(DUT.SerialDUT):
esp = esptool.ESP32ROM(port)
esp.connect()
return esp.read_mac()
except RuntimeError as e:
except RuntimeError:
return None
finally:
esp._port.close()
@ -183,7 +180,7 @@ class IDFDUT(DUT.SerialDUT):
:return: None
"""
raise NotImplementedError() # TODO: implement this
address = self.app.partition_table[partition]["offset"]
# address = self.app.partition_table[partition]["offset"]
size = self.app.partition_table[partition]["size"]
# TODO can use esp.erase_region() instead of this, I think
with open(".erase_partition.tmp", "wb") as f:
@ -231,7 +228,7 @@ class IDFDUT(DUT.SerialDUT):
return [x for x in ports if not cls.INVALID_PORT_PATTERN.search(x)]
# On MacOs with python3.6: type of espport is already utf8
if type(espport) is type(u''):
if isinstance(espport, type(u'')):
port_hint = espport
else:
port_hint = espport.decode('utf8')

View file

@ -59,9 +59,9 @@ def _convert_to_lower_case_bytes(item):
"""
if isinstance(item, (tuple, list)):
output = [_convert_to_lower_case_bytes(v) for v in item]
elif type(item) == type(b''):
elif isinstance(item, type(b'')):
output = item.lower()
elif type(item) == type(u''):
elif isinstance(item, type(u'')):
output = item.encode().lower()
else:
output = item

View file

@ -15,7 +15,7 @@
import matplotlib
# fix can't draw figure with docker
matplotlib.use('Agg')
import matplotlib.pyplot as plt
import matplotlib.pyplot as plt # noqa: E402 - matplotlib.use('Agg') need to be before this
# candidate colors

View file

@ -29,7 +29,7 @@ def console_log(data, color="white", end="\n"):
if color not in _COLOR_CODES:
color = "white"
color_codes = _COLOR_CODES[color]
if type(data) is type(b''):
if isinstance(data, type(b'')):
data = data.decode('utf-8', 'replace')
print(color_codes + data, end=end)
if color not in ["white", "W"]:

View file

@ -155,6 +155,3 @@ texinfo_documents = [
author, 'TinyTestFW', 'One line description of project.',
'Miscellaneous'),
]

View file

@ -17,13 +17,16 @@ import re
import os
import sys
try:
import TinyFW
except ImportError:
# if we want to run test case outside `tiny-test-fw` folder,
# we need to insert tiny-test-fw path into sys path
test_fw_path = os.getenv("TEST_FW_PATH")
if test_fw_path and test_fw_path not in sys.path:
sys.path.insert(0, test_fw_path)
import TinyFW
import IDF

View file

@ -1,4 +1,3 @@
import sys
import glob
import tempfile
import os
@ -6,7 +5,6 @@ import os.path
import re
import shutil
import argparse
import json
import copy
PROJECT_NAME = "unit-test-app"
@ -22,6 +20,7 @@ CONFIG_NAMES = os.listdir(os.path.join(PROJECT_PATH, "configs"))
BUILDS_DIR = os.path.join(PROJECT_PATH, "builds")
BINARIES_DIR = os.path.join(PROJECT_PATH, "output")
# Convert the values passed to the -T parameter to corresponding cache entry definitions
# TESTS_ALL and TEST_COMPONENTS
class TestComponentAction(argparse.Action):
@ -46,10 +45,11 @@ class TestComponentAction(argparse.Action):
# Brute force add reconfigure at the very beginning
existing_actions = getattr(namespace, "actions", [])
if not "reconfigure" in existing_actions:
if "reconfigure" not in existing_actions:
existing_actions = ["reconfigure"] + existing_actions
setattr(namespace, "actions", existing_actions)
class TestExcludeComponentAction(argparse.Action):
def __call__(self, parser, namespace, values, option_string=None):
# Create a new of cache definition entry, adding previous elements
@ -66,16 +66,18 @@ class TestExcludeComponentAction(argparse.Action):
# Brute force add reconfigure at the very beginning
existing_actions = getattr(namespace, "actions", [])
if not "reconfigure" in existing_actions:
if "reconfigure" not in existing_actions:
existing_actions = ["reconfigure"] + existing_actions
setattr(namespace, "actions", existing_actions)
def add_argument_extensions(parser):
# For convenience, define a -T argument that gets converted to -D arguments
parser.add_argument('-T', '--test-component', help="Specify the components to test", nargs='+', action=TestComponentAction)
# For convenience, define a -T argument that gets converted to -D arguments
parser.add_argument('-E', '--test-exclude-components', help="Specify the components to exclude from testing", nargs='+', action=TestExcludeComponentAction)
def add_action_extensions(base_functions, base_actions):
def ut_apply_config(ut_apply_config_name, args):

View file

@ -29,7 +29,7 @@ class Parser(object):
""" parse unit test cases from build files and create files for test bench """
TAG_PATTERN = re.compile("([^=]+)(=)?(.+)?")
DESCRIPTION_PATTERN = re.compile("\[([^]\[]+)\]")
DESCRIPTION_PATTERN = re.compile("\[([^]\[]+)\]") # noqa: W605 - regular expression
CONFIG_PATTERN = re.compile(r"{([^}]+)}")
TEST_GROUPS_PATTERN = re.compile(r"TEST_GROUPS=(.*)$")
@ -213,7 +213,6 @@ class Parser(object):
return self.parse_tags_internal(configs, self.config_dependencies, self.CONFIG_PATTERN)
def get_test_groups(self, config_file):
"""
If the config file includes TEST_GROUPS variable, return its value as a list of strings.

View file

@ -374,7 +374,7 @@ class Handler(threading.Thread):
Utility.console_log("No case detected!", color="orange")
while not self.finish and not self.force_stop.isSet():
try:
self.dut.expect_any((re.compile('\(' + str(self.child_case_index) + '\)\s"(\w+)"'),
self.dut.expect_any((re.compile('\(' + str(self.child_case_index) + '\)\s"(\w+)"'), # noqa: W605 - regex
get_child_case_name),
(self.WAIT_SIGNAL_PATTERN, device_wait_action), # wait signal pattern
(self.SEND_SIGNAL_PATTERN, device_send_action), # send signal pattern

View file

@ -3,11 +3,16 @@
# Wrapper to run make and preprocess any paths in the output from MSYS Unix-style paths
# to Windows paths, for Eclipse
from __future__ import print_function, division
import sys, subprocess, os.path, re
import sys
import subprocess
import os.path
import re
UNIX_PATH_RE = re.compile(r'(/[^ \'"]+)+')
paths = {}
def check_path(path):
try:
return paths[path]
@ -24,6 +29,7 @@ def check_path(path):
paths[path] = winpath
return winpath
def main():
print("Running make in '%s'" % check_path(os.getcwd()))
make = subprocess.Popen(["make"] + sys.argv[1:] + ["BATCH_BUILD=1"], stdout=subprocess.PIPE)
@ -32,5 +38,6 @@ def main():
print(line.rstrip())
sys.exit(make.wait())
if __name__ == "__main__":
main()