2017-10-10 02:44:55 +00:00
|
|
|
# Copyright 2015-2017 Espressif Systems (Shanghai) PTE LTD
|
|
|
|
#
|
|
|
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
# you may not use this file except in compliance with the License.
|
|
|
|
# You may obtain a copy of the License at
|
|
|
|
#
|
|
|
|
# http:#www.apache.org/licenses/LICENSE-2.0
|
|
|
|
#
|
|
|
|
# Unless required by applicable law or agreed to in writing, software
|
|
|
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
# See the License for the specific language governing permissions and
|
|
|
|
# limitations under the License.
|
|
|
|
|
|
|
|
""" DUT for IDF applications """
|
|
|
|
import os
|
2018-12-05 00:13:33 +00:00
|
|
|
import os.path
|
2018-06-25 06:43:40 +00:00
|
|
|
import sys
|
2017-10-10 02:44:55 +00:00
|
|
|
import re
|
|
|
|
import functools
|
2018-07-26 07:07:32 +00:00
|
|
|
import tempfile
|
2019-03-18 04:16:24 +00:00
|
|
|
import subprocess
|
2018-07-26 07:07:32 +00:00
|
|
|
|
2019-03-16 12:07:52 +00:00
|
|
|
# python2 and python3 queue package name is different
|
|
|
|
try:
|
|
|
|
import Queue as _queue
|
|
|
|
except ImportError:
|
|
|
|
import queue as _queue
|
|
|
|
|
|
|
|
|
2018-06-25 06:43:40 +00:00
|
|
|
from serial.tools import list_ports
|
2017-10-10 02:44:55 +00:00
|
|
|
|
|
|
|
import DUT
|
2019-03-16 12:07:52 +00:00
|
|
|
import Utility
|
2017-10-10 02:44:55 +00:00
|
|
|
|
2018-12-05 00:13:33 +00:00
|
|
|
try:
|
|
|
|
import esptool
|
|
|
|
except ImportError: # cheat and use IDF's copy of esptool if available
|
|
|
|
idf_path = os.getenv("IDF_PATH")
|
|
|
|
if not idf_path or not os.path.exists(idf_path):
|
|
|
|
raise
|
|
|
|
sys.path.insert(0, os.path.join(idf_path, "components", "esptool_py", "esptool"))
|
|
|
|
import esptool
|
|
|
|
|
2017-10-10 02:44:55 +00:00
|
|
|
|
|
|
|
class IDFToolError(OSError):
|
|
|
|
pass
|
|
|
|
|
|
|
|
|
2019-03-16 12:07:52 +00:00
|
|
|
class IDFDUTException(RuntimeError):
|
|
|
|
pass
|
|
|
|
|
|
|
|
|
|
|
|
class IDFRecvThread(DUT.RecvThread):
|
|
|
|
|
|
|
|
PERFORMANCE_PATTERN = re.compile(r"\[Performance]\[(\w+)]: ([^\r\n]+)\r?\n")
|
|
|
|
EXCEPTION_PATTERNS = [
|
|
|
|
re.compile(r"(Guru Meditation Error: Core\s+\d panic'ed \([\w].*?\))"),
|
|
|
|
re.compile(r"(abort\(\) was called at PC 0x[a-fA-F\d]{8} on core \d)"),
|
|
|
|
re.compile(r"(rst 0x\d+ \(TG\dWDT_SYS_RESET|TGWDT_CPU_RESET\))")
|
|
|
|
]
|
|
|
|
BACKTRACE_PATTERN = re.compile(r"Backtrace:((\s(0x[0-9a-f]{8}):0x[0-9a-f]{8})+)")
|
2019-03-18 04:16:24 +00:00
|
|
|
BACKTRACE_ADDRESS_PATTERN = re.compile(r"(0x[0-9a-f]{8}):0x[0-9a-f]{8}")
|
2019-03-16 12:07:52 +00:00
|
|
|
|
2019-03-18 04:16:24 +00:00
|
|
|
def __init__(self, read, dut):
|
|
|
|
super(IDFRecvThread, self).__init__(read, dut)
|
2019-03-16 12:07:52 +00:00
|
|
|
self.exceptions = _queue.Queue()
|
|
|
|
|
|
|
|
def collect_performance(self, comp_data):
|
|
|
|
matches = self.PERFORMANCE_PATTERN.findall(comp_data)
|
|
|
|
for match in matches:
|
|
|
|
Utility.console_log("[Performance][{}]: {}".format(match[0], match[1]),
|
|
|
|
color="orange")
|
|
|
|
|
|
|
|
def detect_exception(self, comp_data):
|
|
|
|
for pattern in self.EXCEPTION_PATTERNS:
|
|
|
|
start = 0
|
|
|
|
while True:
|
|
|
|
match = pattern.search(comp_data, pos=start)
|
|
|
|
if match:
|
|
|
|
start = match.end()
|
|
|
|
self.exceptions.put(match.group(0))
|
|
|
|
Utility.console_log("[Exception]: {}".format(match.group(0)), color="red")
|
|
|
|
else:
|
|
|
|
break
|
|
|
|
|
|
|
|
def detect_backtrace(self, comp_data):
|
|
|
|
start = 0
|
|
|
|
while True:
|
|
|
|
match = self.BACKTRACE_PATTERN.search(comp_data, pos=start)
|
|
|
|
if match:
|
|
|
|
start = match.end()
|
|
|
|
Utility.console_log("[Backtrace]:{}".format(match.group(1)), color="red")
|
2019-03-18 04:16:24 +00:00
|
|
|
# translate backtrace
|
|
|
|
addresses = self.BACKTRACE_ADDRESS_PATTERN.findall(match.group(1))
|
|
|
|
translated_backtrace = ""
|
|
|
|
for addr in addresses:
|
|
|
|
ret = self.dut.lookup_pc_address(addr)
|
|
|
|
if ret:
|
|
|
|
translated_backtrace += ret + "\n"
|
|
|
|
if translated_backtrace:
|
|
|
|
Utility.console_log("Translated backtrace\n:" + translated_backtrace, color="yellow")
|
|
|
|
else:
|
|
|
|
Utility.console_log("Failed to translate backtrace", color="yellow")
|
2019-03-16 12:07:52 +00:00
|
|
|
else:
|
|
|
|
break
|
|
|
|
|
|
|
|
CHECK_FUNCTIONS = [collect_performance, detect_exception, detect_backtrace]
|
|
|
|
|
|
|
|
|
2018-12-05 00:13:33 +00:00
|
|
|
def _uses_esptool(func):
|
|
|
|
""" Suspend listener thread, connect with esptool,
|
|
|
|
call target function with esptool instance,
|
|
|
|
then resume listening for output
|
|
|
|
"""
|
2017-10-10 02:44:55 +00:00
|
|
|
@functools.wraps(func)
|
|
|
|
def handler(self, *args, **kwargs):
|
2018-12-05 00:13:33 +00:00
|
|
|
self.stop_receive()
|
|
|
|
|
|
|
|
settings = self.port_inst.get_settings()
|
|
|
|
|
2019-01-02 06:51:36 +00:00
|
|
|
try:
|
|
|
|
rom = esptool.ESP32ROM(self.port_inst)
|
|
|
|
rom.connect('hard_reset')
|
|
|
|
esp = rom.run_stub()
|
2018-12-05 00:13:33 +00:00
|
|
|
|
2019-01-02 06:51:36 +00:00
|
|
|
ret = func(self, esp, *args, **kwargs)
|
|
|
|
# do hard reset after use esptool
|
|
|
|
esp.hard_reset()
|
|
|
|
finally:
|
|
|
|
# always need to restore port settings
|
|
|
|
self.port_inst.apply_settings(settings)
|
2018-12-05 00:13:33 +00:00
|
|
|
|
|
|
|
self.start_receive()
|
2019-01-02 06:51:36 +00:00
|
|
|
|
2017-10-10 02:44:55 +00:00
|
|
|
return ret
|
|
|
|
return handler
|
|
|
|
|
|
|
|
|
|
|
|
class IDFDUT(DUT.SerialDUT):
|
2018-12-05 00:13:33 +00:00
|
|
|
""" IDF DUT, extends serial with esptool methods
|
|
|
|
|
|
|
|
(Becomes aware of IDFApp instance which holds app-specific data)
|
|
|
|
"""
|
2017-10-10 02:44:55 +00:00
|
|
|
|
2018-07-10 16:00:40 +00:00
|
|
|
# /dev/ttyAMA0 port is listed in Raspberry Pi
|
|
|
|
# /dev/tty.Bluetooth-Incoming-Port port is listed in Mac
|
|
|
|
INVALID_PORT_PATTERN = re.compile(r"AMA|Bluetooth")
|
2018-07-26 07:07:32 +00:00
|
|
|
# if need to erase NVS partition in start app
|
|
|
|
ERASE_NVS = True
|
2019-03-16 12:07:52 +00:00
|
|
|
RECV_THREAD_CLS = IDFRecvThread
|
2019-03-18 04:16:24 +00:00
|
|
|
TOOLCHAIN_PREFIX = "xtensa-esp32-elf-"
|
2017-10-10 02:44:55 +00:00
|
|
|
|
2019-03-16 12:07:52 +00:00
|
|
|
def __init__(self, name, port, log_file, app, allow_dut_exception=False, **kwargs):
|
2017-10-10 02:44:55 +00:00
|
|
|
super(IDFDUT, self).__init__(name, port, log_file, app, **kwargs)
|
2019-03-16 12:07:52 +00:00
|
|
|
self.allow_dut_exception = allow_dut_exception
|
|
|
|
self.exceptions = _queue.Queue()
|
2017-10-10 02:44:55 +00:00
|
|
|
|
|
|
|
@classmethod
|
2018-12-05 00:13:33 +00:00
|
|
|
def get_mac(cls, app, port):
|
2017-10-10 02:44:55 +00:00
|
|
|
"""
|
2018-12-05 00:13:33 +00:00
|
|
|
get MAC address via esptool
|
2017-10-10 02:44:55 +00:00
|
|
|
|
|
|
|
:param app: application instance (to get tool)
|
2018-12-05 00:13:33 +00:00
|
|
|
:param port: serial port as string
|
|
|
|
:return: MAC address or None
|
2017-10-10 02:44:55 +00:00
|
|
|
"""
|
|
|
|
try:
|
2018-12-05 00:13:33 +00:00
|
|
|
esp = esptool.ESP32ROM(port)
|
|
|
|
esp.connect()
|
|
|
|
return esp.read_mac()
|
2018-12-04 12:46:48 +00:00
|
|
|
except RuntimeError:
|
2018-12-05 00:13:33 +00:00
|
|
|
return None
|
|
|
|
finally:
|
2019-01-02 06:51:36 +00:00
|
|
|
# do hard reset after use esptool
|
|
|
|
esp.hard_reset()
|
2018-12-05 00:13:33 +00:00
|
|
|
esp._port.close()
|
2017-10-10 02:44:55 +00:00
|
|
|
|
|
|
|
@classmethod
|
|
|
|
def confirm_dut(cls, port, app, **kwargs):
|
2018-12-05 00:13:33 +00:00
|
|
|
return cls.get_mac(app, port) is not None
|
2017-10-10 02:44:55 +00:00
|
|
|
|
2018-12-05 00:13:33 +00:00
|
|
|
@_uses_esptool
|
2018-12-06 04:44:27 +00:00
|
|
|
def _try_flash(self, esp, erase_nvs, baud_rate):
|
2017-10-10 02:44:55 +00:00
|
|
|
"""
|
2018-12-06 04:44:27 +00:00
|
|
|
Called by start_app() to try flashing at a particular baud rate.
|
2017-10-10 02:44:55 +00:00
|
|
|
|
2018-12-06 04:44:27 +00:00
|
|
|
Structured this way so @_uses_esptool will reconnect each time
|
2017-10-10 02:44:55 +00:00
|
|
|
"""
|
2018-07-26 07:07:32 +00:00
|
|
|
try:
|
2018-12-06 04:44:27 +00:00
|
|
|
# note: opening here prevents us from having to seek back to 0 each time
|
2018-12-04 12:46:48 +00:00
|
|
|
flash_files = [(offs, open(path, "rb")) for (offs, path) in self.app.flash_files]
|
2018-12-06 04:44:27 +00:00
|
|
|
|
|
|
|
if erase_nvs:
|
|
|
|
address = self.app.partition_table["nvs"]["offset"]
|
|
|
|
size = self.app.partition_table["nvs"]["size"]
|
|
|
|
nvs_file = tempfile.TemporaryFile()
|
|
|
|
nvs_file.write(b'\xff' * size)
|
|
|
|
nvs_file.seek(0)
|
2018-12-04 12:46:48 +00:00
|
|
|
flash_files.append((int(address, 0), nvs_file))
|
2018-12-06 04:44:27 +00:00
|
|
|
|
|
|
|
# fake flasher args object, this is a hack until
|
|
|
|
# esptool Python API is improved
|
2019-01-02 06:51:36 +00:00
|
|
|
class FlashArgs(object):
|
|
|
|
def __init__(self, attributes):
|
|
|
|
for key, value in attributes.items():
|
|
|
|
self.__setattr__(key, value)
|
|
|
|
|
|
|
|
flash_args = FlashArgs({
|
|
|
|
'flash_size': self.app.flash_settings["flash_size"],
|
|
|
|
'flash_mode': self.app.flash_settings["flash_mode"],
|
|
|
|
'flash_freq': self.app.flash_settings["flash_freq"],
|
|
|
|
'addr_filename': flash_files,
|
|
|
|
'no_stub': False,
|
|
|
|
'compress': True,
|
|
|
|
'verify': False,
|
|
|
|
'encrypt': False,
|
2019-01-22 10:40:25 +00:00
|
|
|
'erase_all': False,
|
2019-01-02 06:51:36 +00:00
|
|
|
})
|
2018-12-06 04:44:27 +00:00
|
|
|
|
|
|
|
esp.change_baud(baud_rate)
|
2019-01-02 06:51:36 +00:00
|
|
|
esptool.detect_flash_size(esp, flash_args)
|
2018-12-06 04:44:27 +00:00
|
|
|
esptool.write_flash(esp, flash_args)
|
2018-07-26 07:07:32 +00:00
|
|
|
finally:
|
2019-01-02 06:51:36 +00:00
|
|
|
for (_, f) in flash_files:
|
2018-12-05 00:13:33 +00:00
|
|
|
f.close()
|
2017-10-10 02:44:55 +00:00
|
|
|
|
2018-12-06 04:44:27 +00:00
|
|
|
def start_app(self, erase_nvs=ERASE_NVS):
|
|
|
|
"""
|
|
|
|
download and start app.
|
|
|
|
|
|
|
|
:param: erase_nvs: whether erase NVS partition during flash
|
|
|
|
:return: None
|
|
|
|
"""
|
2018-12-04 12:46:48 +00:00
|
|
|
for baud_rate in [921600, 115200]:
|
2018-12-06 04:44:27 +00:00
|
|
|
try:
|
|
|
|
self._try_flash(erase_nvs, baud_rate)
|
|
|
|
break
|
|
|
|
except RuntimeError:
|
|
|
|
continue
|
|
|
|
else:
|
|
|
|
raise IDFToolError()
|
|
|
|
|
2018-12-05 00:13:33 +00:00
|
|
|
@_uses_esptool
|
|
|
|
def reset(self, esp):
|
2017-10-10 02:44:55 +00:00
|
|
|
"""
|
2018-12-05 00:13:33 +00:00
|
|
|
hard reset DUT
|
2017-10-10 02:44:55 +00:00
|
|
|
|
|
|
|
:return: None
|
|
|
|
"""
|
2019-01-02 06:51:36 +00:00
|
|
|
# decorator `_use_esptool` will do reset
|
|
|
|
# so we don't need to do anything in this method
|
|
|
|
pass
|
2017-10-10 02:44:55 +00:00
|
|
|
|
2018-12-05 00:13:33 +00:00
|
|
|
@_uses_esptool
|
|
|
|
def erase_partition(self, esp, partition):
|
2018-07-26 07:07:32 +00:00
|
|
|
"""
|
|
|
|
:param partition: partition name to erase
|
|
|
|
:return: None
|
|
|
|
"""
|
2018-12-05 00:13:33 +00:00
|
|
|
raise NotImplementedError() # TODO: implement this
|
2018-12-04 12:46:48 +00:00
|
|
|
# address = self.app.partition_table[partition]["offset"]
|
2018-12-05 00:13:33 +00:00
|
|
|
size = self.app.partition_table[partition]["size"]
|
|
|
|
# TODO can use esp.erase_region() instead of this, I think
|
2018-07-26 07:07:32 +00:00
|
|
|
with open(".erase_partition.tmp", "wb") as f:
|
|
|
|
f.write(chr(0xFF) * size)
|
|
|
|
|
2018-12-05 00:13:33 +00:00
|
|
|
@_uses_esptool
|
|
|
|
def dump_flush(self, esp, output_file, **kwargs):
|
2017-10-10 02:44:55 +00:00
|
|
|
"""
|
|
|
|
dump flush
|
|
|
|
|
|
|
|
:param output_file: output file name, if relative path, will use sdk path as base path.
|
|
|
|
:keyword partition: partition name, dump the partition.
|
|
|
|
``partition`` is preferred than using ``address`` and ``size``.
|
|
|
|
:keyword address: dump from address (need to be used with size)
|
|
|
|
:keyword size: dump size (need to be used with address)
|
|
|
|
:return: None
|
|
|
|
"""
|
|
|
|
if os.path.isabs(output_file) is False:
|
|
|
|
output_file = os.path.relpath(output_file, self.app.get_log_folder())
|
|
|
|
if "partition" in kwargs:
|
2018-12-05 00:13:33 +00:00
|
|
|
partition = self.app.partition_table[kwargs["partition"]]
|
2017-10-10 02:44:55 +00:00
|
|
|
_address = partition["offset"]
|
|
|
|
_size = partition["size"]
|
|
|
|
elif "address" in kwargs and "size" in kwargs:
|
|
|
|
_address = kwargs["address"]
|
|
|
|
_size = kwargs["size"]
|
|
|
|
else:
|
|
|
|
raise IDFToolError("You must specify 'partition' or ('address' and 'size') to dump flash")
|
2018-12-05 00:13:33 +00:00
|
|
|
|
|
|
|
content = esp.read_flash(_address, _size)
|
|
|
|
with open(output_file, "wb") as f:
|
|
|
|
f.write(content)
|
2018-06-25 06:43:40 +00:00
|
|
|
|
|
|
|
@classmethod
|
|
|
|
def list_available_ports(cls):
|
|
|
|
ports = [x.device for x in list_ports.comports()]
|
2018-07-02 13:45:27 +00:00
|
|
|
espport = os.getenv('ESPPORT')
|
|
|
|
if not espport:
|
2018-07-10 16:00:40 +00:00
|
|
|
# It's a little hard filter out invalid port with `serial.tools.list_ports.grep()`:
|
|
|
|
# The check condition in `grep` is: `if r.search(port) or r.search(desc) or r.search(hwid)`.
|
|
|
|
# This means we need to make all 3 conditions fail, to filter out the port.
|
|
|
|
# So some part of the filters will not be straight forward to users.
|
|
|
|
# And negative regular expression (`^((?!aa|bb|cc).)*$`) is not easy to understand.
|
|
|
|
# Filter out invalid port by our own will be much simpler.
|
|
|
|
return [x for x in ports if not cls.INVALID_PORT_PATTERN.search(x)]
|
|
|
|
|
2018-09-27 09:46:13 +00:00
|
|
|
# On MacOs with python3.6: type of espport is already utf8
|
2018-12-04 12:46:48 +00:00
|
|
|
if isinstance(espport, type(u'')):
|
2018-09-27 09:46:13 +00:00
|
|
|
port_hint = espport
|
|
|
|
else:
|
|
|
|
port_hint = espport.decode('utf8')
|
2018-06-25 06:43:40 +00:00
|
|
|
|
|
|
|
# If $ESPPORT is a valid port, make it appear first in the list
|
|
|
|
if port_hint in ports:
|
|
|
|
ports.remove(port_hint)
|
|
|
|
return [port_hint] + ports
|
|
|
|
|
|
|
|
# On macOS, user may set ESPPORT to /dev/tty.xxx while
|
|
|
|
# pySerial lists only the corresponding /dev/cu.xxx port
|
|
|
|
if sys.platform == 'darwin' and 'tty.' in port_hint:
|
|
|
|
port_hint = port_hint.replace('tty.', 'cu.')
|
|
|
|
if port_hint in ports:
|
|
|
|
ports.remove(port_hint)
|
|
|
|
return [port_hint] + ports
|
|
|
|
|
|
|
|
return ports
|
2019-03-16 12:07:52 +00:00
|
|
|
|
2019-03-18 04:16:24 +00:00
|
|
|
def lookup_pc_address(self, pc_addr):
|
|
|
|
cmd = ["%saddr2line" % self.TOOLCHAIN_PREFIX,
|
|
|
|
"-pfiaC", "-e", self.app.elf_file, pc_addr]
|
|
|
|
ret = ""
|
|
|
|
try:
|
|
|
|
translation = subprocess.check_output(cmd)
|
|
|
|
ret = translation.decode()
|
|
|
|
except OSError:
|
|
|
|
pass
|
|
|
|
return ret
|
|
|
|
|
2019-03-16 12:07:52 +00:00
|
|
|
def stop_receive(self):
|
|
|
|
if self.receive_thread:
|
|
|
|
while True:
|
|
|
|
try:
|
|
|
|
self.exceptions.put(self.receive_thread.exceptions.get(timeout=0))
|
|
|
|
except _queue.Empty:
|
|
|
|
break
|
|
|
|
super(IDFDUT, self).stop_receive()
|
|
|
|
|
|
|
|
def get_exceptions(self):
|
|
|
|
""" Get exceptions detected by DUT receive thread. """
|
|
|
|
if self.receive_thread:
|
|
|
|
while True:
|
|
|
|
try:
|
|
|
|
self.exceptions.put(self.receive_thread.exceptions.get(timeout=0))
|
|
|
|
except _queue.Empty:
|
|
|
|
break
|
|
|
|
exceptions = []
|
|
|
|
while True:
|
|
|
|
try:
|
|
|
|
exceptions.append(self.exceptions.get(timeout=0))
|
|
|
|
except _queue.Empty:
|
|
|
|
break
|
|
|
|
return exceptions
|
|
|
|
|
|
|
|
def close(self):
|
|
|
|
super(IDFDUT, self).close()
|
|
|
|
if not self.allow_dut_exception and self.get_exceptions():
|
2019-03-18 04:16:24 +00:00
|
|
|
Utility.console_log("DUT exception detected on {}".format(self), color="red")
|
2019-03-16 12:07:52 +00:00
|
|
|
raise IDFDUTException()
|