Merge branch 'bugfix/idf_size_multichip_support' into 'master'

tools: Fix idf_size.py

See merge request espressif/esp-idf!7814
This commit is contained in:
Angus Gratton 2020-03-02 12:22:06 +08:00
commit 08b0b830a1
9 changed files with 25601 additions and 413 deletions

View file

@ -49,8 +49,8 @@ class MemRegions(object):
if target == 'esp32':
return sorted([
# TODO comment this
MemRegDef(0x3FFAE000, 17 * 0x2000 + 2 * 0x8000, MemRegions.DRAM_ID, 0),
# Consecutive MemRegDefs of the same type are joined into one MemRegDef
MemRegDef(0x3FFAE000, 17 * 0x2000 + 4 * 0x8000 + 4 * 0x4000, MemRegions.DRAM_ID, 0),
# MemRegDef(0x3FFAE000, 0x2000, MemRegions.DRAM_ID, 0),
# MemRegDef(0x3FFB0000, 0x8000, MemRegions.DRAM_ID, 0),
# MemRegDef(0x3FFB8000, 0x8000, MemRegions.DRAM_ID, 0),
@ -71,7 +71,7 @@ class MemRegions(object):
# MemRegDef(0x3FFDC000, 0x2000, MemRegions.DRAM_ID, 0),
# MemRegDef(0x3FFDE000, 0x2000, MemRegions.DRAM_ID, 0),
#
MemRegDef(0x3FFE0000, 4 * 0x4000 + 2 * 0x8000, MemRegions.DIRAM_ID, 0x400BC000),
# The bootloader is there and it has to been counted as DRAM
# MemRegDef(0x3FFE0000, 0x4000, MemRegions.DIRAM_ID, 0x400BC000),
# MemRegDef(0x3FFE4000, 0x4000, MemRegions.DIRAM_ID, 0x400B8000),
# MemRegDef(0x3FFE8000, 0x8000, MemRegions.DIRAM_ID, 0x400B0000),
@ -101,9 +101,7 @@ class MemRegions(object):
])
elif target == 'esp32s2':
return sorted([
# The following one memory region is defined instead of defining all 3 + 11 individually because their
# type (MemRegions.DIRAM_ID) is the same
MemRegDef(0x3FFB2000, 3 * 0x2000 + 11 * 0x4000, MemRegions.DIRAM_ID, 0x40022000),
MemRegDef(0x3FFB2000, 3 * 0x2000 + 18 * 0x4000, MemRegions.DIRAM_ID, 0x40022000),
# MemRegDef(0x3FFB2000, 0x2000, MemRegions.DIRAM_ID, 0x40022000),
# MemRegDef(0x3FFB4000, 0x2000, MemRegions.DIRAM_ID, 0x40024000),
# MemRegDef(0x3FFB6000, 0x2000, MemRegions.DIRAM_ID, 0x40026000),
@ -119,8 +117,6 @@ class MemRegions(object):
# MemRegDef(0x3FFDC000, 0x4000, MemRegions.DIRAM_ID, 0x4004C000),
# MemRegDef(0x3FFE0000, 0x4000, MemRegions.DIRAM_ID, 0x40050000),
#
# 2nd stage bootloader iram_loader_seg starts at block 15. Therefore, the following blocks are not
# defined here and will not be counted in the total amount of available space
# MemRegDef(0x3FFE4000, 0x4000, MemRegions.DIRAM_ID, 0x40054000),
# MemRegDef(0x3FFE8000, 0x4000, MemRegions.DIRAM_ID, 0x40058000),
# MemRegDef(0x3FFEC000, 0x4000, MemRegions.DIRAM_ID, 0x4005C000),
@ -128,8 +124,6 @@ class MemRegions(object):
# MemRegDef(0x3FFF4000, 0x4000, MemRegions.DIRAM_ID, 0x40064000),
# MemRegDef(0x3FFF8000, 0x4000, MemRegions.DIRAM_ID, 0x40068000),
# MemRegDef(0x3FFFC000, 0x4000, MemRegions.DIRAM_ID, 0x4006C000),
# Note the type of the last block which is in contrast with soc_memory_layout.c. It is used for
# startup stack therefore the type is D/IRAM (from the perspective of idf_size) and not DRAM.
])
else:
return None
@ -139,20 +133,33 @@ class MemRegions(object):
if not self.chip_mem_regions:
raise RuntimeError('Target {} is not implemented in idf_size'.format(target))
def _address_in_range(address, length, reg_address, reg_length):
def _address_in_range(self, address, length, reg_address, reg_length):
return address >= reg_address and (address - reg_address) <= (reg_length - length)
def get_names(self, dictionary, region_id):
result = []
# TODO alebo origin a length
def get_address(d):
try:
return d['address']
except KeyError:
return d['origin']
def get_size(d):
try:
return d['size']
except KeyError:
return d['length']
result = set() # using a set will remove possible duplicates and consequent operations with sets are more
# efficient
for m in self.chip_mem_regions:
result.append([n for (n, c) in iteritems(dictionary) if (self._address_in_range(c.address, c.size,
m.primary_addr, m.length) or
(m.type == self.DIRAM_ID and
self._address_in_range(c.address,
c.size,
m.secondary_addr,
m.length)))])
if m.type != region_id:
continue
# the following code is intentionally not a one-liner for better readability
for (n, c) in iteritems(dictionary):
if (self._address_in_range(get_address(c), get_size(c), m.primary_addr, m.length) or
(m.type == self.DIRAM_ID and
self._address_in_range(get_address(c), get_size(c), m.secondary_addr, m.length))):
result.add(n)
return result
@ -335,69 +342,108 @@ def main():
output = ""
memory_config, sections = load_map_data(args.map_file)
MemRegNames = collections.namedtuple('MemRegNames', ['iram_names', 'dram_names', 'diram_names', 'used_iram_names',
'used_dram_names', 'used_diram_names'])
mem_reg = MemRegNames
mem_reg.iram_names = mem_regions.get_names(memory_config, MemRegions.IRAM_ID)
mem_reg.dram_names = mem_regions.get_names(memory_config, MemRegions.DRAM_ID)
mem_reg.diram_names = mem_regions.get_names(memory_config, MemRegions.DIRAM_ID)
mem_reg.used_iram_names = mem_regions.get_names(sections, MemRegions.IRAM_ID)
mem_reg.used_dram_names = mem_regions.get_names(sections, MemRegions.DRAM_ID)
mem_reg.used_diram_names = mem_regions.get_names(sections, MemRegions.DIRAM_ID)
if not args.json or not (args.archives or args.files or args.archive_details):
output += get_summary(mem_regions, memory_config, sections, args.json)
output += get_summary(mem_reg, memory_config, sections, args.json)
if args.archives:
output += get_detailed_sizes(mem_regions, sections, "archive", "Archive File", args.json)
output += get_detailed_sizes(mem_reg, sections, "archive", "Archive File", args.json)
if args.files:
output += get_detailed_sizes(mem_regions, sections, "file", "Object File", args.json)
output += get_detailed_sizes(mem_reg, sections, "file", "Object File", args.json)
if args.archive_details:
output += get_archive_symbols(mem_regions, sections, args.archive_details, args.json)
output += get_archive_symbols(mem_reg, sections, args.archive_details, args.json)
args.output_file.write(output)
def get_summary(mem_regions, memory_config, sections, as_json=False):
def get_summary(mem_reg, memory_config, sections, as_json=False):
def get_size(section):
try:
return sections[section]["size"]
except KeyError:
return 0
# if linker script changes, these need to change
total_iram = memory_config["iram0_0_seg"]["length"]
total_dram = memory_config["dram0_0_seg"]["length"]
used_data = get_size(".dram0.data")
used_bss = get_size(".dram0.bss")
used_dram = used_data + used_bss
dram_data_names = frozenset([n for n in mem_reg.used_dram_names if n.endswith('.data')])
dram_bss_names = frozenset([n for n in mem_reg.used_dram_names if n.endswith('.bss')])
dram_other_names = mem_reg.used_dram_names - dram_data_names - dram_bss_names
diram_data_names = frozenset([n for n in mem_reg.used_diram_names if n.endswith('.data')])
diram_bss_names = frozenset([n for n in mem_reg.used_diram_names if n.endswith('.bss')])
total_iram = sum(memory_config[n]["length"] for n in mem_reg.iram_names)
total_dram = sum(memory_config[n]["length"] for n in mem_reg.dram_names)
total_diram = sum(memory_config[n]["length"] for n in mem_reg.diram_names)
used_dram_data = sum(get_size(n) for n in dram_data_names)
used_dram_bss = sum(get_size(n) for n in dram_bss_names)
used_dram_other = sum(get_size(n) for n in dram_other_names)
used_dram = used_dram_data + used_dram_bss + used_dram_other
try:
used_dram_ratio = used_dram / total_dram
except ZeroDivisionError:
used_dram_ratio = float('nan')
used_iram = sum(get_size(s) for s in sections if s.startswith(".iram0"))
used_iram = sum(get_size(s) for s in sections if s in mem_reg.used_iram_names)
try:
used_iram_ratio = used_iram / total_iram
except ZeroDivisionError:
used_iram_ratio = float('nan')
used_diram_data = sum(get_size(n) for n in diram_data_names)
used_diram_bss = sum(get_size(n) for n in diram_bss_names)
used_diram = sum(get_size(n) for n in mem_reg.used_diram_names)
try:
used_diram_ratio = used_diram / total_diram
except ZeroDivisionError:
used_diram_ratio = float('nan')
flash_code = get_size(".flash.text")
flash_rodata = get_size(".flash.rodata")
total_size = used_data + used_iram + flash_code + flash_rodata
total_size = used_dram + used_iram + used_diram + flash_code + flash_rodata
output = ""
if as_json:
output = format_json(collections.OrderedDict([
("dram_data", used_data),
("dram_bss", used_bss),
("dram_data", used_dram_data + used_diram_data),
("dram_bss", used_dram_bss + used_diram_bss),
("dram_other", used_dram_other),
("used_dram", used_dram),
("available_dram", total_dram - used_dram),
("used_dram_ratio", used_dram_ratio),
("used_dram_ratio", used_dram_ratio if total_dram != 0 else 0),
("used_iram", used_iram),
("available_iram", total_iram - used_iram),
("used_iram_ratio", used_iram_ratio),
("used_iram_ratio", used_iram_ratio if total_iram != 0 else 0),
("used_diram", used_diram),
("available_diram", total_diram - used_diram),
("used_diram_ratio", used_diram_ratio if total_diram != 0 else 0),
("flash_code", flash_code),
("flash_rodata", flash_rodata),
("total_size", total_size)
]))
else:
output += "Total sizes:\n"
output += " DRAM .data size: {:>7} bytes\n".format(used_data)
output += " DRAM .bss size: {:>7} bytes\n".format(used_bss)
output += " DRAM .data size: {:>7} bytes\n".format(used_dram_data + used_diram_data)
output += " DRAM .bss size: {:>7} bytes\n".format(used_dram_bss + used_diram_bss)
if used_dram_other > 0:
output += " DRAM other size: {:>7} bytes ({})\n".format(used_dram_other, ', '.join(dram_other_names))
output += "Used static DRAM: {:>7} bytes ({:>7} available, {:.1%} used)\n".format(
used_dram, total_dram - used_dram, used_dram_ratio)
output += "Used static IRAM: {:>7} bytes ({:>7} available, {:.1%} used)\n".format(
used_iram, total_iram - used_iram, used_iram_ratio)
if total_diram > 0:
output += "Used stat D/IRAM: {:>7} bytes ({:>7} available, {:.1%} used)\n".format(
used_diram, total_diram - used_diram, used_diram_ratio)
output += " Flash code: {:>7} bytes\n".format(flash_code)
output += " Flash rodata: {:>7} bytes\n".format(flash_rodata)
output += "Total image size:~{:>7} bytes (.bin may be padded larger)\n".format(total_size)
@ -405,44 +451,49 @@ def get_summary(mem_regions, memory_config, sections, as_json=False):
return output
def get_detailed_sizes(mem_regions, sections, key, header, as_json=False):
def get_detailed_sizes(mem_reg, sections, key, header, as_json=False):
sizes = sizes_by_key(sections, key)
# these sets are also computed in get_summary() but they are small ones so it should not matter
dram_data_names = frozenset([n for n in mem_reg.used_dram_names if n.endswith('.data')])
dram_bss_names = frozenset([n for n in mem_reg.used_dram_names if n.endswith('.bss')])
dram_other_names = mem_reg.used_dram_names - dram_data_names - dram_bss_names
diram_data_names = frozenset([n for n in mem_reg.used_diram_names if n.endswith('.data')])
diram_bss_names = frozenset([n for n in mem_reg.used_diram_names if n.endswith('.bss')])
result = {}
for k in sizes:
v = sizes[k]
r = collections.OrderedDict()
r["data"] = v.get(".dram0.data", 0)
r["bss"] = v.get(".dram0.bss", 0)
r["iram"] = sum(t for (s,t) in v.items() if s.startswith(".iram0"))
r["data"] = sum(v.get(n, 0) for n in dram_data_names | diram_data_names)
r["bss"] = sum(v.get(n, 0) for n in dram_bss_names | diram_bss_names)
r["other"] = sum(v.get(n, 0) for n in dram_other_names)
r["iram"] = sum(t for (s,t) in iteritems(v) if s in mem_reg.used_iram_names)
r["diram"] = sum(t for (s,t) in iteritems(v) if s in mem_reg.used_diram_names)
r["flash_text"] = v.get(".flash.text", 0)
r["flash_rodata"] = v.get(".flash.rodata", 0)
r["total"] = sum(r.values())
result[k] = r
def return_total_size(elem):
val = elem[1]
return val["total"]
def return_header(elem):
return elem[0]
s = sorted(list(result.items()), key=return_header)
s = sorted(list(result.items()), key=lambda elem: elem[0])
# do a secondary sort in order to have consistent order (for diff-ing the output)
s = sorted(s, key=return_total_size, reverse=True)
s = sorted(s, key=lambda elem: elem[1]['total'], reverse=True)
output = ""
if as_json:
output = format_json(collections.OrderedDict(s))
else:
header_format = "{:>24} {:>10} {:>6} {:>6} {:>10} {:>8} {:>7}\n"
header_format = "{:>24} {:>10} {:>6} {:>7} {:>6} {:>8} {:>10} {:>8} {:>7}\n"
output += "Per-{} contributions to ELF file:\n".format(key)
output += header_format.format(header,
"DRAM .data",
"& .bss",
"& other",
"IRAM",
"D/IRAM",
"Flash code",
"& rodata",
"Total")
@ -453,7 +504,9 @@ def get_detailed_sizes(mem_regions, sections, key, header, as_json=False):
output += header_format.format(k[:24],
v["data"],
v["bss"],
v["other"],
v["iram"],
v["diram"],
v["flash_text"],
v["flash_rodata"],
v["total"])
@ -461,8 +514,11 @@ def get_detailed_sizes(mem_regions, sections, key, header, as_json=False):
return output
def get_archive_symbols(mem_regions, sections, archive, as_json=False):
interested_sections = [".dram0.data", ".dram0.bss", ".iram0.text", ".iram0.vectors", ".flash.text", ".flash.rodata"]
def get_archive_symbols(mem_reg, sections, archive, as_json=False):
interested_sections = mem_reg.used_dram_names | mem_reg.used_iram_names | mem_reg.used_diram_names
interested_sections |= frozenset([".flash.text", ".flash.rodata"])
# sort the list for consistent order in the output
interested_sections = sorted(list(interested_sections))
result = {}
for t in interested_sections:
result[t] = {}

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

View file

@ -1,13 +1,17 @@
{
"dram_data": 9324,
"dram_bss": 8296,
"dram_other": 0,
"used_dram": 17620,
"available_dram": 163116,
"used_dram_ratio": 0.09749026203966006,
"used_iram": 38932,
"available_iram": 92140,
"used_iram_ratio": 0.297027587890625,
"used_diram": 0,
"available_diram": 0,
"used_diram_ratio": 0,
"flash_code": 146944,
"flash_rodata": 39580,
"total_size": 234780
"total_size": 243076
}

View file

@ -5,4 +5,4 @@ Used static DRAM: 17620 bytes ( 163116 available, 9.7% used)
Used static IRAM: 38932 bytes ( 92140 available, 29.7% used)
Flash code: 146944 bytes
Flash rodata: 39580 bytes
Total image size:~ 234780 bytes (.bin may be padded larger)
Total image size:~ 243076 bytes (.bin may be padded larger)

View file

@ -2,27 +2,42 @@
{ coverage debug sys \
&& coverage erase &> output \
&& echo -e "\n***\nRunning idf_size.py..." >> output \
&& echo -e "\n***\nRunning idf_size.py..." &>> output \
&& coverage run -a $IDF_PATH/tools/idf_size.py app.map &>> output \
&& echo -e "\n***\nRunning idf_size.py --archives..." >> output \
&& echo -e "\n***\nRunning idf_size.py on bootloader..." &>> output \
&& coverage run -a $IDF_PATH/tools/idf_size.py bootloader.map &>> output \
&& echo -e "\n***\nRunning idf_size.py --archives..." &>> output \
&& coverage run -a $IDF_PATH/tools/idf_size.py --archives app.map &>> output \
&& echo -e "\n***\nRunning idf_size.py --files..." >> output \
&& echo -e "\n***\nRunning idf_size.py --files..." &>> output \
&& coverage run -a $IDF_PATH/tools/idf_size.py --files app.map &>> output \
&& echo -e "\n***\nRunning idf_size.py --archive_details..." >> output \
&& echo -e "\n***\nRunning idf_size.py --archive_details..." &>> output \
&& coverage run -a $IDF_PATH/tools/idf_size.py --archive_details libdriver.a app.map &>> output \
&& echo -e "\n***\nProducing JSON output..." >> output \
&& echo -e "\n***\nRunning idf_size.py for esp32s2..." &>> output \
&& coverage run -a $IDF_PATH/tools/idf_size.py --target esp32s2 app_esp32s2.map &>> output \
&& echo -e "\n***\nRunning idf_size.py on bootloader for esp32s2..." &>> output \
&& coverage run -a $IDF_PATH/tools/idf_size.py --target esp32s2 bootloader_esp32s2.map &>> output \
&& echo -e "\n***\nRunning idf_size.py --archives for esp32s2..." &>> output \
&& coverage run -a $IDF_PATH/tools/idf_size.py --target esp32s2 --archives app_esp32s2.map &>> output \
&& echo -e "\n***\nRunning idf_size.py --files for esp32s2..." &>> output \
&& coverage run -a $IDF_PATH/tools/idf_size.py --target esp32s2 --files app_esp32s2.map &>> output \
&& echo -e "\n***\nRunning idf_size.py --archive_details for esp32s2..." &>> output \
&& coverage run -a $IDF_PATH/tools/idf_size.py --target esp32s2 --archive_details libdriver.a app_esp32s2.map &>> output \
&& echo -e "\n***\nProducing JSON output..." &>> output \
&& coverage run -a $IDF_PATH/tools/idf_size.py --json app.map &>> output \
&& coverage run -a $IDF_PATH/tools/idf_size.py --json --archives app.map &>> output \
&& coverage run -a $IDF_PATH/tools/idf_size.py --json --files app.map &>> output \
&& coverage run -a $IDF_PATH/tools/idf_size.py --json --archive_details libdriver.a app.map &>> output \
&& echo -e "\n***\nProducing JSON file output..." >> output \
&& echo -e "\n***\nProducing JSON file output..." &>> output \
&& coverage run -a $IDF_PATH/tools/idf_size.py --json --output-file output.json app.map &>> output \
&& echo -e "\n***\nProducing text file output..." >> output \
&& echo -e "\n***\nProducing text file output..." &>> output \
&& coverage run -a $IDF_PATH/tools/idf_size.py -o output.txt app.map &>> output \
&& echo -e "\n***\nRunning idf_size_tests.py..." >> output \
&& echo -e "\n***\nRunning idf_size_tests.py..." &>> output \
&& coverage run -a $IDF_PATH/tools/test_idf_size/test_idf_size.py &>> output \
&& echo -e "\n\nComparing expected output..." \
&& diff -Z output expected_output \
&& echo -e "\n\nComparing expected json output..." \
&& diff -Z output.json expected_output.json \
&& echo -e "\n\nComparing expected text output..." \
&& diff -Z output.txt expected_output.txt \
&& coverage report \
; } || { echo 'The test for idf_size has failed. Please examine the artifacts.' ; exit 1; }

View file

@ -16,6 +16,7 @@
from __future__ import print_function
import sys
import collections
try:
import idf_size
@ -40,4 +41,8 @@ if __name__ == "__main__":
# This used to crash with a division by zero error but now it just prints nan% due to
# zero lengths
print(idf_size.get_summary(idf_size.MemRegions('esp32'), {"iram0_0_seg": {"length":0}, "dram0_0_seg": {"length":0}}, {}), end="")
MemRegNames = collections.namedtuple('MemRegNames', ['iram_names', 'dram_names', 'diram_names', 'used_iram_names',
'used_dram_names', 'used_diram_names'])
mem_reg = MemRegNames(set(), set(), set(), set(), set(), set())
print(idf_size.get_summary(mem_reg, {"iram0_0_seg": {"origin":0,"length":0}, "dram0_0_seg":
{"origin":0, "length":0}}, {}), end="")