ldgen: implement common fragment parsing

This commit is contained in:
Renz Christian Bagaporo 2019-03-19 14:35:47 +08:00
parent e349329d86
commit 7dcef2c33c
4 changed files with 249 additions and 199 deletions

View file

@ -13,8 +13,6 @@
# See the License for the specific language governing permissions and
# limitations under the License.
#
import re
import os
from sdkconfig import SDKConfig
@ -23,49 +21,179 @@ from pyparsing import restOfLine
from pyparsing import alphanums
from pyparsing import Word
from pyparsing import alphas
from pyparsing import ParseBaseException
from pyparsing import ParseFatalException
from pyparsing import Suppress
from pyparsing import Group
from pyparsing import Literal
from pyparsing import ZeroOrMore
from pyparsing import Optional
from pyparsing import originalTextFor
from common import LdGenFailure
from pyparsing import Forward
from pyparsing import indentedBlock
from collections import namedtuple
import abc
class FragmentFileModel():
KeyGrammar = namedtuple("KeyGrammar", "grammar min max required")
class FragmentFile():
"""
Fragment file internal representation. Parses and stores instances of the fragment definitions
contained within the file.
"""
def __init__(self, fragment_file):
def __init__(self, fragment_file, sdkconfig):
try:
fragment_file = open(fragment_file, "r")
except TypeError:
pass
path = os.path.realpath(fragment_file.name)
sections = Sections.get_fragment_grammar()
scheme = Scheme.get_fragment_grammar()
mapping = Mapping.get_fragment_grammar()
indent_stack = [1]
# Each fragment file is composed of sections, scheme or mapping fragments. The grammar
# for each of those objects are defined it the respective classes
parser = OneOrMore(sections | scheme | mapping)
class parse_ctx:
fragment = None # current fragment
key = "" # current key
keys = list() # list of keys parsed
key_grammar = None # current key grammar
# Set any text beginnning with # as comment
parser.ignore("#" + restOfLine)
@staticmethod
def reset():
parse_ctx.fragment_instance = None
parse_ctx.key = ""
parse_ctx.keys = list()
parse_ctx.key_grammar = None
try:
self.fragments = parser.parseFile(fragment_file, parseAll=True)
except ParseBaseException as e:
# the actual parse error is kind of useless for normal users, so just point to the location of
# the error
raise LdGenFailure("Parse error in linker fragment %s: error at line %d col %d (char %d)" % (
fragment_file.name, e.lineno, e.column, e.loc))
def fragment_type_parse_action(toks):
parse_ctx.reset()
parse_ctx.fragment = FRAGMENT_TYPES[toks[0]]() # create instance of the fragment
return None
def expand_conditionals(toks, stmts):
try:
stmt = toks["value"]
stmts.append(stmt)
except KeyError:
try:
conditions = toks["conditional"]
for condition in conditions:
try:
_toks = condition[1]
_cond = condition[0]
if sdkconfig.evaluate_expression(_cond):
expand_conditionals(_toks, stmts)
break
except IndexError:
expand_conditionals(condition[0], stmts)
except KeyError:
for tok in toks:
expand_conditionals(tok, stmts)
def key_body_parsed(pstr, loc, toks):
stmts = list()
expand_conditionals(toks, stmts)
if parse_ctx.key_grammar.min and len(stmts) < parse_ctx.key_grammar.min:
raise ParseFatalException(pstr, loc, "fragment requires at least %d values for key '%s'" %
(parse_ctx.key_grammar.min, parse_ctx.key))
if parse_ctx.key_grammar.max and len(stmts) > parse_ctx.key_grammar.max:
raise ParseFatalException(pstr, loc, "fragment requires at most %d values for key '%s'" %
(parse_ctx.key_grammar.max, parse_ctx.key))
try:
parse_ctx.fragment.set_key_value(parse_ctx.key, stmts)
except Exception as e:
raise ParseFatalException(pstr, loc, "unable to add key '%s'; %s" % (parse_ctx.key, e.message))
return None
key = Word(alphanums + "_") + Suppress(":")
key_stmt = Forward()
condition_block = indentedBlock(key_stmt, indent_stack)
key_stmts = OneOrMore(condition_block)
key_body = Suppress(key) + key_stmts
key_body.setParseAction(key_body_parsed)
condition = originalTextFor(SDKConfig.get_expression_grammar()).setResultsName("condition")
if_condition = Group(Suppress("if") + condition + Suppress(":") + condition_block)
elif_condition = Group(Suppress("elif") + condition + Suppress(":") + condition_block)
else_condition = Group(Suppress("else") + Suppress(":") + condition_block)
conditional = (if_condition + Optional(OneOrMore(elif_condition)) + Optional(else_condition)).setResultsName("conditional")
def key_parse_action(pstr, loc, toks):
key = toks[0]
if key in parse_ctx.keys:
raise ParseFatalException(pstr, loc, "duplicate key '%s' value definition" % parse_ctx.key)
parse_ctx.key = key
parse_ctx.keys.append(key)
try:
parse_ctx.key_grammar = parse_ctx.fragment.get_key_grammars()[key]
key_grammar = parse_ctx.key_grammar.grammar
except KeyError:
raise ParseFatalException(pstr, loc, "key '%s' is not supported by fragment" % key)
except Exception as e:
raise ParseFatalException(pstr, loc, "unable to parse key '%s'; %s" % (key, e.message))
key_stmt << (conditional | Group(key_grammar).setResultsName("value"))
return None
def name_parse_action(pstr, loc, toks):
parse_ctx.fragment.name = toks[0]
key.setParseAction(key_parse_action)
ftype = Word(alphas).setParseAction(fragment_type_parse_action)
fid = Suppress(":") + Word(alphanums + "_.").setResultsName("name")
fid.setParseAction(name_parse_action)
header = Suppress("[") + ftype + fid + Suppress("]")
def fragment_parse_action(pstr, loc, toks):
key_grammars = parse_ctx.fragment.get_key_grammars()
required_keys = set([k for (k,v) in key_grammars.items() if v.required])
present_keys = required_keys.intersection(set(parse_ctx.keys))
if present_keys != required_keys:
raise ParseFatalException(pstr, loc, "required keys %s for fragment not found" %
list(required_keys - present_keys))
return parse_ctx.fragment
fragment_stmt = Forward()
fragment_block = indentedBlock(fragment_stmt, indent_stack)
fragment_if_condition = Group(Suppress("if") + condition + Suppress(":") + fragment_block)
fragment_elif_condition = Group(Suppress("elif") + condition + Suppress(":") + fragment_block)
fragment_else_condition = Group(Suppress("else") + Suppress(":") + fragment_block)
fragment_conditional = (fragment_if_condition + Optional(OneOrMore(fragment_elif_condition)) +
Optional(fragment_else_condition)).setResultsName("conditional")
fragment = (header + OneOrMore(indentedBlock(key_body, indent_stack, False))).setResultsName("value")
fragment.setParseAction(fragment_parse_action)
fragment.ignore("#" + restOfLine)
fragment_stmt << (Group(fragment) | Group(fragment_conditional))
def fragment_stmt_parsed(pstr, loc, toks):
stmts = list()
expand_conditionals(toks, stmts)
return stmts
parser = ZeroOrMore(fragment_stmt)
parser.setParseAction(fragment_stmt_parsed)
self.fragments = parser.parseFile(fragment_file, parseAll=True)
for fragment in self.fragments:
fragment.path = path
class Fragment:
class Fragment():
__metaclass__ = abc.ABCMeta
"""
Encapsulates a fragment as defined in the generator syntax. Sets values common to all fragment and performs processing
such as checking the validity of the fragment name and getting the entry values.
@ -74,22 +202,20 @@ class Fragment:
IDENTIFIER = Word(alphas + "_", alphanums + "_")
ENTITY = Word(alphanums + ".-_$")
def __init__(self, name, entries):
self.path = None
self.name = name
self.entries = entries
@abc.abstractmethod
def set_key_value(self, key, parse_results):
pass
@abc.abstractmethod
def get_key_grammars(self):
pass
class Sections(Fragment):
def __init__(self, name, entries):
Fragment.__init__(self, name, entries)
self._process_entries()
def _process_entries(self):
# Quietly ignore duplicate entries
self.entries = set(self.entries)
self.entries = list(self.entries)
grammars = {
"entries": KeyGrammar(Word(alphanums + "+.").setResultsName("section"), 1, None, True)
}
"""
Utility function that returns a list of sections given a sections fragment entry,
@ -110,20 +236,14 @@ class Sections(Fragment):
else:
return (sections_entry, None)
@staticmethod
def get_fragment_grammar():
name = Fragment.IDENTIFIER
header = Suppress("[") + Suppress("sections") + Suppress(":") + name.setResultsName("name") + Suppress("]")
entry = Word(alphanums + "+" + ".")
entries = Suppress("entries") + Suppress(":") + Group(OneOrMore(entry)).setResultsName("entries")
def set_key_value(self, key, parse_results):
if key == "entries":
self.entries = set()
for result in parse_results:
self.entries.add(result["section"])
sections = Group(header + entries)
sections.setParseAction(lambda t: Sections(t[0].name, t[0].entries))
sections.ignore("#" + restOfLine)
return sections
def get_key_grammars(self):
return self.__class__.grammars
class Scheme(Fragment):
@ -131,38 +251,19 @@ class Scheme(Fragment):
Encapsulates a scheme fragment, which defines what target input sections are placed under.
"""
def __init__(self, name, items):
Fragment.__init__(self, name, items)
self._process_entries()
grammars = {
"entries": KeyGrammar(Fragment.IDENTIFIER.setResultsName("sections") + Suppress("->") +
Fragment.IDENTIFIER.setResultsName("target"), 1, None, True)
}
def _process_entries(self):
processed = set()
def set_key_value(self, key, parse_results):
if key == "entries":
self.entries = set()
for result in parse_results:
self.entries.add((result["sections"], result["target"]))
# Store entries as a set of tuples. Quietly ignores duplicate entries.
for entry in self.entries:
processed.add((entry.sections, entry.target))
self.entries = processed
@staticmethod
def get_fragment_grammar():
name = Fragment.IDENTIFIER
header = Suppress("[") + Suppress("scheme") + Suppress(":") + name.setResultsName("name") + Suppress("]")
# Scheme entry in the form 'sections -> target'
sections = Fragment.IDENTIFIER
target = Fragment.IDENTIFIER
entry = Group(sections.setResultsName("sections") + Suppress("->") + target.setResultsName("target"))
entries = Suppress("entries") + Suppress(":") + Group(OneOrMore(entry)).setResultsName("entries")
scheme = Group(header + entries)
scheme.setParseAction(lambda t: Scheme(t[0].name, t[0].entries))
scheme.ignore("#" + restOfLine)
return scheme
def get_key_grammars(self):
return self.__class__.grammars
class Mapping(Fragment):
@ -170,67 +271,37 @@ class Mapping(Fragment):
Encapsulates a mapping fragment, which defines what targets the input sections of mappable entties are placed under.
"""
# Name of the default condition entry
DEFAULT_CONDITION = "default"
MAPPING_ALL_OBJECTS = "*"
def __init__(self, archive, entries):
self.archive = archive
def set_key_value(self, key, parse_results):
if key == "archive":
self.archive = parse_results[0]["archive"]
elif key == "entries":
self.entries = set()
# Generate name from archive value by replacing all non-alphanumeric
# characters with underscore
name = Mapping.get_mapping_name_from_archive(self.archive)
Fragment.__init__(self, name, entries)
self._process_entries()
def _create_mappings_set(self, mappings):
mapping_set = set()
for mapping in mappings:
obj = mapping.object
symbol = mapping.symbol
scheme = mapping.scheme
if symbol == "":
for result in parse_results:
obj = None
symbol = None
scheme = None
# Quietly handle duplicate definitions under the same condition
mapping_set.add((obj, symbol, scheme))
try:
obj = result["object"]
except KeyError:
pass
return mapping_set
try:
symbol = result["symbol"]
except KeyError:
pass
def _process_entries(self):
processed = []
try:
scheme = result["scheme"]
except KeyError:
pass
for normal_group in self.entries[0]:
# Get the original string of the condition
condition = next(iter(normal_group.condition.asList())).strip()
mappings = self._create_mappings_set(normal_group[1])
processed.append((condition, mappings))
default_group = self.entries[1]
if len(default_group) > 1:
mappings = self._create_mappings_set(default_group[1])
else:
mappings = self._create_mappings_set(default_group[0])
processed.append(("default", mappings))
self.entries = processed
@staticmethod
def get_mapping_name_from_archive(archive):
return re.sub(r"[^0-9a-zA-Z]+", "_", archive)
@staticmethod
def get_fragment_grammar():
# Match header [mapping]
header = Suppress("[") + Suppress("mapping") + Suppress("]")
self.entries.add((obj, symbol, scheme))
def get_key_grammars(self):
# There are three possible patterns for mapping entries:
# obj:symbol (scheme)
# obj (scheme)
@ -239,33 +310,22 @@ class Mapping(Fragment):
symbol = Suppress(":") + Fragment.IDENTIFIER.setResultsName("symbol")
scheme = Suppress("(") + Fragment.IDENTIFIER.setResultsName("scheme") + Suppress(")")
pattern1 = Group(obj + symbol + scheme)
pattern2 = Group(obj + scheme)
pattern3 = Group(Literal(Mapping.MAPPING_ALL_OBJECTS).setResultsName("object") + scheme)
pattern1 = obj + symbol + scheme
pattern2 = obj + scheme
pattern3 = Literal(Mapping.MAPPING_ALL_OBJECTS).setResultsName("object") + scheme
mapping_entry = pattern1 | pattern2 | pattern3
entry = pattern1 | pattern2 | pattern3
# To simplify parsing, classify groups of condition-mapping entry into two types: normal and default
# A normal grouping is one with a non-default condition. The default grouping is one which contains the
# default condition
mapping_entries = Group(ZeroOrMore(mapping_entry)).setResultsName("mappings")
grammars = {
"archive": KeyGrammar(Fragment.ENTITY.setResultsName("archive"), 1, 1, True),
"entries": KeyGrammar(entry, 1, None, True)
}
normal_condition = Suppress(":") + originalTextFor(SDKConfig.get_expression_grammar())
default_condition = Optional(Suppress(":") + Literal(Mapping.DEFAULT_CONDITION))
return grammars
normal_group = Group(normal_condition.setResultsName("condition") + mapping_entries)
default_group = Group(default_condition + mapping_entries).setResultsName("default_group")
normal_groups = Group(ZeroOrMore(normal_group)).setResultsName("normal_groups")
# Any mapping fragment definition can have zero or more normal group and only one default group as a last entry.
archive = Suppress("archive") + Suppress(":") + Fragment.ENTITY.setResultsName("archive")
entries = Suppress("entries") + Suppress(":") + (normal_groups + default_group).setResultsName("entries")
mapping = Group(header + archive + entries)
mapping.setParseAction(lambda t: Mapping(t[0].archive, t[0].entries))
mapping.ignore("#" + restOfLine)
return mapping
FRAGMENT_TYPES = {
"sections": Sections,
"scheme": Scheme,
"mapping": Mapping
}

View file

@ -321,7 +321,7 @@ class GenerationModel:
return scheme_dictionary
def generate_rules(self, sdkconfig, sections_infos):
def generate_rules(self, sections_infos):
placement_rules = collections.defaultdict(list)
scheme_dictionary = self._build_scheme_dictionary()
@ -334,33 +334,18 @@ class GenerationModel:
# Generate rules based on mapping fragments
for mapping in self.mappings.values():
for (condition, entries) in mapping.entries:
condition_true = False
mapping_rules = list()
archive = mapping.archive
for (obj, symbol, scheme_name) in mapping.entries:
try:
if not (obj == Mapping.MAPPING_ALL_OBJECTS and symbol is None and
scheme_name == GenerationModel.DEFAULT_SCHEME):
self._add_mapping_rules(archive, obj, symbol, scheme_name, scheme_dictionary, mapping_rules)
except KeyError:
message = GenerationException.UNDEFINED_REFERENCE + " to scheme '" + scheme_name + "'."
raise GenerationException(message, mapping)
# Only non-default condition are evaluated agains sdkconfig model
if condition != Mapping.DEFAULT_CONDITION:
try:
condition_true = sdkconfig.evaluate_expression(condition)
except Exception as e:
raise GenerationException(e.message, mapping)
else:
condition_true = True
if condition_true:
mapping_rules = list()
archive = mapping.archive
for (obj, symbol, scheme_name) in entries:
try:
self._add_mapping_rules(archive, obj, symbol, scheme_name, scheme_dictionary, mapping_rules)
except KeyError:
message = GenerationException.UNDEFINED_REFERENCE + " to scheme '" + scheme_name + "'."
raise GenerationException(message, mapping)
all_mapping_rules[mapping.name] = mapping_rules
break # Exit on first condition that evaluates to true
all_mapping_rules[mapping.name] = mapping_rules
# Detect rule conflicts
for mapping_rules in all_mapping_rules.items():
@ -397,8 +382,7 @@ class GenerationModel:
if intersections and rule_a.maps_same_entities_as(rule_b):
rules_string = str([str(rule_a), str(rule_b)])
message = "Rules " + rules_string + " map sections " + str(list(intersections)) + " into multiple targets."
mapping = self.mappings[Mapping.get_mapping_name_from_archive(archive)]
raise GenerationException(message, mapping)
raise GenerationException(message)
def _create_extra_rules(self, rules):
# This function generates extra rules for symbol specific rules. The reason for generating extra rules is to isolate,
@ -523,7 +507,7 @@ class TemplateModel:
# Does not match marker syntax
self.members.append(line)
def fill(self, mapping_rules, sdkconfig):
def fill(self, mapping_rules):
for member in self.members:
target = None
try:

View file

@ -19,10 +19,11 @@ import argparse
import sys
import tempfile
from fragments import FragmentFileModel
from fragments import FragmentFile
from sdkconfig import SDKConfig
from generation import GenerationModel, TemplateModel, SectionsInfo
from common import LdGenFailure
from pyparsing import ParseException, ParseFatalException
def main():
@ -52,13 +53,11 @@ def main():
argparser.add_argument(
"--config", "-c",
help="Project configuration",
type=argparse.FileType("r"))
help="Project configuration")
argparser.add_argument(
"--kconfig", "-k",
help="IDF Kconfig file",
type=argparse.FileType("r"))
help="IDF Kconfig file")
argparser.add_argument(
"--env", "-e",
@ -89,15 +88,22 @@ def main():
generation_model = GenerationModel()
sdkconfig = SDKConfig(kconfig_file, config_file, args.env)
for fragment_file in fragment_files:
fragment_file = FragmentFileModel(fragment_file)
try:
fragment_file = FragmentFile(fragment_file, sdkconfig)
except (ParseException, ParseFatalException) as e:
# ParseException is raised on incorrect grammar
# ParseFatalException is raised on correct grammar, but inconsistent contents (ex. duplicate
# keys, key unsupported by fragment, unexpected number of values, etc.)
raise LdGenFailure("failed to parse %s\n%s" % (fragment_file.name, str(e)))
generation_model.add_fragments_from_file(fragment_file)
sdkconfig = SDKConfig(kconfig_file, config_file, args.env)
mapping_rules = generation_model.generate_rules(sdkconfig, sections_infos)
mapping_rules = generation_model.generate_rules(sections_infos)
script_model = TemplateModel(input_file)
script_model.fill(mapping_rules, sdkconfig)
script_model.fill(mapping_rules)
with tempfile.TemporaryFile("w+") as output:
script_model.write(output)

View file

@ -15,7 +15,7 @@
#
import os
from pyparsing import Word, printables, Combine, Literal, hexnums, quotedString, Optional, nums, removeQuotes, oneOf, Group, infixNotation, opAssoc
from pyparsing import Word, alphanums, printables, Combine, Literal, hexnums, quotedString, Optional, nums, removeQuotes, oneOf, Group, infixNotation, opAssoc
import sys
try:
@ -34,11 +34,11 @@ class SDKConfig:
"""
# A configuration entry is in the form CONFIG=VALUE. Definitions of components of that grammar
IDENTIFIER = Word(printables.upper())
IDENTIFIER = Word(alphanums.upper() + "_")
HEX = Combine("0x" + Word(hexnums)).setParseAction(lambda t:int(t[0], 16))
DECIMAL = Combine(Optional(Literal("+") | Literal("-")) + Word(nums)).setParseAction(lambda t:int(t[0]))
LITERAL = Word(printables)
LITERAL = Word(printables.replace(":", ""))
QUOTED_LITERAL = quotedString.setParseAction(removeQuotes)
VALUE = HEX | DECIMAL | LITERAL | QUOTED_LITERAL
@ -53,8 +53,8 @@ class SDKConfig:
value = " ".join(value.split())
os.environ[name] = value
self.config = kconfiglib.Kconfig(kconfig_file.name)
self.config.load_config(sdkconfig_file.name)
self.config = kconfiglib.Kconfig(kconfig_file)
self.config.load_config(sdkconfig_file)
def evaluate_expression(self, expression):
result = self.config.eval_string(expression)
@ -64,7 +64,7 @@ class SDKConfig:
elif result == 2: # y
return True
else: # m
raise Exception("Unsupported config expression result.")
raise Exception("unsupported config expression result")
@staticmethod
def get_expression_grammar():