mirror of
https://github.com/espressif/esp-idf.git
synced 2025-08-06 06:04:33 +02:00
refactor: move ldgen into a separate package
This commit is contained in:
@@ -3033,8 +3033,6 @@ tools/ldgen/output_commands.py
|
|||||||
tools/ldgen/samples/template.ld
|
tools/ldgen/samples/template.ld
|
||||||
tools/ldgen/sdkconfig.py
|
tools/ldgen/sdkconfig.py
|
||||||
tools/ldgen/test/data/linker_script.ld
|
tools/ldgen/test/data/linker_script.ld
|
||||||
tools/ldgen/test/test_entity.py
|
|
||||||
tools/ldgen/test/test_output_commands.py
|
|
||||||
tools/mass_mfg/mfg_gen.py
|
tools/mass_mfg/mfg_gen.py
|
||||||
tools/mkdfu.py
|
tools/mkdfu.py
|
||||||
tools/mkuf2.py
|
tools/mkuf2.py
|
||||||
|
@@ -233,13 +233,14 @@ tools/kconfig_new/test/confgen/test_confgen.py
|
|||||||
tools/kconfig_new/test/confserver/test_confserver.py
|
tools/kconfig_new/test/confserver/test_confserver.py
|
||||||
tools/kconfig_new/test/gen_kconfig_doc/test_kconfig_out.py
|
tools/kconfig_new/test/gen_kconfig_doc/test_kconfig_out.py
|
||||||
tools/kconfig_new/test/gen_kconfig_doc/test_target_visibility.py
|
tools/kconfig_new/test/gen_kconfig_doc/test_target_visibility.py
|
||||||
tools/ldgen/fragments.py
|
|
||||||
tools/ldgen/generation.py
|
|
||||||
tools/ldgen/ldgen.py
|
tools/ldgen/ldgen.py
|
||||||
tools/ldgen/ldgen_common.py
|
tools/ldgen/ldgen/entity.py
|
||||||
tools/ldgen/linker_script.py
|
tools/ldgen/ldgen/fragments.py
|
||||||
tools/ldgen/output_commands.py
|
tools/ldgen/ldgen/generation.py
|
||||||
tools/ldgen/sdkconfig.py
|
tools/ldgen/ldgen/ldgen_common.py
|
||||||
|
tools/ldgen/ldgen/linker_script.py
|
||||||
|
tools/ldgen/ldgen/output_commands.py
|
||||||
|
tools/ldgen/ldgen/sdkconfig.py
|
||||||
tools/ldgen/test/test_entity.py
|
tools/ldgen/test/test_entity.py
|
||||||
tools/ldgen/test/test_fragments.py
|
tools/ldgen/test/test_fragments.py
|
||||||
tools/ldgen/test/test_generation.py
|
tools/ldgen/test/test_generation.py
|
||||||
|
@@ -13,13 +13,13 @@ import sys
|
|||||||
import tempfile
|
import tempfile
|
||||||
from io import StringIO
|
from io import StringIO
|
||||||
|
|
||||||
from entity import EntityDB
|
from ldgen.entity import EntityDB
|
||||||
from fragments import FragmentFile
|
from ldgen.fragments import FragmentFile
|
||||||
from generation import Generation
|
from ldgen.generation import Generation
|
||||||
from ldgen_common import LdGenFailure
|
from ldgen.ldgen_common import LdGenFailure
|
||||||
from linker_script import LinkerScript
|
from ldgen.linker_script import LinkerScript
|
||||||
|
from ldgen.sdkconfig import SDKConfig
|
||||||
from pyparsing import ParseException, ParseFatalException
|
from pyparsing import ParseException, ParseFatalException
|
||||||
from sdkconfig import SDKConfig
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
import confgen
|
import confgen
|
||||||
|
@@ -1,17 +1,6 @@
|
|||||||
#
|
#
|
||||||
# Copyright 2021 Espressif Systems (Shanghai) CO LTD
|
# SPDX-FileCopyrightText: 2021 Espressif Systems (Shanghai) CO LTD
|
||||||
#
|
# SPDX-License-Identifier: Apache-2.0
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
# you may not use this file except in compliance with the License.
|
|
||||||
# You may obtain a copy of the License at
|
|
||||||
#
|
|
||||||
# http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
#
|
|
||||||
# Unless required by applicable law or agreed to in writing, software
|
|
||||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
# See the License for the specific language governing permissions and
|
|
||||||
# limitations under the License.
|
|
||||||
#
|
#
|
||||||
|
|
||||||
import collections
|
import collections
|
||||||
@@ -25,7 +14,7 @@ from pyparsing import (Group, Literal, OneOrMore, ParseException, SkipTo, Suppre
|
|||||||
|
|
||||||
|
|
||||||
@total_ordering
|
@total_ordering
|
||||||
class Entity():
|
class Entity:
|
||||||
"""
|
"""
|
||||||
An entity refers to a library, object, symbol whose input
|
An entity refers to a library, object, symbol whose input
|
||||||
sections can be placed or excluded from placement.
|
sections can be placed or excluded from placement.
|
||||||
@@ -60,7 +49,7 @@ class Entity():
|
|||||||
else:
|
else:
|
||||||
raise ValueError("Invalid arguments '(%s, %s, %s)'" % (archive, obj, symbol))
|
raise ValueError("Invalid arguments '(%s, %s, %s)'" % (archive, obj, symbol))
|
||||||
|
|
||||||
self.archive = archive
|
self.archive = archive
|
||||||
self.obj = obj
|
self.obj = obj
|
||||||
self.symbol = symbol
|
self.symbol = symbol
|
||||||
|
|
||||||
@@ -127,7 +116,8 @@ class EntityDB():
|
|||||||
archive_path = (Literal('In archive').suppress() +
|
archive_path = (Literal('In archive').suppress() +
|
||||||
White().suppress() +
|
White().suppress() +
|
||||||
# trim the colon and line ending characters from archive_path
|
# trim the colon and line ending characters from archive_path
|
||||||
restOfLine.setResultsName('archive_path').setParseAction(lambda s, loc, toks: s.rstrip(':\n\r ')))
|
restOfLine.setResultsName('archive_path').setParseAction(
|
||||||
|
lambda s, loc, toks: s.rstrip(':\n\r ')))
|
||||||
parser = archive_path
|
parser = archive_path
|
||||||
|
|
||||||
results = None
|
results = None
|
||||||
@@ -154,11 +144,10 @@ class EntityDB():
|
|||||||
section_entry = Suppress(Word(nums)) + SkipTo(' ') + Suppress(restOfLine) + \
|
section_entry = Suppress(Word(nums)) + SkipTo(' ') + Suppress(restOfLine) + \
|
||||||
Suppress(ZeroOrMore(Word(alphas) + Literal(',')) + Word(alphas))
|
Suppress(ZeroOrMore(Word(alphas) + Literal(',')) + Word(alphas))
|
||||||
|
|
||||||
content = Group(object_line + section_start + section_header + Group(OneOrMore(section_entry)).setResultsName('sections'))
|
content = Group(
|
||||||
|
object_line + section_start + section_header + Group(OneOrMore(section_entry)).setResultsName('sections'))
|
||||||
parser = Group(ZeroOrMore(content)).setResultsName('contents')
|
parser = Group(ZeroOrMore(content)).setResultsName('contents')
|
||||||
|
|
||||||
results = None
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
results = parser.parseString(info.content, parseAll=True)
|
results = parser.parseString(info.content, parseAll=True)
|
||||||
except ParseException as p:
|
except ParseException as p:
|
||||||
@@ -192,7 +181,8 @@ class EntityDB():
|
|||||||
|
|
||||||
def _match_obj(self, archive, obj):
|
def _match_obj(self, archive, obj):
|
||||||
objs = self.get_objects(archive)
|
objs = self.get_objects(archive)
|
||||||
match_objs = fnmatch.filter(objs, obj + '.o') + fnmatch.filter(objs, obj + '.*.obj') + fnmatch.filter(objs, obj + '.obj')
|
match_objs = fnmatch.filter(objs, obj + '.o') + fnmatch.filter(objs, obj + '.*.obj') + fnmatch.filter(objs,
|
||||||
|
obj + '.obj')
|
||||||
|
|
||||||
if len(match_objs) > 1:
|
if len(match_objs) > 1:
|
||||||
raise ValueError("Multiple matches for object: '%s: %s': %s" % (archive, obj, str(match_objs)))
|
raise ValueError("Multiple matches for object: '%s: %s': %s" % (archive, obj, str(match_objs)))
|
@@ -8,11 +8,12 @@ import re
|
|||||||
from collections import namedtuple
|
from collections import namedtuple
|
||||||
from enum import Enum
|
from enum import Enum
|
||||||
|
|
||||||
from entity import Entity
|
|
||||||
from pyparsing import (Combine, Forward, Group, Keyword, Literal, OneOrMore, Optional, Or, ParseFatalException,
|
from pyparsing import (Combine, Forward, Group, Keyword, Literal, OneOrMore, Optional, Or, ParseFatalException,
|
||||||
Suppress, Word, ZeroOrMore, alphanums, alphas, delimitedList, indentedBlock, nums,
|
Suppress, Word, ZeroOrMore, alphanums, alphas, delimitedList, indentedBlock, nums,
|
||||||
originalTextFor, restOfLine)
|
originalTextFor, restOfLine)
|
||||||
from sdkconfig import SDKConfig
|
|
||||||
|
from .entity import Entity
|
||||||
|
from .sdkconfig import SDKConfig
|
||||||
|
|
||||||
|
|
||||||
class FragmentFile():
|
class FragmentFile():
|
||||||
@@ -70,38 +71,6 @@ class FragmentFile():
|
|||||||
for tok in toks:
|
for tok in toks:
|
||||||
expand_conditionals(tok, stmts)
|
expand_conditionals(tok, stmts)
|
||||||
|
|
||||||
def key_body_parsed(pstr, loc, toks):
|
|
||||||
stmts = list()
|
|
||||||
expand_conditionals(toks, stmts)
|
|
||||||
|
|
||||||
if parse_ctx.key_grammar.min and len(stmts) < parse_ctx.key_grammar.min:
|
|
||||||
raise ParseFatalException(pstr, loc, "fragment requires at least %d values for key '%s'" %
|
|
||||||
(parse_ctx.key_grammar.min, parse_ctx.key))
|
|
||||||
|
|
||||||
if parse_ctx.key_grammar.max and len(stmts) > parse_ctx.key_grammar.max:
|
|
||||||
raise ParseFatalException(pstr, loc, "fragment requires at most %d values for key '%s'" %
|
|
||||||
(parse_ctx.key_grammar.max, parse_ctx.key))
|
|
||||||
|
|
||||||
try:
|
|
||||||
parse_ctx.fragment.set_key_value(parse_ctx.key, stmts)
|
|
||||||
except Exception as e:
|
|
||||||
raise ParseFatalException(pstr, loc, "unable to add key '%s'; %s" % (parse_ctx.key, str(e)))
|
|
||||||
return None
|
|
||||||
|
|
||||||
key = Word(alphanums + '_') + Suppress(':')
|
|
||||||
key_stmt = Forward()
|
|
||||||
|
|
||||||
condition_block = indentedBlock(key_stmt, indent_stack)
|
|
||||||
key_stmts = OneOrMore(condition_block)
|
|
||||||
key_body = Suppress(key) + key_stmts
|
|
||||||
key_body.setParseAction(key_body_parsed)
|
|
||||||
|
|
||||||
condition = originalTextFor(SDKConfig.get_expression_grammar()).setResultsName('condition')
|
|
||||||
if_condition = Group(Suppress('if') + condition + Suppress(':') + condition_block)
|
|
||||||
elif_condition = Group(Suppress('elif') + condition + Suppress(':') + condition_block)
|
|
||||||
else_condition = Group(Suppress('else') + Suppress(':') + condition_block)
|
|
||||||
conditional = (if_condition + Optional(OneOrMore(elif_condition)) + Optional(else_condition)).setResultsName('conditional')
|
|
||||||
|
|
||||||
def key_parse_action(pstr, loc, toks):
|
def key_parse_action(pstr, loc, toks):
|
||||||
key = toks[0]
|
key = toks[0]
|
||||||
|
|
||||||
@@ -123,11 +92,42 @@ class FragmentFile():
|
|||||||
|
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
def key_body_parsed(pstr, loc, toks):
|
||||||
|
stmts = list()
|
||||||
|
expand_conditionals(toks, stmts)
|
||||||
|
|
||||||
|
if parse_ctx.key_grammar.min and len(stmts) < parse_ctx.key_grammar.min:
|
||||||
|
raise ParseFatalException(pstr, loc, "fragment requires at least %d values for key '%s'" %
|
||||||
|
(parse_ctx.key_grammar.min, parse_ctx.key))
|
||||||
|
|
||||||
|
if parse_ctx.key_grammar.max and len(stmts) > parse_ctx.key_grammar.max:
|
||||||
|
raise ParseFatalException(pstr, loc, "fragment requires at most %d values for key '%s'" %
|
||||||
|
(parse_ctx.key_grammar.max, parse_ctx.key))
|
||||||
|
|
||||||
|
try:
|
||||||
|
parse_ctx.fragment.set_key_value(parse_ctx.key, stmts)
|
||||||
|
except Exception as e:
|
||||||
|
raise ParseFatalException(pstr, loc, "unable to add key '%s'; %s" % (parse_ctx.key, str(e)))
|
||||||
|
return None
|
||||||
|
|
||||||
|
key = (Word(alphanums + '_') + Suppress(':')).setParseAction(key_parse_action)
|
||||||
|
key_stmt = Forward()
|
||||||
|
|
||||||
|
condition_block = indentedBlock(key_stmt, indent_stack)
|
||||||
|
key_stmts = OneOrMore(condition_block)
|
||||||
|
key_body = Suppress(key) + key_stmts
|
||||||
|
key_body.setParseAction(key_body_parsed)
|
||||||
|
|
||||||
|
condition = originalTextFor(SDKConfig.get_expression_grammar()).setResultsName('condition')
|
||||||
|
if_condition = Group(Suppress('if') + condition + Suppress(':') + condition_block)
|
||||||
|
elif_condition = Group(Suppress('elif') + condition + Suppress(':') + condition_block)
|
||||||
|
else_condition = Group(Suppress('else') + Suppress(':') + condition_block)
|
||||||
|
conditional = (if_condition + Optional(OneOrMore(elif_condition)) + Optional(else_condition)).setResultsName(
|
||||||
|
'conditional')
|
||||||
|
|
||||||
def name_parse_action(pstr, loc, toks):
|
def name_parse_action(pstr, loc, toks):
|
||||||
parse_ctx.fragment.name = toks[0]
|
parse_ctx.fragment.name = toks[0]
|
||||||
|
|
||||||
key.setParseAction(key_parse_action)
|
|
||||||
|
|
||||||
ftype = Word(alphas).setParseAction(fragment_type_parse_action)
|
ftype = Word(alphas).setParseAction(fragment_type_parse_action)
|
||||||
fid = Suppress(':') + Word(alphanums + '_.').setResultsName('name')
|
fid = Suppress(':') + Word(alphanums + '_.').setResultsName('name')
|
||||||
fid.setParseAction(name_parse_action)
|
fid.setParseAction(name_parse_action)
|
||||||
@@ -135,7 +135,7 @@ class FragmentFile():
|
|||||||
|
|
||||||
def fragment_parse_action(pstr, loc, toks):
|
def fragment_parse_action(pstr, loc, toks):
|
||||||
key_grammars = parse_ctx.fragment.get_key_grammars()
|
key_grammars = parse_ctx.fragment.get_key_grammars()
|
||||||
required_keys = set([k for (k,v) in key_grammars.items() if v.required])
|
required_keys = set([k for (k, v) in key_grammars.items() if v.required])
|
||||||
present_keys = required_keys.intersection(set(parse_ctx.keys))
|
present_keys = required_keys.intersection(set(parse_ctx.keys))
|
||||||
if present_keys != required_keys:
|
if present_keys != required_keys:
|
||||||
raise ParseFatalException(pstr, loc, 'required keys %s for fragment not found' %
|
raise ParseFatalException(pstr, loc, 'required keys %s for fragment not found' %
|
||||||
@@ -155,7 +155,8 @@ class FragmentFile():
|
|||||||
fragment.setParseAction(fragment_parse_action)
|
fragment.setParseAction(fragment_parse_action)
|
||||||
fragment.ignore('#' + restOfLine)
|
fragment.ignore('#' + restOfLine)
|
||||||
|
|
||||||
deprecated_mapping = DeprecatedMapping.get_fragment_grammar(sdkconfig, fragment_file.name).setResultsName('value')
|
deprecated_mapping = DeprecatedMapping.get_fragment_grammar(sdkconfig, fragment_file.name).setResultsName(
|
||||||
|
'value')
|
||||||
|
|
||||||
fragment_stmt << (Group(deprecated_mapping) | Group(fragment) | Group(fragment_conditional))
|
fragment_stmt << (Group(deprecated_mapping) | Group(fragment) | Group(fragment_conditional))
|
||||||
|
|
||||||
@@ -164,8 +165,7 @@ class FragmentFile():
|
|||||||
expand_conditionals(toks, stmts)
|
expand_conditionals(toks, stmts)
|
||||||
return stmts
|
return stmts
|
||||||
|
|
||||||
parser = ZeroOrMore(fragment_stmt)
|
parser = ZeroOrMore(fragment_stmt).setParseAction(fragment_stmt_parsed)
|
||||||
parser.setParseAction(fragment_stmt_parsed)
|
|
||||||
|
|
||||||
self.fragments = parser.parseFile(fragment_file, parseAll=True)
|
self.fragments = parser.parseFile(fragment_file, parseAll=True)
|
||||||
|
|
||||||
@@ -173,7 +173,7 @@ class FragmentFile():
|
|||||||
fragment.path = path
|
fragment.path = path
|
||||||
|
|
||||||
|
|
||||||
class Fragment():
|
class Fragment:
|
||||||
"""
|
"""
|
||||||
Base class for a fragment that can be parsed from a fragment file. All fragments
|
Base class for a fragment that can be parsed from a fragment file. All fragments
|
||||||
share the common grammar:
|
share the common grammar:
|
||||||
@@ -242,6 +242,7 @@ class Sections(Fragment):
|
|||||||
Utility function that returns a list of sections given a sections fragment entry,
|
Utility function that returns a list of sections given a sections fragment entry,
|
||||||
with the '+' notation and symbol concatenation handled automatically.
|
with the '+' notation and symbol concatenation handled automatically.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def get_section_data_from_entry(sections_entry, symbol=None):
|
def get_section_data_from_entry(sections_entry, symbol=None):
|
||||||
if not symbol:
|
if not symbol:
|
||||||
@@ -504,7 +505,8 @@ class Mapping(Fragment):
|
|||||||
Optional(Suppress(';') + delimitedList(section_target_flags).setResultsName('sections_target_flags')))
|
Optional(Suppress(';') + delimitedList(section_target_flags).setResultsName('sections_target_flags')))
|
||||||
|
|
||||||
grammars = {
|
grammars = {
|
||||||
'archive': Fragment.KeyValue(Or([Fragment.ENTITY, Word(Entity.ALL)]).setResultsName('archive'), 1, 1, True),
|
'archive': Fragment.KeyValue(Or([Fragment.ENTITY, Word(Entity.ALL)]).setResultsName('archive'), 1, 1,
|
||||||
|
True),
|
||||||
'entries': Fragment.KeyValue(entry, 0, None, True)
|
'entries': Fragment.KeyValue(entry, 0, None, True)
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -570,12 +572,13 @@ class DeprecatedMapping():
|
|||||||
fragment.entries = set()
|
fragment.entries = set()
|
||||||
condition_true = False
|
condition_true = False
|
||||||
for entries in toks[0].entries[0]:
|
for entries in toks[0].entries[0]:
|
||||||
condition = next(iter(entries.condition.asList())).strip()
|
condition = next(iter(entries.condition.asList())).strip()
|
||||||
condition_val = sdkconfig.evaluate_expression(condition)
|
condition_val = sdkconfig.evaluate_expression(condition)
|
||||||
|
|
||||||
if condition_val:
|
if condition_val:
|
||||||
for entry in entries[1]:
|
for entry in entries[1]:
|
||||||
fragment.entries.add((entry.object, None if entry.symbol == '' else entry.symbol, entry.scheme))
|
fragment.entries.add(
|
||||||
|
(entry.object, None if entry.symbol == '' else entry.symbol, entry.scheme))
|
||||||
condition_true = True
|
condition_true = True
|
||||||
break
|
break
|
||||||
|
|
||||||
@@ -591,7 +594,7 @@ class DeprecatedMapping():
|
|||||||
fragment.entries.add(('*', None, 'default'))
|
fragment.entries.add(('*', None, 'default'))
|
||||||
|
|
||||||
dep_warning = str(ParseFatalException(pstr, loc,
|
dep_warning = str(ParseFatalException(pstr, loc,
|
||||||
'Warning: Deprecated old-style mapping fragment parsed in file %s.' % fragment_file))
|
'Warning: Deprecated old-style mapping fragment parsed in file %s.' % fragment_file))
|
||||||
|
|
||||||
print(dep_warning)
|
print(dep_warning)
|
||||||
return fragment
|
return fragment
|
@@ -8,13 +8,13 @@ import fnmatch
|
|||||||
import itertools
|
import itertools
|
||||||
from collections import namedtuple
|
from collections import namedtuple
|
||||||
|
|
||||||
from entity import Entity
|
from .entity import Entity
|
||||||
from fragments import Mapping, Scheme, Sections
|
from .fragments import Mapping, Scheme, Sections
|
||||||
from ldgen_common import LdGenFailure
|
from .ldgen_common import LdGenFailure
|
||||||
from output_commands import AlignAtAddress, InputSectionDesc, SymbolAtAddress
|
from .output_commands import AlignAtAddress, InputSectionDesc, SymbolAtAddress
|
||||||
|
|
||||||
|
|
||||||
class Placement():
|
class Placement:
|
||||||
"""
|
"""
|
||||||
A Placement is an assignment of an entity's input sections to a target
|
A Placement is an assignment of an entity's input sections to a target
|
||||||
in the output linker script - a precursor to the input section description.
|
in the output linker script - a precursor to the input section description.
|
||||||
@@ -134,12 +134,12 @@ class EntityNode():
|
|||||||
|
|
||||||
def add_child(self, entity):
|
def add_child(self, entity):
|
||||||
child_specificity = self.entity.specificity.value + 1
|
child_specificity = self.entity.specificity.value + 1
|
||||||
assert(child_specificity <= Entity.Specificity.SYMBOL.value)
|
assert (child_specificity <= Entity.Specificity.SYMBOL.value)
|
||||||
name = entity[Entity.Specificity(child_specificity)]
|
name = entity[Entity.Specificity(child_specificity)]
|
||||||
assert(name and name != Entity.ALL)
|
assert (name and name != Entity.ALL)
|
||||||
|
|
||||||
child = [c for c in self.children if c.name == name]
|
child = [c for c in self.children if c.name == name]
|
||||||
assert(len(child) <= 1)
|
assert (len(child) <= 1)
|
||||||
|
|
||||||
if not child:
|
if not child:
|
||||||
child = self.child_t(self, name)
|
child = self.child_t(self, name)
|
||||||
@@ -174,7 +174,7 @@ class EntityNode():
|
|||||||
for sections in self.get_output_sections():
|
for sections in self.get_output_sections():
|
||||||
placement = self.placements[sections]
|
placement = self.placements[sections]
|
||||||
if placement.is_significant():
|
if placement.is_significant():
|
||||||
assert(placement.node == self)
|
assert (placement.node == self)
|
||||||
|
|
||||||
keep = False
|
keep = False
|
||||||
sort = None
|
sort = None
|
||||||
@@ -202,7 +202,8 @@ class EntityNode():
|
|||||||
placement_sections = frozenset(placement.sections)
|
placement_sections = frozenset(placement.sections)
|
||||||
command_sections = sections if sections == placement_sections else placement_sections
|
command_sections = sections if sections == placement_sections else placement_sections
|
||||||
|
|
||||||
command = InputSectionDesc(placement.node.entity, command_sections, [e.node.entity for e in placement.exclusions], keep, sort)
|
command = InputSectionDesc(placement.node.entity, command_sections,
|
||||||
|
[e.node.entity for e in placement.exclusions], keep, sort)
|
||||||
commands[placement.target].append(command)
|
commands[placement.target].append(command)
|
||||||
|
|
||||||
# Generate commands for intermediate, non-explicit exclusion placements here, so that they can be enclosed by
|
# Generate commands for intermediate, non-explicit exclusion placements here, so that they can be enclosed by
|
||||||
@@ -248,6 +249,7 @@ class SymbolNode(EntityNode):
|
|||||||
Entities at depth=3. Represents entities with archive, object
|
Entities at depth=3. Represents entities with archive, object
|
||||||
and symbol specified.
|
and symbol specified.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, parent, name):
|
def __init__(self, parent, name):
|
||||||
EntityNode.__init__(self, parent, name)
|
EntityNode.__init__(self, parent, name)
|
||||||
self.entity = Entity(self.parent.parent.name, self.parent.name)
|
self.entity = Entity(self.parent.parent.name, self.parent.name)
|
||||||
@@ -270,6 +272,7 @@ class ObjectNode(EntityNode):
|
|||||||
An intermediate placement on this node is created, if one does not exist,
|
An intermediate placement on this node is created, if one does not exist,
|
||||||
and is the one excluded from its basis placement.
|
and is the one excluded from its basis placement.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, parent, name):
|
def __init__(self, parent, name):
|
||||||
EntityNode.__init__(self, parent, name)
|
EntityNode.__init__(self, parent, name)
|
||||||
self.child_t = SymbolNode
|
self.child_t = SymbolNode
|
||||||
@@ -334,6 +337,7 @@ class ArchiveNode(EntityNode):
|
|||||||
"""
|
"""
|
||||||
Entities at depth=1. Represents entities with archive specified.
|
Entities at depth=1. Represents entities with archive specified.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, parent, name):
|
def __init__(self, parent, name):
|
||||||
EntityNode.__init__(self, parent, name)
|
EntityNode.__init__(self, parent, name)
|
||||||
self.child_t = ObjectNode
|
self.child_t = ObjectNode
|
||||||
@@ -345,6 +349,7 @@ class RootNode(EntityNode):
|
|||||||
Single entity at depth=0. Represents entities with no specific members
|
Single entity at depth=0. Represents entities with no specific members
|
||||||
specified.
|
specified.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
EntityNode.__init__(self, None, Entity.ALL)
|
EntityNode.__init__(self, None, Entity.ALL)
|
||||||
self.child_t = ArchiveNode
|
self.child_t = ArchiveNode
|
||||||
@@ -433,9 +438,9 @@ class Generation:
|
|||||||
entity = Entity(archive, obj, symbol)
|
entity = Entity(archive, obj, symbol)
|
||||||
|
|
||||||
# Check the entity exists
|
# Check the entity exists
|
||||||
if (self.check_mappings and
|
if (self.check_mappings
|
||||||
entity.specificity.value > Entity.Specificity.ARCHIVE.value and
|
and entity.specificity.value > Entity.Specificity.ARCHIVE.value
|
||||||
mapping.name not in self.check_mapping_exceptions):
|
and mapping.name not in self.check_mapping_exceptions):
|
||||||
if not entities.check_exists(entity):
|
if not entities.check_exists(entity):
|
||||||
message = "'%s' not found" % str(entity)
|
message = "'%s' not found" % str(entity)
|
||||||
raise GenerationException(message, mapping)
|
raise GenerationException(message, mapping)
|
||||||
@@ -445,9 +450,8 @@ class Generation:
|
|||||||
# Check if all section->target defined in the current
|
# Check if all section->target defined in the current
|
||||||
# scheme.
|
# scheme.
|
||||||
for (s, t, f) in flags:
|
for (s, t, f) in flags:
|
||||||
if (t not in scheme_dictionary[scheme_name].keys() or
|
if (t not in scheme_dictionary[scheme_name].keys()
|
||||||
s not in [_s.name for _s in scheme_dictionary[scheme_name][t]]):
|
or s not in [_s.name for _s in scheme_dictionary[scheme_name][t]]):
|
||||||
|
|
||||||
message = "%s->%s not defined in scheme '%s'" % (s, t, scheme_name)
|
message = "%s->%s not defined in scheme '%s'" % (s, t, scheme_name)
|
||||||
raise GenerationException(message, mapping)
|
raise GenerationException(message, mapping)
|
||||||
else:
|
else:
|
||||||
@@ -517,8 +521,6 @@ class Generation:
|
|||||||
|
|
||||||
def add_fragments_from_file(self, fragment_file):
|
def add_fragments_from_file(self, fragment_file):
|
||||||
for fragment in fragment_file.fragments:
|
for fragment in fragment_file.fragments:
|
||||||
dict_to_append_to = None
|
|
||||||
|
|
||||||
if isinstance(fragment, Mapping) and fragment.deprecated and fragment.name in self.mappings.keys():
|
if isinstance(fragment, Mapping) and fragment.deprecated and fragment.name in self.mappings.keys():
|
||||||
self.mappings[fragment.name].entries |= fragment.entries
|
self.mappings[fragment.name].entries |= fragment.entries
|
||||||
else:
|
else:
|
||||||
@@ -533,7 +535,8 @@ class Generation:
|
|||||||
if fragment.name in dict_to_append_to.keys():
|
if fragment.name in dict_to_append_to.keys():
|
||||||
stored = dict_to_append_to[fragment.name].path
|
stored = dict_to_append_to[fragment.name].path
|
||||||
new = fragment.path
|
new = fragment.path
|
||||||
message = "Duplicate definition of fragment '%s' found in %s and %s." % (fragment.name, stored, new)
|
message = "Duplicate definition of fragment '%s' found in %s and %s." % (
|
||||||
|
fragment.name, stored, new)
|
||||||
raise GenerationException(message)
|
raise GenerationException(message)
|
||||||
|
|
||||||
dict_to_append_to[fragment.name] = fragment
|
dict_to_append_to[fragment.name] = fragment
|
13
tools/ldgen/ldgen/ldgen_common.py
Normal file
13
tools/ldgen/ldgen/ldgen_common.py
Normal file
@@ -0,0 +1,13 @@
|
|||||||
|
#
|
||||||
|
# SPDX-FileCopyrightText: 2021 Espressif Systems (Shanghai) CO LTD
|
||||||
|
# SPDX-License-Identifier: Apache-2.0
|
||||||
|
#
|
||||||
|
|
||||||
|
|
||||||
|
class LdGenFailure(RuntimeError):
|
||||||
|
"""
|
||||||
|
Parent class for any ldgen runtime failure which is due to input data
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, message):
|
||||||
|
super(LdGenFailure, self).__init__(message)
|
@@ -6,10 +6,11 @@
|
|||||||
import collections
|
import collections
|
||||||
import os
|
import os
|
||||||
|
|
||||||
from fragments import Fragment
|
|
||||||
from generation import GenerationException
|
|
||||||
from pyparsing import ParseException, Suppress, White
|
from pyparsing import ParseException, Suppress, White
|
||||||
|
|
||||||
|
from .fragments import Fragment
|
||||||
|
from .generation import GenerationException
|
||||||
|
|
||||||
|
|
||||||
class LinkerScript:
|
class LinkerScript:
|
||||||
"""
|
"""
|
@@ -1,20 +1,9 @@
|
|||||||
#
|
#
|
||||||
# Copyright 2021 Espressif Systems (Shanghai) CO LTD
|
# SPDX-FileCopyrightText: 2021 Espressif Systems (Shanghai) CO LTD
|
||||||
#
|
# SPDX-License-Identifier: Apache-2.0
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
# you may not use this file except in compliance with the License.
|
|
||||||
# You may obtain a copy of the License at
|
|
||||||
#
|
|
||||||
# http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
#
|
|
||||||
# Unless required by applicable law or agreed to in writing, software
|
|
||||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
# See the License for the specific language governing permissions and
|
|
||||||
# limitations under the License.
|
|
||||||
#
|
#
|
||||||
|
|
||||||
from entity import Entity
|
from .entity import Entity
|
||||||
|
|
||||||
# Contains classes for output section commands referred to in
|
# Contains classes for output section commands referred to in
|
||||||
# https://www.acrc.bris.ac.uk/acrc/RedHat/rhel-ld-en-4/sections.html#OUTPUT-SECTION-DESCRIPTION.
|
# https://www.acrc.bris.ac.uk/acrc/RedHat/rhel-ld-en-4/sections.html#OUTPUT-SECTION-DESCRIPTION.
|
||||||
@@ -76,7 +65,7 @@ class InputSectionDesc():
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, entity, sections, exclusions=None, keep=False, sort=None):
|
def __init__(self, entity, sections, exclusions=None, keep=False, sort=None):
|
||||||
assert(entity.specificity != Entity.Specificity.SYMBOL)
|
assert (entity.specificity != Entity.Specificity.SYMBOL)
|
||||||
|
|
||||||
self.entity = entity
|
self.entity = entity
|
||||||
self.sections = set(sections)
|
self.sections = set(sections)
|
||||||
@@ -84,8 +73,8 @@ class InputSectionDesc():
|
|||||||
self.exclusions = set()
|
self.exclusions = set()
|
||||||
|
|
||||||
if exclusions:
|
if exclusions:
|
||||||
assert(not [e for e in exclusions if e.specificity == Entity.Specificity.SYMBOL or
|
assert (not [e for e in exclusions if e.specificity == Entity.Specificity.SYMBOL or
|
||||||
e.specificity == Entity.Specificity.NONE])
|
e.specificity == Entity.Specificity.NONE])
|
||||||
self.exclusions = set(exclusions)
|
self.exclusions = set(exclusions)
|
||||||
else:
|
else:
|
||||||
self.exclusions = set()
|
self.exclusions = set()
|
@@ -1,17 +1,6 @@
|
|||||||
#
|
#
|
||||||
# Copyright 2021 Espressif Systems (Shanghai) CO LTD
|
# SPDX-FileCopyrightText: 2021 Espressif Systems (Shanghai) CO LTD
|
||||||
#
|
# SPDX-License-Identifier: Apache-2.0
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
# you may not use this file except in compliance with the License.
|
|
||||||
# You may obtain a copy of the License at
|
|
||||||
#
|
|
||||||
# http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
#
|
|
||||||
# Unless required by applicable law or agreed to in writing, software
|
|
||||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
# See the License for the specific language governing permissions and
|
|
||||||
# limitations under the License.
|
|
||||||
#
|
#
|
||||||
|
|
||||||
import kconfiglib
|
import kconfiglib
|
||||||
@@ -28,8 +17,8 @@ class SDKConfig:
|
|||||||
# A configuration entry is in the form CONFIG=VALUE. Definitions of components of that grammar
|
# A configuration entry is in the form CONFIG=VALUE. Definitions of components of that grammar
|
||||||
IDENTIFIER = Word(alphanums.upper() + '_')
|
IDENTIFIER = Word(alphanums.upper() + '_')
|
||||||
|
|
||||||
HEX = Combine('0x' + Word(hexnums)).setParseAction(lambda t:int(t[0], 16))
|
HEX = Combine('0x' + Word(hexnums)).setParseAction(lambda t: int(t[0], 16))
|
||||||
DECIMAL = Combine(Optional(Literal('+') | Literal('-')) + Word(nums)).setParseAction(lambda t:int(t[0]))
|
DECIMAL = Combine(Optional(Literal('+') | Literal('-')) + Word(nums)).setParseAction(lambda t: int(t[0]))
|
||||||
LITERAL = Word(printables.replace(':', ''))
|
LITERAL = Word(printables.replace(':', ''))
|
||||||
QUOTED_LITERAL = quotedString.setParseAction(removeQuotes)
|
QUOTED_LITERAL = quotedString.setParseAction(removeQuotes)
|
||||||
|
|
||||||
@@ -66,8 +55,8 @@ class SDKConfig:
|
|||||||
condition = Group(Optional('(').suppress() + test + Optional(')').suppress())
|
condition = Group(Optional('(').suppress() + test + Optional(')').suppress())
|
||||||
|
|
||||||
grammar = infixNotation(condition, [
|
grammar = infixNotation(condition, [
|
||||||
('!', 1, opAssoc.RIGHT),
|
('!', 1, opAssoc.RIGHT),
|
||||||
('&&', 2, opAssoc.LEFT),
|
('&&', 2, opAssoc.LEFT),
|
||||||
('||', 2, opAssoc.LEFT)])
|
('||', 2, opAssoc.LEFT)])
|
||||||
|
|
||||||
return grammar
|
return grammar
|
@@ -1,23 +0,0 @@
|
|||||||
#
|
|
||||||
# Copyright 2021 Espressif Systems (Shanghai) CO LTD
|
|
||||||
#
|
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
# you may not use this file except in compliance with the License.
|
|
||||||
# You may obtain a copy of the License at
|
|
||||||
#
|
|
||||||
# http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
#
|
|
||||||
# Unless required by applicable law or agreed to in writing, software
|
|
||||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
# See the License for the specific language governing permissions and
|
|
||||||
# limitations under the License.
|
|
||||||
#
|
|
||||||
|
|
||||||
|
|
||||||
class LdGenFailure(RuntimeError):
|
|
||||||
"""
|
|
||||||
Parent class for any ldgen runtime failure which is due to input data
|
|
||||||
"""
|
|
||||||
def __init__(self, message):
|
|
||||||
super(LdGenFailure, self).__init__(message)
|
|
@@ -1,29 +1,18 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
# coding=utf-8
|
# coding=utf-8
|
||||||
#
|
#
|
||||||
# Copyright 2018-2020 Espressif Systems (Shanghai) CO LTD
|
# SPDX-FileCopyrightText: 2018-2021 Espressif Systems (Shanghai) CO LTD
|
||||||
#
|
# SPDX-License-Identifier: Apache-2.0
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
# you may not use this file except in compliance with the License.
|
|
||||||
# You may obtain a copy of the License at
|
|
||||||
#
|
|
||||||
# http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
#
|
|
||||||
# Unless required by applicable law or agreed to in writing, software
|
|
||||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
# See the License for the specific language governing permissions and
|
|
||||||
# limitations under the License.
|
|
||||||
#
|
#
|
||||||
|
|
||||||
import sys
|
import sys
|
||||||
import unittest
|
import unittest
|
||||||
|
|
||||||
try:
|
try:
|
||||||
from entity import Entity, EntityDB
|
from ldgen.entity import Entity, EntityDB
|
||||||
except ImportError:
|
except ImportError:
|
||||||
sys.path.append('../')
|
sys.path.append('../')
|
||||||
from entity import Entity, EntityDB
|
from ldgen.entity import Entity, EntityDB
|
||||||
|
|
||||||
|
|
||||||
class EntityTest(unittest.TestCase):
|
class EntityTest(unittest.TestCase):
|
||||||
|
@@ -13,12 +13,12 @@ from io import StringIO
|
|||||||
from pyparsing import ParseException, ParseFatalException, Word, alphanums
|
from pyparsing import ParseException, ParseFatalException, Word, alphanums
|
||||||
|
|
||||||
try:
|
try:
|
||||||
from fragments import FRAGMENT_TYPES, Fragment, FragmentFile, Mapping
|
from ldgen.fragments import FRAGMENT_TYPES, Fragment, FragmentFile, Mapping
|
||||||
from sdkconfig import SDKConfig
|
from ldgen.sdkconfig import SDKConfig
|
||||||
except ImportError:
|
except ImportError:
|
||||||
sys.path.append('../')
|
sys.path.append('../')
|
||||||
from fragments import FRAGMENT_TYPES, Fragment, FragmentFile, Mapping
|
from ldgen.fragments import FRAGMENT_TYPES, Fragment, FragmentFile, Mapping
|
||||||
from sdkconfig import SDKConfig
|
from ldgen.sdkconfig import SDKConfig
|
||||||
|
|
||||||
|
|
||||||
class SampleFragment(Fragment):
|
class SampleFragment(Fragment):
|
||||||
|
@@ -12,18 +12,18 @@ import tempfile
|
|||||||
import unittest
|
import unittest
|
||||||
|
|
||||||
try:
|
try:
|
||||||
from generation import Generation, GenerationException
|
from ldgen.generation import Generation, GenerationException
|
||||||
except ImportError:
|
except ImportError:
|
||||||
sys.path.append('../')
|
sys.path.append('../')
|
||||||
from generation import Generation, GenerationException
|
from ldgen.generation import Generation, GenerationException
|
||||||
|
|
||||||
from io import StringIO
|
from io import StringIO
|
||||||
|
|
||||||
from entity import Entity, EntityDB
|
from ldgen.entity import Entity, EntityDB
|
||||||
from fragments import FragmentFile
|
from ldgen.fragments import FragmentFile
|
||||||
from linker_script import LinkerScript
|
from ldgen.linker_script import LinkerScript
|
||||||
from output_commands import AlignAtAddress, InputSectionDesc, SymbolAtAddress
|
from ldgen.output_commands import AlignAtAddress, InputSectionDesc, SymbolAtAddress
|
||||||
from sdkconfig import SDKConfig
|
from ldgen.sdkconfig import SDKConfig
|
||||||
|
|
||||||
ROOT = Entity('*')
|
ROOT = Entity('*')
|
||||||
|
|
||||||
|
@@ -1,30 +1,20 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
#
|
#
|
||||||
# Copyright 2021 Espressif Systems (Shanghai) CO LTD
|
# SPDX-FileCopyrightText: 2021 Espressif Systems (Shanghai) CO LTD
|
||||||
#
|
# SPDX-License-Identifier: Apache-2.0
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
# you may not use this file except in compliance with the License.
|
|
||||||
# You may obtain a copy of the License at
|
|
||||||
#
|
|
||||||
# http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
#
|
|
||||||
# Unless required by applicable law or agreed to in writing, software
|
|
||||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
# See the License for the specific language governing permissions and
|
|
||||||
# limitations under the License.
|
|
||||||
#
|
#
|
||||||
|
|
||||||
import sys
|
import sys
|
||||||
import unittest
|
import unittest
|
||||||
|
|
||||||
try:
|
try:
|
||||||
from output_commands import AlignAtAddress, InputSectionDesc, SymbolAtAddress
|
from ldgen.entity import Entity
|
||||||
|
from ldgen.output_commands import AlignAtAddress, InputSectionDesc, SymbolAtAddress
|
||||||
except ImportError:
|
except ImportError:
|
||||||
sys.path.append('../')
|
sys.path.append('../')
|
||||||
from output_commands import InputSectionDesc, SymbolAtAddress, AlignAtAddress
|
from ldgen.entity import Entity
|
||||||
|
from ldgen.output_commands import AlignAtAddress, InputSectionDesc, SymbolAtAddress
|
||||||
|
|
||||||
from entity import Entity
|
|
||||||
|
|
||||||
SECTIONS = ['.text', '.text.*', '.literal', '.literal.*']
|
SECTIONS = ['.text', '.text.*', '.literal', '.literal.*']
|
||||||
|
|
||||||
|
Reference in New Issue
Block a user