Merge branch 'feature/gdbinit-files-generation-update' into 'master'

gdbinit files generation update

Closes IDF-11667 and IDF-11692

See merge request espressif/esp-idf!41549
This commit is contained in:
Alexey Lapshin
2025-09-20 17:35:00 +04:00
6 changed files with 83 additions and 83 deletions

View File

@@ -1,6 +1,6 @@
#!/usr/bin/env python
#
# SPDX-FileCopyrightText: 2022 Espressif Systems (Shanghai) CO LTD
# SPDX-FileCopyrightText: 2022-2025 Espressif Systems (Shanghai) CO LTD
#
# SPDX-License-Identifier: Apache-2.0
#
@@ -12,26 +12,27 @@ from typing import Any
try:
from esp_coredump import CoreDump
except ImportError:
raise ModuleNotFoundError('No module named "esp_coredump" please install esp_coredump by running '
'"python -m pip install esp-coredump"')
raise ModuleNotFoundError(
'No module named "esp_coredump" please install esp_coredump by running "python -m pip install esp-coredump"'
)
from esp_coredump.cli_ext import parser
def get_prefix_map_gdbinit_path(prog_path): # type: (str) -> Any
def get_prefix_map_gdbinit_path(prog_path: str) -> Any:
build_dir = os.path.abspath(os.path.dirname(prog_path))
desc_path = os.path.abspath(os.path.join(build_dir, 'project_description.json'))
if not os.path.isfile(desc_path):
logging.warning('%s does not exist. Please build the app with "idf.py build"', desc_path)
return ''
with open(desc_path, 'r', encoding='utf-8') as f:
with open(desc_path, encoding='utf-8') as f:
project_desc = json.load(f)
return project_desc.get('debug_prefix_map_gdbinit')
return project_desc['gdbinit_files']['02_prefix_map']
def main(): # type: () -> None
def main() -> None:
args = parser.parse_args()
if args.debug == 0:

View File

@@ -45,13 +45,10 @@ function(__generate_prefix_map compile_options_var)
get_filename_component(compiler_sysroot "${compiler_sysroot}/.." REALPATH)
list(APPEND compile_options "-fdebug-prefix-map=${compiler_sysroot}=/TOOLCHAIN")
string(APPEND gdbinit_file_lines "set substitute-path /TOOLCHAIN ${compiler_sysroot}\n")
file(WRITE "${BUILD_DIR}/prefix_map_gdbinit" "${gdbinit_file_lines}") # TODO IDF-11667
idf_build_set_property(DEBUG_PREFIX_MAP_GDBINIT "${gdbinit_path}")
else()
set(gdbinit_file_lines "# There is no prefix map defined for the project.\n")
endif()
# Write prefix_map_gdbinit file even it is empty.
# Write prefix_map file even it is empty.
file(MAKE_DIRECTORY ${gdbinit_dir})
file(WRITE "${gdbinit_path}" "${gdbinit_file_lines}")
idf_build_set_property(GDBINIT_FILES_PREFIX_MAP "${gdbinit_path}")

View File

@@ -351,7 +351,6 @@ function(__project_info test_components)
include(${sdkconfig_cmake})
idf_build_get_property(COMPONENT_KCONFIGS KCONFIGS)
idf_build_get_property(COMPONENT_KCONFIGS_PROJBUILD KCONFIG_PROJBUILDS)
idf_build_get_property(debug_prefix_map_gdbinit DEBUG_PREFIX_MAP_GDBINIT)
__generate_gdbinit()
idf_build_get_property(gdbinit_files_prefix_map GDBINIT_FILES_PREFIX_MAP)

View File

@@ -29,7 +29,6 @@
"build_component_paths" : ${build_component_paths_json},
"build_component_info" : ${build_component_info_json},
"all_component_info" : ${all_component_info_json},
"debug_prefix_map_gdbinit": "${debug_prefix_map_gdbinit}",
"gdbinit_files": {
"01_symbols": "${gdbinit_files_symbols}",
"02_prefix_map": "${gdbinit_files_prefix_map}",

View File

@@ -10,11 +10,6 @@ import threading
import time
from threading import Thread
from typing import Any
from typing import Dict
from typing import List
from typing import Optional
from typing import Tuple
from typing import Union
from click import INT
from click.core import Context
@@ -31,7 +26,7 @@ from idf_py_actions.tools import get_sdkconfig_value
from idf_py_actions.tools import yellow_print
def chip_rev_to_int(chip_rev: Optional[str]) -> Union[int, None]:
def chip_rev_to_int(chip_rev: str | None) -> int | None:
# The chip rev will be derived from the elf file if none are returned.
# The chip rev must be supplied for coredump files generated with idf versions less than 5.1 in order to load
# rom elf file.
@@ -43,11 +38,11 @@ def chip_rev_to_int(chip_rev: Optional[str]) -> Union[int, None]:
return major * 100 + minor
def action_extensions(base_actions: Dict, project_path: str) -> Dict:
def action_extensions(base_actions: dict, project_path: str) -> dict:
OPENOCD_OUT_FILE = 'openocd_out.txt'
GDBGUI_OUT_FILE = 'gdbgui_out.txt'
# Internal dictionary of currently active processes, threads and their output files
processes: Dict = {'threads_to_join': [], 'allow_hints': True}
processes: dict = {'threads_to_join': [], 'allow_hints': True}
def _print_hints(file_name: str) -> None:
if not processes['allow_hints']:
@@ -61,7 +56,7 @@ def action_extensions(base_actions: Dict, project_path: str) -> Dict:
sys.stdout.flush()
print(hint, file=sys.stderr)
def _check_openocd_errors(fail_if_openocd_failed: Dict, target: str, ctx: Context) -> None:
def _check_openocd_errors(fail_if_openocd_failed: dict, target: str, ctx: Context) -> None:
if fail_if_openocd_failed:
if 'openocd' in processes and processes['openocd'] is not None:
p = processes['openocd']
@@ -69,9 +64,9 @@ def action_extensions(base_actions: Dict, project_path: str) -> Dict:
# watch OpenOCD (for 5x500ms) to check if it hasn't terminated or outputs an error
for _ in range(5):
if p.poll() is not None:
print('OpenOCD exited with {}'.format(p.poll()))
print(f'OpenOCD exited with {p.poll()}')
break
with open(name, 'r', encoding='utf-8') as f:
with open(name, encoding='utf-8') as f:
content = f.read()
if re.search(r'Listening on port \d+ for gdb connections', content):
# expect OpenOCD has started successfully - stop watching
@@ -79,10 +74,10 @@ def action_extensions(base_actions: Dict, project_path: str) -> Dict:
time.sleep(0.5)
# OpenOCD exited or is not listening -> print full log and terminate
with open(name, 'r', encoding='utf-8') as f:
with open(name, encoding='utf-8') as f:
print(f.read())
raise FatalError('Action "{}" failed due to errors in OpenOCD'.format(target), ctx)
raise FatalError(f'Action "{target}" failed due to errors in OpenOCD', ctx)
def _terminate_async_target(target: str) -> None:
if target in processes and processes[target] is not None:
@@ -103,16 +98,16 @@ def action_extensions(base_actions: Dict, project_path: str) -> Dict:
_print_hints(processes[target + '_outfile_name'])
except Exception as e:
print(e)
print('Failed to close/kill {}'.format(target))
print(f'Failed to close/kill {target}')
processes[target] = None # to indicate this has ended
def _get_espcoredump_instance(
ctx: Context,
args: PropertyDict,
gdb_timeout_sec: Optional[int] = None,
core: Optional[str] = None,
chip_rev: Optional[str] = None,
save_core: Optional[str] = None,
gdb_timeout_sec: int | None = None,
core: str | None = None,
chip_rev: str | None = None,
save_core: str | None = None,
) -> CoreDump:
ensure_build_directory(args, ctx.info_name)
project_desc = get_project_desc(args, ctx)
@@ -130,7 +125,7 @@ def action_extensions(base_actions: Dict, project_path: str) -> Dict:
espcoredump_kwargs['chip_rev'] = chip_rev_to_int(chip_rev)
# for reproducible builds
extra_gdbinit_file = project_desc.get('debug_prefix_map_gdbinit', None)
extra_gdbinit_file = project_desc['gdbinit_files']['02_prefix_map']
if extra_gdbinit_file:
espcoredump_kwargs['extra_gdbinit_file'] = extra_gdbinit_file
@@ -202,13 +197,13 @@ def action_extensions(base_actions: Dict, project_path: str) -> Dict:
name = processes[target + '_outfile_name']
pos = 0
while True:
with open(name, 'r', encoding='utf-8') as f:
with open(name, encoding='utf-8') as f:
f.seek(pos)
for line in f:
print(line.rstrip())
pos = f.tell()
if p.poll() is not None:
print('"{}" exited with {}'.format(target, p.poll()))
print(f'"{target}" exited with {p.poll()}')
break
time.sleep(0.5)
except KeyboardInterrupt:
@@ -220,12 +215,12 @@ def action_extensions(base_actions: Dict, project_path: str) -> Dict:
desc_path = os.path.join(args.build_dir, 'project_description.json')
if not os.path.exists(desc_path):
ensure_build_directory(args, ctx.info_name)
with open(desc_path, 'r', encoding='utf-8') as f:
with open(desc_path, encoding='utf-8') as f:
project_desc = json.load(f)
return project_desc
def openocd(
action: str, ctx: Context, args: PropertyDict, openocd_scripts: Optional[str], openocd_commands: str
action: str, ctx: Context, args: PropertyDict, openocd_scripts: str | None, openocd_commands: str
) -> None:
"""
Execute openocd as external tool
@@ -239,11 +234,11 @@ def action_extensions(base_actions: Dict, project_path: str) -> Dict:
openocd_arguments = project_desc.get('debug_arguments_openocd', '')
print(
'Note: OpenOCD cfg not found (via env variable OPENOCD_COMMANDS nor as a --openocd-commands argument)\n'
'OpenOCD arguments default to: "{}"'.format(openocd_arguments)
f'OpenOCD arguments default to: "{openocd_arguments}"'
)
# script directory is taken from the environment by OpenOCD, update only if command line arguments to override
if openocd_scripts is not None:
openocd_arguments += ' -s {}'.format(openocd_scripts)
openocd_arguments += f' -s {openocd_scripts}'
local_dir = project_desc['build_dir']
args = ['openocd'] + shlex.split(openocd_arguments)
openocd_out_name = os.path.join(local_dir, OPENOCD_OUT_FILE)
@@ -259,11 +254,9 @@ def action_extensions(base_actions: Dict, project_path: str) -> Dict:
processes['openocd'] = process
processes['openocd_outfile'] = openocd_out
processes['openocd_outfile_name'] = openocd_out_name
print('OpenOCD started as a background task {}'.format(process.pid))
print(f'OpenOCD started as a background task {process.pid}')
def get_gdb_args(
project_desc: Dict[str, Any], gdb_x: Tuple, gdb_ex: Tuple, gdb_commands: Optional[str]
) -> List[str]:
def get_gdb_args(project_desc: dict[str, Any], gdb_x: tuple, gdb_ex: tuple, gdb_commands: str | None) -> list[str]:
# check if the application was built and ELF file is in place.
app_elf = os.path.join(project_desc.get('build_dir', ''), project_desc.get('app_elf', ''))
if not os.path.exists(app_elf):
@@ -292,7 +285,10 @@ def action_extensions(base_actions: Dict, project_path: str) -> Dict:
gdb_args.append(f'-x={gdb_x_list[gdb_x_index]}')
gdb_x_list.pop(gdb_x_index)
continue
if name == 'connect' and gdb_x_list: # TODO IDF-11692
# If the user provides a gdbinit file with name not in the "gdbinit_files" list,
# we assume the connection logic is defined within it.
# Otherwise, the configuration may be invalid.
if name == 'connect' and gdb_x_list:
continue
gdb_args.append(f'-x={path}')
# append user-defined gdbinit files
@@ -309,7 +305,7 @@ def action_extensions(base_actions: Dict, project_path: str) -> Dict:
return gdb_args
def _get_gdbgui_version(ctx: Context) -> Tuple[int, ...]:
def _get_gdbgui_version(ctx: Context) -> tuple[int, ...]:
subprocess_success = False
try:
completed_process = subprocess.run(
@@ -337,10 +333,10 @@ def action_extensions(base_actions: Dict, project_path: str) -> Dict:
action: str,
ctx: Context,
args: PropertyDict,
gdbgui_port: Optional[str],
gdbinit: Tuple,
ex: Tuple,
gdb_commands: Optional[str],
gdbgui_port: str | None,
gdbinit: tuple,
ex: tuple,
gdb_commands: str | None,
require_openocd: bool,
) -> None:
"""
@@ -388,10 +384,10 @@ def action_extensions(base_actions: Dict, project_path: str) -> Dict:
processes['gdbgui'] = process
processes['gdbgui_outfile'] = gdbgui_out
processes['gdbgui_outfile_name'] = gdbgui_out_name
print('gdbgui started as a background task {}'.format(process.pid))
print(f'gdbgui started as a background task {process.pid}')
_check_openocd_errors(fail_if_openocd_failed, action, ctx)
def global_callback(ctx: Context, global_args: PropertyDict, tasks: List) -> None:
def global_callback(ctx: Context, global_args: PropertyDict, tasks: list) -> None:
def move_to_front(task_name: str) -> None:
for index, task in enumerate(tasks):
if task.name == task_name:
@@ -422,8 +418,8 @@ def action_extensions(base_actions: Dict, project_path: str) -> Dict:
action: str,
ctx: Context,
args: PropertyDict,
gdbinit: Tuple,
ex: Tuple,
gdbinit: tuple,
ex: tuple,
gdb_commands: str,
require_openocd: bool,
) -> None:
@@ -437,10 +433,10 @@ def action_extensions(base_actions: Dict, project_path: str) -> Dict:
ctx: Context,
args: PropertyDict,
batch: bool,
gdb_tui: Optional[int],
gdbinit: Tuple,
ex: Tuple,
gdb_commands: Optional[str],
gdb_tui: int | None,
gdbinit: tuple,
ex: tuple,
gdb_commands: str | None,
require_openocd: bool,
) -> None:
"""
@@ -484,9 +480,9 @@ def action_extensions(base_actions: Dict, project_path: str) -> Dict:
ctx: Context,
args: PropertyDict,
gdb_timeout_sec: int,
core: Optional[str] = None,
chip_rev: Optional[str] = None,
save_core: Optional[str] = None,
core: str | None = None,
chip_rev: str | None = None,
save_core: str | None = None,
) -> None:
espcoredump = _get_espcoredump_instance(
ctx=ctx, args=args, gdb_timeout_sec=gdb_timeout_sec, core=core, chip_rev=chip_rev, save_core=save_core
@@ -498,9 +494,9 @@ def action_extensions(base_actions: Dict, project_path: str) -> Dict:
action: str,
ctx: Context,
args: PropertyDict,
core: Optional[str] = None,
chip_rev: Optional[str] = None,
save_core: Optional[str] = None,
core: str | None = None,
chip_rev: str | None = None,
save_core: str | None = None,
) -> None:
espcoredump = _get_espcoredump_instance(ctx=ctx, args=args, core=core, chip_rev=chip_rev, save_core=save_core)

View File

@@ -1,4 +1,4 @@
# SPDX-FileCopyrightText: 2024 Espressif Systems (Shanghai) CO LTD
# SPDX-FileCopyrightText: 2024-2025 Espressif Systems (Shanghai) CO LTD
# SPDX-License-Identifier: Apache-2.0
# This test checks the behavior of reproducible builds option.
import logging
@@ -8,17 +8,18 @@ import subprocess
from pathlib import Path
import pytest
from test_build_system_helpers import append_to_file
from test_build_system_helpers import bin_files_differ
from test_build_system_helpers import BOOTLOADER_BINS
from test_build_system_helpers import IdfPyFunc
from test_build_system_helpers import append_to_file
from test_build_system_helpers import bin_files_differ
@pytest.mark.parametrize(
'app_name', [
'app_name',
[
pytest.param('blink', marks=[pytest.mark.test_app_copy('examples/get-started/blink')]),
pytest.param('blecent', marks=[pytest.mark.test_app_copy('examples/bluetooth/nimble/blecent')]),
]
],
)
def test_reproducible_builds(app_name: str, idf_py: IdfPyFunc, test_app_copy: Path) -> None:
append_to_file(test_app_copy / 'sdkconfig', 'CONFIG_APP_REPRODUCIBLE_BUILD=y')
@@ -29,11 +30,8 @@ def test_reproducible_builds(app_name: str, idf_py: IdfPyFunc, test_app_copy: Pa
idf_py('-B', str(build_first), 'build')
elf_file = build_first / f'{app_name}.elf'
logging.info(f'Checking that various paths are not included in the ELF file')
strings_output = subprocess.check_output(
['xtensa-esp32-elf-strings', str(elf_file)],
encoding='utf-8'
)
logging.info('Checking that various paths are not included in the ELF file')
strings_output = subprocess.check_output(['xtensa-esp32-elf-strings', str(elf_file)], encoding='utf-8')
idf_path = os.environ['IDF_PATH']
assert str(idf_path) not in strings_output, f'{idf_path} found in {elf_file}'
assert str(test_app_copy) not in strings_output, f'{test_app_copy} found in {elf_file}'
@@ -45,7 +43,7 @@ def test_reproducible_builds(app_name: str, idf_py: IdfPyFunc, test_app_copy: Pa
logging.info(f'Building in {build_second} directory')
idf_py('-B', str(build_second), 'build')
logging.info(f'Comparing build artifacts')
logging.info('Comparing build artifacts')
artifacts_to_check = [
f'build/{app_name}.map',
f'build/{app_name}.elf',
@@ -58,15 +56,25 @@ def test_reproducible_builds(app_name: str, idf_py: IdfPyFunc, test_app_copy: Pa
assert not bin_files_differ(path_first, path_second), f'{path_first} and {path_second} differ'
logging.info(f'Checking that GDB works with CONFIG_APP_REPRODUCIBLE_BUILD=y')
gdb_output = subprocess.check_output([
'xtensa-esp32-elf-gdb',
'--batch', '--quiet',
'-x', f'{build_first}/prefix_map_gdbinit',
'-ex', 'set logging enabled',
'-ex', 'set pagination off',
'-ex', 'list app_main',
str(elf_file)
], encoding='utf-8', stderr=subprocess.STDOUT, cwd=str(build_first))
logging.info('Checking that GDB works with CONFIG_APP_REPRODUCIBLE_BUILD=y')
gdb_output = subprocess.check_output(
[
'xtensa-esp32-elf-gdb',
'--batch',
'--quiet',
'-x',
f'{build_first}/gdbinit/prefix_map',
'-ex',
'set logging enabled',
'-ex',
'set pagination off',
'-ex',
'list app_main',
str(elf_file),
],
encoding='utf-8',
stderr=subprocess.STDOUT,
cwd=str(build_first),
)
assert 'No such file or directory' not in gdb_output, f'GDB failed to find app_main in {elf_file}:\n{gdb_output}'