mirror of
https://github.com/espressif/esp-idf.git
synced 2025-08-02 20:24:32 +02:00
Merge branch 'feature/refactor_espcoredump' into 'master'
Feature: Refactor espcoredump See merge request espressif/esp-idf!11525
This commit is contained in:
@@ -3,7 +3,7 @@
|
|||||||
|
|
||||||
repos:
|
repos:
|
||||||
- repo: https://github.com/pre-commit/pre-commit-hooks
|
- repo: https://github.com/pre-commit/pre-commit-hooks
|
||||||
rev: v3.3.0
|
rev: v3.4.0
|
||||||
hooks:
|
hooks:
|
||||||
- id: trailing-whitespace
|
- id: trailing-whitespace
|
||||||
# note: whitespace exclusions use multiline regex, see https://pre-commit.com/#regular-expressions
|
# note: whitespace exclusions use multiline regex, see https://pre-commit.com/#regular-expressions
|
||||||
@@ -26,11 +26,17 @@ repos:
|
|||||||
files: 'tools/ci/executable-list.txt'
|
files: 'tools/ci/executable-list.txt'
|
||||||
- id: mixed-line-ending
|
- id: mixed-line-ending
|
||||||
args: ['-f=lf']
|
args: ['-f=lf']
|
||||||
|
- id: double-quote-string-fixer
|
||||||
- repo: https://gitlab.com/pycqa/flake8
|
- repo: https://gitlab.com/pycqa/flake8
|
||||||
rev: 3.8.4
|
rev: 3.8.4
|
||||||
hooks:
|
hooks:
|
||||||
- id: flake8
|
- id: flake8
|
||||||
args: ['--config=.flake8', '--tee', '--benchmark']
|
args: ['--config=.flake8', '--tee', '--benchmark']
|
||||||
|
- repo: https://github.com/pycqa/isort
|
||||||
|
rev: 5.6.4
|
||||||
|
hooks:
|
||||||
|
- id: isort
|
||||||
|
name: isort (python)
|
||||||
- repo: local
|
- repo: local
|
||||||
hooks:
|
hooks:
|
||||||
- id: check-executables
|
- id: check-executables
|
||||||
|
@@ -505,7 +505,7 @@ variable-naming-style=snake_case
|
|||||||
max-args=5
|
max-args=5
|
||||||
|
|
||||||
# Maximum number of attributes for a class (see R0902).
|
# Maximum number of attributes for a class (see R0902).
|
||||||
max-attributes=7
|
max-attributes=12
|
||||||
|
|
||||||
# Maximum number of boolean expressions in an if statement (see R0916).
|
# Maximum number of boolean expressions in an if statement (see R0916).
|
||||||
max-bool-expr=5
|
max-bool-expr=5
|
||||||
|
68
components/espcoredump/corefile/__init__.py
Normal file
68
components/espcoredump/corefile/__init__.py
Normal file
@@ -0,0 +1,68 @@
|
|||||||
|
#
|
||||||
|
# Copyright 2021 Espressif Systems (Shanghai) PTE LTD
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
#
|
||||||
|
|
||||||
|
__version__ = '0.4-dev'
|
||||||
|
|
||||||
|
from abc import abstractmethod
|
||||||
|
|
||||||
|
|
||||||
|
class ESPCoreDumpError(RuntimeError):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class ESPCoreDumpLoaderError(ESPCoreDumpError):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class _TargetMethodsBase(object):
|
||||||
|
@staticmethod
|
||||||
|
@abstractmethod
|
||||||
|
def tcb_is_sane(tcb_addr, tcb_size):
|
||||||
|
"""
|
||||||
|
Check tcb address if it is correct
|
||||||
|
"""
|
||||||
|
return False
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
@abstractmethod
|
||||||
|
def stack_is_sane(sp):
|
||||||
|
"""
|
||||||
|
Check stack address if it is correct
|
||||||
|
"""
|
||||||
|
return False
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
@abstractmethod
|
||||||
|
def addr_is_fake(addr):
|
||||||
|
"""
|
||||||
|
Check if address is in fake area
|
||||||
|
"""
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
class _ArchMethodsBase(object):
|
||||||
|
@staticmethod
|
||||||
|
@abstractmethod
|
||||||
|
def get_registers_from_stack(data, grows_down):
|
||||||
|
"""
|
||||||
|
Returns list of registers (in GDB format) from stack frame
|
||||||
|
"""
|
||||||
|
return [], {}
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
@abstractmethod
|
||||||
|
def build_prstatus_data(tcb_addr, task_regs):
|
||||||
|
return b''
|
373
components/espcoredump/corefile/elf.py
Normal file
373
components/espcoredump/corefile/elf.py
Normal file
@@ -0,0 +1,373 @@
|
|||||||
|
#
|
||||||
|
# Copyright 2021 Espressif Systems (Shanghai) PTE LTD
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
#
|
||||||
|
|
||||||
|
import hashlib
|
||||||
|
import os
|
||||||
|
|
||||||
|
from construct import (AlignedStruct, Bytes, Const, GreedyRange, Int16ul, Int32ul, Padding, Pointer, Sequence, Struct,
|
||||||
|
this)
|
||||||
|
|
||||||
|
# Following structs are based on spec
|
||||||
|
# https://refspecs.linuxfoundation.org/elf/elf.pdf
|
||||||
|
# and source code
|
||||||
|
# IDF_PATH/components/espcoredump/include_core_dump/elf.h
|
||||||
|
|
||||||
|
ElfIdentification = Struct(
|
||||||
|
'EI_MAG' / Const(b'\x7fELF'),
|
||||||
|
'EI_CLASS' / Const(b'\x01'), # ELFCLASS32
|
||||||
|
'EI_DATA' / Const(b'\x01'), # ELFDATA2LSB
|
||||||
|
'EI_VERSION' / Const(b'\x01'), # EV_CURRENT
|
||||||
|
Padding(9),
|
||||||
|
)
|
||||||
|
|
||||||
|
ElfHeader = Struct(
|
||||||
|
'e_ident' / ElfIdentification,
|
||||||
|
'e_type' / Int16ul,
|
||||||
|
'e_machine' / Int16ul,
|
||||||
|
'e_version' / Int32ul,
|
||||||
|
'e_entry' / Int32ul,
|
||||||
|
'e_phoff' / Int32ul,
|
||||||
|
'e_shoff' / Int32ul,
|
||||||
|
'e_flags' / Int32ul,
|
||||||
|
'e_ehsize' / Int16ul,
|
||||||
|
'e_phentsize' / Int16ul,
|
||||||
|
'e_phnum' / Int16ul,
|
||||||
|
'e_shentsize' / Int16ul,
|
||||||
|
'e_shnum' / Int16ul,
|
||||||
|
'e_shstrndx' / Int16ul,
|
||||||
|
)
|
||||||
|
|
||||||
|
SectionHeader = Struct(
|
||||||
|
'sh_name' / Int32ul,
|
||||||
|
'sh_type' / Int32ul,
|
||||||
|
'sh_flags' / Int32ul,
|
||||||
|
'sh_addr' / Int32ul,
|
||||||
|
'sh_offset' / Int32ul,
|
||||||
|
'sh_size' / Int32ul,
|
||||||
|
'sh_link' / Int32ul,
|
||||||
|
'sh_info' / Int32ul,
|
||||||
|
'sh_addralign' / Int32ul,
|
||||||
|
'sh_entsize' / Int32ul,
|
||||||
|
)
|
||||||
|
|
||||||
|
ProgramHeader = Struct(
|
||||||
|
'p_type' / Int32ul,
|
||||||
|
'p_offset' / Int32ul,
|
||||||
|
'p_vaddr' / Int32ul,
|
||||||
|
'p_paddr' / Int32ul,
|
||||||
|
'p_filesz' / Int32ul,
|
||||||
|
'p_memsz' / Int32ul,
|
||||||
|
'p_flags' / Int32ul,
|
||||||
|
'p_align' / Int32ul,
|
||||||
|
)
|
||||||
|
|
||||||
|
ElfHeaderTables = Struct(
|
||||||
|
'elf_header' / ElfHeader,
|
||||||
|
'program_headers' / Pointer(this.elf_header.e_phoff, ProgramHeader[this.elf_header.e_phnum]),
|
||||||
|
'section_headers' / Pointer(this.elf_header.e_shoff, SectionHeader[this.elf_header.e_shnum]),
|
||||||
|
)
|
||||||
|
|
||||||
|
NoteSection = AlignedStruct(
|
||||||
|
4,
|
||||||
|
'namesz' / Int32ul,
|
||||||
|
'descsz' / Int32ul,
|
||||||
|
'type' / Int32ul,
|
||||||
|
'name' / Bytes(this.namesz),
|
||||||
|
'desc' / Bytes(this.descsz),
|
||||||
|
)
|
||||||
|
|
||||||
|
NoteSections = GreedyRange(NoteSection)
|
||||||
|
|
||||||
|
|
||||||
|
class ElfFile(object):
|
||||||
|
"""
|
||||||
|
Elf class to a single elf file
|
||||||
|
"""
|
||||||
|
|
||||||
|
SHN_UNDEF = 0x00
|
||||||
|
SHT_PROGBITS = 0x01
|
||||||
|
SHT_STRTAB = 0x03
|
||||||
|
SHT_NOBITS = 0x08
|
||||||
|
|
||||||
|
PT_LOAD = 0x01
|
||||||
|
PT_NOTE = 0x04
|
||||||
|
|
||||||
|
ET_CORE = 0x04
|
||||||
|
|
||||||
|
EV_CURRENT = 0x01
|
||||||
|
|
||||||
|
def __init__(self, elf_path=None, e_type=None, e_machine=None):
|
||||||
|
self.e_type = e_type
|
||||||
|
self.e_machine = e_machine
|
||||||
|
|
||||||
|
self._struct = None # construct Struct
|
||||||
|
self._model = None # construct Container
|
||||||
|
self._section_names = [] # type: list[str]
|
||||||
|
|
||||||
|
self.sections = [] # type: list[ElfSection]
|
||||||
|
self.load_segments = [] # type: list[ElfSegment]
|
||||||
|
self.note_segments = [] # type: list[ElfNoteSegment]
|
||||||
|
|
||||||
|
if elf_path and os.path.isfile(elf_path):
|
||||||
|
self.read_elf(elf_path)
|
||||||
|
|
||||||
|
def read_elf(self, elf_path): # type: (str) -> None
|
||||||
|
"""
|
||||||
|
Read elf file, also write to ``self.model``, ``self.program_headers``,
|
||||||
|
``self.section_headers``
|
||||||
|
:param elf_path: elf file path
|
||||||
|
:return: None
|
||||||
|
"""
|
||||||
|
with open(elf_path, 'rb') as fr:
|
||||||
|
elf_bytes = fr.read()
|
||||||
|
header_tables = ElfHeaderTables.parse(elf_bytes)
|
||||||
|
self.e_type = header_tables.elf_header.e_type
|
||||||
|
self.e_machine = header_tables.elf_header.e_machine
|
||||||
|
|
||||||
|
self._struct = self._generate_struct_from_headers(header_tables)
|
||||||
|
self._model = self._struct.parse(elf_bytes)
|
||||||
|
|
||||||
|
if 'string_table' in self._model:
|
||||||
|
self._section_names = self._parse_string_table(self._model.string_table)
|
||||||
|
|
||||||
|
self.load_segments = [ElfSegment(seg.ph.p_vaddr,
|
||||||
|
seg.data,
|
||||||
|
seg.ph.p_flags) for seg in self._model.load_segments]
|
||||||
|
self.note_segments = [ElfNoteSegment(seg.ph.p_vaddr,
|
||||||
|
seg.data,
|
||||||
|
seg.ph.p_flags) for seg in self._model.note_segments]
|
||||||
|
self.sections = [ElfSection(self._section_names[sec.sh.sh_name],
|
||||||
|
sec.sh.sh_addr,
|
||||||
|
sec.data,
|
||||||
|
sec.sh.sh_flags) for sec in self._model.sections]
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _parse_string_table(byte_str): # type: (bytes) -> dict
|
||||||
|
name = ''
|
||||||
|
index = 0
|
||||||
|
res = {}
|
||||||
|
for i, c in enumerate(byte_str):
|
||||||
|
if c in [0x00, '\x00']: # a workaround for python 2 bytes is actually string
|
||||||
|
res[index] = name
|
||||||
|
name = ''
|
||||||
|
index = i + 1
|
||||||
|
continue
|
||||||
|
if isinstance(c, int):
|
||||||
|
name += chr(c)
|
||||||
|
else:
|
||||||
|
name += c
|
||||||
|
return res
|
||||||
|
|
||||||
|
def _generate_struct_from_headers(self, header_tables):
|
||||||
|
"""
|
||||||
|
Generate ``construct`` Struct for this file
|
||||||
|
:param header_tables: contains elf_header, program_headers, section_headers
|
||||||
|
:return: Struct of the whole file
|
||||||
|
"""
|
||||||
|
elf_header = header_tables.elf_header
|
||||||
|
program_headers = header_tables.program_headers
|
||||||
|
section_headers = header_tables.section_headers
|
||||||
|
assert program_headers or section_headers
|
||||||
|
|
||||||
|
string_table_sh = None
|
||||||
|
load_segment_subcons = []
|
||||||
|
note_segment_subcons = []
|
||||||
|
# Here we point back to make segments know their program headers
|
||||||
|
for i, ph in enumerate(program_headers):
|
||||||
|
args = [
|
||||||
|
'ph' / Pointer(elf_header.e_phoff + i * ProgramHeader.sizeof(), ProgramHeader),
|
||||||
|
'data' / Pointer(ph.p_offset, Bytes(ph.p_filesz)),
|
||||||
|
]
|
||||||
|
if ph.p_vaddr == 0 and ph.p_type == self.PT_NOTE:
|
||||||
|
args.append('note_secs' / Pointer(ph.p_offset, NoteSections))
|
||||||
|
note_segment_subcons.append(Struct(*args))
|
||||||
|
elif ph.p_vaddr != 0:
|
||||||
|
load_segment_subcons.append(Struct(*args))
|
||||||
|
|
||||||
|
section_subcons = []
|
||||||
|
for i, sh in enumerate(section_headers):
|
||||||
|
if sh.sh_type == self.SHT_STRTAB and i == elf_header.e_shstrndx:
|
||||||
|
string_table_sh = sh
|
||||||
|
elif sh.sh_addr != 0 and sh.sh_type == self.SHT_PROGBITS:
|
||||||
|
section_subcons.append(Struct(
|
||||||
|
'sh' / Pointer(elf_header.e_shoff + i * SectionHeader.sizeof(), SectionHeader),
|
||||||
|
'data' / Pointer(sh.sh_offset, Bytes(sh.sh_size)),
|
||||||
|
))
|
||||||
|
|
||||||
|
args = [
|
||||||
|
'elf_header' / ElfHeader,
|
||||||
|
'load_segments' / Sequence(*load_segment_subcons),
|
||||||
|
'note_segments' / Sequence(*note_segment_subcons),
|
||||||
|
'sections' / Sequence(*section_subcons),
|
||||||
|
]
|
||||||
|
if string_table_sh is not None:
|
||||||
|
args.append('string_table' / Pointer(string_table_sh.sh_offset, Bytes(string_table_sh.sh_size)))
|
||||||
|
|
||||||
|
return Struct(*args)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def sha256(self):
|
||||||
|
"""
|
||||||
|
:return: SHA256 hash of the input ELF file
|
||||||
|
"""
|
||||||
|
sha256 = hashlib.sha256()
|
||||||
|
sha256.update(self._struct.build(self._model))
|
||||||
|
return sha256.digest()
|
||||||
|
|
||||||
|
|
||||||
|
class ElfSection(object):
|
||||||
|
SHF_WRITE = 0x01
|
||||||
|
SHF_ALLOC = 0x02
|
||||||
|
SHF_EXECINSTR = 0x04
|
||||||
|
SHF_MASKPROC = 0xf0000000
|
||||||
|
|
||||||
|
def __init__(self, name, addr, data, flags):
|
||||||
|
self.name = name
|
||||||
|
self.addr = addr
|
||||||
|
self.data = data
|
||||||
|
self.flags = flags
|
||||||
|
|
||||||
|
def attr_str(self):
|
||||||
|
if self.flags & self.SHF_MASKPROC:
|
||||||
|
return 'MS'
|
||||||
|
|
||||||
|
res = 'R'
|
||||||
|
res += 'W' if self.flags & self.SHF_WRITE else ' '
|
||||||
|
res += 'X' if self.flags & self.SHF_EXECINSTR else ' '
|
||||||
|
res += 'A' if self.flags & self.SHF_ALLOC else ' '
|
||||||
|
return res
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return '{:>32} [Addr] 0x{:>08X}, [Size] 0x{:>08X} {:>4}' \
|
||||||
|
.format(self.name, self.addr, len(self.data), self.attr_str())
|
||||||
|
|
||||||
|
|
||||||
|
class ElfSegment(object):
|
||||||
|
PF_X = 0x01
|
||||||
|
PF_W = 0x02
|
||||||
|
PF_R = 0x04
|
||||||
|
|
||||||
|
def __init__(self, addr, data, flags):
|
||||||
|
self.addr = addr
|
||||||
|
self.data = data
|
||||||
|
self.flags = flags
|
||||||
|
self.type = ElfFile.PT_LOAD
|
||||||
|
|
||||||
|
def attr_str(self):
|
||||||
|
res = ''
|
||||||
|
res += 'R' if self.flags & self.PF_R else ' '
|
||||||
|
res += 'W' if self.flags & self.PF_W else ' '
|
||||||
|
res += 'E' if self.flags & self.PF_X else ' '
|
||||||
|
return res
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _type_str():
|
||||||
|
return 'LOAD'
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return '{:>8} Addr 0x{:>08X}, Size 0x{:>08X} Flags {:4}' \
|
||||||
|
.format(self._type_str(), self.addr, len(self.data), self.attr_str())
|
||||||
|
|
||||||
|
|
||||||
|
class ElfNoteSegment(ElfSegment):
|
||||||
|
def __init__(self, addr, data, flags):
|
||||||
|
super(ElfNoteSegment, self).__init__(addr, data, flags)
|
||||||
|
self.type = ElfFile.PT_NOTE
|
||||||
|
self.note_secs = NoteSections.parse(self.data)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _type_str():
|
||||||
|
return 'NOTE'
|
||||||
|
|
||||||
|
|
||||||
|
TASK_STATUS_CORRECT = 0x00
|
||||||
|
TASK_STATUS_TCB_CORRUPTED = 0x01
|
||||||
|
TASK_STATUS_STACK_CORRUPTED = 0x02
|
||||||
|
|
||||||
|
EspTaskStatus = Struct(
|
||||||
|
'task_index' / Int32ul,
|
||||||
|
'task_flags' / Int32ul,
|
||||||
|
'task_tcb_addr' / Int32ul,
|
||||||
|
'task_stack_start' / Int32ul,
|
||||||
|
'task_stack_len' / Int32ul,
|
||||||
|
'task_name' / Bytes(16),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class ESPCoreDumpElfFile(ElfFile):
|
||||||
|
PT_INFO = 8266
|
||||||
|
PT_TASK_INFO = 678
|
||||||
|
PT_EXTRA_INFO = 677
|
||||||
|
|
||||||
|
CURR_TASK_MARKER = 0xdeadbeef
|
||||||
|
|
||||||
|
# ELF file machine type
|
||||||
|
EM_XTENSA = 0x5E
|
||||||
|
|
||||||
|
def __init__(self, elf_path=None, e_type=None, e_machine=None):
|
||||||
|
_e_type = e_type or self.ET_CORE
|
||||||
|
_e_machine = e_machine or self.EM_XTENSA
|
||||||
|
super(ESPCoreDumpElfFile, self).__init__(elf_path, _e_type, _e_machine)
|
||||||
|
|
||||||
|
def add_segment(self, addr, data, seg_type, flags):
|
||||||
|
if seg_type != self.PT_NOTE:
|
||||||
|
self.load_segments.append(ElfSegment(addr, data, flags))
|
||||||
|
else:
|
||||||
|
self.note_segments.append(ElfNoteSegment(addr, data, flags))
|
||||||
|
|
||||||
|
def dump(self, output_path): # type: (str) -> None
|
||||||
|
"""
|
||||||
|
Dump self.model into file
|
||||||
|
:param output_path: output file path
|
||||||
|
:return: None
|
||||||
|
"""
|
||||||
|
res = b''
|
||||||
|
res += ElfHeader.build({
|
||||||
|
'e_type': self.e_type,
|
||||||
|
'e_machine': self.e_machine,
|
||||||
|
'e_version': self.EV_CURRENT,
|
||||||
|
'e_entry': 0,
|
||||||
|
'e_phoff': ElfHeader.sizeof(),
|
||||||
|
'e_shoff': 0,
|
||||||
|
'e_flags': 0,
|
||||||
|
'e_ehsize': ElfHeader.sizeof(),
|
||||||
|
'e_phentsize': ProgramHeader.sizeof(),
|
||||||
|
'e_phnum': len(self.load_segments) + len(self.note_segments),
|
||||||
|
'e_shentsize': 0,
|
||||||
|
'e_shnum': 0,
|
||||||
|
'e_shstrndx': self.SHN_UNDEF,
|
||||||
|
})
|
||||||
|
|
||||||
|
offset = ElfHeader.sizeof() + (len(self.load_segments) + len(self.note_segments)) * ProgramHeader.sizeof()
|
||||||
|
_segments = self.load_segments + self.note_segments
|
||||||
|
for seg in _segments:
|
||||||
|
res += ProgramHeader.build({
|
||||||
|
'p_type': seg.type,
|
||||||
|
'p_offset': offset,
|
||||||
|
'p_vaddr': seg.addr,
|
||||||
|
'p_paddr': seg.addr,
|
||||||
|
'p_filesz': len(seg.data),
|
||||||
|
'p_memsz': len(seg.data),
|
||||||
|
'p_flags': seg.flags,
|
||||||
|
'p_align': 0,
|
||||||
|
})
|
||||||
|
offset += len(seg.data)
|
||||||
|
|
||||||
|
for seg in _segments:
|
||||||
|
res += seg.data
|
||||||
|
|
||||||
|
with open(output_path, 'wb') as fw:
|
||||||
|
fw.write(res)
|
134
components/espcoredump/corefile/gdb.py
Normal file
134
components/espcoredump/corefile/gdb.py
Normal file
@@ -0,0 +1,134 @@
|
|||||||
|
#
|
||||||
|
# Copyright 2021 Espressif Systems (Shanghai) PTE LTD
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
#
|
||||||
|
|
||||||
|
import logging
|
||||||
|
import re
|
||||||
|
import time
|
||||||
|
|
||||||
|
from . import ESPCoreDumpError
|
||||||
|
|
||||||
|
try:
|
||||||
|
import typing
|
||||||
|
except ImportError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
from pygdbmi.gdbcontroller import DEFAULT_GDB_TIMEOUT_SEC, GdbController
|
||||||
|
|
||||||
|
|
||||||
|
class EspGDB(object):
|
||||||
|
def __init__(self, gdb_path, gdb_cmds, core_filename, prog_filename, timeout_sec=DEFAULT_GDB_TIMEOUT_SEC):
|
||||||
|
# type: (str, typing.List[str], str, str, int) -> None
|
||||||
|
"""
|
||||||
|
Start GDB and initialize a GdbController instance
|
||||||
|
"""
|
||||||
|
gdb_args = ['--quiet', # inhibit dumping info at start-up
|
||||||
|
'--nx', # inhibit window interface
|
||||||
|
'--nw', # ignore .gdbinit
|
||||||
|
'--interpreter=mi2', # use GDB/MI v2
|
||||||
|
'--core=%s' % core_filename] # core file
|
||||||
|
for c in gdb_cmds:
|
||||||
|
if c:
|
||||||
|
gdb_args += ['-ex', c]
|
||||||
|
gdb_args.append(prog_filename)
|
||||||
|
self.p = GdbController(gdb_path=gdb_path, gdb_args=gdb_args)
|
||||||
|
self.timeout = timeout_sec
|
||||||
|
|
||||||
|
# Consume initial output by issuing a dummy command
|
||||||
|
self._gdbmi_run_cmd_get_responses(cmd='-data-list-register-values x pc',
|
||||||
|
resp_message=None, resp_type='console', multiple=True,
|
||||||
|
done_message='done', done_type='result')
|
||||||
|
|
||||||
|
def __del__(self):
|
||||||
|
try:
|
||||||
|
self.p.exit()
|
||||||
|
except IndexError:
|
||||||
|
logging.warning('Attempt to terminate the GDB process failed, because it is already terminated. Skip')
|
||||||
|
|
||||||
|
def _gdbmi_run_cmd_get_responses(self, cmd, resp_message, resp_type, multiple=True,
|
||||||
|
done_message=None, done_type=None):
|
||||||
|
# type: (str, typing.Optional[str], str, bool, typing.Optional[str], typing.Optional[str]) -> list
|
||||||
|
self.p.write(cmd, read_response=False)
|
||||||
|
t_end = time.time() + self.timeout
|
||||||
|
filtered_response_list = []
|
||||||
|
all_responses = []
|
||||||
|
while time.time() < t_end:
|
||||||
|
more_responses = self.p.get_gdb_response(timeout_sec=0, raise_error_on_timeout=False)
|
||||||
|
filtered_response_list += filter(lambda rsp: rsp['message'] == resp_message and rsp['type'] == resp_type,
|
||||||
|
more_responses)
|
||||||
|
all_responses += more_responses
|
||||||
|
if filtered_response_list and not multiple:
|
||||||
|
break
|
||||||
|
if done_message and done_type and self._gdbmi_filter_responses(more_responses, done_message, done_type):
|
||||||
|
break
|
||||||
|
if not filtered_response_list and not multiple:
|
||||||
|
raise ESPCoreDumpError("Couldn't find response with message '{}', type '{}' in responses '{}'".format(
|
||||||
|
resp_message, resp_type, str(all_responses)
|
||||||
|
))
|
||||||
|
return filtered_response_list
|
||||||
|
|
||||||
|
def _gdbmi_run_cmd_get_one_response(self, cmd, resp_message, resp_type):
|
||||||
|
# type: ( str, typing.Optional[str], str) -> dict
|
||||||
|
return self._gdbmi_run_cmd_get_responses(cmd, resp_message, resp_type, multiple=False)[0]
|
||||||
|
|
||||||
|
def _gdbmi_data_evaluate_expression(self, expr): # type: (str) -> str
|
||||||
|
""" Get the value of an expression, similar to the 'print' command """
|
||||||
|
return self._gdbmi_run_cmd_get_one_response("-data-evaluate-expression \"%s\"" % expr,
|
||||||
|
'done', 'result')['payload']['value']
|
||||||
|
|
||||||
|
def get_freertos_task_name(self, tcb_addr): # type: (int) -> str
|
||||||
|
""" Get FreeRTOS task name given the TCB address """
|
||||||
|
try:
|
||||||
|
val = self._gdbmi_data_evaluate_expression('(char*)((TCB_t *)0x%x)->pcTaskName' % tcb_addr)
|
||||||
|
except (ESPCoreDumpError, KeyError):
|
||||||
|
# KeyError is raised when "value" is not in "payload"
|
||||||
|
return ''
|
||||||
|
|
||||||
|
# Value is of form '0x12345678 "task_name"', extract the actual name
|
||||||
|
result = re.search(r"\"([^']*)\"$", val)
|
||||||
|
if result:
|
||||||
|
return result.group(1)
|
||||||
|
return ''
|
||||||
|
|
||||||
|
def run_cmd(self, gdb_cmd): # type: (str) -> str
|
||||||
|
""" Execute a generic GDB console command via MI2
|
||||||
|
"""
|
||||||
|
filtered_responses = self._gdbmi_run_cmd_get_responses(cmd="-interpreter-exec console \"%s\"" % gdb_cmd,
|
||||||
|
resp_message=None, resp_type='console', multiple=True,
|
||||||
|
done_message='done', done_type='result')
|
||||||
|
return ''.join([x['payload'] for x in filtered_responses]) \
|
||||||
|
.replace('\\n', '\n') \
|
||||||
|
.replace('\\t', '\t') \
|
||||||
|
.rstrip('\n')
|
||||||
|
|
||||||
|
def get_thread_info(self): # type: () -> (typing.List[dict], str)
|
||||||
|
""" Get information about all threads known to GDB, and the current thread ID """
|
||||||
|
result = self._gdbmi_run_cmd_get_one_response('-thread-info', 'done', 'result')['payload']
|
||||||
|
current_thread_id = result['current-thread-id']
|
||||||
|
threads = result['threads']
|
||||||
|
return threads, current_thread_id
|
||||||
|
|
||||||
|
def switch_thread(self, thr_id): # type: (int) -> None
|
||||||
|
""" Tell GDB to switch to a specific thread, given its ID """
|
||||||
|
self._gdbmi_run_cmd_get_one_response('-thread-select %s' % thr_id, 'done', 'result')
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _gdbmi_filter_responses(responses, resp_message, resp_type):
|
||||||
|
return list(filter(lambda rsp: rsp['message'] == resp_message and rsp['type'] == resp_type, responses))
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def gdb2freertos_thread_id(gdb_target_id): # type: (str) -> int
|
||||||
|
""" Convert GDB 'target ID' to the FreeRTOS TCB address """
|
||||||
|
return int(gdb_target_id.replace('process ', ''), 0)
|
572
components/espcoredump/corefile/loader.py
Normal file
572
components/espcoredump/corefile/loader.py
Normal file
@@ -0,0 +1,572 @@
|
|||||||
|
#
|
||||||
|
# Copyright 2021 Espressif Systems (Shanghai) PTE LTD
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
#
|
||||||
|
|
||||||
|
import base64
|
||||||
|
import binascii
|
||||||
|
import hashlib
|
||||||
|
import logging
|
||||||
|
import os
|
||||||
|
import subprocess
|
||||||
|
import sys
|
||||||
|
import tempfile
|
||||||
|
|
||||||
|
from construct import AlignedStruct, Bytes, GreedyRange, Int32ul, Padding, Struct, abs_, this
|
||||||
|
|
||||||
|
from . import ESPCoreDumpLoaderError, _ArchMethodsBase, _TargetMethodsBase
|
||||||
|
from .elf import (TASK_STATUS_CORRECT, TASK_STATUS_TCB_CORRUPTED, ElfFile, ElfSegment, ESPCoreDumpElfFile,
|
||||||
|
EspTaskStatus, NoteSection)
|
||||||
|
from .xtensa import _ArchMethodsXtensa, _TargetMethodsESP32
|
||||||
|
|
||||||
|
IDF_PATH = os.getenv('IDF_PATH')
|
||||||
|
PARTTOOL_PY = os.path.join(IDF_PATH, 'components', 'partition_table', 'parttool.py')
|
||||||
|
ESPTOOL_PY = os.path.join(IDF_PATH, 'components', 'esptool_py', 'esptool', 'esptool.py')
|
||||||
|
|
||||||
|
# Following structs are based on source code
|
||||||
|
# components/espcoredump/include_core_dump/esp_core_dump_priv.h
|
||||||
|
|
||||||
|
EspCoreDumpV1Header = Struct(
|
||||||
|
'tot_len' / Int32ul,
|
||||||
|
'ver' / Int32ul,
|
||||||
|
'task_num' / Int32ul,
|
||||||
|
'tcbsz' / Int32ul,
|
||||||
|
)
|
||||||
|
|
||||||
|
EspCoreDumpV2Header = Struct(
|
||||||
|
'tot_len' / Int32ul,
|
||||||
|
'ver' / Int32ul,
|
||||||
|
'task_num' / Int32ul,
|
||||||
|
'tcbsz' / Int32ul,
|
||||||
|
'segs_num' / Int32ul,
|
||||||
|
)
|
||||||
|
|
||||||
|
CRC = Int32ul
|
||||||
|
SHA256 = Bytes(32)
|
||||||
|
|
||||||
|
TaskHeader = Struct(
|
||||||
|
'tcb_addr' / Int32ul,
|
||||||
|
'stack_top' / Int32ul,
|
||||||
|
'stack_end' / Int32ul,
|
||||||
|
)
|
||||||
|
|
||||||
|
MemSegmentHeader = Struct(
|
||||||
|
'mem_start' / Int32ul,
|
||||||
|
'mem_sz' / Int32ul,
|
||||||
|
'data' / Bytes(this.mem_sz),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class EspCoreDumpVersion(object):
|
||||||
|
"""Core dump version class
|
||||||
|
"""
|
||||||
|
# This class contains all version-dependent params
|
||||||
|
ESP32 = 0
|
||||||
|
ESP32S2 = 2
|
||||||
|
|
||||||
|
XTENSA_CHIPS = [ESP32, ESP32S2]
|
||||||
|
|
||||||
|
ESP_COREDUMP_TARGETS = XTENSA_CHIPS
|
||||||
|
|
||||||
|
def __init__(self, version=None):
|
||||||
|
"""Constructor for core dump version
|
||||||
|
"""
|
||||||
|
super(EspCoreDumpVersion, self).__init__()
|
||||||
|
if version is None:
|
||||||
|
self.version = 0
|
||||||
|
else:
|
||||||
|
self.set_version(version)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def make_dump_ver(major, minor):
|
||||||
|
return ((major & 0xFF) << 8) | ((minor & 0xFF) << 0)
|
||||||
|
|
||||||
|
def set_version(self, version):
|
||||||
|
self.version = version
|
||||||
|
|
||||||
|
@property
|
||||||
|
def chip_ver(self):
|
||||||
|
return (self.version & 0xFFFF0000) >> 16
|
||||||
|
|
||||||
|
@property
|
||||||
|
def dump_ver(self):
|
||||||
|
return self.version & 0x0000FFFF
|
||||||
|
|
||||||
|
@property
|
||||||
|
def major(self):
|
||||||
|
return (self.version & 0x0000FF00) >> 8
|
||||||
|
|
||||||
|
@property
|
||||||
|
def minor(self):
|
||||||
|
return self.version & 0x000000FF
|
||||||
|
|
||||||
|
|
||||||
|
class EspCoreDumpLoader(EspCoreDumpVersion):
|
||||||
|
# "legacy" stands for core dumps v0.1 (before IDF v4.1)
|
||||||
|
BIN_V1 = EspCoreDumpVersion.make_dump_ver(0, 1)
|
||||||
|
BIN_V2 = EspCoreDumpVersion.make_dump_ver(0, 2)
|
||||||
|
ELF_CRC32 = EspCoreDumpVersion.make_dump_ver(1, 0)
|
||||||
|
ELF_SHA256 = EspCoreDumpVersion.make_dump_ver(1, 1)
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
super(EspCoreDumpLoader, self).__init__()
|
||||||
|
self.core_src_file = None
|
||||||
|
self.core_src_struct = None
|
||||||
|
self.core_src = None
|
||||||
|
|
||||||
|
self.core_elf_file = None
|
||||||
|
|
||||||
|
self.header = None
|
||||||
|
self.header_struct = EspCoreDumpV1Header
|
||||||
|
self.checksum_struct = CRC
|
||||||
|
|
||||||
|
# These two method classes will be assigned in ``reload_coredump``
|
||||||
|
self.target_method_cls = _TargetMethodsBase
|
||||||
|
self.arch_method_cls = _ArchMethodsBase
|
||||||
|
|
||||||
|
self._temp_files = []
|
||||||
|
|
||||||
|
def __del__(self):
|
||||||
|
if self.core_src_file:
|
||||||
|
self.core_src_file.close()
|
||||||
|
if self.core_elf_file:
|
||||||
|
self.core_elf_file.close()
|
||||||
|
for f in self._temp_files:
|
||||||
|
try:
|
||||||
|
os.remove(f)
|
||||||
|
except OSError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
def _create_temp_file(self):
|
||||||
|
t = tempfile.NamedTemporaryFile('wb', delete=False)
|
||||||
|
self._temp_files.append(t.name)
|
||||||
|
return t
|
||||||
|
|
||||||
|
def _reload_coredump(self):
|
||||||
|
with open(self.core_src_file.name, 'rb') as fr:
|
||||||
|
coredump_bytes = fr.read()
|
||||||
|
|
||||||
|
_header = EspCoreDumpV1Header.parse(coredump_bytes) # first we use V1 format to get version
|
||||||
|
self.set_version(_header.ver)
|
||||||
|
if self.dump_ver == self.ELF_CRC32:
|
||||||
|
self.checksum_struct = CRC
|
||||||
|
self.header_struct = EspCoreDumpV2Header
|
||||||
|
elif self.dump_ver == self.ELF_SHA256:
|
||||||
|
self.checksum_struct = SHA256
|
||||||
|
self.header_struct = EspCoreDumpV2Header
|
||||||
|
elif self.dump_ver == self.BIN_V1:
|
||||||
|
self.checksum_struct = CRC
|
||||||
|
self.header_struct = EspCoreDumpV1Header
|
||||||
|
elif self.dump_ver == self.BIN_V2:
|
||||||
|
self.checksum_struct = CRC
|
||||||
|
self.header_struct = EspCoreDumpV2Header
|
||||||
|
else:
|
||||||
|
raise ESPCoreDumpLoaderError('Core dump version "0x%x" is not supported!' % self.dump_ver)
|
||||||
|
|
||||||
|
self.core_src_struct = Struct(
|
||||||
|
'header' / self.header_struct,
|
||||||
|
'data' / Bytes(this.header.tot_len - self.header_struct.sizeof() - self.checksum_struct.sizeof()),
|
||||||
|
'checksum' / self.checksum_struct,
|
||||||
|
)
|
||||||
|
self.core_src = self.core_src_struct.parse(coredump_bytes)
|
||||||
|
|
||||||
|
# Reload header if header struct changes after parsing
|
||||||
|
if self.header_struct != EspCoreDumpV1Header:
|
||||||
|
self.header = EspCoreDumpV2Header.parse(coredump_bytes)
|
||||||
|
|
||||||
|
if self.chip_ver in self.ESP_COREDUMP_TARGETS:
|
||||||
|
if self.chip_ver == self.ESP32:
|
||||||
|
self.target_method_cls = _TargetMethodsESP32
|
||||||
|
|
||||||
|
if self.chip_ver in self.XTENSA_CHIPS:
|
||||||
|
self.arch_method_cls = _ArchMethodsXtensa
|
||||||
|
else:
|
||||||
|
raise ESPCoreDumpLoaderError('Core dump chip "0x%x" is not supported!' % self.chip_ver)
|
||||||
|
|
||||||
|
def _validate_dump_file(self):
|
||||||
|
if self.chip_ver not in self.ESP_COREDUMP_TARGETS:
|
||||||
|
raise ESPCoreDumpLoaderError('Invalid core dump chip version: "{}", should be <= "0x{:X}"'
|
||||||
|
.format(self.chip_ver, self.ESP32S2))
|
||||||
|
|
||||||
|
if self.checksum_struct == CRC:
|
||||||
|
self._crc_validate()
|
||||||
|
elif self.checksum_struct == SHA256:
|
||||||
|
self._sha256_validate()
|
||||||
|
|
||||||
|
def _crc_validate(self):
|
||||||
|
data_crc = binascii.crc32(EspCoreDumpV2Header.build(self.core_src.header) + self.core_src.data) & 0xffffffff
|
||||||
|
if data_crc != self.core_src.checksum:
|
||||||
|
raise ESPCoreDumpLoaderError('Invalid core dump CRC %x, should be %x' % (data_crc, self.core_src.crc))
|
||||||
|
|
||||||
|
def _sha256_validate(self):
|
||||||
|
data_sha256 = hashlib.sha256(EspCoreDumpV2Header.build(self.core_src.header) + self.core_src.data)
|
||||||
|
data_sha256_str = data_sha256.hexdigest()
|
||||||
|
sha256_str = binascii.hexlify(self.core_src.checksum).decode('ascii')
|
||||||
|
if data_sha256_str != sha256_str:
|
||||||
|
raise ESPCoreDumpLoaderError('Invalid core dump SHA256 "{}", should be "{}"'
|
||||||
|
.format(data_sha256_str, sha256_str))
|
||||||
|
|
||||||
|
def create_corefile(self, exe_name=None): # type: (str) -> None
|
||||||
|
"""
|
||||||
|
Creates core dump ELF file
|
||||||
|
"""
|
||||||
|
self._validate_dump_file()
|
||||||
|
self.core_elf_file = self._create_temp_file()
|
||||||
|
|
||||||
|
if self.dump_ver in [self.ELF_CRC32,
|
||||||
|
self.ELF_SHA256]:
|
||||||
|
self._extract_elf_corefile(exe_name)
|
||||||
|
elif self.dump_ver in [self.BIN_V1,
|
||||||
|
self.BIN_V2]:
|
||||||
|
self._extract_bin_corefile()
|
||||||
|
else:
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
def _extract_elf_corefile(self, exe_name=None):
|
||||||
|
"""
|
||||||
|
Reads the ELF formatted core dump image and parse it
|
||||||
|
"""
|
||||||
|
self.core_elf_file.write(self.core_src.data)
|
||||||
|
# Need to be closed before read. Otherwise the result will be wrong
|
||||||
|
self.core_elf_file.close()
|
||||||
|
|
||||||
|
core_elf = ESPCoreDumpElfFile(self.core_elf_file.name)
|
||||||
|
|
||||||
|
# Read note segments from core file which are belong to tasks (TCB or stack)
|
||||||
|
for seg in core_elf.note_segments:
|
||||||
|
for note_sec in seg.note_secs:
|
||||||
|
# Check for version info note
|
||||||
|
if note_sec.name == 'ESP_CORE_DUMP_INFO' \
|
||||||
|
and note_sec.type == ESPCoreDumpElfFile.PT_INFO \
|
||||||
|
and exe_name:
|
||||||
|
exe_elf = ElfFile(exe_name)
|
||||||
|
app_sha256 = binascii.hexlify(exe_elf.sha256)
|
||||||
|
coredump_sha256_struct = Struct(
|
||||||
|
'ver' / Int32ul,
|
||||||
|
'sha256' / Bytes(64) # SHA256 as hex string
|
||||||
|
)
|
||||||
|
coredump_sha256 = coredump_sha256_struct.parse(note_sec.desc[:coredump_sha256_struct.sizeof()])
|
||||||
|
if coredump_sha256.sha256 != app_sha256:
|
||||||
|
raise ESPCoreDumpLoaderError(
|
||||||
|
'Invalid application image for coredump: coredump SHA256({}) != app SHA256({}).'
|
||||||
|
.format(coredump_sha256, app_sha256))
|
||||||
|
if coredump_sha256.ver != self.version:
|
||||||
|
raise ESPCoreDumpLoaderError(
|
||||||
|
'Invalid application image for coredump: coredump SHA256 version({}) != app SHA256 version({}).'
|
||||||
|
.format(coredump_sha256.ver, self.version))
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _get_aligned_size(size, align_with=4):
|
||||||
|
if size % align_with:
|
||||||
|
return align_with * (size // align_with + 1)
|
||||||
|
return size
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _build_note_section(name, sec_type, desc):
|
||||||
|
name = bytearray(name, encoding='ascii') + b'\0'
|
||||||
|
return NoteSection.build({
|
||||||
|
'namesz': len(name),
|
||||||
|
'descsz': len(desc),
|
||||||
|
'type': sec_type,
|
||||||
|
'name': name,
|
||||||
|
'desc': desc,
|
||||||
|
})
|
||||||
|
|
||||||
|
def _extract_bin_corefile(self):
|
||||||
|
"""
|
||||||
|
Creates core dump ELF file
|
||||||
|
"""
|
||||||
|
tcbsz_aligned = self._get_aligned_size(self.header.tcbsz)
|
||||||
|
|
||||||
|
coredump_data_struct = Struct(
|
||||||
|
'tasks' / GreedyRange(
|
||||||
|
AlignedStruct(
|
||||||
|
4,
|
||||||
|
'task_header' / TaskHeader,
|
||||||
|
'tcb' / Bytes(self.header.tcbsz),
|
||||||
|
'stack' / Bytes(abs_(this.task_header.stack_top - this.task_header.stack_end)),
|
||||||
|
)
|
||||||
|
),
|
||||||
|
'mem_seg_headers' / MemSegmentHeader[self.core_src.header.segs_num]
|
||||||
|
)
|
||||||
|
|
||||||
|
core_elf = ESPCoreDumpElfFile()
|
||||||
|
notes = b''
|
||||||
|
core_dump_info_notes = b''
|
||||||
|
task_info_notes = b''
|
||||||
|
|
||||||
|
coredump_data = coredump_data_struct.parse(self.core_src.data)
|
||||||
|
for i, task in enumerate(coredump_data.tasks):
|
||||||
|
stack_len_aligned = self._get_aligned_size(abs(task.task_header.stack_top - task.task_header.stack_end))
|
||||||
|
task_status_kwargs = {
|
||||||
|
'task_index': i,
|
||||||
|
'task_flags': TASK_STATUS_CORRECT,
|
||||||
|
'task_tcb_addr': task.task_header.tcb_addr,
|
||||||
|
'task_stack_start': min(task.task_header.stack_top, task.task_header.stack_end),
|
||||||
|
'task_stack_len': stack_len_aligned,
|
||||||
|
'task_name': Padding(16).build({}) # currently we don't have task_name, keep it as padding
|
||||||
|
}
|
||||||
|
|
||||||
|
# Write TCB
|
||||||
|
try:
|
||||||
|
if self.target_method_cls.tcb_is_sane(task.task_header.tcb_addr, tcbsz_aligned):
|
||||||
|
core_elf.add_segment(task.task_header.tcb_addr,
|
||||||
|
task.tcb,
|
||||||
|
ElfFile.PT_LOAD,
|
||||||
|
ElfSegment.PF_R | ElfSegment.PF_W)
|
||||||
|
elif task.task_header.tcb_addr and self.target_method_cls.addr_is_fake(task.task_header.tcb_addr):
|
||||||
|
task_status_kwargs['task_flags'] |= TASK_STATUS_TCB_CORRUPTED
|
||||||
|
except ESPCoreDumpLoaderError as e:
|
||||||
|
logging.warning('Skip TCB {} bytes @ 0x{:x}. (Reason: {})'
|
||||||
|
.format(tcbsz_aligned, task.task_header.tcb_addr, e))
|
||||||
|
|
||||||
|
# Write stack
|
||||||
|
try:
|
||||||
|
if self.target_method_cls.stack_is_sane(task_status_kwargs['task_stack_start']):
|
||||||
|
core_elf.add_segment(task_status_kwargs['task_stack_start'],
|
||||||
|
task.stack,
|
||||||
|
ElfFile.PT_LOAD,
|
||||||
|
ElfSegment.PF_R | ElfSegment.PF_W)
|
||||||
|
elif task_status_kwargs['task_stack_start'] \
|
||||||
|
and self.target_method_cls.addr_is_fake(task_status_kwargs['task_stack_start']):
|
||||||
|
task_status_kwargs['task_flags'] |= TASK_STATUS_TCB_CORRUPTED
|
||||||
|
core_elf.add_segment(task_status_kwargs['task_stack_start'],
|
||||||
|
task.stack,
|
||||||
|
ElfFile.PT_LOAD,
|
||||||
|
ElfSegment.PF_R | ElfSegment.PF_W)
|
||||||
|
except ESPCoreDumpLoaderError as e:
|
||||||
|
logging.warning('Skip task\'s ({:x}) stack {} bytes @ 0x{:x}. (Reason: {})'
|
||||||
|
.format(task_status_kwargs['tcb_addr'],
|
||||||
|
task_status_kwargs['stack_len_aligned'],
|
||||||
|
task_status_kwargs['stack_base'],
|
||||||
|
e))
|
||||||
|
|
||||||
|
try:
|
||||||
|
logging.debug('Stack start_end: 0x{:x} @ 0x{:x}'
|
||||||
|
.format(task.task_header.stack_top, task.task_header.stack_end))
|
||||||
|
task_regs, extra_regs = self.arch_method_cls.get_registers_from_stack(
|
||||||
|
task.stack,
|
||||||
|
task.task_header.stack_end > task.task_header.stack_top
|
||||||
|
)
|
||||||
|
except Exception as e:
|
||||||
|
raise ESPCoreDumpLoaderError(str(e))
|
||||||
|
|
||||||
|
task_info_notes += self._build_note_section('TASK_INFO',
|
||||||
|
ESPCoreDumpElfFile.PT_TASK_INFO,
|
||||||
|
EspTaskStatus.build(task_status_kwargs))
|
||||||
|
notes += self._build_note_section('CORE',
|
||||||
|
ElfFile.PT_LOAD,
|
||||||
|
self.arch_method_cls.build_prstatus_data(task.task_header.tcb_addr,
|
||||||
|
task_regs))
|
||||||
|
|
||||||
|
if extra_regs and len(core_dump_info_notes) == 0:
|
||||||
|
# actually there will be only one such note - for crashed task
|
||||||
|
core_dump_info_notes += self._build_note_section('ESP_CORE_DUMP_INFO',
|
||||||
|
ESPCoreDumpElfFile.PT_INFO,
|
||||||
|
Int32ul.build(self.header.ver))
|
||||||
|
|
||||||
|
exc_regs = []
|
||||||
|
for reg_id in extra_regs:
|
||||||
|
exc_regs.extend([reg_id, extra_regs[reg_id]])
|
||||||
|
_regs = [task.task_header.tcb_addr] + exc_regs
|
||||||
|
core_dump_info_notes += self._build_note_section(
|
||||||
|
'EXTRA_INFO',
|
||||||
|
ESPCoreDumpElfFile.PT_EXTRA_INFO,
|
||||||
|
Int32ul[1 + len(exc_regs)].build(_regs)
|
||||||
|
)
|
||||||
|
|
||||||
|
if self.dump_ver == self.BIN_V2:
|
||||||
|
for header in coredump_data.mem_seg_headers:
|
||||||
|
logging.debug('Read memory segment {} bytes @ 0x{:x}'.format(header.mem_sz, header.mem_start))
|
||||||
|
core_elf.add_segment(header.mem_start, header.data, ElfFile.PT_LOAD, ElfSegment.PF_R | ElfSegment.PF_W)
|
||||||
|
|
||||||
|
# add notes
|
||||||
|
try:
|
||||||
|
core_elf.add_segment(0, notes, ElfFile.PT_NOTE, 0)
|
||||||
|
except ESPCoreDumpLoaderError as e:
|
||||||
|
logging.warning('Skip NOTES segment {:d} bytes @ 0x{:x}. (Reason: {})'.format(len(notes), 0, e))
|
||||||
|
# add core dump info notes
|
||||||
|
try:
|
||||||
|
core_elf.add_segment(0, core_dump_info_notes, ElfFile.PT_NOTE, 0)
|
||||||
|
except ESPCoreDumpLoaderError as e:
|
||||||
|
logging.warning('Skip core dump info NOTES segment {:d} bytes @ 0x{:x}. (Reason: {})'
|
||||||
|
.format(len(core_dump_info_notes), 0, e))
|
||||||
|
try:
|
||||||
|
core_elf.add_segment(0, task_info_notes, ElfFile.PT_NOTE, 0)
|
||||||
|
except ESPCoreDumpLoaderError as e:
|
||||||
|
logging.warning('Skip failed tasks info NOTES segment {:d} bytes @ 0x{:x}. (Reason: {})'
|
||||||
|
.format(len(task_info_notes), 0, e))
|
||||||
|
# dump core ELF
|
||||||
|
core_elf.e_type = ElfFile.ET_CORE
|
||||||
|
core_elf.e_machine = ESPCoreDumpElfFile.EM_XTENSA
|
||||||
|
core_elf.dump(self.core_elf_file.name)
|
||||||
|
|
||||||
|
|
||||||
|
class ESPCoreDumpFlashLoader(EspCoreDumpLoader):
|
||||||
|
ESP_COREDUMP_PART_TABLE_OFF = 0x8000
|
||||||
|
|
||||||
|
def __init__(self, offset, target='esp32', port=None, baud=None):
|
||||||
|
super(ESPCoreDumpFlashLoader, self).__init__()
|
||||||
|
self.port = port
|
||||||
|
self.baud = baud
|
||||||
|
self.target = target
|
||||||
|
|
||||||
|
self._get_coredump(offset)
|
||||||
|
self._reload_coredump()
|
||||||
|
|
||||||
|
def _get_coredump(self, off):
|
||||||
|
"""
|
||||||
|
Loads core dump from flash using parttool or elftool (if offset is set)
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
if off:
|
||||||
|
logging.info('Invoke esptool to read image.')
|
||||||
|
self._invoke_esptool(off=off)
|
||||||
|
else:
|
||||||
|
logging.info('Invoke parttool to read image.')
|
||||||
|
self._invoke_parttool()
|
||||||
|
except subprocess.CalledProcessError as e:
|
||||||
|
if e.output:
|
||||||
|
logging.info(e.output)
|
||||||
|
logging.error('Error during the subprocess execution')
|
||||||
|
else:
|
||||||
|
# Need to be closed before read. Otherwise the result will be wrong
|
||||||
|
self.core_src_file.close()
|
||||||
|
|
||||||
|
def _invoke_esptool(self, off=None):
|
||||||
|
"""
|
||||||
|
Loads core dump from flash using elftool
|
||||||
|
"""
|
||||||
|
tool_args = [sys.executable, ESPTOOL_PY, '-c', self.target]
|
||||||
|
if self.port:
|
||||||
|
tool_args.extend(['-p', self.port])
|
||||||
|
if self.baud:
|
||||||
|
tool_args.extend(['-b', str(self.baud)])
|
||||||
|
|
||||||
|
self.core_src_file = self._create_temp_file()
|
||||||
|
try:
|
||||||
|
(part_offset, part_size) = self._get_core_dump_partition_info()
|
||||||
|
if not off:
|
||||||
|
off = part_offset # set default offset if not specified
|
||||||
|
logging.warning('The core dump image offset is not specified. Use partition offset: %d.', part_offset)
|
||||||
|
if part_offset != off:
|
||||||
|
logging.warning('Predefined image offset: %d does not match core dump partition offset: %d', off,
|
||||||
|
part_offset)
|
||||||
|
|
||||||
|
# Here we use V1 format to locate the size
|
||||||
|
tool_args.extend(['read_flash', str(off), str(EspCoreDumpV1Header.sizeof())])
|
||||||
|
tool_args.append(self.core_src_file.name)
|
||||||
|
|
||||||
|
# read core dump length
|
||||||
|
et_out = subprocess.check_output(tool_args)
|
||||||
|
if et_out:
|
||||||
|
logging.info(et_out.decode('utf-8'))
|
||||||
|
|
||||||
|
header = EspCoreDumpV1Header.parse(open(self.core_src_file.name, 'rb').read())
|
||||||
|
if not header or not 0 < header.tot_len <= part_size:
|
||||||
|
logging.error('Incorrect size of core dump image: {}, use partition size instead: {}'
|
||||||
|
.format(header.tot_len, part_size))
|
||||||
|
coredump_len = part_size
|
||||||
|
else:
|
||||||
|
coredump_len = header.tot_len
|
||||||
|
# set actual size of core dump image and read it from flash
|
||||||
|
tool_args[-2] = str(coredump_len)
|
||||||
|
et_out = subprocess.check_output(tool_args)
|
||||||
|
if et_out:
|
||||||
|
logging.info(et_out.decode('utf-8'))
|
||||||
|
except subprocess.CalledProcessError as e:
|
||||||
|
logging.error('esptool script execution failed with err %d', e.returncode)
|
||||||
|
logging.debug("Command ran: '%s'", e.cmd)
|
||||||
|
logging.debug('Command out:')
|
||||||
|
logging.debug(e.output)
|
||||||
|
raise e
|
||||||
|
|
||||||
|
def _invoke_parttool(self):
|
||||||
|
"""
|
||||||
|
Loads core dump from flash using parttool
|
||||||
|
"""
|
||||||
|
tool_args = [sys.executable, PARTTOOL_PY]
|
||||||
|
if self.port:
|
||||||
|
tool_args.extend(['--port', self.port])
|
||||||
|
tool_args.extend(['read_partition', '--partition-type', 'data', '--partition-subtype', 'coredump', '--output'])
|
||||||
|
|
||||||
|
self.core_src_file = self._create_temp_file()
|
||||||
|
try:
|
||||||
|
tool_args.append(self.core_src_file.name)
|
||||||
|
# read core dump partition
|
||||||
|
et_out = subprocess.check_output(tool_args)
|
||||||
|
if et_out:
|
||||||
|
logging.info(et_out.decode('utf-8'))
|
||||||
|
except subprocess.CalledProcessError as e:
|
||||||
|
logging.error('parttool script execution failed with err %d', e.returncode)
|
||||||
|
logging.debug("Command ran: '%s'", e.cmd)
|
||||||
|
logging.debug('Command out:')
|
||||||
|
logging.debug(e.output)
|
||||||
|
raise e
|
||||||
|
|
||||||
|
def _get_core_dump_partition_info(self, part_off=None):
|
||||||
|
"""
|
||||||
|
Get core dump partition info using parttool
|
||||||
|
"""
|
||||||
|
logging.info('Retrieving core dump partition offset and size...')
|
||||||
|
if not part_off:
|
||||||
|
part_off = self.ESP_COREDUMP_PART_TABLE_OFF
|
||||||
|
try:
|
||||||
|
tool_args = [sys.executable, PARTTOOL_PY, '-q', '--partition-table-offset', str(part_off)]
|
||||||
|
if self.port:
|
||||||
|
tool_args.extend(['--port', self.port])
|
||||||
|
invoke_args = tool_args + ['get_partition_info', '--partition-type', 'data',
|
||||||
|
'--partition-subtype', 'coredump',
|
||||||
|
'--info', 'offset', 'size']
|
||||||
|
res = subprocess.check_output(invoke_args).strip()
|
||||||
|
(offset_str, size_str) = res.rsplit(b'\n')[-1].split(b' ')
|
||||||
|
size = int(size_str, 16)
|
||||||
|
offset = int(offset_str, 16)
|
||||||
|
logging.info('Core dump partition offset=%d, size=%d', offset, size)
|
||||||
|
except subprocess.CalledProcessError as e:
|
||||||
|
logging.error('parttool get partition info failed with err %d', e.returncode)
|
||||||
|
logging.debug("Command ran: '%s'", e.cmd)
|
||||||
|
logging.debug('Command out:')
|
||||||
|
logging.debug(e.output)
|
||||||
|
logging.error('Check if the coredump partition exists in partition table.')
|
||||||
|
raise e
|
||||||
|
return offset, size
|
||||||
|
|
||||||
|
|
||||||
|
class ESPCoreDumpFileLoader(EspCoreDumpLoader):
|
||||||
|
def __init__(self, path, is_b64=False):
|
||||||
|
super(ESPCoreDumpFileLoader, self).__init__()
|
||||||
|
self.is_b64 = is_b64
|
||||||
|
|
||||||
|
self._get_coredump(path)
|
||||||
|
self._reload_coredump()
|
||||||
|
|
||||||
|
def _get_coredump(self, path):
|
||||||
|
"""
|
||||||
|
Loads core dump from (raw binary or base64-encoded) file
|
||||||
|
"""
|
||||||
|
logging.debug('Load core dump from "%s", %s format', path, 'b64' if self.is_b64 else 'raw')
|
||||||
|
if not self.is_b64:
|
||||||
|
self.core_src_file = open(path, mode='rb')
|
||||||
|
else:
|
||||||
|
self.core_src_file = self._create_temp_file()
|
||||||
|
with open(path, 'rb') as fb64:
|
||||||
|
while True:
|
||||||
|
line = fb64.readline()
|
||||||
|
if len(line) == 0:
|
||||||
|
break
|
||||||
|
data = base64.standard_b64decode(line.rstrip(b'\r\n'))
|
||||||
|
self.core_src_file.write(data)
|
||||||
|
self.core_src_file.flush()
|
||||||
|
self.core_src_file.seek(0)
|
281
components/espcoredump/corefile/xtensa.py
Normal file
281
components/espcoredump/corefile/xtensa.py
Normal file
@@ -0,0 +1,281 @@
|
|||||||
|
#
|
||||||
|
# Copyright 2021 Espressif Systems (Shanghai) PTE LTD
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
#
|
||||||
|
|
||||||
|
from construct import Int16ul, Int32ul, Int64ul, Struct
|
||||||
|
|
||||||
|
from . import ESPCoreDumpLoaderError, _ArchMethodsBase, _TargetMethodsBase
|
||||||
|
|
||||||
|
INVALID_CAUSE_VALUE = 0xFFFF
|
||||||
|
XCHAL_EXCCAUSE_NUM = 64
|
||||||
|
|
||||||
|
|
||||||
|
# Exception cause dictionary to get translation of exccause register
|
||||||
|
# From 4.4.1.5 table 4-64 Exception Causes of Xtensa
|
||||||
|
# Instruction Set Architecture (ISA) Reference Manual
|
||||||
|
|
||||||
|
XTENSA_EXCEPTION_CAUSE_DICT = {
|
||||||
|
0: ('IllegalInstructionCause', 'Illegal instruction'),
|
||||||
|
1: ('SyscallCause', 'SYSCALL instruction'),
|
||||||
|
2: ('InstructionFetchErrorCause',
|
||||||
|
'Processor internal physical address or data error during instruction fetch. (See EXCVADDR for more information)'),
|
||||||
|
3: ('LoadStoreErrorCause',
|
||||||
|
'Processor internal physical address or data error during load or store. (See EXCVADDR for more information)'),
|
||||||
|
4: ('Level1InterruptCause', 'Level-1 interrupt as indicated by set level-1 bits in the INTERRUPT register'),
|
||||||
|
5: ('AllocaCause', 'MOVSP instruction, if caller`s registers are not in the register file'),
|
||||||
|
6: ('IntegerDivideByZeroCause', 'QUOS: QUOU, REMS: or REMU divisor operand is zero'),
|
||||||
|
8: ('PrivilegedCause', 'Attempt to execute a privileged operation when CRING ? 0'),
|
||||||
|
9: ('LoadStoreAlignmentCause', 'Load or store to an unaligned address. (See EXCVADDR for more information)'),
|
||||||
|
12: ('InstrPIFDataErrorCause', 'PIF data error during instruction fetch. (See EXCVADDR for more information)'),
|
||||||
|
13: ('LoadStorePIFDataErrorCause',
|
||||||
|
'Synchronous PIF data error during LoadStore access. (See EXCVADDR for more information)'),
|
||||||
|
14: ('InstrPIFAddrErrorCause', 'PIF address error during instruction fetch. (See EXCVADDR for more information)'),
|
||||||
|
15: ('LoadStorePIFAddrErrorCause',
|
||||||
|
'Synchronous PIF address error during LoadStore access. (See EXCVADDR for more information)'),
|
||||||
|
16: ('InstTLBMissCause', 'Error during Instruction TLB refill. (See EXCVADDR for more information)'),
|
||||||
|
17: ('InstTLBMultiHitCause', 'Multiple instruction TLB entries matched. (See EXCVADDR for more information)'),
|
||||||
|
18: ('InstFetchPrivilegeCause',
|
||||||
|
'An instruction fetch referenced a virtual address at a ring level less than CRING. (See EXCVADDR for more information)'),
|
||||||
|
20: ('InstFetchProhibitedCause',
|
||||||
|
'An instruction fetch referenced a page mapped with an attribute that does not permit instruction fetch (EXCVADDR).'),
|
||||||
|
24: ('LoadStoreTLBMissCause', 'Error during TLB refill for a load or store. (See EXCVADDR for more information)'),
|
||||||
|
25: ('LoadStoreTLBMultiHitCause',
|
||||||
|
'Multiple TLB entries matched for a load or store. (See EXCVADDR for more information)'),
|
||||||
|
26: ('LoadStorePrivilegeCause',
|
||||||
|
'A load or store referenced a virtual address at a ring level less than CRING. (See EXCVADDR for more information)'),
|
||||||
|
28: ('LoadProhibitedCause',
|
||||||
|
'A load referenced a page mapped with an attribute that does not permit loads. (See EXCVADDR for more information)'),
|
||||||
|
29: ('StoreProhibitedCause',
|
||||||
|
'A store referenced a page mapped with an attribute that does not permit stores [Region Protection Option or MMU Option].'),
|
||||||
|
32: ('Coprocessor0Disabled', 'Coprocessor 0 instruction when cp0 disabled'),
|
||||||
|
33: ('Coprocessor1Disabled', 'Coprocessor 1 instruction when cp1 disabled'),
|
||||||
|
34: ('Coprocessor2Disabled', 'Coprocessor 2 instruction when cp2 disabled'),
|
||||||
|
35: ('Coprocessor3Disabled', 'Coprocessor 3 instruction when cp3 disabled'),
|
||||||
|
36: ('Coprocessor4Disabled', 'Coprocessor 4 instruction when cp4 disabled'),
|
||||||
|
37: ('Coprocessor5Disabled', 'Coprocessor 5 instruction when cp5 disabled'),
|
||||||
|
38: ('Coprocessor6Disabled', 'Coprocessor 6 instruction when cp6 disabled'),
|
||||||
|
39: ('Coprocessor7Disabled', 'Coprocessor 7 instruction when cp7 disabled'),
|
||||||
|
INVALID_CAUSE_VALUE: (
|
||||||
|
'InvalidCauseRegister', 'Invalid EXCCAUSE register value or current task is broken and was skipped'),
|
||||||
|
# ESP panic pseudo reasons
|
||||||
|
XCHAL_EXCCAUSE_NUM + 0: ('UnknownException', 'Unknown exception'),
|
||||||
|
XCHAL_EXCCAUSE_NUM + 1: ('DebugException', 'Unhandled debug exception'),
|
||||||
|
XCHAL_EXCCAUSE_NUM + 2: ('DoubleException', 'Double exception'),
|
||||||
|
XCHAL_EXCCAUSE_NUM + 3: ('KernelException', 'Unhandled kernel exception'),
|
||||||
|
XCHAL_EXCCAUSE_NUM + 4: ('CoprocessorException', 'Coprocessor exception'),
|
||||||
|
XCHAL_EXCCAUSE_NUM + 5: ('InterruptWDTTimoutCPU0', 'Interrupt wdt timeout on CPU0'),
|
||||||
|
XCHAL_EXCCAUSE_NUM + 6: ('InterruptWDTTimoutCPU1', 'Interrupt wdt timeout on CPU1'),
|
||||||
|
XCHAL_EXCCAUSE_NUM + 7: ('CacheError', 'Cache disabled but cached memory region accessed'),
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
class XtensaRegisters(object):
|
||||||
|
# extra regs IDs used in EXTRA_INFO note
|
||||||
|
EXCCAUSE_IDX = 0
|
||||||
|
EXCVADDR_IDX = 1
|
||||||
|
EPS2_IDX = 2
|
||||||
|
EPS3_IDX = 3
|
||||||
|
EPS4_IDX = 4
|
||||||
|
EPS5_IDX = 5
|
||||||
|
EPS6_IDX = 6
|
||||||
|
EPS7_IDX = 7
|
||||||
|
EPC1_IDX = 8
|
||||||
|
EPC2_IDX = 9
|
||||||
|
EPC3_IDX = 10
|
||||||
|
EPC4_IDX = 11
|
||||||
|
EPC5_IDX = 12
|
||||||
|
EPC6_IDX = 13
|
||||||
|
EPC7_IDX = 14
|
||||||
|
|
||||||
|
@property
|
||||||
|
def registers(self):
|
||||||
|
return {k: v for k, v in self.__class__.__dict__.items()
|
||||||
|
if not k.startswith('__') and isinstance(v, int)}
|
||||||
|
|
||||||
|
|
||||||
|
# Following structs are based on source code
|
||||||
|
# IDF_PATH/components/espcoredump/src/core_dump_port.c
|
||||||
|
XtensaPrStatus = Struct(
|
||||||
|
'si_signo' / Int32ul,
|
||||||
|
'si_code' / Int32ul,
|
||||||
|
'si_errno' / Int32ul,
|
||||||
|
'pr_cursig' / Int16ul,
|
||||||
|
'pr_pad0' / Int16ul,
|
||||||
|
'pr_sigpend' / Int32ul,
|
||||||
|
'pr_sighold' / Int32ul,
|
||||||
|
'pr_pid' / Int32ul,
|
||||||
|
'pr_ppid' / Int32ul,
|
||||||
|
'pr_pgrp' / Int32ul,
|
||||||
|
'pr_sid' / Int32ul,
|
||||||
|
'pr_utime' / Int64ul,
|
||||||
|
'pr_stime' / Int64ul,
|
||||||
|
'pr_cutime' / Int64ul,
|
||||||
|
'pr_cstime' / Int64ul,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def print_exc_regs_info(extra_info):
|
||||||
|
"""
|
||||||
|
Print the register info by parsing extra_info
|
||||||
|
:param extra_info: extra info data str
|
||||||
|
:return: None
|
||||||
|
"""
|
||||||
|
exccause = extra_info[1 + 2 * XtensaRegisters.EXCCAUSE_IDX + 1]
|
||||||
|
exccause_str = XTENSA_EXCEPTION_CAUSE_DICT.get(exccause)
|
||||||
|
if not exccause_str:
|
||||||
|
exccause_str = ('Invalid EXCCAUSE code', 'Invalid EXCAUSE description or not found.')
|
||||||
|
print('exccause 0x%x (%s)' % (exccause, exccause_str[0]))
|
||||||
|
print('excvaddr 0x%x' % extra_info[1 + 2 * XtensaRegisters.EXCVADDR_IDX + 1])
|
||||||
|
print('epc1 0x%x' % extra_info[1 + 2 * XtensaRegisters.EPC1_IDX + 1])
|
||||||
|
print('epc2 0x%x' % extra_info[1 + 2 * XtensaRegisters.EPC2_IDX + 1])
|
||||||
|
print('epc3 0x%x' % extra_info[1 + 2 * XtensaRegisters.EPC3_IDX + 1])
|
||||||
|
print('epc4 0x%x' % extra_info[1 + 2 * XtensaRegisters.EPC4_IDX + 1])
|
||||||
|
print('epc5 0x%x' % extra_info[1 + 2 * XtensaRegisters.EPC5_IDX + 1])
|
||||||
|
print('epc6 0x%x' % extra_info[1 + 2 * XtensaRegisters.EPC6_IDX + 1])
|
||||||
|
print('epc7 0x%x' % extra_info[1 + 2 * XtensaRegisters.EPC7_IDX + 1])
|
||||||
|
print('eps2 0x%x' % extra_info[1 + 2 * XtensaRegisters.EPS2_IDX + 1])
|
||||||
|
print('eps3 0x%x' % extra_info[1 + 2 * XtensaRegisters.EPS3_IDX + 1])
|
||||||
|
print('eps4 0x%x' % extra_info[1 + 2 * XtensaRegisters.EPS4_IDX + 1])
|
||||||
|
print('eps5 0x%x' % extra_info[1 + 2 * XtensaRegisters.EPS5_IDX + 1])
|
||||||
|
print('eps6 0x%x' % extra_info[1 + 2 * XtensaRegisters.EPS6_IDX + 1])
|
||||||
|
print('eps7 0x%x' % extra_info[1 + 2 * XtensaRegisters.EPS7_IDX + 1])
|
||||||
|
|
||||||
|
|
||||||
|
# from "gdb/xtensa-tdep.h"
|
||||||
|
# typedef struct
|
||||||
|
# {
|
||||||
|
# 0 xtensa_elf_greg_t pc;
|
||||||
|
# 1 xtensa_elf_greg_t ps;
|
||||||
|
# 2 xtensa_elf_greg_t lbeg;
|
||||||
|
# 3 xtensa_elf_greg_t lend;
|
||||||
|
# 4 xtensa_elf_greg_t lcount;
|
||||||
|
# 5 xtensa_elf_greg_t sar;
|
||||||
|
# 6 xtensa_elf_greg_t windowstart;
|
||||||
|
# 7 xtensa_elf_greg_t windowbase;
|
||||||
|
# 8..63 xtensa_elf_greg_t reserved[8+48];
|
||||||
|
# 64 xtensa_elf_greg_t ar[64];
|
||||||
|
# } xtensa_elf_gregset_t;
|
||||||
|
REG_PC_IDX = 0
|
||||||
|
REG_PS_IDX = 1
|
||||||
|
REG_LB_IDX = 2
|
||||||
|
REG_LE_IDX = 3
|
||||||
|
REG_LC_IDX = 4
|
||||||
|
REG_SAR_IDX = 5
|
||||||
|
# REG_WS_IDX = 6
|
||||||
|
# REG_WB_IDX = 7
|
||||||
|
REG_AR_START_IDX = 64
|
||||||
|
# REG_AR_NUM = 64
|
||||||
|
# FIXME: acc to xtensa_elf_gregset_t number of regs must be 128,
|
||||||
|
# but gdb complains when it less then 129
|
||||||
|
REG_NUM = 129
|
||||||
|
|
||||||
|
# XT_SOL_EXIT = 0
|
||||||
|
XT_SOL_PC = 1
|
||||||
|
XT_SOL_PS = 2
|
||||||
|
# XT_SOL_NEXT = 3
|
||||||
|
XT_SOL_AR_START = 4
|
||||||
|
XT_SOL_AR_NUM = 4
|
||||||
|
# XT_SOL_FRMSZ = 8
|
||||||
|
|
||||||
|
XT_STK_EXIT = 0
|
||||||
|
XT_STK_PC = 1
|
||||||
|
XT_STK_PS = 2
|
||||||
|
XT_STK_AR_START = 3
|
||||||
|
XT_STK_AR_NUM = 16
|
||||||
|
XT_STK_SAR = 19
|
||||||
|
XT_STK_EXCCAUSE = 20
|
||||||
|
XT_STK_EXCVADDR = 21
|
||||||
|
XT_STK_LBEG = 22
|
||||||
|
XT_STK_LEND = 23
|
||||||
|
XT_STK_LCOUNT = 24
|
||||||
|
XT_STK_FRMSZ = 25
|
||||||
|
|
||||||
|
|
||||||
|
class _TargetMethodsESP32(_TargetMethodsBase):
|
||||||
|
@staticmethod
|
||||||
|
def tcb_is_sane(tcb_addr, tcb_size):
|
||||||
|
return not (tcb_addr < 0x3ffae000 or (tcb_addr + tcb_size) > 0x40000000)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def stack_is_sane(sp):
|
||||||
|
return not (sp < 0x3ffae010 or sp > 0x3fffffff)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def addr_is_fake(addr):
|
||||||
|
return (0x20000000 <= addr < 0x3f3fffff) or addr >= 0x80000000
|
||||||
|
|
||||||
|
|
||||||
|
class _ArchMethodsXtensa(_ArchMethodsBase):
|
||||||
|
@staticmethod
|
||||||
|
def get_registers_from_stack(data, grows_down):
|
||||||
|
extra_regs = {k: 0 for k in XtensaRegisters().registers}
|
||||||
|
regs = [0] * REG_NUM
|
||||||
|
# TODO: support for growing up stacks
|
||||||
|
if not grows_down:
|
||||||
|
raise ESPCoreDumpLoaderError('Growing up stacks are not supported for now!')
|
||||||
|
ex_struct = Struct(
|
||||||
|
'stack' / Int32ul[XT_STK_FRMSZ]
|
||||||
|
)
|
||||||
|
if len(data) < ex_struct.sizeof():
|
||||||
|
raise ESPCoreDumpLoaderError('Too small stack to keep frame: %d bytes!' % len(data))
|
||||||
|
|
||||||
|
stack = ex_struct.parse(data).stack
|
||||||
|
# Stack frame type indicator is always the first item
|
||||||
|
rc = stack[XT_STK_EXIT]
|
||||||
|
if rc != 0:
|
||||||
|
regs[REG_PC_IDX] = stack[XT_STK_PC]
|
||||||
|
regs[REG_PS_IDX] = stack[XT_STK_PS]
|
||||||
|
for i in range(XT_STK_AR_NUM):
|
||||||
|
regs[REG_AR_START_IDX + i] = stack[XT_STK_AR_START + i]
|
||||||
|
regs[REG_SAR_IDX] = stack[XT_STK_SAR]
|
||||||
|
regs[REG_LB_IDX] = stack[XT_STK_LBEG]
|
||||||
|
regs[REG_LE_IDX] = stack[XT_STK_LEND]
|
||||||
|
regs[REG_LC_IDX] = stack[XT_STK_LCOUNT]
|
||||||
|
# FIXME: crashed and some running tasks (e.g. prvIdleTask) have EXCM bit set
|
||||||
|
# and GDB can not unwind callstack properly (it implies not windowed call0)
|
||||||
|
if regs[REG_PS_IDX] & (1 << 5):
|
||||||
|
regs[REG_PS_IDX] &= ~(1 << 4)
|
||||||
|
if stack[XT_STK_EXCCAUSE] in XTENSA_EXCEPTION_CAUSE_DICT:
|
||||||
|
extra_regs[XtensaRegisters.EXCCAUSE_IDX] = stack[XT_STK_EXCCAUSE]
|
||||||
|
else:
|
||||||
|
extra_regs[XtensaRegisters.EXCCAUSE_IDX] = INVALID_CAUSE_VALUE
|
||||||
|
extra_regs[XtensaRegisters.EXCVADDR_IDX] = stack[XT_STK_EXCVADDR]
|
||||||
|
else:
|
||||||
|
regs[REG_PC_IDX] = stack[XT_SOL_PC]
|
||||||
|
regs[REG_PS_IDX] = stack[XT_SOL_PS]
|
||||||
|
for i in range(XT_SOL_AR_NUM):
|
||||||
|
regs[REG_AR_START_IDX + i] = stack[XT_SOL_AR_START + i]
|
||||||
|
# nxt = stack[XT_SOL_NEXT]
|
||||||
|
return regs, extra_regs
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def build_prstatus_data(tcb_addr, task_regs):
|
||||||
|
return XtensaPrStatus.build({
|
||||||
|
'si_signo': 0,
|
||||||
|
'si_code': 0,
|
||||||
|
'si_errno': 0,
|
||||||
|
'pr_cursig': 0, # TODO: set sig only for current/failed task
|
||||||
|
'pr_pad0': 0,
|
||||||
|
'pr_sigpend': 0,
|
||||||
|
'pr_sighold': 0,
|
||||||
|
'pr_pid': tcb_addr,
|
||||||
|
'pr_ppid': 0,
|
||||||
|
'pr_pgrp': 0,
|
||||||
|
'pr_sid': 0,
|
||||||
|
'pr_utime': 0,
|
||||||
|
'pr_stime': 0,
|
||||||
|
'pr_cutime': 0,
|
||||||
|
'pr_cstime': 0,
|
||||||
|
}) + Int32ul[len(task_regs)].build(task_regs)
|
File diff suppressed because it is too large
Load Diff
@@ -151,12 +151,12 @@ Name Address Size Attrs
|
|||||||
.rtc.force_fast 0x3ff80000 0x0 RW
|
.rtc.force_fast 0x3ff80000 0x0 RW
|
||||||
.rtc_noinit 0x50000200 0x0 RW
|
.rtc_noinit 0x50000200 0x0 RW
|
||||||
.rtc.force_slow 0x50000200 0x0 RW
|
.rtc.force_slow 0x50000200 0x0 RW
|
||||||
.iram0.vectors 0x40080000 0x404 R XA
|
.iram0.vectors 0x40080000 0x403 R XA
|
||||||
.iram0.text 0x40080404 0xa970 RWXA
|
.iram0.text 0x40080404 0xa970 RWXA
|
||||||
.dram0.data 0x3ffb0000 0x3474 RW A
|
.dram0.data 0x3ffb0000 0x3474 RW A
|
||||||
.noinit 0x3ffb3474 0x0 RW
|
.noinit 0x3ffb3474 0x0 RW
|
||||||
.flash.rodata 0x3f400020 0x6e4c RW A
|
.flash.rodata 0x3f400020 0x6e4c RW A
|
||||||
.flash.text 0x400d0020 0x188f0 R XA
|
.flash.text 0x400d0020 0x188ef R XA
|
||||||
.iram0.text_end 0x4008ad74 0x0 RW
|
.iram0.text_end 0x4008ad74 0x0 RW
|
||||||
.dram0.heap_start 0x3ffb4cf0 0x0 RW
|
.dram0.heap_start 0x3ffb4cf0 0x0 RW
|
||||||
.coredump.tasks.data 0x3ffb6260 0x17c RW
|
.coredump.tasks.data 0x3ffb6260 0x17c RW
|
||||||
|
@@ -14,28 +14,38 @@
|
|||||||
# See the License for the specific language governing permissions and
|
# See the License for the specific language governing permissions and
|
||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
|
||||||
import sys
|
|
||||||
import os
|
import os
|
||||||
|
import sys
|
||||||
import unittest
|
import unittest
|
||||||
|
|
||||||
try:
|
try:
|
||||||
import espcoredump
|
from corefile.elf import ESPCoreDumpElfFile
|
||||||
|
from corefile.loader import ESPCoreDumpFileLoader, ESPCoreDumpLoaderError
|
||||||
except ImportError:
|
except ImportError:
|
||||||
idf_path = os.getenv('IDF_PATH')
|
idf_path = os.getenv('IDF_PATH')
|
||||||
if idf_path:
|
if idf_path:
|
||||||
sys.path.insert(0, os.path.join(idf_path, 'components', 'espcoredump'))
|
sys.path.insert(0, os.path.join(idf_path, 'components', 'espcoredump'))
|
||||||
import espcoredump
|
else:
|
||||||
|
sys.path.insert(0, '..')
|
||||||
|
from corefile.elf import ESPCoreDumpElfFile
|
||||||
|
from corefile.loader import ESPCoreDumpFileLoader, ESPCoreDumpLoaderError
|
||||||
|
|
||||||
|
|
||||||
|
class TestESPCoreDumpElfFile(unittest.TestCase):
|
||||||
|
def test_read_elf(self):
|
||||||
|
elf = ESPCoreDumpElfFile('core.elf')
|
||||||
|
assert elf.load_segments
|
||||||
|
assert elf.note_segments
|
||||||
|
|
||||||
|
|
||||||
class TestESPCoreDumpFileLoader(unittest.TestCase):
|
class TestESPCoreDumpFileLoader(unittest.TestCase):
|
||||||
def testESPCoreDumpFileLoaderWithoutB64(self):
|
def test_load_wrong_encode_core_bin(self):
|
||||||
loader = espcoredump.ESPCoreDumpFileLoader(path='coredump.b64', b64=False)
|
with self.assertRaises(ESPCoreDumpLoaderError):
|
||||||
loader.cleanup()
|
ESPCoreDumpFileLoader(path='coredump.b64', is_b64=False)
|
||||||
|
|
||||||
def test_create_corefile(self):
|
def test_create_corefile(self):
|
||||||
loader = espcoredump.ESPCoreDumpFileLoader(path='coredump.b64', b64=True)
|
loader = ESPCoreDumpFileLoader(path='coredump.b64', is_b64=True)
|
||||||
loader.create_corefile()
|
loader.create_corefile()
|
||||||
loader.cleanup()
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
|
@@ -2,10 +2,10 @@
|
|||||||
|
|
||||||
{ coverage debug sys \
|
{ coverage debug sys \
|
||||||
&& coverage erase \
|
&& coverage erase \
|
||||||
&& coverage run -a --source=espcoredump ../espcoredump.py --gdb-timeout-sec 5 info_corefile -m -t b64 -c coredump.b64 -s core.elf test.elf &> output \
|
&& coverage run -a --source=corefile ../espcoredump.py --gdb-timeout-sec 5 info_corefile -m -t b64 -c coredump.b64 -s core.elf test.elf &> output \
|
||||||
&& diff expected_output output \
|
&& diff expected_output output \
|
||||||
&& coverage run -a --source=espcoredump ../espcoredump.py --gdb-timeout-sec 5 info_corefile -m -t elf -c core.elf test.elf &> output2 \
|
&& coverage run -a --source=corefile ../espcoredump.py --gdb-timeout-sec 5 info_corefile -m -t elf -c core.elf test.elf &> output2 \
|
||||||
&& diff expected_output output2 \
|
&& diff expected_output output2 \
|
||||||
&& coverage run -a --source=espcoredump ./test_espcoredump.py \
|
&& coverage run -a --source=corefile ./test_espcoredump.py \
|
||||||
&& coverage report \
|
&& coverage report ../corefile/elf.py ../corefile/gdb.py ../corefile/loader.py ../corefile/xtensa.py ../espcoredump.py \
|
||||||
; } || { echo 'The test for espcoredump has failed!'; exit 1; }
|
; } || { echo 'The test for espcoredump has failed!'; exit 1; }
|
||||||
|
@@ -16,7 +16,7 @@ gdbgui==0.13.2.0
|
|||||||
# 0.13.2.1 supports Python 3.6+ only
|
# 0.13.2.1 supports Python 3.6+ only
|
||||||
# Windows is not supported since 0.14.0.0. See https://github.com/cs01/gdbgui/issues/348
|
# Windows is not supported since 0.14.0.0. See https://github.com/cs01/gdbgui/issues/348
|
||||||
pygdbmi<=0.9.0.2
|
pygdbmi<=0.9.0.2
|
||||||
# The pygdbmi required max version 0.9.0.2 since 0.9.0.3 is not copatible with latest gdbgui (>=0.13.2.0)
|
# The pygdbmi required max version 0.9.0.2 since 0.9.0.3 is not compatible with latest gdbgui (>=0.13.2.0)
|
||||||
python-socketio<5; python_version<="2.7"
|
python-socketio<5; python_version<="2.7"
|
||||||
|
|
||||||
kconfiglib==13.7.1
|
kconfiglib==13.7.1
|
||||||
@@ -26,6 +26,10 @@ reedsolo>=1.5.3,<=1.5.4
|
|||||||
bitstring>=3.1.6
|
bitstring>=3.1.6
|
||||||
ecdsa>=0.16.0
|
ecdsa>=0.16.0
|
||||||
|
|
||||||
|
# espcoredump requirements
|
||||||
|
# This is the last version supports both 2.7 and 3.4
|
||||||
|
construct==2.10.54
|
||||||
|
|
||||||
# windows-curses are required in Windows command line but cannot be installed in MSYS2. A requirement like
|
# windows-curses are required in Windows command line but cannot be installed in MSYS2. A requirement like
|
||||||
# "windows-curses; sys_platform == 'win32'" would want to install the package on both of them. There is no environment
|
# "windows-curses; sys_platform == 'win32'" would want to install the package on both of them. There is no environment
|
||||||
# marker for detecting MSYS2. So instead, a dummy custom package is used with "windows-curses" dependency for Windows
|
# marker for detecting MSYS2. So instead, a dummy custom package is used with "windows-curses" dependency for Windows
|
||||||
|
@@ -155,7 +155,7 @@ a15 0x0 0
|
|||||||
|
|
||||||
======================= ALL MEMORY REGIONS ========================
|
======================= ALL MEMORY REGIONS ========================
|
||||||
Name Address Size Attrs
|
Name Address Size Attrs
|
||||||
.text 0x400074 0x134 R XA
|
.text 0x400074 0x133 R XA
|
||||||
.eh_frame 0x4001a8 0x4 R A
|
.eh_frame 0x4001a8 0x4 R A
|
||||||
.ctors 0x4011ac 0x8 RW A
|
.ctors 0x4011ac 0x8 RW A
|
||||||
.dtors 0x4011b4 0x8 RW A
|
.dtors 0x4011b4 0x8 RW A
|
||||||
|
Reference in New Issue
Block a user