mirror of
https://github.com/espressif/esp-idf.git
synced 2025-10-02 10:00:57 +02:00
Merge branch 'change/remove_legacy_unit_test_app' into 'master'
change(tools): removed the old legacy unit-test-app Closes IDF-13565 See merge request espressif/esp-idf!41859
This commit is contained in:
@@ -247,8 +247,6 @@
|
|||||||
|
|
||||||
/tools/tools.json @esp-idf-codeowners/tools @esp-idf-codeowners/toolchain @esp-idf-codeowners/debugging
|
/tools/tools.json @esp-idf-codeowners/tools @esp-idf-codeowners/toolchain @esp-idf-codeowners/debugging
|
||||||
|
|
||||||
/tools/unit-test-app/ @esp-idf-codeowners/system @esp-idf-codeowners/tools
|
|
||||||
|
|
||||||
# sort-order-reset
|
# sort-order-reset
|
||||||
|
|
||||||
/components/**/test_apps/**/*.py @esp-idf-codeowners/ci @esp-idf-codeowners/tools
|
/components/**/test_apps/**/*.py @esp-idf-codeowners/ci @esp-idf-codeowners/tools
|
||||||
|
@@ -160,28 +160,19 @@ Some old ways of disabling unit tests for targets, that have obvious disadvantag
|
|||||||
|
|
||||||
But please avoid using ``#else`` macro. When new target is added, the test case will fail at building stage, so that the maintainer will be aware of this, and choose one of the implementations explicitly.
|
But please avoid using ``#else`` macro. When new target is added, the test case will fail at building stage, so that the maintainer will be aware of this, and choose one of the implementations explicitly.
|
||||||
|
|
||||||
Building Unit Test App
|
Building Unit Test Apps
|
||||||
----------------------
|
-----------------------
|
||||||
|
|
||||||
Follow the setup instructions in the top-level esp-idf README. Make sure that ``IDF_PATH`` environment variable is set to point to the path of esp-idf top-level directory.
|
Follow the setup instructions in the top-level esp-idf README. Make sure that ``IDF_PATH`` environment variable is set to point to the path of esp-idf top-level directory.
|
||||||
|
|
||||||
Change into ``tools/unit-test-app`` directory to configure and build it:
|
Change into the test app directory to configure and build it:
|
||||||
|
|
||||||
* ``idf.py menuconfig`` - configure unit test app.
|
* ``idf.py menuconfig`` - configure unit test app.
|
||||||
* ``idf.py -T all build`` - build unit test app with tests for each component having tests in the ``test`` subdirectory.
|
* ``idf.py build`` - build unit test app.
|
||||||
* ``idf.py -T "xxx yyy" build`` - build unit test app with tests for some space-separated specific components (For instance: ``idf.py -T heap build`` - build unit tests only for ``heap`` component directory).
|
|
||||||
* ``idf.py -T all -E "xxx yyy" build`` - build unit test app with all unit tests, except for unit tests of some components (For instance: ``idf.py -T all -E "ulp mbedtls" build`` - build all unit tests excludes ``ulp`` and ``mbedtls`` components).
|
|
||||||
|
|
||||||
.. note::
|
|
||||||
|
|
||||||
Due to inherent limitations of Windows command prompt, following syntax has to be used in order to build unit-test-app with multiple components: ``idf.py -T xxx -T yyy build`` or with escaped quotes: ``idf.py -T \`"xxx yyy\`" build`` in PowerShell or ``idf.py -T \^"ssd1306 hts221\^" build`` in Windows command prompt.
|
|
||||||
|
|
||||||
When the build finishes, it will print instructions for flashing the chip. You can simply run ``idf.py flash`` to flash all build output.
|
When the build finishes, it will print instructions for flashing the chip. You can simply run ``idf.py flash`` to flash all build output.
|
||||||
|
|
||||||
You can also run ``idf.py -T all flash`` or ``idf.py -T xxx flash`` to build and flash. Everything needed will be rebuilt automatically before flashing.
|
|
||||||
|
|
||||||
Use menuconfig to set the serial port for flashing. For more information, see :idf_file:`tools/unit-test-app/README.md`.
|
|
||||||
|
|
||||||
Running Unit Tests
|
Running Unit Tests
|
||||||
------------------
|
------------------
|
||||||
|
|
||||||
|
@@ -53,3 +53,12 @@ The minimal supported CMake version has been upgraded to 3.22.1. In case you enc
|
|||||||
If updating the OS is not possible, you can install a recommended CMake version using the following command: ``./tools/idf_tools.py install cmake``.
|
If updating the OS is not possible, you can install a recommended CMake version using the following command: ``./tools/idf_tools.py install cmake``.
|
||||||
|
|
||||||
This affects ESP-IDF users who use system-provided CMake and custom CMake.
|
This affects ESP-IDF users who use system-provided CMake and custom CMake.
|
||||||
|
|
||||||
|
Unit-Test-App
|
||||||
|
-------------
|
||||||
|
|
||||||
|
The legacy unit-test-app has been moved out of ESP-IDF repository as no ESP-IDF component longer uses it for running tests. It is now available at the `component registry <https://components.espressif.com/components/espressif/unit-test-app>`_ and a test app can be created using the ``idf.py create-project-from-example`` command:
|
||||||
|
|
||||||
|
.. code-block:: bash
|
||||||
|
|
||||||
|
idf.py create-project-from-example espressif/unit-test-app:unit-test-app
|
||||||
|
@@ -180,8 +180,6 @@ DUT2 (slave) 终端::
|
|||||||
|
|
||||||
你还可以运行 ``idf.py -T all flash`` 或者 ``idf.py -T xxx flash`` 来编译并烧写,所有需要的文件都会在烧写之前自动重新编译。
|
你还可以运行 ``idf.py -T all flash`` 或者 ``idf.py -T xxx flash`` 来编译并烧写,所有需要的文件都会在烧写之前自动重新编译。
|
||||||
|
|
||||||
使用 ``menuconfig`` 可以设置烧写测试程序所使用的串口。更多信息,见 :idf_file:`tools/unit-test-app/README.md`。
|
|
||||||
|
|
||||||
运行单元测试
|
运行单元测试
|
||||||
--------------
|
--------------
|
||||||
|
|
||||||
|
@@ -8,7 +8,6 @@ set(EXTRA_COMPONENT_DIRS "../components")
|
|||||||
# Set the components to include the tests for.
|
# Set the components to include the tests for.
|
||||||
# This can be overridden from CMake cache:
|
# This can be overridden from CMake cache:
|
||||||
# - when invoking CMake directly: cmake -D TEST_COMPONENTS="xxxxx" ..
|
# - when invoking CMake directly: cmake -D TEST_COMPONENTS="xxxxx" ..
|
||||||
# - when using idf.py: idf.py -T xxxxx build
|
|
||||||
#
|
#
|
||||||
set(TEST_COMPONENTS "testable" CACHE STRING "List of components to test")
|
set(TEST_COMPONENTS "testable" CACHE STRING "List of components to test")
|
||||||
|
|
||||||
|
@@ -111,7 +111,6 @@ components_not_formatted_temporary:
|
|||||||
- "/tools/esp_app_trace/test/"
|
- "/tools/esp_app_trace/test/"
|
||||||
- "/tools/mocks/"
|
- "/tools/mocks/"
|
||||||
- "/tools/test_apps/"
|
- "/tools/test_apps/"
|
||||||
- "/tools/unit-test-app/"
|
|
||||||
|
|
||||||
components_not_formatted_permanent:
|
components_not_formatted_permanent:
|
||||||
# Files which are not supposed to be formatted.
|
# Files which are not supposed to be formatted.
|
||||||
|
@@ -341,7 +341,6 @@ if __name__ == '__main__':
|
|||||||
if check_all:
|
if check_all:
|
||||||
check_dirs = {IDF_PATH}
|
check_dirs = {IDF_PATH}
|
||||||
_exclude_dirs = [
|
_exclude_dirs = [
|
||||||
os.path.join(IDF_PATH, 'tools', 'unit-test-app'),
|
|
||||||
os.path.join(IDF_PATH, 'tools', 'test_build_system', 'build_test_app'),
|
os.path.join(IDF_PATH, 'tools', 'test_build_system', 'build_test_app'),
|
||||||
os.path.join(IDF_PATH, 'tools', 'templates', 'sample_project'),
|
os.path.join(IDF_PATH, 'tools', 'templates', 'sample_project'),
|
||||||
]
|
]
|
||||||
|
@@ -54,4 +54,3 @@ tools/templates/sample_component/main.c
|
|||||||
tools/templates/sample_project/CMakeLists.txt
|
tools/templates/sample_project/CMakeLists.txt
|
||||||
tools/templates/sample_project/main/CMakeLists.txt
|
tools/templates/sample_project/main/CMakeLists.txt
|
||||||
tools/templates/sample_project/main/main.c
|
tools/templates/sample_project/main/main.c
|
||||||
tools/unit-test-app/**/*
|
|
||||||
|
@@ -1,8 +0,0 @@
|
|||||||
# The following lines of boilerplate have to be in your project's
|
|
||||||
# CMakeLists in this exact order for cmake to work correctly
|
|
||||||
cmake_minimum_required(VERSION 3.22)
|
|
||||||
|
|
||||||
list(APPEND EXTRA_COMPONENT_DIRS "$ENV{IDF_PATH}/tools/test_apps/components")
|
|
||||||
|
|
||||||
include($ENV{IDF_PATH}/tools/cmake/project.cmake)
|
|
||||||
project(unit-test-app)
|
|
@@ -1,36 +0,0 @@
|
|||||||
# Unit Test App
|
|
||||||
|
|
||||||
ESP-IDF unit tests are run using Unit Test App. The app can be built with the unit tests for a specific component. Unit tests are in `test` subdirectories of respective components.
|
|
||||||
|
|
||||||
# Building Unit Test App
|
|
||||||
|
|
||||||
## CMake
|
|
||||||
|
|
||||||
* Follow the setup instructions in the top-level esp-idf README.
|
|
||||||
* Set IDF_PATH environment variable to point to the path to the esp-idf top-level directory.
|
|
||||||
* Change into `tools/unit-test-app` directory
|
|
||||||
* `idf.py menuconfig` to configure the Unit Test App.
|
|
||||||
* `idf.py -T <component> -T <component> ... build` with `component` set to names of the components to be included in the test app. Or `idf.py -T all build` to build the test app with all the tests for components having `test` subdirectory.
|
|
||||||
* Follow the printed instructions to flash, or run `idf.py -p PORT flash`.
|
|
||||||
* Unit test have a few preset sdkconfigs. It provides command `idf.py ut-clean-config_name` and `idf.py ut-build-config_name` (where `config_name` is the file name under `unit-test-app/configs` folder) to build with preset configs. For example, you can use `idf.py -T all ut-build-default` to build with config file `unit-test-app/configs/default`. Built binary for this config will be copied to `unit-test-app/output/config_name` folder.
|
|
||||||
* You may extract the test cases presented in the built elf file by calling `ElfUnitTestParser.py <your_elf>`.
|
|
||||||
|
|
||||||
# Flash Size
|
|
||||||
|
|
||||||
The unit test partition table assumes a 4MB flash size. When testing `-T all`, this additional factory app partition size is required.
|
|
||||||
|
|
||||||
If building unit tests to run on a smaller flash size, edit `partition_table_unit_tests_app.csv` and use `-T <component> <component> ...` or instead of `-T all` tests don't fit in a smaller factory app partition (exact size will depend on configured options).
|
|
||||||
|
|
||||||
# Running Unit Tests
|
|
||||||
|
|
||||||
The unit test loader will prompt by showing a menu of available tests to run:
|
|
||||||
|
|
||||||
* Type a number to run a single test.
|
|
||||||
* `*` to run all tests.
|
|
||||||
* `[tagname]` to run tests with "tag"
|
|
||||||
* `![tagname]` to run tests without "tag" (`![ignore]` is very useful as it runs all CI-enabled tests.)
|
|
||||||
* `"test name here"` to run test with given name
|
|
||||||
|
|
||||||
# Testing Unit Tests with CI
|
|
||||||
|
|
||||||
For now we prefer to use component-based unit test to test in CI. Running unit-test-app in CI is being deprecated.
|
|
@@ -1,249 +0,0 @@
|
|||||||
import copy
|
|
||||||
import glob
|
|
||||||
import os
|
|
||||||
import os.path
|
|
||||||
import re
|
|
||||||
import shutil
|
|
||||||
|
|
||||||
|
|
||||||
def action_extensions(base_actions, project_path=os.getcwd()):
|
|
||||||
""" Describes extensions for unit tests. This function expects that actions "all" and "reconfigure" """
|
|
||||||
|
|
||||||
PROJECT_NAME = 'unit-test-app'
|
|
||||||
|
|
||||||
# List of unit-test-app configurations.
|
|
||||||
# Each file in configs/ directory defines a configuration. The format is the
|
|
||||||
# same as sdkconfig file. Configuration is applied on top of sdkconfig.defaults
|
|
||||||
# file from the project directory
|
|
||||||
CONFIG_NAMES = os.listdir(os.path.join(project_path, 'configs'))
|
|
||||||
|
|
||||||
# Build (intermediate) and output (artifact) directories
|
|
||||||
BUILDS_DIR = os.path.join(project_path, 'builds')
|
|
||||||
BINARIES_DIR = os.path.join(project_path, 'output')
|
|
||||||
|
|
||||||
def parse_file_to_dict(path, regex):
|
|
||||||
"""
|
|
||||||
Parse the config file at 'path'
|
|
||||||
|
|
||||||
Returns a dict of name:value.
|
|
||||||
"""
|
|
||||||
compiled_regex = re.compile(regex)
|
|
||||||
result = {}
|
|
||||||
with open(path) as f:
|
|
||||||
for line in f:
|
|
||||||
m = compiled_regex.match(line)
|
|
||||||
if m:
|
|
||||||
result[m.group(1)] = m.group(2)
|
|
||||||
return result
|
|
||||||
|
|
||||||
def parse_config(path):
|
|
||||||
"""
|
|
||||||
Expected format with default regex is "key=value"
|
|
||||||
"""
|
|
||||||
|
|
||||||
return parse_file_to_dict(path, r'^([^=]+)=(.+)$')
|
|
||||||
|
|
||||||
def ut_apply_config(ut_apply_config_name, ctx, args):
|
|
||||||
config_name = re.match(r'ut-apply-config-(.*)', ut_apply_config_name).group(1)
|
|
||||||
# Make sure that define_cache_entry is list
|
|
||||||
args.define_cache_entry = list(args.define_cache_entry)
|
|
||||||
new_cache_values = {}
|
|
||||||
sdkconfig_set = list(filter(lambda s: 'SDKCONFIG=' in s, args.define_cache_entry))
|
|
||||||
sdkconfig_path = os.path.join(args.project_dir, 'sdkconfig')
|
|
||||||
|
|
||||||
if sdkconfig_set:
|
|
||||||
sdkconfig_path = sdkconfig_set[-1].split('=')[1]
|
|
||||||
sdkconfig_path = os.path.abspath(sdkconfig_path)
|
|
||||||
|
|
||||||
try:
|
|
||||||
os.remove(sdkconfig_path)
|
|
||||||
except OSError:
|
|
||||||
pass
|
|
||||||
|
|
||||||
if config_name in CONFIG_NAMES:
|
|
||||||
# Parse the sdkconfig for components to be included/excluded and tests to be run
|
|
||||||
config_path = os.path.join(project_path, 'configs', config_name)
|
|
||||||
config = parse_config(config_path)
|
|
||||||
|
|
||||||
target = config.get('CONFIG_IDF_TARGET', 'esp32').strip("'").strip('"')
|
|
||||||
|
|
||||||
print('Reconfigure: config %s, target %s' % (config_name, target))
|
|
||||||
|
|
||||||
# Clean up and set idf-target
|
|
||||||
base_actions['actions']['fullclean']['callback']('fullclean', ctx, args)
|
|
||||||
|
|
||||||
new_cache_values['EXCLUDE_COMPONENTS'] = config.get('EXCLUDE_COMPONENTS', "''")
|
|
||||||
new_cache_values['TEST_EXCLUDE_COMPONENTS'] = config.get('TEST_EXCLUDE_COMPONENTS', "''")
|
|
||||||
new_cache_values['TEST_COMPONENTS'] = config.get('TEST_COMPONENTS', "''")
|
|
||||||
new_cache_values['TESTS_ALL'] = int(new_cache_values['TEST_COMPONENTS'] == "''")
|
|
||||||
new_cache_values['IDF_TARGET'] = target
|
|
||||||
new_cache_values['SDKCONFIG_DEFAULTS'] = ';'.join([os.path.join(project_path, 'sdkconfig.defaults'), config_path])
|
|
||||||
|
|
||||||
args.define_cache_entry.extend(['%s=%s' % (k, v) for k, v in new_cache_values.items()])
|
|
||||||
|
|
||||||
reconfigure = base_actions['actions']['reconfigure']['callback']
|
|
||||||
reconfigure(None, ctx, args)
|
|
||||||
|
|
||||||
# This target builds the configuration. It does not currently track dependencies,
|
|
||||||
# but is good enough for CI builds if used together with clean-all-configs.
|
|
||||||
# For local builds, use 'apply-config-NAME' target and then use normal 'all'
|
|
||||||
# and 'flash' targets.
|
|
||||||
def ut_build(ut_build_name, ctx, args):
|
|
||||||
# Create a copy of the passed arguments to prevent arg modifications to accrue if
|
|
||||||
# all configs are being built
|
|
||||||
build_args = copy.copy(args)
|
|
||||||
|
|
||||||
config_name = re.match(r'ut-build-(.*)', ut_build_name).group(1)
|
|
||||||
|
|
||||||
if config_name in CONFIG_NAMES:
|
|
||||||
build_args.build_dir = os.path.join(BUILDS_DIR, config_name)
|
|
||||||
|
|
||||||
src = os.path.join(BUILDS_DIR, config_name)
|
|
||||||
dest = os.path.join(BINARIES_DIR, config_name)
|
|
||||||
|
|
||||||
try:
|
|
||||||
os.makedirs(dest)
|
|
||||||
except OSError:
|
|
||||||
pass
|
|
||||||
|
|
||||||
# Build, tweaking paths to sdkconfig and sdkconfig.defaults
|
|
||||||
ut_apply_config('ut-apply-config-' + config_name, ctx, build_args)
|
|
||||||
|
|
||||||
build_target = base_actions['actions']['all']['callback']
|
|
||||||
|
|
||||||
build_target('all', ctx, build_args)
|
|
||||||
|
|
||||||
# Copy artifacts to the output directory
|
|
||||||
shutil.copyfile(
|
|
||||||
os.path.join(build_args.project_dir, 'sdkconfig'),
|
|
||||||
os.path.join(dest, 'sdkconfig'),
|
|
||||||
)
|
|
||||||
|
|
||||||
binaries = [PROJECT_NAME + x for x in ['.elf', '.bin', '.map']]
|
|
||||||
|
|
||||||
for binary in binaries:
|
|
||||||
shutil.copyfile(os.path.join(src, binary), os.path.join(dest, binary))
|
|
||||||
|
|
||||||
try:
|
|
||||||
os.mkdir(os.path.join(dest, 'bootloader'))
|
|
||||||
except OSError:
|
|
||||||
pass
|
|
||||||
|
|
||||||
shutil.copyfile(
|
|
||||||
os.path.join(src, 'bootloader', 'bootloader.bin'),
|
|
||||||
os.path.join(dest, 'bootloader', 'bootloader.bin'),
|
|
||||||
)
|
|
||||||
|
|
||||||
for partition_table in glob.glob(os.path.join(src, 'partition_table', 'partition-table*.bin')):
|
|
||||||
try:
|
|
||||||
os.mkdir(os.path.join(dest, 'partition_table'))
|
|
||||||
except OSError:
|
|
||||||
pass
|
|
||||||
shutil.copyfile(
|
|
||||||
partition_table,
|
|
||||||
os.path.join(dest, 'partition_table', os.path.basename(partition_table)),
|
|
||||||
)
|
|
||||||
|
|
||||||
shutil.copyfile(
|
|
||||||
os.path.join(src, 'flasher_args.json'),
|
|
||||||
os.path.join(dest, 'flasher_args.json'),
|
|
||||||
)
|
|
||||||
|
|
||||||
binaries = glob.glob(os.path.join(src, '*.bin'))
|
|
||||||
binaries = [os.path.basename(s) for s in binaries]
|
|
||||||
|
|
||||||
for binary in binaries:
|
|
||||||
shutil.copyfile(os.path.join(src, binary), os.path.join(dest, binary))
|
|
||||||
|
|
||||||
def ut_clean(ut_clean_name, ctx, args):
|
|
||||||
config_name = re.match(r'ut-clean-(.*)', ut_clean_name).group(1)
|
|
||||||
if config_name in CONFIG_NAMES:
|
|
||||||
shutil.rmtree(os.path.join(BUILDS_DIR, config_name), ignore_errors=True)
|
|
||||||
shutil.rmtree(os.path.join(BINARIES_DIR, config_name), ignore_errors=True)
|
|
||||||
|
|
||||||
def test_component_callback(ctx, global_args, tasks):
|
|
||||||
""" Convert the values passed to the -T and -E parameter to corresponding cache entry definitions TESTS_ALL and TEST_COMPONENTS """
|
|
||||||
test_components = global_args.test_components
|
|
||||||
test_exclude_components = global_args.test_exclude_components
|
|
||||||
|
|
||||||
cache_entries = {}
|
|
||||||
|
|
||||||
if test_components:
|
|
||||||
if 'all' in test_components:
|
|
||||||
cache_entries['TESTS_ALL'] = 1
|
|
||||||
cache_entries['TEST_COMPONENTS'] = "''"
|
|
||||||
else:
|
|
||||||
cache_entries['TESTS_ALL'] = 0
|
|
||||||
cache_entries['TEST_COMPONENTS'] = ' '.join(test_components)
|
|
||||||
|
|
||||||
if test_exclude_components:
|
|
||||||
cache_entries['TEST_EXCLUDE_COMPONENTS'] = ' '.join(test_exclude_components)
|
|
||||||
|
|
||||||
if cache_entries:
|
|
||||||
global_args.define_cache_entry = list(global_args.define_cache_entry)
|
|
||||||
global_args.define_cache_entry.extend(['%s=%s' % (k, v) for k, v in cache_entries.items()])
|
|
||||||
|
|
||||||
# Add global options
|
|
||||||
extensions = {
|
|
||||||
'global_options': [{
|
|
||||||
'names': ['-T', '--test-components'],
|
|
||||||
'help': 'Specify the components to test.',
|
|
||||||
'scope': 'shared',
|
|
||||||
'multiple': True,
|
|
||||||
}, {
|
|
||||||
'names': ['-E', '--test-exclude-components'],
|
|
||||||
'help': 'Specify the components to exclude from testing.',
|
|
||||||
'scope': 'shared',
|
|
||||||
'multiple': True,
|
|
||||||
}],
|
|
||||||
'global_action_callbacks': [test_component_callback],
|
|
||||||
'actions': {},
|
|
||||||
}
|
|
||||||
|
|
||||||
# This generates per-config targets (clean, build, apply-config).
|
|
||||||
build_all_config_deps = []
|
|
||||||
clean_all_config_deps = []
|
|
||||||
|
|
||||||
for config in CONFIG_NAMES:
|
|
||||||
config_build_action_name = 'ut-build-' + config
|
|
||||||
config_clean_action_name = 'ut-clean-' + config
|
|
||||||
config_apply_config_action_name = 'ut-apply-config-' + config
|
|
||||||
|
|
||||||
extensions['actions'][config_build_action_name] = {
|
|
||||||
'callback':
|
|
||||||
ut_build,
|
|
||||||
'help':
|
|
||||||
'Build unit-test-app with configuration provided in configs/NAME. ' +
|
|
||||||
'Build directory will be builds/%s/, ' % config_build_action_name +
|
|
||||||
'output binaries will be under output/%s/' % config_build_action_name,
|
|
||||||
}
|
|
||||||
|
|
||||||
extensions['actions'][config_clean_action_name] = {
|
|
||||||
'callback': ut_clean,
|
|
||||||
'help': 'Remove build and output directories for configuration %s.' % config_clean_action_name,
|
|
||||||
}
|
|
||||||
|
|
||||||
extensions['actions'][config_apply_config_action_name] = {
|
|
||||||
'callback':
|
|
||||||
ut_apply_config,
|
|
||||||
'help':
|
|
||||||
'Generates configuration based on configs/%s in sdkconfig file.' % config_apply_config_action_name +
|
|
||||||
'After this, normal all/flash targets can be used. Useful for development/debugging.',
|
|
||||||
}
|
|
||||||
|
|
||||||
build_all_config_deps.append(config_build_action_name)
|
|
||||||
clean_all_config_deps.append(config_clean_action_name)
|
|
||||||
|
|
||||||
extensions['actions']['ut-build-all-configs'] = {
|
|
||||||
'callback': ut_build,
|
|
||||||
'help': 'Build all configurations defined in configs/ directory.',
|
|
||||||
'dependencies': build_all_config_deps,
|
|
||||||
}
|
|
||||||
|
|
||||||
extensions['actions']['ut-clean-all-configs'] = {
|
|
||||||
'callback': ut_clean,
|
|
||||||
'help': 'Remove build and output directories for all configurations defined in configs/ directory.',
|
|
||||||
'dependencies': clean_all_config_deps,
|
|
||||||
}
|
|
||||||
|
|
||||||
return extensions
|
|
@@ -1,2 +0,0 @@
|
|||||||
idf_component_register(SRCS "app_main.c"
|
|
||||||
INCLUDE_DIRS "")
|
|
@@ -1,6 +0,0 @@
|
|||||||
#include "test_utils.h"
|
|
||||||
|
|
||||||
void app_main(void)
|
|
||||||
{
|
|
||||||
test_main();
|
|
||||||
}
|
|
@@ -1,17 +0,0 @@
|
|||||||
# Special partition table for unit test app
|
|
||||||
#
|
|
||||||
# Name, Type, SubType, Offset, Size, Flags
|
|
||||||
# Note: if you have increased the bootloader size, make sure to update the offsets to avoid overlap
|
|
||||||
nvs, data, nvs, 0xb000, 0x5000
|
|
||||||
otadata, data, ota, 0x10000, 0x2000
|
|
||||||
phy_init, data, phy, 0x12000, 0x1000
|
|
||||||
factory, 0, 0, 0x20000, 0x260000
|
|
||||||
# these OTA partitions are used for tests, but can't fit real OTA apps in them
|
|
||||||
# (done this way to reduce total flash usage.)
|
|
||||||
ota_0, 0, ota_0, , 64K
|
|
||||||
ota_1, 0, ota_1, , 64K
|
|
||||||
# flash_test partition used for SPI flash tests, WL FAT tests, and SPIFFS tests
|
|
||||||
flash_test, data, fat, , 528K
|
|
||||||
nvs_key, data, nvs_keys, , 0x1000, encrypted
|
|
||||||
|
|
||||||
# Note: still 1MB of a 4MB flash left free for some other purpose
|
|
|
@@ -1,17 +0,0 @@
|
|||||||
# Special partition table for unit test app
|
|
||||||
#
|
|
||||||
# Name, Type, SubType, Offset, Size, Flags
|
|
||||||
# Note: if you have increased the bootloader size, make sure to update the offsets to avoid overlap
|
|
||||||
nvs, data, nvs, 0xb000, 0x5000
|
|
||||||
otadata, data, ota, 0x10000, 0x2000
|
|
||||||
phy_init, data, phy, 0x12000, 0x1000
|
|
||||||
factory, 0, 0, 0x20000, 0x150000
|
|
||||||
# these OTA partitions are used for tests, but can't fit real OTA apps in them
|
|
||||||
# (done this way to reduce total flash usage.)
|
|
||||||
ota_0, 0, ota_0, , 64K
|
|
||||||
ota_1, 0, ota_1, , 64K
|
|
||||||
# flash_test partition used for SPI flash tests, WL FAT tests, and SPIFFS tests
|
|
||||||
flash_test, data, fat, , 320K
|
|
||||||
nvs_key, data, nvs_keys, , 0x1000, encrypted
|
|
||||||
|
|
||||||
# Note: occupied 1.85MB in the 2MB flash
|
|
|
@@ -1,11 +0,0 @@
|
|||||||
# Special partition table for unit test app_update
|
|
||||||
# Name, Type, SubType, Offset, Size, Flags
|
|
||||||
nvs, data, nvs, , 0x4000
|
|
||||||
otadata, data, ota, , 0x2000
|
|
||||||
phy_init, data, phy, , 0x1000
|
|
||||||
factory, 0, 0, , 0xB0000
|
|
||||||
ota_0, 0, ota_0, , 0xB0000
|
|
||||||
ota_1, 0, ota_1, , 0xB0000
|
|
||||||
test, 0, test, , 0xB0000
|
|
||||||
# flash_test partition used for SPI flash tests, WL FAT tests, and SPIFFS tests
|
|
||||||
flash_test, data, fat, , 528K
|
|
|
@@ -1,11 +0,0 @@
|
|||||||
# Special partition table for unit test app_update
|
|
||||||
# Name, Type, SubType, Offset, Size, Flags
|
|
||||||
nvs, data, nvs, , 0x4000
|
|
||||||
otadata, data, ota, , 0x2000
|
|
||||||
phy_init, data, phy, , 0x1000
|
|
||||||
factory, 0, 0, , 0x70000
|
|
||||||
ota_0, 0, ota_0, , 0x70000
|
|
||||||
ota_1, 0, ota_1, , 0x70000
|
|
||||||
test, 0, test, , 0x70000
|
|
||||||
# flash_test partition used for SPI flash tests, WL FAT tests, and SPIFFS tests
|
|
||||||
flash_test, data, fat, , 128K
|
|
|
@@ -1,19 +0,0 @@
|
|||||||
CONFIG_BOOTLOADER_LOG_LEVEL_WARN=y
|
|
||||||
CONFIG_ESPTOOLPY_FLASHSIZE_4MB=y
|
|
||||||
CONFIG_PARTITION_TABLE_CUSTOM=y
|
|
||||||
CONFIG_PARTITION_TABLE_CUSTOM_FILENAME="partition_table_unit_test_app.csv"
|
|
||||||
CONFIG_PARTITION_TABLE_FILENAME="partition_table_unit_test_app.csv"
|
|
||||||
CONFIG_PARTITION_TABLE_OFFSET=0x8000
|
|
||||||
CONFIG_FREERTOS_HZ=1000
|
|
||||||
CONFIG_FREERTOS_WATCHPOINT_END_OF_STACK=y
|
|
||||||
CONFIG_HEAP_POISONING_COMPREHENSIVE=y
|
|
||||||
CONFIG_SPI_FLASH_ENABLE_COUNTERS=y
|
|
||||||
CONFIG_ESP_TASK_WDT_INIT=n
|
|
||||||
CONFIG_SPI_FLASH_DANGEROUS_WRITE_FAILS=y
|
|
||||||
CONFIG_COMPILER_STACK_CHECK_MODE_STRONG=y
|
|
||||||
CONFIG_COMPILER_STACK_CHECK=y
|
|
||||||
CONFIG_ADC_DISABLE_DAC=n
|
|
||||||
CONFIG_COMPILER_WARN_WRITE_STRINGS=y
|
|
||||||
CONFIG_SPI_MASTER_IN_IRAM=y
|
|
||||||
CONFIG_EFUSE_VIRTUAL=y
|
|
||||||
CONFIG_UNITY_ENABLE_BACKTRACE_ON_FAIL=y
|
|
@@ -1,4 +0,0 @@
|
|||||||
CONFIG_ESP_DEFAULT_CPU_FREQ_MHZ_240=y
|
|
||||||
CONFIG_XTAL_FREQ_AUTO=y
|
|
||||||
CONFIG_SPI_FLASH_SHARE_SPI1_BUS=y
|
|
||||||
CONFIG_SPIRAM_BANKSWITCH_ENABLE=n
|
|
@@ -1,7 +0,0 @@
|
|||||||
CONFIG_ESP_SYSTEM_MEMPROT=n
|
|
||||||
CONFIG_ESPTOOLPY_FLASHSIZE_2MB=y
|
|
||||||
CONFIG_PARTITION_TABLE_CUSTOM_FILENAME="partition_table_unit_test_app_2m.csv"
|
|
||||||
CONFIG_PARTITION_TABLE_FILENAME="partition_table_unit_test_app_2m.csv"
|
|
||||||
|
|
||||||
# 2MB partition table has a FAT partition too small for 4k sectors
|
|
||||||
CONFIG_WL_SECTOR_SIZE_512=y
|
|
@@ -1 +0,0 @@
|
|||||||
CONFIG_ESP_SYSTEM_MEMPROT=n
|
|
@@ -1,2 +0,0 @@
|
|||||||
CONFIG_ESP_DEFAULT_CPU_FREQ_MHZ_240=y
|
|
||||||
CONFIG_ESP_SYSTEM_MEMPROT=n
|
|
@@ -1,2 +0,0 @@
|
|||||||
CONFIG_ESP_DEFAULT_CPU_FREQ_MHZ_240=y
|
|
||||||
CONFIG_ESP_SYSTEM_MEMPROT=n
|
|
@@ -1,163 +0,0 @@
|
|||||||
# This file is used to process section data generated by `objdump -s`
|
|
||||||
import re
|
|
||||||
|
|
||||||
|
|
||||||
class Section(object):
|
|
||||||
"""
|
|
||||||
One Section of section table. contains info about section name, address and raw data
|
|
||||||
"""
|
|
||||||
SECTION_START_PATTERN = re.compile(b'Contents of section (.+?):')
|
|
||||||
DATA_PATTERN = re.compile(b'([0-9a-f]{4,8})')
|
|
||||||
|
|
||||||
def __init__(self, name, start_address, data):
|
|
||||||
self.name = name
|
|
||||||
self.start_address = start_address
|
|
||||||
self.data = data
|
|
||||||
|
|
||||||
def __contains__(self, item):
|
|
||||||
""" check if the section name and address match this section """
|
|
||||||
if (item['section'] == self.name or item['section'] == 'any') \
|
|
||||||
and (self.start_address <= item['address'] < (self.start_address + len(self.data))):
|
|
||||||
return True
|
|
||||||
else:
|
|
||||||
return False
|
|
||||||
|
|
||||||
def __getitem__(self, item):
|
|
||||||
"""
|
|
||||||
process slice.
|
|
||||||
convert absolute address to relative address in current section and return slice result
|
|
||||||
"""
|
|
||||||
if isinstance(item, int):
|
|
||||||
return self.data[item - self.start_address]
|
|
||||||
elif isinstance(item, slice):
|
|
||||||
start = item.start if item.start is None else item.start - self.start_address
|
|
||||||
stop = item.stop if item.stop is None else item.stop - self.start_address
|
|
||||||
return self.data[start:stop]
|
|
||||||
return self.data[item]
|
|
||||||
|
|
||||||
def __str__(self):
|
|
||||||
return '%s [%08x - %08x]' % (self.name, self.start_address, self.start_address + len(self.data))
|
|
||||||
|
|
||||||
__repr__ = __str__
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def parse_raw_data(cls, raw_data):
|
|
||||||
"""
|
|
||||||
process raw data generated by `objdump -s`, create section and return un-processed lines
|
|
||||||
:param raw_data: lines of raw data generated by `objdump -s`
|
|
||||||
:return: one section, un-processed lines
|
|
||||||
"""
|
|
||||||
name = ''
|
|
||||||
data = ''
|
|
||||||
start_address = 0
|
|
||||||
# first find start line
|
|
||||||
for i, line in enumerate(raw_data):
|
|
||||||
if b'Contents of section ' in line: # do strcmp first to speed up
|
|
||||||
match = cls.SECTION_START_PATTERN.search(line)
|
|
||||||
if match is not None:
|
|
||||||
name = match.group(1)
|
|
||||||
raw_data = raw_data[i + 1:]
|
|
||||||
break
|
|
||||||
else:
|
|
||||||
# do some error handling
|
|
||||||
raw_data = [b''] # add a dummy first data line
|
|
||||||
|
|
||||||
def process_data_line(line_to_process):
|
|
||||||
# first remove the ascii part
|
|
||||||
hex_part = line_to_process.split(b' ')[0]
|
|
||||||
# process rest part
|
|
||||||
data_list = cls.DATA_PATTERN.findall(hex_part)
|
|
||||||
try:
|
|
||||||
_address = int(data_list[0], base=16)
|
|
||||||
except IndexError:
|
|
||||||
_address = -1
|
|
||||||
|
|
||||||
def hex_to_str(hex_data):
|
|
||||||
if len(hex_data) % 2 == 1:
|
|
||||||
hex_data = b'0' + hex_data # append zero at the beginning
|
|
||||||
_length = len(hex_data)
|
|
||||||
return ''.join([chr(int(hex_data[_i:_i + 2], base=16))
|
|
||||||
for _i in range(0, _length, 2)])
|
|
||||||
|
|
||||||
return _address, ''.join([hex_to_str(x) for x in data_list[1:]])
|
|
||||||
|
|
||||||
# handle first line:
|
|
||||||
address, _data = process_data_line(raw_data[0])
|
|
||||||
if address != -1:
|
|
||||||
start_address = address
|
|
||||||
data += _data
|
|
||||||
raw_data = raw_data[1:]
|
|
||||||
for i, line in enumerate(raw_data):
|
|
||||||
address, _data = process_data_line(line)
|
|
||||||
if address == -1:
|
|
||||||
raw_data = raw_data[i:]
|
|
||||||
break
|
|
||||||
else:
|
|
||||||
data += _data
|
|
||||||
else:
|
|
||||||
# do error handling
|
|
||||||
raw_data = []
|
|
||||||
|
|
||||||
section = cls(name, start_address, data) if start_address != -1 else None
|
|
||||||
unprocessed_data = None if len(raw_data) == 0 else raw_data
|
|
||||||
return section, unprocessed_data
|
|
||||||
|
|
||||||
|
|
||||||
class SectionTable(object):
|
|
||||||
""" elf section table """
|
|
||||||
|
|
||||||
def __init__(self, file_name):
|
|
||||||
with open(file_name, 'rb') as f:
|
|
||||||
raw_data = f.readlines()
|
|
||||||
self.table = []
|
|
||||||
while raw_data:
|
|
||||||
section, raw_data = Section.parse_raw_data(raw_data)
|
|
||||||
self.table.append(section)
|
|
||||||
|
|
||||||
def get_unsigned_int(self, section, address, size=4, endian='LE'):
|
|
||||||
"""
|
|
||||||
get unsigned int from section table
|
|
||||||
:param section: section name; use "any" will only match with address
|
|
||||||
:param address: start address
|
|
||||||
:param size: size in bytes
|
|
||||||
:param endian: LE or BE
|
|
||||||
:return: int or None
|
|
||||||
"""
|
|
||||||
if address % 4 != 0 or size % 4 != 0:
|
|
||||||
print('warning: try to access without 4 bytes aligned')
|
|
||||||
key = {'address': address, 'section': section}
|
|
||||||
for section in self.table:
|
|
||||||
if key in section:
|
|
||||||
tmp = section[address:address + size]
|
|
||||||
value = 0
|
|
||||||
for i in range(size):
|
|
||||||
if endian == 'LE':
|
|
||||||
value += ord(tmp[i]) << (i * 8)
|
|
||||||
elif endian == 'BE':
|
|
||||||
value += ord(tmp[i]) << ((size - i - 1) * 8)
|
|
||||||
else:
|
|
||||||
print('only support LE or BE for parameter endian')
|
|
||||||
assert False
|
|
||||||
break
|
|
||||||
else:
|
|
||||||
value = None
|
|
||||||
return value
|
|
||||||
|
|
||||||
def get_string(self, section, address):
|
|
||||||
"""
|
|
||||||
get string ('\0' terminated) from section table
|
|
||||||
:param section: section name; use "any" will only match with address
|
|
||||||
:param address: start address
|
|
||||||
:return: string or None
|
|
||||||
"""
|
|
||||||
value = None
|
|
||||||
key = {'address': address, 'section': section}
|
|
||||||
for section in self.table:
|
|
||||||
if key in section:
|
|
||||||
value = section[address:]
|
|
||||||
for i, c in enumerate(value):
|
|
||||||
if c == '\0':
|
|
||||||
value = value[:i]
|
|
||||||
break
|
|
||||||
break
|
|
||||||
return value
|
|
@@ -1,83 +0,0 @@
|
|||||||
# SPDX-FileCopyrightText: 2022 Espressif Systems (Shanghai) CO LTD
|
|
||||||
# SPDX-License-Identifier: Apache-2.0
|
|
||||||
import argparse
|
|
||||||
import os
|
|
||||||
import subprocess
|
|
||||||
import sys
|
|
||||||
from typing import Dict, List
|
|
||||||
|
|
||||||
import yaml
|
|
||||||
|
|
||||||
try:
|
|
||||||
import CreateSectionTable
|
|
||||||
except ImportError:
|
|
||||||
sys.path.append(os.path.expandvars(os.path.join('$IDF_PATH', 'tools', 'unit-test-app', 'tools')))
|
|
||||||
import CreateSectionTable
|
|
||||||
|
|
||||||
|
|
||||||
def get_target_objdump(idf_target: str) -> str:
|
|
||||||
toolchain_for_target = {
|
|
||||||
'esp32': 'xtensa-esp32-elf-',
|
|
||||||
'esp32s2': 'xtensa-esp32s2-elf-',
|
|
||||||
'esp32s3': 'xtensa-esp32s3-elf-',
|
|
||||||
'esp32c2': 'riscv32-esp-elf-',
|
|
||||||
'esp32c3': 'riscv32-esp-elf-',
|
|
||||||
}
|
|
||||||
return toolchain_for_target.get(idf_target, '') + 'objdump'
|
|
||||||
|
|
||||||
|
|
||||||
def parse_elf_test_cases(elf_file: str, idf_target: str) -> List[Dict]:
|
|
||||||
objdump = get_target_objdump(idf_target)
|
|
||||||
|
|
||||||
try:
|
|
||||||
subprocess.check_output('{} -s {} > section_table.tmp'.format(objdump, elf_file), shell=True)
|
|
||||||
table = CreateSectionTable.SectionTable('section_table.tmp')
|
|
||||||
except subprocess.CalledProcessError:
|
|
||||||
raise Exception('Can\'t resolve elf file. File not found.')
|
|
||||||
finally:
|
|
||||||
os.remove('section_table.tmp')
|
|
||||||
|
|
||||||
bin_test_cases = []
|
|
||||||
try:
|
|
||||||
subprocess.check_output('{} -t {} | grep test_desc > case_address.tmp'.format(objdump, elf_file),
|
|
||||||
shell=True)
|
|
||||||
|
|
||||||
with open('case_address.tmp', 'rb') as input_f:
|
|
||||||
for line in input_f:
|
|
||||||
# process symbol table like: "3ffb4310 l O .dram0.data 00000018 test_desc_33$5010"
|
|
||||||
sections = line.split()
|
|
||||||
test_addr = int(sections[0], 16)
|
|
||||||
section = sections[3]
|
|
||||||
|
|
||||||
name_addr = table.get_unsigned_int(section, test_addr, 4)
|
|
||||||
desc_addr = table.get_unsigned_int(section, test_addr + 4, 4)
|
|
||||||
tc = {
|
|
||||||
'name': table.get_string('any', name_addr),
|
|
||||||
'desc': table.get_string('any', desc_addr),
|
|
||||||
'function_count': table.get_unsigned_int(section, test_addr + 20, 4),
|
|
||||||
}
|
|
||||||
bin_test_cases.append(tc)
|
|
||||||
except subprocess.CalledProcessError:
|
|
||||||
raise Exception('Test cases not found')
|
|
||||||
finally:
|
|
||||||
os.remove('case_address.tmp')
|
|
||||||
|
|
||||||
return bin_test_cases
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
|
||||||
parser = argparse.ArgumentParser()
|
|
||||||
parser.add_argument('elf_file', help='Elf file to parse')
|
|
||||||
parser.add_argument('-t', '--idf_target',
|
|
||||||
type=str, default=os.environ.get('IDF_TARGET', ''),
|
|
||||||
help='Target of the elf, e.g. esp32s2')
|
|
||||||
parser.add_argument('-o', '--output_file',
|
|
||||||
type=str, default='elf_test_cases.yml',
|
|
||||||
help='Target of the elf, e.g. esp32s2')
|
|
||||||
args = parser.parse_args()
|
|
||||||
|
|
||||||
assert args.idf_target
|
|
||||||
|
|
||||||
test_cases = parse_elf_test_cases(args.elf_file, args.idf_target)
|
|
||||||
with open(args.output_file, 'w') as out_file:
|
|
||||||
yaml.dump(test_cases, out_file, default_flow_style=False)
|
|
Reference in New Issue
Block a user