Merge branch 'feat/ble_log_v2_with_log_compression_v5.5' into 'release/v5.5'

feat(ble): Support ble log compression(draft)(v5.5)

See merge request espressif/esp-idf!41757
This commit is contained in:
Island
2025-09-24 10:28:17 +08:00
22 changed files with 3854 additions and 1 deletions

View File

@@ -954,6 +954,22 @@ set(bt_priv_requires
esp_gdbstub
)
if(CONFIG_BLE_COMPRESSED_LOG_ENABLE)
set(BT_ROOT_PATH "${CMAKE_CURRENT_SOURCE_DIR}")
# When log compression is enabled, selected logs are replaced
# by auto-generated macros that emit pre-encoded data.
# This eliminates the original format strings, reducing firmware size and
# removing runtime formatting overhead, so logs are produced faster and
# with less system impact.
add_subdirectory(common/ble_log/extension/log_compression)
if(LOG_COMPRESSION_TARGET)
set(srcs ${LOG_COMPRESS_SRCS})
set(include_dirs ${LOG_COMPRESS_INCLUDE_DIRS})
else()
list(APPEND include_dirs ${LOG_COMPRESS_INCLUDE_DIRS})
endif()
endif()
idf_component_register(SRCS "${srcs}"
INCLUDE_DIRS "${include_dirs}"
PRIV_INCLUDE_DIRS "${priv_include_dirs}"
@@ -961,6 +977,12 @@ idf_component_register(SRCS "${srcs}"
PRIV_REQUIRES "${bt_priv_requires}"
LDFRAGMENTS "${ldscripts}")
if(CONFIG_BLE_COMPRESSED_LOG_ENABLE)
if(LOG_COMPRESSION_TARGET)
add_dependencies(${COMPONENT_LIB} ${LOG_COMPRESSION_TARGET})
endif()
endif()
if(CONFIG_BT_ENABLED)
target_compile_options(${COMPONENT_LIB} PRIVATE -Wno-implicit-fallthrough -Wno-unused-const-variable)
if(CONFIG_IDF_TARGET_ESP32)

View File

@@ -166,4 +166,8 @@ if BLE_LOG_ENABLED
help
GPIO number for UART TX
endif
menu "Settings of BLE Log Compression"
source "$IDF_PATH/components/bt/common/ble_log/extension/log_compression/Kconfig.in"
endmenu
endif

View File

@@ -0,0 +1,202 @@
set(LOG_COMPRESSED_MODULE "")
set(LOG_COMPRESSED_MODULE_CODE_PATH "")
set(LOG_COMPRESSED_SRCS_DIR "${CMAKE_BINARY_DIR}/ble_log/.compressed_srcs")
# default config value for ble mesh module
set(BLE_MESH_CODE_PATH "")
set(BLE_MESH_LOG_INDEX_HEADER "\"\"")
set(BLE_MESH_TAGS "")
set(BLE_MESH_TAGS_PRESERVE "")
# default config value for host module
set(HOST_CODE_PATH "")
set(HOST_LOG_INDEX_HEADER "\"\"")
set(BLE_HOST_TAGS "")
set(BLE_HOST_TAGS_PRESERVE "")
if(CONFIG_BLE_MESH_COMPRESSED_LOG_ENABLE)
list(APPEND LOG_COMPRESSED_MODULE "BLE_MESH")
if(NOT EXISTS "${CMAKE_BINARY_DIR}/ble_log/include/mesh_log_index.h")
file(WRITE "${CMAKE_BINARY_DIR}/ble_log/include/mesh_log_index.h" "")
endif()
list(APPEND LOG_COMPRESSED_MODULE_CODE_PATH "esp_ble_mesh")
# update config file
set(BLE_MESH_CODE_PATH "esp_ble_mesh")
set(BLE_MESH_LOG_INDEX_HEADER "mesh_log_index.h")
# update BLE_MESH_TAGS and BLE_MESH_TAGS_PRESERVE
include(${CMAKE_CURRENT_LIST_DIR}/cmake/ble_mesh_log_tags.cmake)
endif()
if(CONFIG_BLE_HOST_COMPRESSED_LOG_ENABLE AND CONFIG_BT_BLUEDROID_ENABLED)
list(APPEND LOG_COMPRESSED_MODULE "BLE_HOST")
list(APPEND LOG_COMPRESSED_MODULE_CODE_PATH "host/bluedroid/stack")
if(NOT EXISTS "${CMAKE_BINARY_DIR}/ble_log/include/host_log_index.h")
file(WRITE "${CMAKE_BINARY_DIR}/ble_log/include/host_log_index.h" "")
endif()
set(HOST_CODE_PATH "host/bluedroid/stack")
set(HOST_LOG_INDEX_HEADER "host_log_index.h")
include(${CMAKE_CURRENT_LIST_DIR}/cmake/ble_host_bluedroid_tags.cmake)
endif()
if(LOG_COMPRESSED_MODULE)
list(APPEND srcs "common/ble_log/extension/log_compression/ble_log_compression.c")
list(APPEND include_dirs "${CMAKE_BINARY_DIR}/ble_log/include")
if(NOT CMAKE_VERSION VERSION_LESS 3.15.0)
set(Python3_FIND_STRATEGY LOCATION)
find_package(Python3 COMPONENTS Interpreter)
if(Python3_Interpreter_FOUND)
set(BLE_PYTHON_EXECUTABLE ${Python3_EXECUTABLE})
endif()
else()
find_package(PythonInterp 3)
if(PYTHONINTERP_FOUND)
set(BLE_PYTHON_EXECUTABLE ${PYTHON_EXECUTABLE})
endif()
endif()
if(BLE_PYTHON_EXECUTABLE)
set(PYTHON_SCRIPT ${BT_ROOT_PATH}/common/ble_log/extension/log_compression/scripts/ble_log_compress.py)
set(compressed_srcs "")
set(uncompressed_srcs "")
set(compressed_srcs_with_abs_path "")
execute_process(COMMAND ${BLE_PYTHON_EXECUTABLE}
${BT_ROOT_PATH}/common/ble_log/extension/log_compression/scripts/env_check.py
RESULT_VARIABLE result
OUTPUT_VARIABLE out
ERROR_VARIABLE err
)
if(NOT ${result} EQUAL 0)
message(WARNING "${err}")
message(WARNING "Exit this log compression due to failure of environment check")
set(LOG_COMPRESS_INCLUDE_DIRS ${include_dirs} PARENT_SCOPE)
set(LOG_COMPRESSION_TARGET "" PARENT_SCOPE)
return()
endif()
set(CONFIG_FILE_PATH "${CMAKE_BINARY_DIR}/ble_log/module_info.yml")
if(NOT EXISTS "${CONFIG_FILE_PATH}")
file(WRITE "${CMAKE_BINARY_DIR}/ble_log/module_info.yml")
endif()
set(YML_IN "${BT_ROOT_PATH}/common/ble_log/extension/log_compression/scripts/configs/module_info.yml.in")
configure_file(${YML_IN} ${CONFIG_FILE_PATH} @ONLY)
string(REPLACE ";" "|" MODULE_CODE_PATH "${LOG_COMPRESSED_MODULE_CODE_PATH}")
set(MATCH_PATTERN "(${MODULE_CODE_PATH}).+\\.c")
foreach(src ${srcs})
if(src MATCHES ${MATCH_PATTERN})
set(dest "${LOG_COMPRESSED_SRCS_DIR}/${src}")
file(WRITE "${dest}" "")
list(APPEND compressed_srcs ${src})
list(APPEND compressed_srcs_with_abs_path "${dest}")
else()
list(APPEND uncompressed_srcs ${src})
endif()
endforeach()
string(REPLACE "|" ";" LOG_COMPRESSED_MODULE_CODE_PATH "${MODULE_CODE_PATH}")
# Some header files of NIMBLE are not added to include_dirs,
# but rely on relative path searches. This will cause the header
# files to be found due to the change in the source code location
# after using the log compression scheme.
# Therefore, these paths are added to include_dirs here to avoid
# unfinished compilation errors.
if(CONFIG_BT_NIMBLE_ENABLED)
list(APPEND include_dirs
"host/nimble/nimble/nimble/host/src"
"host/nimble/nimble/nimble/host/store/config/src")
endif()
add_custom_target(ble_log_compression ALL
COMMAND ${BLE_PYTHON_EXECUTABLE} ${PYTHON_SCRIPT}
compress
--compressed_srcs_path "${LOG_COMPRESSED_SRCS_DIR}"
--build_path "${CMAKE_BINARY_DIR}"
--module "'${LOG_COMPRESSED_MODULE}'"
--bt_path "${BT_ROOT_PATH}"
--srcs "'${compressed_srcs}'"
DEPENDS ${compressed_srcs_with_abs_path} ${PYTHON_SCRIPT}
COMMENT "Log compression is being performed, please wait..."
WORKING_DIRECTORY ${BT_ROOT_PATH}
USES_TERMINAL
)
function(add_flags_if_in_list file file_list compile_flags)
set(PROCESSED OFF PARENT_SCOPE)
foreach(item IN LISTS file_list)
if(item STREQUAL file)
set_source_files_properties("${LOG_COMPRESSED_SRCS_DIR}/${file}"
PROPERTIES
GENERATED TRUE
COMPILE_FLAGS "${compile_flags}"
OBJECT_DEPENDS "${CMAKE_CURRENT_BINARY_DIR}/ble_log_compression"
)
set(PROCESSED ON PARENT_SCOPE)
break()
endif()
endforeach()
endfunction()
foreach(src ${compressed_srcs})
set(PROCESSED OFF)
if(CONFIG_BT_BLUEDROID_ENABLED)
set(files_with_compile_flags
"host/bluedroid/bta/gatt/bta_gattc_act.c"
"host/bluedroid/bta/gatt/bta_gattc_cache.c"
"host/bluedroid/btc/profile/std/gatt/btc_gatt_util.c"
"host/bluedroid/btc/profile/std/gatt/btc_gatts.c")
add_flags_if_in_list("${src}"
"${files_with_compile_flags}"
"-Wno-address-of-packed-member")
if(NOT CMAKE_BUILD_EARLY_EXPANSION)
set(jump_table_opts "-fjump-tables")
if(NOT (CMAKE_C_COMPILER_ID MATCHES "Clang") )
set(jump_table_opts "${jump_table_opts} -ftree-switch-conversion")
endif()
set(files_with_compile_flags
"host/bluedroid/bta/hf_ag/bta_ag_cmd.c"
"host/bluedroid/btc/profile/std/gap/btc_gap_ble.c"
)
add_flags_if_in_list("${src}"
"${files_with_compile_flags}"
"${jump_table_opts}")
endif()
if(CMAKE_C_COMPILER_ID MATCHES "GNU" AND CMAKE_C_COMPILER_VERSION VERSION_GREATER 15.0)
set(files_with_compile_flags "host/bluedroid/device/controller.c")
add_flags_if_in_list("${src}"
"${files_with_compile_flags}"
"-Wno-unterminated-string-initialization")
endif()
endif()
if(CONFIG_BT_NIMBLE_ENABLED)
if(CONFIG_BT_NIMBLE_MESH)
message(ERROR "The current log compression scheme does not support NIMBLE MESH")
endif()
endif()
if(NOT PROCESSED)
set_source_files_properties("${LOG_COMPRESSED_SRCS_DIR}/${src}"
PROPERTIES GENERATED TRUE
OBJECT_DEPENDS ${CMAKE_CURRENT_BINARY_DIR}/ble_log_compression
)
endif()
endforeach()
set(LOG_COMPRESSION_TARGET ble_log_compression PARENT_SCOPE)
# set(LOG_COMPRESSION_TARGET "" PARENT_SCOPE)
set(LOG_COMPRESS_SRCS "${compressed_srcs_with_abs_path};${uncompressed_srcs}" PARENT_SCOPE)
list(APPEND include_dirs "common/ble_log/extension/log_compression/include")
set(LOG_COMPRESS_INCLUDE_DIRS ${include_dirs} PARENT_SCOPE)
else()
set(LOG_COMPRESSION_TARGET "" PARENT_SCOPE)
message("Python 3 used for log compressing not found")
endif()
else()
set(LOG_COMPRESSION_TARGET "" PARENT_SCOPE)
message(STATUS "No module enabled for log compress")
endif()

View File

@@ -0,0 +1,692 @@
config BLE_COMPRESSED_LOG_ENABLE
bool "Enable BLE log compression(Preview, Please read help information)"
default n
help
Compress BLE logs during application build to reduce flash usage
and improve output speed. When enabled, log data from Bluetooth
Low Energy components will be compressed before storage,
optimizing both memory footprint and transmission efficiency.
Note: This library depends on additional Python packages. It will
function correctly only after these dependencies are installed;
refer to:
"components/bt/common/ble_log/log_compression/README.en.md"
for installation instructions.
if BLE_COMPRESSED_LOG_ENABLE
menuconfig BLE_MESH_COMPRESSED_LOG_ENABLE
bool "Enable BLE Mesh log compression(Preview)"
depends on BLE_COMPRESSED_LOG_ENABLE
depends on BLE_MESH
default n
help
Apply compression to ESP-BLE-MESH protocol stack logs. Requires
base BLE compression to be enabled. Specifically optimizes log
storage and transmission for ble mesh.
Note: This library depends on additional Python packages. It will
function correctly only after these dependencies are installed;
refer to:
"components/bt/common/ble_log/log_compression/README.en.md"
for installation instructions.
If the required packages are not installed, the log-compression
mechanism will remain disabled even when this Config is enabled.
config BLE_MESH_COMPRESSED_LOG_BUFFER_LEN
int "BLE Mesh log buffer length"
depends on BLE_MESH_COMPRESSED_LOG_ENABLE
default 400
help
Maximum output length for a single log
if BLE_MESH_COMPRESSED_LOG_ENABLE
menu "Select the stack log tag to be compressed"
config BLE_MESH_STACK_ERR_LOG_COMPRESSION
bool "Compress ERROR log of ESP-BLE-MESH"
default y
help
The error log in the BLE-MESH component will be compressed
config BLE_MESH_STACK_ERR_LOG_PRESERVE
bool "Keep the original error log statement"
depends on BLE_MESH_STACK_ERR_LOG_COMPRESSION
default y
help
When this option is enabled, the log data will be output
through both the compressed log interface and the original
UART interface at the same time, meaning that the log
statements will appear on both paths. However, please note
that this dual-output approach introduces additional code
and string constants, which will increase the size of the
firmware binary file. When this option is disabled, the
logs will no longer be printed through the original UART
output path; instead, they will only be output through the
compressed log interface. As the code and strings related
to the original UART output are omitted, the size of the
firmware binary file can be effectively reduced.
config BLE_MESH_STACK_WARN_LOG_COMPRESSION
bool "Compress warn log of ESP-BLE-MESH"
default y
help
The warn log in the BLE-MESH component will be compressed
config BLE_MESH_STACK_WARN_LOG_PRESERVE
bool "Keep the original warn log statement"
depends on BLE_MESH_STACK_WARN_LOG_COMPRESSION
default y
help
Please refer to the help information in BLE_MESH_STACK_ERR_LOG_PRESERVE
config BLE_MESH_STACK_INFO_LOG_COMPRESSION
bool "Compress info log of ESP-BLE-MESH"
default y
help
The info log in the BLE-MESH component will be compressed
config BLE_MESH_STACK_INFO_LOG_PRESERVE
bool "Keep the original info log statement"
depends on BLE_MESH_STACK_INFO_LOG_COMPRESSION
default n
help
Please refer to the help information in BLE_MESH_STACK_ERR_LOG_PRESERVE
config BLE_MESH_STACK_DEBUG_LOG_COMPRESSION
bool "Compress debug log of ESP-BLE-MESH"
default y
help
The debug log in the BLE-MESH component will be compressed
config BLE_MESH_STACK_DEBUG_LOG_PRESERVE
bool "Keep the original debug log statement"
depends on BLE_MESH_STACK_DEBUG_LOG_COMPRESSION
default n
help
Please refer to the help information in BLE_MESH_STACK_ERR_LOG_PRESERVE
endmenu
menu "Select the net buf log tag to be compressed"
config BLE_MESH_NET_BUF_ERR_LOG_COMPRESSION
bool "Compress ERROR log of ESP-BLE-MESH"
default y
help
The error log in the BLE-MESH component will be compressed
config BLE_MESH_NET_BUF_ERR_LOG_PRESERVE
bool "Keep the original error log statement"
depends on BLE_MESH_NET_BUF_ERR_LOG_COMPRESSION
default y
help
When this option is enabled, the log data will be output
through both the compressed log interface and the original
UART interface at the same time, meaning that the log
statements will appear on both paths. However, please note
that this dual-output approach introduces additional code
and string constants, which will increase the size of the
firmware binary file. When this option is disabled, the
logs will no longer be printed through the original UART
output path; instead, they will only be output through the
compressed log interface. As the code and strings related
to the original UART output are omitted, the size of the
firmware binary file can be effectively reduced.
config BLE_MESH_NET_BUF_WARN_LOG_COMPRESSION
bool "Compress warn log of ESP-BLE-MESH"
default y
help
The warn log in the BLE-MESH component will be compressed
config BLE_MESH_NET_BUF_WARN_LOG_PRESERVE
bool "Keep the original warn log statement"
depends on BLE_MESH_NET_BUF_WARN_LOG_COMPRESSION
default y
help
Please refer to the help information in BLE_MESH_NET_BUF_ERR_LOG_PRESERVE
config BLE_MESH_NET_BUF_INFO_LOG_COMPRESSION
bool "Compress info log of ESP-BLE-MESH"
default y
help
The info log in the BLE-MESH component will be compressed
config BLE_MESH_NET_BUF_INFO_LOG_PRESERVE
bool "Keep the original info log statement"
depends on BLE_MESH_NET_BUF_INFO_LOG_COMPRESSION
default n
help
Please refer to the help information in BLE_MESH_NET_BUF_ERR_LOG_PRESERVE
config BLE_MESH_NET_BUF_DEBUG_LOG_COMPRESSION
bool "Compress debug log of ESP-BLE-MESH"
default y
help
The debug log in the BLE-MESH component will be compressed
config BLE_MESH_NET_BUF_DEBUG_LOG_PRESERVE
bool "Keep the original debug log statement"
depends on BLE_MESH_NET_BUF_DEBUG_LOG_COMPRESSION
default n
help
Please refer to the help information in BLE_MESH_NET_BUF_ERR_LOG_PRESERVE
endmenu
endif
menuconfig BLE_HOST_COMPRESSED_LOG_ENABLE
bool "Enable BLE Host log compression(Preview, only Bluedroid Host for now)"
depends on BLE_COMPRESSED_LOG_ENABLE
depends on BT_BLUEDROID_ENABLED
default n
help
Apply compression to host logs. Requires
base BLE compression to be enabled. Specifically optimizes log
storage and transmission.
Note: This library depends on additional Python packages. It will
function correctly only after these dependencies are installed;
refer to:
"components/bt/common/ble_log/log_compression/README.en.md"
for installation instructions.
config BLE_HOST_COMPRESSED_LOG_BUFFER_LEN
int "Host log buffer length"
depends on BLE_HOST_COMPRESSED_LOG_ENABLE
default 300
help
Maximum output length for a single log
if BLE_HOST_COMPRESSED_LOG_ENABLE
menu "Select the BTM layer log tag to be compressed"
config BLE_BLUEDROID_BTM_ERROR_LOG_COMPRESSION
bool "Compress error log of Bluedroid host"
default y
help
The error log in the Bluedroid host component will be compressed
config BLE_BLUEDROID_BTM_ERROR_LOG_PRESERVE
bool "Keep the original error log statement"
depends on BLE_BLUEDROID_BTM_ERROR_LOG_COMPRESSION
default y
help
When this option is enabled, the log data will be output
through both the compressed log interface and the original
UART interface at the same time, meaning that the log
statements will appear on both paths. However, please note
that this dual-output approach introduces additional code
and string constants, which will increase the size of the
firmware binary file. When this option is disabled, the
logs will no longer be printed through the original UART
output path; instead, they will only be output through the
compressed log interface. As the code and strings related
to the original UART output are omitted, the size of the
firmware binary file can be effectively reduced.
config BLE_BLUEDROID_BTM_WARNING_LOG_COMPRESSION
bool "Compress warning log of Bluedroid host"
default y
help
The warning log in the Bluedroid host component will be compressed
config BLE_BLUEDROID_BTM_WARNING_LOG_PRESERVE
bool "Keep the original warning log statement"
depends on BLE_BLUEDROID_BTM_WARNING_LOG_COMPRESSION
default y
help
Please refer to the help information in BLE_BLUEDROID_BTM_ERROR_LOG_PRESERVE
config BLE_BLUEDROID_BTM_API_LOG_COMPRESSION
bool "Compress api log of Bluedroid host"
default y
help
The api log in the Bluedroid host component will be compressed
config BLE_BLUEDROID_BTM_API_LOG_PRESERVE
bool "Keep the original api log statement"
depends on BLE_BLUEDROID_BTM_API_LOG_COMPRESSION
default n
help
Please refer to the help information in BLE_BLUEDROID_BTM_ERROR_LOG_PRESERVE
config BLE_BLUEDROID_BTM_EVENT_LOG_COMPRESSION
bool "Compress event log of Bluedroid host"
default y
help
The event log in the Bluedroid host component will be compressed
config BLE_BLUEDROID_BTM_EVENT_LOG_PRESERVE
bool "Keep the original event log statement"
depends on BLE_BLUEDROID_BTM_EVENT_LOG_COMPRESSION
default n
help
Please refer to the help information in BLE_BLUEDROID_BTM_ERROR_LOG_PRESERVE
config BLE_BLUEDROID_BTM_DEBUG_LOG_COMPRESSION
bool "Compress debug log of Bluedroid host"
default y
help
The debug log in the Bluedroid host component will be compressed
config BLE_BLUEDROID_BTM_DEBUG_LOG_PRESERVE
bool "Keep the original debug log statement"
depends on BLE_BLUEDROID_BTM_DEBUG_LOG_COMPRESSION
default n
help
Please refer to the help information in BLE_BLUEDROID_BTM_ERROR_LOG_PRESERVE
config BLE_BLUEDROID_BTM_VERBOSE_LOG_COMPRESSION
bool "Compress verbose log of Bluedroid host"
default y
help
The verbose log in the Bluedroid host component will be compressed
config BLE_BLUEDROID_BTM_VERBOSE_LOG_PRESERVE
bool "Keep the original verbose log statement"
depends on BLE_BLUEDROID_BTM_VERBOSE_LOG_COMPRESSION
default n
help
Please refer to the help information in BLE_BLUEDROID_BTM_ERROR_LOG_PRESERVE
endmenu
menu "Select the LA2CAP layer log tag to be compressed"
config BLE_BLUEDROID_L2CAP_ERROR_LOG_COMPRESSION
bool "Compress error log of Bluedroid host"
default y
help
The error log in the Bluedroid host component will be compressed
config BLE_BLUEDROID_L2CAP_ERROR_LOG_PRESERVE
bool "Keep the original error log statement"
depends on BLE_BLUEDROID_L2CAP_ERROR_LOG_COMPRESSION
default y
help
When this option is enabled, the log data will be output
through both the compressed log interface and the original
UART interface at the same time, meaning that the log
statements will appear on both paths. However, please note
that this dual-output approach introduces additional code
and string constants, which will increase the size of the
firmware binary file. When this option is disabled, the
logs will no longer be printed through the original UART
output path; instead, they will only be output through the
compressed log interface. As the code and strings related
to the original UART output are omitted, the size of the
firmware binary file can be effectively reduced.
config BLE_BLUEDROID_L2CAP_WARNING_LOG_COMPRESSION
bool "Compress warning log of Bluedroid host"
default y
help
The warning log in the Bluedroid host component will be compressed
config BLE_BLUEDROID_L2CAP_WARNING_LOG_PRESERVE
bool "Keep the original warning log statement"
depends on BLE_BLUEDROID_L2CAP_WARNING_LOG_COMPRESSION
default y
help
Please refer to the help information in BLE_BLUEDROID_L2CAP_ERROR_LOG_PRESERVE
config BLE_BLUEDROID_L2CAP_API_LOG_COMPRESSION
bool "Compress api log of Bluedroid host"
default y
help
The api log in the Bluedroid host component will be compressed
config BLE_BLUEDROID_L2CAP_API_LOG_PRESERVE
bool "Keep the original api log statement"
depends on BLE_BLUEDROID_L2CAP_API_LOG_COMPRESSION
default n
help
Please refer to the help information in BLE_BLUEDROID_L2CAP_ERROR_LOG_PRESERVE
config BLE_BLUEDROID_L2CAP_EVENT_LOG_COMPRESSION
bool "Compress event log of Bluedroid host"
default y
help
The event log in the Bluedroid host component will be compressed
config BLE_BLUEDROID_L2CAP_EVENT_LOG_PRESERVE
bool "Keep the original event log statement"
depends on BLE_BLUEDROID_L2CAP_EVENT_LOG_COMPRESSION
default n
help
Please refer to the help information in BLE_BLUEDROID_L2CAP_ERROR_LOG_PRESERVE
config BLE_BLUEDROID_L2CAP_DEBUG_LOG_COMPRESSION
bool "Compress debug log of Bluedroid host"
default y
help
The debug log in the Bluedroid host component will be compressed
config BLE_BLUEDROID_L2CAP_DEBUG_LOG_PRESERVE
bool "Keep the original debug log statement"
depends on BLE_BLUEDROID_L2CAP_DEBUG_LOG_COMPRESSION
default n
help
Please refer to the help information in BLE_BLUEDROID_L2CAP_ERROR_LOG_PRESERVE
config BLE_BLUEDROID_L2CAP_VERBOSE_LOG_COMPRESSION
bool "Compress verbose log of Bluedroid host"
default y
help
The verbose log in the Bluedroid host component will be compressed
config BLE_BLUEDROID_L2CAP_VERBOSE_LOG_PRESERVE
bool "Keep the original verbose log statement"
depends on BLE_BLUEDROID_L2CAP_VERBOSE_LOG_COMPRESSION
default n
help
Please refer to the help information in BLE_BLUEDROID_L2CAP_ERROR_LOG_PRESERVE
endmenu
menu "Select the GAP layer log tag to be compressed"
config BLE_BLUEDROID_GAP_ERROR_LOG_COMPRESSION
bool "Compress error log of Bluedroid host"
default y
help
The error log in the Bluedroid host component will be compressed
config BLE_BLUEDROID_GAP_ERROR_LOG_PRESERVE
bool "Keep the original error log statement"
depends on BLE_BLUEDROID_GAP_ERROR_LOG_COMPRESSION
default y
help
When this option is enabled, the log data will be output
through both the compressed log interface and the original
UART interface at the same time, meaning that the log
statements will appear on both paths. However, please note
that this dual-output approach introduces additional code
and string constants, which will increase the size of the
firmware binary file. When this option is disabled, the
logs will no longer be printed through the original UART
output path; instead, they will only be output through the
compressed log interface. As the code and strings related
to the original UART output are omitted, the size of the
firmware binary file can be effectively reduced.
config BLE_BLUEDROID_GAP_WARNING_LOG_COMPRESSION
bool "Compress warning log of Bluedroid host"
default y
help
The warning log in the Bluedroid host component will be compressed
config BLE_BLUEDROID_GAP_WARNING_LOG_PRESERVE
bool "Keep the original warning log statement"
depends on BLE_BLUEDROID_GAP_WARNING_LOG_COMPRESSION
default y
help
Please refer to the help information in BLE_BLUEDROID_GAP_ERROR_LOG_PRESERVE
config BLE_BLUEDROID_GAP_API_LOG_COMPRESSION
bool "Compress api log of Bluedroid host"
default y
help
The api log in the Bluedroid host component will be compressed
config BLE_BLUEDROID_GAP_API_LOG_PRESERVE
bool "Keep the original api log statement"
depends on BLE_BLUEDROID_GAP_API_LOG_COMPRESSION
default n
help
Please refer to the help information in BLE_BLUEDROID_GAP_ERROR_LOG_PRESERVE
config BLE_BLUEDROID_GAP_EVENT_LOG_COMPRESSION
bool "Compress event log of Bluedroid host"
default y
help
The event log in the Bluedroid host component will be compressed
config BLE_BLUEDROID_GAP_EVENT_LOG_PRESERVE
bool "Keep the original event log statement"
depends on BLE_BLUEDROID_GAP_EVENT_LOG_COMPRESSION
default n
help
Please refer to the help information in BLE_BLUEDROID_GAP_ERROR_LOG_PRESERVE
config BLE_BLUEDROID_GAP_DEBUG_LOG_COMPRESSION
bool "Compress debug log of Bluedroid host"
default y
help
The debug log in the Bluedroid host component will be compressed
config BLE_BLUEDROID_GAP_DEBUG_LOG_PRESERVE
bool "Keep the original debug log statement"
depends on BLE_BLUEDROID_GAP_DEBUG_LOG_COMPRESSION
default n
help
Please refer to the help information in BLE_BLUEDROID_GAP_ERROR_LOG_PRESERVE
config BLE_BLUEDROID_GAP_VERBOSE_LOG_COMPRESSION
bool "Compress verbose log of Bluedroid host"
default y
help
The verbose log in the Bluedroid host component will be compressed
config BLE_BLUEDROID_GAP_VERBOSE_LOG_PRESERVE
bool "Keep the original verbose log statement"
depends on BLE_BLUEDROID_GAP_VERBOSE_LOG_COMPRESSION
default n
help
Please refer to the help information in BLE_BLUEDROID_GAP_ERROR_LOG_PRESERVE
endmenu
menu "Select the GATT layer log tag to be compressed"
config BLE_BLUEDROID_GATT_ERROR_LOG_COMPRESSION
bool "Compress error log of Bluedroid host"
default y
help
The error log in the Bluedroid host component will be compressed
config BLE_BLUEDROID_GATT_ERROR_LOG_PRESERVE
bool "Keep the original error log statement"
depends on BLE_BLUEDROID_GATT_ERROR_LOG_COMPRESSION
default y
help
When this option is enabled, the log data will be output
through both the compressed log interface and the original
UART interface at the same time, meaning that the log
statements will appear on both paths. However, please note
that this dual-output approach introduces additional code
and string constants, which will increase the size of the
firmware binary file. When this option is disabled, the
logs will no longer be printed through the original UART
output path; instead, they will only be output through the
compressed log interface. As the code and strings related
to the original UART output are omitted, the size of the
firmware binary file can be effectively reduced.
config BLE_BLUEDROID_GATT_WARNING_LOG_COMPRESSION
bool "Compress warning log of Bluedroid host"
default y
help
The warning log in the Bluedroid host component will be compressed
config BLE_BLUEDROID_GATT_WARNING_LOG_PRESERVE
bool "Keep the original warning log statement"
depends on BLE_BLUEDROID_GATT_WARNING_LOG_COMPRESSION
default y
help
Please refer to the help information in BLE_BLUEDROID_GATT_ERROR_LOG_PRESERVE
config BLE_BLUEDROID_GATT_API_LOG_COMPRESSION
bool "Compress api log of Bluedroid host"
default y
help
The api log in the Bluedroid host component will be compressed
config BLE_BLUEDROID_GATT_API_LOG_PRESERVE
bool "Keep the original api log statement"
depends on BLE_BLUEDROID_GATT_API_LOG_COMPRESSION
default n
help
Please refer to the help information in BLE_BLUEDROID_GATT_ERROR_LOG_PRESERVE
config BLE_BLUEDROID_GATT_EVENT_LOG_COMPRESSION
bool "Compress event log of Bluedroid host"
default y
help
The event log in the Bluedroid host component will be compressed
config BLE_BLUEDROID_GATT_EVENT_LOG_PRESERVE
bool "Keep the original event log statement"
depends on BLE_BLUEDROID_GATT_EVENT_LOG_COMPRESSION
default n
help
Please refer to the help information in BLE_BLUEDROID_GATT_ERROR_LOG_PRESERVE
config BLE_BLUEDROID_GATT_DEBUG_LOG_COMPRESSION
bool "Compress debug log of Bluedroid host"
default y
help
The debug log in the Bluedroid host component will be compressed
config BLE_BLUEDROID_GATT_DEBUG_LOG_PRESERVE
bool "Keep the original debug log statement"
depends on BLE_BLUEDROID_GATT_DEBUG_LOG_COMPRESSION
default n
help
Please refer to the help information in BLE_BLUEDROID_GATT_ERROR_LOG_PRESERVE
config BLE_BLUEDROID_GATT_VERBOSE_LOG_COMPRESSION
bool "Compress verbose log of Bluedroid host"
default y
help
The verbose log in the Bluedroid host component will be compressed
config BLE_BLUEDROID_GATT_VERBOSE_LOG_PRESERVE
bool "Keep the original verbose log statement"
depends on BLE_BLUEDROID_GATT_VERBOSE_LOG_COMPRESSION
default n
help
Please refer to the help information in BLE_BLUEDROID_GATT_ERROR_LOG_PRESERVE
endmenu
menu "Select the SMP layer log tag to be compressed"
config BLE_BLUEDROID_SMP_ERROR_LOG_COMPRESSION
bool "Compress error log of Bluedroid host"
default y
help
The error log in the Bluedroid host component will be compressed
config BLE_BLUEDROID_SMP_ERROR_LOG_PRESERVE
bool "Keep the original error log statement"
depends on BLE_BLUEDROID_SMP_ERROR_LOG_COMPRESSION
default y
help
When this option is enabled, the log data will be output
through both the compressed log interface and the original
UART interface at the same time, meaning that the log
statements will appear on both paths. However, please note
that this dual-output approach introduces additional code
and string constants, which will increase the size of the
firmware binary file. When this option is disabled, the
logs will no longer be printed through the original UART
output path; instead, they will only be output through the
compressed log interface. As the code and strings related
to the original UART output are omitted, the size of the
firmware binary file can be effectively reduced.
config BLE_BLUEDROID_SMP_WARNING_LOG_COMPRESSION
bool "Compress warning log of Bluedroid host"
default y
help
The warning log in the Bluedroid host component will be compressed
config BLE_BLUEDROID_SMP_WARNING_LOG_PRESERVE
bool "Keep the original warning log statement"
depends on BLE_BLUEDROID_SMP_WARNING_LOG_COMPRESSION
default y
help
Please refer to the help information in BLE_BLUEDROID_SMP_ERROR_LOG_PRESERVE
config BLE_BLUEDROID_SMP_API_LOG_COMPRESSION
bool "Compress api log of Bluedroid host"
default y
help
The api log in the Bluedroid host component will be compressed
config BLE_BLUEDROID_SMP_API_LOG_PRESERVE
bool "Keep the original api log statement"
depends on BLE_BLUEDROID_SMP_API_LOG_COMPRESSION
default n
help
Please refer to the help information in BLE_BLUEDROID_SMP_ERROR_LOG_PRESERVE
config BLE_BLUEDROID_SMP_EVENT_LOG_COMPRESSION
bool "Compress event log of Bluedroid host"
default y
help
The event log in the Bluedroid host component will be compressed
config BLE_BLUEDROID_SMP_EVENT_LOG_PRESERVE
bool "Keep the original event log statement"
depends on BLE_BLUEDROID_SMP_EVENT_LOG_COMPRESSION
default n
help
Please refer to the help information in BLE_BLUEDROID_SMP_ERROR_LOG_PRESERVE
config BLE_BLUEDROID_SMP_DEBUG_LOG_COMPRESSION
bool "Compress debug log of Bluedroid host"
default y
help
The debug log in the Bluedroid host component will be compressed
config BLE_BLUEDROID_SMP_DEBUG_LOG_PRESERVE
bool "Keep the original debug log statement"
depends on BLE_BLUEDROID_SMP_DEBUG_LOG_COMPRESSION
default n
help
Please refer to the help information in BLE_BLUEDROID_SMP_ERROR_LOG_PRESERVE
config BLE_BLUEDROID_SMP_VERBOSE_LOG_COMPRESSION
bool "Compress verbose log of Bluedroid host"
default y
help
The verbose log in the Bluedroid host component will be compressed
config BLE_BLUEDROID_SMP_VERBOSE_LOG_PRESERVE
bool "Keep the original verbose log statement"
depends on BLE_BLUEDROID_SMP_VERBOSE_LOG_COMPRESSION
default n
help
Please refer to the help information in BLE_BLUEDROID_SMP_ERROR_LOG_PRESERVE
endmenu
menu "Select the APPL layer log tag to be compressed"
config BLE_BLUEDROID_APPL_ERROR_LOG_COMPRESSION
bool "Compress error log of Bluedroid host"
default y
help
The error log in the Bluedroid host component will be compressed
config BLE_BLUEDROID_APPL_ERROR_LOG_PRESERVE
bool "Keep the original error log statement"
depends on BLE_BLUEDROID_APPL_ERROR_LOG_COMPRESSION
default y
help
When this option is enabled, the log data will be output
through both the compressed log interface and the original
UART interface at the same time, meaning that the log
statements will appear on both paths. However, please note
that this dual-output approach introduces additional code
and string constants, which will increase the size of the
firmware binary file. When this option is disabled, the
logs will no longer be printed through the original UART
output path; instead, they will only be output through the
compressed log interface. As the code and strings related
to the original UART output are omitted, the size of the
firmware binary file can be effectively reduced.
config BLE_BLUEDROID_APPL_WARNING_LOG_COMPRESSION
bool "Compress warning log of Bluedroid host"
default y
help
The warning log in the Bluedroid host component will be compressed
config BLE_BLUEDROID_APPL_WARNING_LOG_PRESERVE
bool "Keep the original warning log statement"
depends on BLE_BLUEDROID_APPL_WARNING_LOG_COMPRESSION
default y
help
Please refer to the help information in BLE_BLUEDROID_APPL_ERROR_LOG_PRESERVE
config BLE_BLUEDROID_APPL_API_LOG_COMPRESSION
bool "Compress api log of Bluedroid host"
default y
help
The api log in the Bluedroid host component will be compressed
config BLE_BLUEDROID_APPL_API_LOG_PRESERVE
bool "Keep the original api log statement"
depends on BLE_BLUEDROID_APPL_API_LOG_COMPRESSION
default n
help
Please refer to the help information in BLE_BLUEDROID_APPL_ERROR_LOG_PRESERVE
config BLE_BLUEDROID_APPL_EVENT_LOG_COMPRESSION
bool "Compress event log of Bluedroid host"
default y
help
The event log in the Bluedroid host component will be compressed
config BLE_BLUEDROID_APPL_EVENT_LOG_PRESERVE
bool "Keep the original event log statement"
depends on BLE_BLUEDROID_APPL_EVENT_LOG_COMPRESSION
default n
help
Please refer to the help information in BLE_BLUEDROID_APPL_ERROR_LOG_PRESERVE
config BLE_BLUEDROID_APPL_DEBUG_LOG_COMPRESSION
bool "Compress debug log of Bluedroid host"
default y
help
The debug log in the Bluedroid host component will be compressed
config BLE_BLUEDROID_APPL_DEBUG_LOG_PRESERVE
bool "Keep the original debug log statement"
depends on BLE_BLUEDROID_APPL_DEBUG_LOG_COMPRESSION
default n
help
Please refer to the help information in BLE_BLUEDROID_APPL_ERROR_LOG_PRESERVE
config BLE_BLUEDROID_APPL_VERBOSE_LOG_COMPRESSION
bool "Compress verbose log of Bluedroid host"
default y
help
The verbose log in the Bluedroid host component will be compressed
config BLE_BLUEDROID_APPL_VERBOSE_LOG_PRESERVE
bool "Keep the original verbose log statement"
depends on BLE_BLUEDROID_APPL_VERBOSE_LOG_COMPRESSION
default n
help
Please refer to the help information in BLE_BLUEDROID_APPL_ERROR_LOG_PRESERVE
endmenu
endif
endif

View File

@@ -0,0 +1,111 @@
## BLE 日志压缩方案 Preview
### 一、概述
本方案通过在编译阶段扫描 BLE 协议栈相关组件的代码,将日志语句中的格式化字符串和参数转换为纯二进制数据,从而提升日志输出效率并减少协议栈所占用的 Flash 空间。
目前,该方案已支持对 BLE-MESH 和 BLE-HOST-BLUEDROID 组件的日志压缩。
### 二、使用方法
该功能依赖额外的 Python 库,请按以下步骤安装所需环境。
#### 步骤一:验证 ESP-IDF 虚拟环境
请确保在 ESP-IDF 的 Python 虚拟环境中执行后续操作。可通过以下命令验证当前环境是否已激活:
```bash
idf.py --version
```
若提示 `command not found`,则表示未激活 ESP-IDF 虚拟环境。
请参考官方文档配置并激活环境:
[ESP-IDF 环境设置指南](https://docs.espressif.com/projects/esp-idf/zh_CN/latest/esp32/get-started/linux-macos-setup.html#get-started-linux-macos-first-steps)
激活后再次执行 `idf.py --version`,若显示版本信息则表明环境已就绪。
#### 步骤二:检查 Python 版本
在虚拟环境中执行以下命令确认 Python 版本:
```bash
python --version
```
若版本低于 3.8,请根据同一官方文档升级 Python
[Python 升级指南](https://docs.espressif.com/projects/esp-idf/zh_CN/latest/esp32/get-started/linux-macos-setup.html#get-started-linux-macos-first-steps)
#### 步骤三:安装日志压缩依赖
根据您的操作系统和终端环境,选择以下对应的命令安装所需依赖:
- **Linux 或 macOS**
```bash
pip install -r $IDF_PATH/components/bt/common/ble_log/log_compression/scripts/requirements.txt
```
- **Windows (PowerShell)**:
```ps
pip install -r $Env:IDF_PATH\components\bt\common\ble_log\log_compression\scripts\requirements.txt
```
- **Windows (命令提示符 Cmd)**:
```bat
pip install -r %IDF_PATH%\components\bt\common\ble_log\log_compression\scripts\requirements.txt
```
#### 步骤四:清理缓存文件
安装日志压缩所需的依赖后,删除之前构建生成的 build 文件夹(如有)。之后重新构建应用程序即可。
### 步骤五:配置 Menuconfig
使用命令`idf.py menuconfig`打开menuconfig, 以开启`BLE-MESH`组件的日志压缩为例:
通过路径`(Top) -> Component config -> Bluetooth -> Common Options -> BLE Log -> Enable BLE log compression(Preview, Please read help information)->Enable BLE Mesh log compression(Preview)`开启`BLE-MESH`组件的日志压缩功能。
在`Enable BLE Mesh log compression(Preview)`目录中有三个配置项目可供设置:
1. `Mesh log buffer length` 通过该配置项来配置单条日志可能出现的最大长度。
2. `Maximum number of tasks that can generate ble mesh logs` 通过该配置项目用来配置可能输出`BLE-MESH`组件日志的最大Task数目默认值为推荐值不建议修改
### 步骤六:构建应用程序
当开启配置项后,使用命令`idf.py build`构建应用程序,请注意构建过程中是否会有警告日志出现,比如出现以下日志时:
```txt
CMake Warning at esp/esp-idf/components/bt/common/ble_log/log_compression/CMakeLists.txt:46 (message):
tree_sitter import failed, please check whether the package is installed
correctly,Please refer to the
file: esp/esp-idf/components/bt/common/ble_log/log_compression/README
for installation instructions.
```
该警告表示依赖未正确安装,日志压缩构建失败,系统将自动回退至普通编译模式。请重新执行步骤一至步骤四。
若日志压缩成功,终端将显示如下信息:
```
[0/1285] Log compression is being performed, please wait...
Log compression underway, please wait...
Found module BLE_MESH for compression
Found 111 source files in module BLE_MESH requiring compression
3055 ble log(s) compressed
Header file for compressed logs generated
```
出现该信息表明压缩日志构建成功,其中输出的文件数目和日志数目可能会随版本变更而略有不同。
构建成功后,将在 `build/ble_log/` 目录下生成如下结构::
```
build/ble_log/
├── ble_log_database
│ └── BLE_MESH_logs.json
├── ble_script_log_{timestamp}.log
├── .compressed_srcs
│ └── esp_ble_mesh
├── include
│ └── mesh_log_index.h
└── module_info.yml
```
- `.compressed_srcs`: 压缩处理后的C代码文件。
- `mesh_log_index.h`: 生成的日志宏头文件。
- `BLE_MESH_logs.json`: 每条日志的详细信息。
- `ble_script_log_{timestamp}.log`: 压缩日志在运行过程中产生的日志。
- `module_info.yml`: 压缩日志对各个模块的配置文件。
**注意:这些为自动生成文件,请勿手动修改。**
### 步骤七、接收日志
开启日志压缩后, 被压缩组件在默认配置下, 除`ERR`、`WARN`级别的日志外其余级别的日志将会被重定向到压缩日志接口进行输出接收日志的方法请参考BLE Log模块的说明文档[BLE Log Module](../../README.md)
## 常见问题解决
1. 如果出现编码后的日志导致编译错误或者找不到宏定义,请删除`build`文件夹后重新构建,如果问题持续存在,请将问题反馈给 Espressif BLE 团队。

View File

@@ -0,0 +1,124 @@
## BLE Log Compression Scheme (Preview)
### 一、Overview
This scheme scans the code of BLE stack-related components during the compilation phase, converting formatted strings and parameters in log statements into pure binary data. This improves log output efficiency and reduces the Flash footprint of the protocol stack.
Currently, the scheme supports log compression for both `BLE-MESH` and `BLE-HOST-BLUEDROID` components.
---
### 二、How to Use
This feature requires additional Python libraries. Please follow the steps below to set up the environment.
### Step 1: Verify ESP-IDF Virtual Environment
Ensure all subsequent steps are performed within the ESP-IDF Python virtual environment.
Verify activation by running:
```bash
idf.py --version
```
If the output shows `idf.py: command not found`, the virtual environment is not active.
Refer to the official documentation to configure and activate the environment:
[ESP-IDF Setup Guide](https://docs.espressif.com/projects/esp-idf/en/latest/esp32/get-started/linux-macos-setup.html)
After activation, run `idf.py --version` again. A version number confirms successful setup.
### Step 2: Check Python Version
Within the virtual environment, check the Python version:
```bash
python --version
```
If the version is lower than 3.8, upgrade Python by following the same official guide:
[Python Upgrade Guide](https://docs.espressif.com/projects/esp-idf/en/latest/esp32/get-started/linux-macos-setup.html)
### Step 3: Install Log Compression Dependencies
Choose the appropriate command below based on your operating system and terminal environment to install the required dependencies:
If you are using Linux or macos, please use the following command:
- **Linux or macOS**
```bash
pip install -r $IDF_PATH/components/bt/common/ble_log/log_compression/scripts/requirements.txt
```
- **Windows (PowerShell)**:
```ps
pip install -r $Env:IDF_PATH\components\bt\common\ble_log\log_compression\scripts\requirements.txt
```
- **Windows (Command Prompt - Cmd)**:
```bat
pip install -r %IDF_PATH%\components\bt\common\ble_log\log_compression\scripts\requirements.txt
```
#### Step 4: Clean Build Cache
After installing the dependencies, it is recommended to delete the existing build folder (if any) and rebuild the application to ensure a clean environment.
### Step 5: Configure via Menuconfig
>
Run `idf.py menuconfig` and navigate to the following path to enable `BLE-MESH` log compression:
```(Top) -> Component config -> Bluetooth -> Common Options -> BLE Log -> Enable BLE log compression(Preview, Please read help information)->Enable BLE Mesh log compression(Preview)```.
There are three configuration items under this submenu:
- Mesh log buffer length: Sets the maximum length of a single log entry.
- Maximum number of tasks that can generate BLE mesh logs: Sets the maximum number of tasks that can output BLE-MESH logs (the default is recommended; avoid modifying this).
### Step 6: Build the Application
After configuration, build the application with:
```bash
idf.py build
```
Watch for any warnings during the build process. For example:
```txt
CMake Warning at esp/esp-idf/components/bt/common/ble_log/log_compression/CMakeLists.txt:46 (message):
tree_sitter import failed, please check whether the package is installed
correctly,Please refer to the
file: esp/esp-idf/components/bt/common/ble_log/log_compression/README
for installation instructions.
```
This indicates that the dependencies were not installed correctly, and log compression has failed—falling back to a normal build. Please repeat Steps 14.
If log compression is successful, you will see output similar to:
```
[0/1285] Log compression is being performed, please wait...
Log compression underway, please wait...
Found module BLE_MESH for compression
Found 111 source files in module BLE_MESH requiring compression
3055 ble log(s) compressed
Header file for compressed logs generated
```
After a successful build, the following structure will be generated under `build/ble_log/`:
```
build/ble_log/
├── ble_log_database
│ └── BLE_MESH_logs.json
├── ble_script_log_{timestamp}.log
├── .compressed_srcs
│ └── esp_ble_mesh
├── include
│ └── mesh_log_index.h
└── module_info.yml
```
- `.compressed_srcs`: Compressed C source files.
- `mesh_log_index.h`: Generated header file containing log macros.
- `BLE_MESH_logs.json`: Detailed information for each log entry.
- `ble_script_log_{timestamp}.log`: Log generated by the compression script.
- `module_info.yml`: Configuration file for compressed logging across modules.
**Do not modify these auto-generated files.**
### Step 7: Receive Logs
With log compression enabled and under the default configuration, all log levels except ERR and WARN generated by the compressed component will be redirected to the compression-log interface for output. Please refer to the BLE Log modules documentation for how to receive these logs: [BLE Log module](../../README.md).
## Frequently Asked Questions
1. If encoded logs cause compilation errors or missing macro definitions, delete the build folder and rebuild. If the issue persists, please report it to the Espressif BLE team.

View File

@@ -0,0 +1,263 @@
/*
* SPDX-FileCopyrightText: 2025 Espressif Systems (Shanghai) CO LTD
*
* SPDX-License-Identifier: Apache-2.0
*/
#include <stdarg.h>
#include <stdint.h>
#include <string.h>
// Private includes
#include "freertos/FreeRTOS.h"
#include "freertos/task.h"
#include "sdkconfig.h"
#include "ble_log_util.h"
#include "log_compression/utils.h"
#if CONFIG_BLE_COMPRESSED_LOG_ENABLE
#define BUF_NAME(name, idx) name##_buffer##idx
#define BUF_MGMT_NAME(name) name##_log_buffer_mgmt
#define DECL_BUF_OP(name, len, idx) \
static uint8_t BUF_NAME(name, idx)[len];
#define INIT_MAP_OP(name, _, buffer_idx) \
{.busy = 0, \
.idx = 0, \
.buffer = BUF_NAME(name, buffer_idx), \
.len = sizeof(BUF_NAME(name, buffer_idx))},
#define DECLARE_BUFFERS(NAME, BUF_LEN, BUF_CNT) \
FOR_EACH_IDX(DECL_BUF_OP, NAME, BUF_LEN, GEN_INDEX(BUF_CNT));
#define INIT_BUFFER_MGMT(NAME, BUF_CNT) \
ble_cp_log_buffer_mgmt_t BUF_MGMT_NAME(NAME)[BUF_CNT] = { \
FOR_EACH_IDX(INIT_MAP_OP, NAME, 0, GEN_INDEX(BUF_CNT)) \
};
#if CONFIG_BLE_MESH_COMPRESSED_LOG_ENABLE
DECLARE_BUFFERS(mesh, CONFIG_BLE_MESH_COMPRESSED_LOG_BUFFER_LEN, LOG_CP_MAX_LOG_BUFFER_USED_SIMU);
INIT_BUFFER_MGMT(mesh, LOG_CP_MAX_LOG_BUFFER_USED_SIMU);
char * mesh_last_task_handle = NULL;
#endif
#if CONFIG_BLE_HOST_COMPRESSED_LOG_ENABLE
DECLARE_BUFFERS(host, CONFIG_BLE_HOST_COMPRESSED_LOG_BUFFER_LEN, LOG_CP_MAX_LOG_BUFFER_USED_SIMU);
INIT_BUFFER_MGMT(host, LOG_CP_MAX_LOG_BUFFER_USED_SIMU);
char * host_last_task_handle = NULL;
#endif
/* The maximum number of supported parameters is 64 */
#define LOG_HEADER(log_type, info) ((log_type << 6) | (info & 0x3f))
int ble_compressed_log_cb_get(uint8_t source, ble_cp_log_buffer_mgmt_t **mgmt)
{
ble_cp_log_buffer_mgmt_t *buffer_mgmt = NULL;
char ** last_handle = NULL;
char * cur_handle = pcTaskGetName(NULL);
switch (source)
{
#if CONFIG_BLE_MESH_COMPRESSED_LOG_ENABLE
case BLE_COMPRESSED_LOG_OUT_SOURCE_MESH:
buffer_mgmt = BUF_MGMT_NAME(mesh);
last_handle = &mesh_last_task_handle;
break;
#endif
#if CONFIG_BLE_HOST_COMPRESSED_LOG_ENABLE
case BLE_COMPRESSED_LOG_OUT_SOURCE_HOST:
buffer_mgmt = BUF_MGMT_NAME(host);
last_handle = &host_last_task_handle;
break;
#endif
default:
assert(0 && "Unsupported log source");
break;
}
for (int i = 0; i < LOG_CP_MAX_LOG_BUFFER_USED_SIMU; i++) {
if (ble_log_cas_acquire(&(buffer_mgmt[i].busy))) {
*mgmt = &buffer_mgmt[i];
ble_log_cp_push_u8(*mgmt, source);
if (*last_handle == NULL ||
*last_handle != cur_handle) {
ble_log_cp_push_u8(*mgmt, LOG_HEADER(LOG_TYPE_INFO, LOG_TYPE_INFO_TASK_SWITCH));
*last_handle = cur_handle;
}
return 0;
}
}
return -1;
}
static inline int ble_compressed_log_buffer_free(ble_cp_log_buffer_mgmt_t *mgmt)
{
#if BLE_LOG_CP_CONTENT_CHECK_ENBALE
memset(mgmt->buffer, BLE_LOG_CP_CONTENT_CHECK_VAL, mgmt->idx);
#endif
mgmt->idx = 0;
ble_log_cas_release(&mgmt->busy);
return 0;
}
int ble_log_compressed_hex_print(uint8_t source, uint32_t log_index, size_t args_cnt, ...)
{
ble_cp_log_buffer_mgmt_t *mgmt = NULL;
uint8_t arg_type = 0;
va_list args;
ble_compressed_log_cb_get(source, &mgmt);
if (args_cnt == 0) {
ble_log_cp_push_u8(mgmt, LOG_HEADER(LOG_TYPE_HEX_ARGS, 0));
ble_log_cp_push_u16(mgmt, log_index);
ble_compressed_log_output(source, mgmt->buffer, mgmt->idx);
ble_compressed_log_buffer_free(mgmt);
return 0;
}
va_start(args, args_cnt);
ble_log_cp_push_u8(mgmt, LOG_HEADER(LOG_TYPE_HEX_ARGS, args_cnt));
ble_log_cp_push_u16(mgmt, log_index);
uint8_t size_info_idx = mgmt->idx;
uint8_t *cur = &(mgmt->buffer)[mgmt->idx];
uint8_t size_info = 0;
for (size_t i = 0; i < args_cnt; i++) {
if (i % 2) {
arg_type = va_arg(args, size_t);
ble_log_cp_push_u8(mgmt, size_info|arg_type);
size_info = 0;
cur++;
} else {
arg_type = va_arg(args, size_t);
if (i == args_cnt - 1) {
ble_log_cp_push_u8(mgmt, arg_type);
} else {
size_info = arg_type << 4;
}
}
if (arg_type >= ARG_SIZE_TYPE_MAX) {
printf("Found invalid arg type %08lx type %d", log_index, arg_type);
assert(0);
}
}
cur = &(mgmt->buffer)[size_info_idx];
for (size_t i = 0; i < args_cnt; i++) {
if (i % 2) {
arg_type = (*cur) & 0x0f;
cur++;
} else {
arg_type = (*cur) >> 4;
}
switch(arg_type) {
case ARG_SIZE_TYPE_U32:
uint32_t u32v = va_arg(args, size_t);
if (likely(u32v)) {
if (u32v <= 0xff) {
ble_log_cp_push_u8(mgmt, 3);
ble_log_cp_push_u8(mgmt, u32v);
ble_log_cp_update_half_byte(mgmt, size_info_idx + i/2, ARG_SIZE_TYPE_LZU32, !(i%2));
break;
} else if (u32v <= 0xffff) {
ble_log_cp_push_u8(mgmt, 2);
ble_log_cp_push_u16(mgmt, u32v);
ble_log_cp_update_half_byte(mgmt, size_info_idx + i/2, ARG_SIZE_TYPE_LZU32, !(i%2));
break;
} else {
ble_log_cp_push_u32(mgmt, u32v);
}
} else {
ble_log_cp_update_half_byte(mgmt, size_info_idx + i/2, ARG_SIZE_TYPE_AZU32, !(i%2));
}
break;
case ARG_SIZE_TYPE_U64:
uint64_t u64v = va_arg(args, uint64_t);
if (likely(u64v)) {
if (unlikely(u64v >> 48)) {
ble_log_cp_push_u64(mgmt, u64v);
} else {
uint32_t tmpv = 0;
uint8_t lz = 0;
if (likely(u64v <= UINT32_MAX)) {
tmpv = (uint32_t)u64v;
lz = 4;
} else {
tmpv = u64v >> 32;
}
lz += __builtin_clz(tmpv) / 8;
ble_log_cp_push_u8(mgmt, lz);
switch (8-lz) {
case 5:
ble_log_cp_push_u32(mgmt, (uint32_t)u64v);
[[fallthrough]];
case 1:
ble_log_cp_push_u8(mgmt, (uint8_t)tmpv);
break;
case 6:
ble_log_cp_push_u32(mgmt, (uint32_t)u64v);
[[fallthrough]];
case 2:
ble_log_cp_push_u16(mgmt, (uint16_t)tmpv);
break;
case 7:
ble_log_cp_push_u32(mgmt, (uint32_t)u64v);
[[fallthrough]];
case 3:
ble_log_cp_push_u8(mgmt, (uint8_t)tmpv);
ble_log_cp_push_u16(mgmt, (uint16_t)(tmpv >> 8));
break;
default:
assert(0);
break;
}
ble_log_cp_update_half_byte(mgmt, size_info_idx + i/2, ARG_SIZE_TYPE_LZU64, !(i%2));
}
} else {
ble_log_cp_update_half_byte(mgmt, size_info_idx + i/2, ARG_SIZE_TYPE_AZU64, !(i%2));
}
break;
case ARG_SIZE_TYPE_STR:
char *str_p = (char *)va_arg(args, char *);
ble_log_cp_push_buf(mgmt, (const uint8_t *)str_p, strlen(str_p) + 1);
break;
default:
printf("Invalid size %d\n", arg_type);
assert(0);
break;
}
}
ble_compressed_log_output(source, mgmt->buffer, mgmt->idx);
ble_compressed_log_buffer_free(mgmt);
va_end(args);
return 0;
}
int ble_log_compressed_hex_print_buf(uint8_t source, uint32_t log_index, uint8_t buf_idx, const uint8_t *buf, size_t len)
{
ble_cp_log_buffer_mgmt_t *mgmt = NULL;
ble_compressed_log_cb_get(source, &mgmt);
if (buf == NULL && len != 0) {
ble_log_cp_push_u8(mgmt, LOG_HEADER(LOG_TYPE_INFO, LOG_TYPE_INFO_NULL_BUF));
ble_log_cp_push_u16(mgmt, log_index);
ble_compressed_log_output(source, mgmt->buffer, mgmt->idx);
ble_compressed_log_buffer_free(mgmt);
return 0;
}
ble_log_cp_push_u8(mgmt, LOG_HEADER(LOG_TYPE_HEX_BUF, buf_idx));
ble_log_cp_push_u16(mgmt, log_index);
ble_log_cp_push_buf(mgmt, buf, len);
ble_compressed_log_output(source, mgmt->buffer, mgmt->idx);
ble_compressed_log_buffer_free(mgmt);
return 0;
}
#endif /* CONFIG_BLE_COMPRESSED_LOG_ENABLE */

View File

@@ -0,0 +1,160 @@
set(_BLE_HOST_TAG_MAP
# BTM Layer
CONFIG_BLE_BLUEDROID_BTM_ERROR_LOG_COMPRESSION
CONFIG_BLE_BLUEDROID_BTM_ERROR_LOG_PRESERVE
BTM_TRACE_ERROR
CONFIG_BLE_BLUEDROID_BTM_WARNING_LOG_COMPRESSION
CONFIG_BLE_BLUEDROID_BTM_WARNING_LOG_PRESERVE
BTM_TRACE_WARNING
CONFIG_BLE_BLUEDROID_BTM_API_LOG_COMPRESSION
CONFIG_BLE_BLUEDROID_BTM_API_LOG_PRESERVE
BTM_TRACE_API
CONFIG_BLE_BLUEDROID_BTM_EVENT_LOG_COMPRESSION
CONFIG_BLE_BLUEDROID_BTM_EVENT_LOG_PRESERVE
BTM_TRACE_EVENT
CONFIG_BLE_BLUEDROID_BTM_DEBUG_LOG_COMPRESSION
CONFIG_BLE_BLUEDROID_BTM_DEBUG_LOG_PRESERVE
BTM_TRACE_DEBUG
CONFIG_BLE_BLUEDROID_BTM_VERBOSE_LOG_COMPRESSION
CONFIG_BLE_BLUEDROID_BTM_VERBOSE_LOG_PRESERVE
BTM_TRACE_VERBOSE
# L2CAP Layer
CONFIG_BLE_BLUEDROID_L2CAP_ERROR_LOG_COMPRESSION
CONFIG_BLE_BLUEDROID_L2CAP_ERROR_LOG_PRESERVE
L2CAP_TRACE_ERROR
CONFIG_BLE_BLUEDROID_L2CAP_WARNING_LOG_COMPRESSION
CONFIG_BLE_BLUEDROID_L2CAP_WARNING_LOG_PRESERVE
L2CAP_TRACE_WARNING
CONFIG_BLE_BLUEDROID_L2CAP_API_LOG_COMPRESSION
CONFIG_BLE_BLUEDROID_L2CAP_API_LOG_PRESERVE
L2CAP_TRACE_API
CONFIG_BLE_BLUEDROID_L2CAP_EVENT_LOG_COMPRESSION
CONFIG_BLE_BLUEDROID_L2CAP_EVENT_LOG_PRESERVE
L2CAP_TRACE_EVENT
CONFIG_BLE_BLUEDROID_L2CAP_DEBUG_LOG_COMPRESSION
CONFIG_BLE_BLUEDROID_L2CAP_DEBUG_LOG_PRESERVE
L2CAP_TRACE_DEBUG
CONFIG_BLE_BLUEDROID_L2CAP_VERBOSE_LOG_COMPRESSION
CONFIG_BLE_BLUEDROID_L2CAP_VERBOSE_LOG_PRESERVE
L2CAP_TRACE_VERBOSE
# GAP Layer
CONFIG_BLE_BLUEDROID_GAP_ERROR_LOG_COMPRESSION
CONFIG_BLE_BLUEDROID_GAP_ERROR_LOG_PRESERVE
GAP_TRACE_ERROR
CONFIG_BLE_BLUEDROID_GAP_WARNING_LOG_COMPRESSION
CONFIG_BLE_BLUEDROID_GAP_WARNING_LOG_PRESERVE
GAP_TRACE_WARNING
CONFIG_BLE_BLUEDROID_GAP_API_LOG_COMPRESSION
CONFIG_BLE_BLUEDROID_GAP_API_LOG_PRESERVE
GAP_TRACE_API
CONFIG_BLE_BLUEDROID_GAP_EVENT_LOG_COMPRESSION
CONFIG_BLE_BLUEDROID_GAP_EVENT_LOG_PRESERVE
GAP_TRACE_EVENT
CONFIG_BLE_BLUEDROID_GAP_DEBUG_LOG_COMPRESSION
CONFIG_BLE_BLUEDROID_GAP_DEBUG_LOG_PRESERVE
GAP_TRACE_DEBUG
CONFIG_BLE_BLUEDROID_GAP_VERBOSE_LOG_COMPRESSION
CONFIG_BLE_BLUEDROID_GAP_VERBOSE_LOG_PRESERVE
GAP_TRACE_VERBOSE
# GATT Layer
CONFIG_BLE_BLUEDROID_GATT_ERROR_LOG_COMPRESSION
CONFIG_BLE_BLUEDROID_GATT_ERROR_LOG_PRESERVE
GATT_TRACE_ERROR
CONFIG_BLE_BLUEDROID_GATT_WARNING_LOG_COMPRESSION
CONFIG_BLE_BLUEDROID_GATT_WARNING_LOG_PRESERVE
GATT_TRACE_WARNING
CONFIG_BLE_BLUEDROID_GATT_API_LOG_COMPRESSION
CONFIG_BLE_BLUEDROID_GATT_API_LOG_PRESERVE
GATT_TRACE_API
CONFIG_BLE_BLUEDROID_GATT_EVENT_LOG_COMPRESSION
CONFIG_BLE_BLUEDROID_GATT_EVENT_LOG_PRESERVE
GATT_TRACE_EVENT
CONFIG_BLE_BLUEDROID_GATT_DEBUG_LOG_COMPRESSION
CONFIG_BLE_BLUEDROID_GATT_DEBUG_LOG_PRESERVE
GATT_TRACE_DEBUG
CONFIG_BLE_BLUEDROID_GATT_VERBOSE_LOG_COMPRESSION
CONFIG_BLE_BLUEDROID_GATT_VERBOSE_LOG_PRESERVE
GATT_TRACE_VERBOSE
# SMP Layer
CONFIG_BLE_BLUEDROID_SMP_ERROR_LOG_COMPRESSION
CONFIG_BLE_BLUEDROID_SMP_ERROR_LOG_PRESERVE
SMP_TRACE_ERROR
CONFIG_BLE_BLUEDROID_SMP_WARNING_LOG_COMPRESSION
CONFIG_BLE_BLUEDROID_SMP_WARNING_LOG_PRESERVE
SMP_TRACE_WARNING
CONFIG_BLE_BLUEDROID_SMP_API_LOG_COMPRESSION
CONFIG_BLE_BLUEDROID_SMP_API_LOG_PRESERVE
SMP_TRACE_API
CONFIG_BLE_BLUEDROID_SMP_EVENT_LOG_COMPRESSION
CONFIG_BLE_BLUEDROID_SMP_EVENT_LOG_PRESERVE
SMP_TRACE_EVENT
CONFIG_BLE_BLUEDROID_SMP_DEBUG_LOG_COMPRESSION
CONFIG_BLE_BLUEDROID_SMP_DEBUG_LOG_PRESERVE
SMP_TRACE_DEBUG
CONFIG_BLE_BLUEDROID_SMP_VERBOSE_LOG_COMPRESSION
CONFIG_BLE_BLUEDROID_SMP_VERBOSE_LOG_PRESERVE
SMP_TRACE_VERBOSE
# APPL Layer
CONFIG_BLE_BLUEDROID_APPL_ERROR_LOG_COMPRESSION
CONFIG_BLE_BLUEDROID_APPL_ERROR_LOG_PRESERVE
APPL_TRACE_ERROR
CONFIG_BLE_BLUEDROID_APPL_WARNING_LOG_COMPRESSION
CONFIG_BLE_BLUEDROID_APPL_WARNING_LOG_PRESERVE
APPL_TRACE_WARNING
CONFIG_BLE_BLUEDROID_APPL_API_LOG_COMPRESSION
CONFIG_BLE_BLUEDROID_APPL_API_LOG_PRESERVE
APPL_TRACE_API
CONFIG_BLE_BLUEDROID_APPL_EVENT_LOG_COMPRESSION
CONFIG_BLE_BLUEDROID_APPL_EVENT_LOG_PRESERVE
APPL_TRACE_EVENT
CONFIG_BLE_BLUEDROID_APPL_DEBUG_LOG_COMPRESSION
CONFIG_BLE_BLUEDROID_APPL_DEBUG_LOG_PRESERVE
APPL_TRACE_DEBUG
CONFIG_BLE_BLUEDROID_APPL_VERBOSE_LOG_COMPRESSION
CONFIG_BLE_BLUEDROID_APPL_VERBOSE_LOG_PRESERVE
APPL_TRACE_VERBOSE
)
include(${CMAKE_CURRENT_LIST_DIR}/tag_table_function.cmake)
set(BLE_HOST_TAGS "")
set(BLE_HOST_TAGS_PRESERVE "")
tag_table_to_lists(_BLE_HOST_TAG_MAP BLE_HOST_TAGS BLE_HOST_TAGS_PRESERVE)
set(BLE_HOST_TAGS "${BLE_HOST_TAGS}" PARENT_SCOPE)
set(BLE_HOST_TAGS_PRESERVE "${BLE_HOST_TAGS_PRESERVE}" PARENT_SCOPE)

View File

@@ -0,0 +1,41 @@
set(_BLE_MESH_TAG_MAP
CONFIG_BLE_MESH_STACK_ERR_LOG_COMPRESSION
CONFIG_BLE_MESH_STACK_ERR_LOG_PRESERVE
BT_ERR
CONFIG_BLE_MESH_STACK_WARN_LOG_COMPRESSION
CONFIG_BLE_MESH_STACK_WARN_LOG_PRESERVE
BT_WARN
CONFIG_BLE_MESH_STACK_INFO_LOG_COMPRESSION
CONFIG_BLE_MESH_STACK_INFO_LOG_PRESERVE
BT_INFO
CONFIG_BLE_MESH_STACK_DEBUG_LOG_COMPRESSION
CONFIG_BLE_MESH_STACK_DEBUG_LOG_PRESERVE
BT_DBG
CONFIG_BLE_MESH_NET_BUF_ERR_LOG_COMPRESSION
CONFIG_BLE_MESH_NET_BUF_ERR_LOG_PRESERVE
NET_BUF_ERR
CONFIG_BLE_MESH_NET_BUF_WARN_LOG_COMPRESSION
CONFIG_BLE_MESH_NET_BUF_WARN_LOG_PRESERVE
NET_BUF_WARN
CONFIG_BLE_MESH_NET_BUF_INFO_LOG_COMPRESSION
CONFIG_BLE_MESH_NET_BUF_INFO_LOG_PRESERVE
NET_BUF_INFO
CONFIG_BLE_MESH_NET_BUF_DEBUG_LOG_COMPRESSION
CONFIG_BLE_MESH_NET_BUF_DEBUG_LOG_PRESERVE
NET_BUF_DBG
)
include(${CMAKE_CURRENT_LIST_DIR}/tag_table_function.cmake)
set(BLE_MESH_TAGS "")
set(BLE_MESH_TAGS_PRESERVE "")
tag_table_to_lists(_BLE_MESH_TAG_MAP BLE_MESH_TAGS BLE_MESH_TAGS_PRESERVE)
set(BLE_MESH_TAGS "${BLE_MESH_TAGS}" PARENT_SCOPE)
set(BLE_MESH_TAGS_PRESERVE "${BLE_MESH_TAGS_PRESERVE}" PARENT_SCOPE)

View File

@@ -0,0 +1,33 @@
# Usage
# set(MY_TABLE CONFIG_FOO_ON CONFIG_FOO_KEEP TAG_FOO ...)
# tag_table_to_lists(MY_TABLE OUT_TAGS OUT_PRESERVE)
function(tag_table_to_lists TABLE_NAME OUT_TAGS OUT_PRESERVE)
set(_tags "")
set(_preserve "")
set(_map ${${TABLE_NAME}})
list(LENGTH _map _len)
math(EXPR _stop "${_len} - 1")
foreach(i RANGE 0 ${_stop} 3)
math(EXPR _i1 "${i} + 1")
math(EXPR _i2 "${i} + 2")
list(GET _map ${i} _comp)
list(GET _map ${_i1} _pres)
list(GET _map ${_i2} _tag)
if(${_comp})
list(APPEND _tags "${_tag}")
if(${_pres})
list(APPEND _preserve "${_tag}")
endif()
endif()
endforeach()
list(JOIN _tags ", " _tags_str)
list(JOIN _preserve ", " _preserve_str)
set(${OUT_TAGS} "${_tags_str}" PARENT_SCOPE)
set(${OUT_PRESERVE} "${_preserve_str}" PARENT_SCOPE)
endfunction()

View File

@@ -0,0 +1,189 @@
/*
* SPDX-FileCopyrightText: 2025 Espressif Systems (Shanghai) CO LTD
*
* SPDX-License-Identifier: Apache-2.0
*/
#ifndef _BLE_LOG_COMPRESSION_UTILS_H
#define _BLE_LOG_COMPRESSION_UTILS_H
#include "ble_log.h"
#define CONCAT(a, b) a##b
#define _CONCAT(a, b) CONCAT(a, b)
#define _0 0
#define _1 1
#define _2 2
#define _3 3
#define _4 4
#define _5 5
#define _6 6
#define _7 7
#define _8 8
#define _9 9
#define __COUNT_ARGS(_0, _1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _n, X...) _n
#define COUNT_ARGS(X...) __COUNT_ARGS(, ##X, 12, 11, 10, 9, 8, 7, 6, 5, 4, 3, 2, 1, 0)
#define FOR_EACH_IDX(macro, name, len, ...) \
_CONCAT(_FOR_EACH_, COUNT_ARGS(__VA_ARGS__))(macro, name, len, __VA_ARGS__)
#define _FOR_EACH_0(m, n, l, ...)
#define _FOR_EACH_1(m, n, l, i1, ...) m(n, l, i1)
#define _FOR_EACH_2(m, n, l, i1, ...) m(n, l, i1) _FOR_EACH_1(m, n, l, __VA_ARGS__)
#define _FOR_EACH_3(m, n, l, i1, ...) m(n, l, i1) _FOR_EACH_2(m, n, l, __VA_ARGS__)
#define _FOR_EACH_4(m, n, l, i1, ...) m(n, l, i1) _FOR_EACH_3(m, n, l, __VA_ARGS__)
#define _FOR_EACH_5(m, n, l, i1, ...) m(n, l, i1) _FOR_EACH_4(m, n, l, __VA_ARGS__)
#define _GEN_INDEX_0()
#define _GEN_INDEX_1() _0
#define _GEN_INDEX_2() _0, _1
#define _GEN_INDEX_3() _0, _1, _2
#define _GEN_INDEX_4() _0, _1, _2, _3
#define _GEN_INDEX_5() _0, _1, _2, _3, _4
#define GEN_INDEX(n) _CONCAT(_GEN_INDEX_, n)()
enum {
BLE_COMPRESSED_LOG_OUT_SOURCE_HOST,
BLE_COMPRESSED_LOG_OUT_SOURCE_MESH,
};
enum {
ARG_SIZE_TYPE_U32, /* argument type with 4 bytes */
ARG_SIZE_TYPE_STR, /* argument type with strings */
ARG_SIZE_TYPE_U64, /* argument type with 8 bytes */
ARG_SIZE_TYPE_LZU32, /* argument type with 4 bytes but with leading zeros */
ARG_SIZE_TYPE_LZU64, /* argument type with 8 bytes but with leading zeros */
ARG_SIZE_TYPE_AZU32, /* argument type with 4 bytes but all zeros */
ARG_SIZE_TYPE_AZU64, /* argument type with 8 bytes but all zeros */
ARG_SIZE_TYPE_MAX,
};
/* The maximum number of buffers used simultaneously */
#define LOG_CP_MAX_LOG_BUFFER_USED_SIMU 3
#define LOG_TYPE_ZERO_ARGS 0
#define LOG_TYPE_HEX_ARGS 1
#define LOG_TYPE_HEX_BUF 2
/* This type of message is used to update log information,
* such as there is currently a new task log */
#define LOG_TYPE_INFO 3
#define LOG_TYPE_INFO_TASK_ID_UPDATE 0
#define LOG_TYPE_INFO_NULL_BUF 1
#define LOG_TYPE_INFO_TASK_SWITCH 2
typedef struct {
volatile bool busy;
uint8_t *buffer;
uint16_t idx;
uint16_t len;
} ble_cp_log_buffer_mgmt_t;
#define CONTENT_CHECK(idx, buf, except_val, len)
#define LENGTH_CHECK(idx, pbuffer_mgmt) do ( if(unlikely((idx) > (pbuffer_mgmt->len))) assert(0 && "Maximum log buffer length exceeded");) while(0)
#define BLE_LOG_CP_CONTENT_CHECK_ENBALE 0
#define BLE_LOG_CP_CONTENT_CHECK_VAL 0x00
static inline int ble_log_cp_buffer_safe_check(ble_cp_log_buffer_mgmt_t *pbuf_mgmt, uint16_t write_len)
{
if ((pbuf_mgmt->idx + write_len) > pbuf_mgmt->len) {
printf("Maximum length of buffer(%p) idx %d write_len %d exceed\n", pbuf_mgmt, pbuf_mgmt->idx, write_len);
return -1;
}
#if BLE_LOG_CP_CONTENT_CHECK_ENBALE
for (int i = pbuf_mgmt->idx; i < pbuf_mgmt->idx + write_len; i++) {
if (pbuf_mgmt->buffer[i] != BLE_LOG_CP_CONTENT_CHECK_VAL) {
printf("The value(%02x) in the buffer does not match the expected(%02x)\n", pbuf_mgmt->buffer[i], BLE_LOG_CP_CONTENT_CHECK_VAL);
return -1;
}
}
#endif
return 0;
}
static inline int ble_log_cp_push_u8(ble_cp_log_buffer_mgmt_t *pbuf_mgmt, uint8_t val)
{
if (ble_log_cp_buffer_safe_check(pbuf_mgmt, 1)) {
return -1;
}
pbuf_mgmt->buffer[pbuf_mgmt->idx] = val;
pbuf_mgmt->idx++;
return 0;
}
static inline int ble_log_cp_push_u16(ble_cp_log_buffer_mgmt_t *pbuf_mgmt, uint16_t val)
{
if (ble_log_cp_buffer_safe_check(pbuf_mgmt, 2)) {
return -1;
}
uint16_t *p = (uint16_t *)&(pbuf_mgmt->buffer[pbuf_mgmt->idx]);
*p = val;
pbuf_mgmt->idx+=2;
return 0;
}
static inline int ble_log_cp_push_u32(ble_cp_log_buffer_mgmt_t *pbuf_mgmt, uint32_t val)
{
if (ble_log_cp_buffer_safe_check(pbuf_mgmt, 4)) {
return -1;
}
uint32_t *p = (uint32_t *)&(pbuf_mgmt->buffer[pbuf_mgmt->idx]);
*p = val;
pbuf_mgmt->idx+=4;
return 0;
}
static inline int ble_log_cp_push_u64(ble_cp_log_buffer_mgmt_t *pbuf_mgmt, uint64_t val)
{
if (ble_log_cp_buffer_safe_check(pbuf_mgmt, 8)) {
return -1;
}
uint64_t *p = (uint64_t *)&(pbuf_mgmt->buffer[pbuf_mgmt->idx]);
*p = val;
pbuf_mgmt->idx+=8;
return 0;
}
static inline int ble_log_cp_push_buf(ble_cp_log_buffer_mgmt_t *pbuf_mgmt, const uint8_t *buf, uint16_t len)
{
if (ble_log_cp_buffer_safe_check(pbuf_mgmt, len)) {
return -1;
}
uint8_t *p = (uint8_t *)&(pbuf_mgmt->buffer[pbuf_mgmt->idx]);
memcpy(p, buf, len);
pbuf_mgmt->idx+=len;
return 0;
}
static inline int ble_log_cp_update_half_byte(ble_cp_log_buffer_mgmt_t *pbuf_mgmt,
uint16_t idx, uint8_t new_data, uint8_t high)
{
if (pbuf_mgmt->idx <= idx) {
return -1;
}
uint8_t old_val = pbuf_mgmt->buffer[idx];
if (high) {
pbuf_mgmt->buffer[idx] = (old_val & 0x0f) | (new_data << 4);
} else {
pbuf_mgmt->buffer[idx] = (old_val & 0xf0) | (new_data & 0x0f);
}
return 0;
}
static inline int ble_log_cp_buffer_print(ble_cp_log_buffer_mgmt_t *pbuf_mgmt)
{
for (size_t i = 0; i < pbuf_mgmt->idx; i++) {
printf("%02x ", pbuf_mgmt->buffer[i]);
}
printf("\n");
return 0;
}
static inline int ble_compressed_log_output(uint8_t source, uint8_t *data, uint16_t len)
{
return ble_log_write_hex(BLE_LOG_SRC_ENCODE, data, len);
}
#endif /* _BLE_LOG_COMPRESSION_UTILS_H */

View File

@@ -0,0 +1,440 @@
# SPDX-FileCopyrightText: 2025 Espressif Systems (Shanghai) CO LTD
# SPDX-License-Identifier: Apache-2.0
# ruff: noqa: UP007
"""
Log Database Manager
====================
Manages the storage and retrieval of compressed log metadata using a multiprocessing-safe database.
"""
import atexit
import hashlib
import json
import logging
import os
import tempfile
import threading
from collections.abc import Mapping
from pathlib import Path
from types import TracebackType
from typing import Any
from typing import Union
LOGGER = logging.getLogger('log_db_manager')
class LogDBManager:
"""Manages log compression metadata in a multiprocessing-safe manner."""
# Operation result codes
SUCCESS = 0
LOG_EXISTS = 1
OPERATION_FAILED = 2
SOURCE_LOG_UPDATE_FULL = 0
SOURCE_LOG_UPDATE_PARTIAL = 1
SOURCE_LOG_UPDATE_NONE = 2
def __init__(self, data_dir: str, sources: Mapping[str, str], logger: Union[logging.Logger, None] = None):
"""
Initialize the log database manager.
Args:
data_dir: Directory for database files
sources: List of log sources/modules
logger: Optional logger instance
"""
self.logger = logger or LOGGER
self.data_dir = Path(data_dir)
self.sources = [s.upper() for s in sources.keys()]
self.source_cfg = sources
self.sources_exist: dict[str, bool] = {s.upper(): False for s in sources.keys()}
self.sources_updated: dict[str, int] = {s.upper(): 0 for s in sources.keys()}
# Create database directory
self.data_dir.mkdir(parents=True, exist_ok=True)
# Setup multiprocessing infrastructure
self.shared_data: dict[str, Any] = dict()
self.locks: dict[str, Any] = dict()
self.global_lock = threading.Lock()
# Initialize sources
self._initialize_sources()
self._closed = False
self.stop_event = threading.Event()
atexit.register(self.safe_close)
self.logger.info(f'LogDB initialized for {len(self.sources)} sources in {self.data_dir}')
def _initialize_sources(self) -> None:
"""Initialize database structures for all sources."""
with self.global_lock:
for source in self.sources:
# Create source-specific structures
self.shared_data[source] = dict(
{
'config': '',
'files': dict(), # file_path -> {src_hash, compressed_hash}
'logs': dict(), # log_id -> log_data
'index': dict(), # unique_key -> log_id
'max_id': 0,
}
)
# Create source-specific lock
self.locks[source] = threading.Lock()
# Load existing data
self._load_source(source)
def _source_file_path(self, source: str) -> Path:
"""Get file path for a source's database file."""
return self.data_dir / f'{source}_logs.json'
def _file_hash(self, file_path: Union[str, Path]) -> str:
"""Compute SHA256 hash of a file's contents."""
hasher = hashlib.sha256()
file_path = Path(file_path)
try:
with file_path.open('rb') as f:
while chunk := f.read(8192):
hasher.update(chunk)
return hasher.hexdigest()
except OSError as e:
self.logger.error(f'Failed to compute hash for {file_path}: {e}')
return ''
def is_config_updated(self, source: str) -> bool:
return bool(self.source_cfg[source] != self.shared_data[source]['config'])
def is_file_processed(self, source: str, src_path: Union[str, Path], compressed_path: Union[str, Path]) -> bool:
"""
Check if a file has already been processed.
Args:
source: Log source/module
src_path: Original source file path
compressed_path: Compressed version path
Returns:
True if file has been processed, False otherwise
"""
source = source.upper()
if source not in self.shared_data:
self.logger.error(f'Unknown source: {source}')
return False
src_hash = self._file_hash(src_path)
compressed_hash = self._file_hash(compressed_path) if Path(compressed_path).exists() else ''
with self.locks[source]:
files = self.shared_data[source]['files']
file_info = files.get(str(src_path))
# Check if file is registered and hashes match
if file_info:
if file_info.get('src_hash') == src_hash and file_info.get('compressed_hash') == compressed_hash:
return True
# Update hashes if changed
file_info['src_hash'] = src_hash
if compressed_hash:
file_info['compressed_hash'] = compressed_hash
return False
# New file
files[str(src_path)] = dict({'src_hash': src_hash, 'compressed_hash': compressed_hash})
return False
def mark_file_processed(self, source: str, src_path: Union[str, Path], compressed_path: Union[str, Path]) -> None:
"""
Mark a file as successfully processed.
Args:
source: Log source/module
src_path: Original source file path
compressed_path: Compressed version path
"""
source = source.upper()
if source not in self.shared_data:
return
src_hash = self._file_hash(src_path)
compressed_hash = self._file_hash(compressed_path)
with self.locks[source]:
files = self.shared_data[source]['files']
file_info = files.get(str(src_path), dict())
file_info.update({'src_hash': src_hash, 'compressed_hash': compressed_hash})
files[str(src_path)] = file_info
def _load_source(self, source: str) -> None:
"""Load source data from JSON file."""
db_file = self._source_file_path(source)
# Create empty database if not exists
if not db_file.exists():
with db_file.open('w') as f:
json.dump({'config': '', 'files': {}, 'logs': {}}, f)
return
try:
with db_file.open('r') as f:
data = json.load(f)
except (OSError, json.JSONDecodeError) as e:
self.logger.error(f'Error loading {source} database: {e}')
return
files = {}
logs = {}
indexes = {}
with self.locks[source]:
source_db = self.shared_data[source]
# Load config
source_db['config'] = data.get('config', '')
# Load files
source_db['files'].clear()
for path, info in data.get('files', {}).items():
files[path] = dict(info)
source_db['files'].update(files)
# Load logs
source_db['logs'].clear()
source_db['index'].clear()
max_id = 0
for log_id, log_data in data.get('logs', {}).items():
log_id = int(log_id)
logs[log_id] = dict(log_data)
unique_key = self._log_unique_key(log_data)
indexes[unique_key] = log_id
if log_id > max_id:
max_id = log_id
source_db['logs'].update(logs)
source_db['index'].update(indexes)
source_db['max_id'] = max_id
self.sources_updated[source] = len(logs)
self.sources_exist[source] = bool(data.get('logs'))
self.logger.info(f'Loaded {len(data.get("logs", {}))} logs for {source}')
def _save_source(self, source: str) -> bool:
"""Save source data to JSON file using atomic write."""
db_file = self._source_file_path(source)
# Prepare data
with self.locks[source]:
source_db = self.shared_data[source]
config = self.source_cfg[source]
files_data = {path: dict(info) for path, info in source_db['files'].items()}
logs_data = {log_id: dict(data) for log_id, data in source_db['logs'].items()}
data = {'config': config, 'files': files_data, 'logs': logs_data}
# Atomic write
tmp_path = None
try:
with tempfile.NamedTemporaryFile(mode='w', dir=self.data_dir, delete=False) as tmp_file:
json.dump(data, tmp_file, indent=2)
tmp_path = tmp_file.name
# Replace original file
os.replace(tmp_path, db_file)
return True
except (OSError, TypeError) as e:
self.logger.error(f'Error saving {source} database: {e}')
if tmp_path and Path(tmp_path).exists():
Path(tmp_path).unlink()
return False
def _log_unique_key(self, log_data: dict) -> tuple:
"""
Create a unique key for a log entry.
Args:
log_data: Log entry dictionary
Returns:
Unique key tuple
"""
return (log_data['tag'], log_data['format'], log_data['caller'], log_data['file'], log_data['line_number'])
def add_log(
self,
source: str,
log_tag: str,
log_format: str,
log_line_number: int,
hexify: bool,
caller_func: str,
caller_line: int,
file_name: str,
) -> tuple[int, int]:
"""
Add a new log entry to the database if it doesn't exist.
Args:
source: Log source/module
log_tag: Log tag
log_format: Log format string
log_line_number: Log Line number
hexify: Whether the log can be hexified
caller_func: Calling function name
caller_line: Calling function line number
file_name: Source file name
Returns:
Tuple (result_code, log_id)
"""
if self._closed:
return self.OPERATION_FAILED, 0
source = source.upper()
if source not in self.shared_data:
self.logger.error(f'Unknown source: {source}')
return self.OPERATION_FAILED, 0
log_data = {
'tag': log_tag,
'format': log_format,
'line_number': log_line_number,
'hexify': hexify,
'caller': caller_func,
'caller_line': caller_line,
'file': file_name,
}
unique_key = self._log_unique_key(log_data)
with self.locks[source]:
source_db = self.shared_data[source]
# Check if log exists
if unique_key in source_db['index']:
existing_id = source_db['index'][unique_key]
return self.LOG_EXISTS, existing_id
# Create new log entry
new_id = source_db['max_id'] + 1
log_data['id'] = new_id
source_db['logs'][new_id] = dict(log_data)
source_db['index'][unique_key] = new_id
source_db['max_id'] = new_id
self.logger.info(f'Added new log [{source}]: ID={new_id}, Tag={log_tag}')
return self.SUCCESS, new_id
def remove_log(self, source: str, log_id: int) -> bool:
"""
Remove a log entry from the database.
Args:
source: Log source/module
log_id: ID of log to remove
Returns:
True if successful, False otherwise
"""
source = source.upper()
if source not in self.shared_data:
return False
with self.locks[source]:
source_db = self.shared_data[source]
if log_id not in source_db['logs']:
return False
# Remove from indexes
log_data = dict(source_db['logs'][log_id])
unique_key = self._log_unique_key(log_data)
if unique_key in source_db['index']:
del source_db['index'][unique_key]
del source_db['logs'][log_id]
if log_id == source_db['max_id']:
source_db['max_id'] = max(source_db['logs'].keys()) if source_db['logs'] else 0
return True
def source_update_state(self, source: str) -> int:
if self.is_config_updated(source):
return self.SOURCE_LOG_UPDATE_FULL
if self.sources_updated[source] == len(self.shared_data[source]['logs']):
return self.SOURCE_LOG_UPDATE_NONE
elif self.sources_updated[source] == 0 and len(self.shared_data[source]['logs']) != 0:
return self.SOURCE_LOG_UPDATE_FULL
else:
return self.SOURCE_LOG_UPDATE_PARTIAL
def save_source(self, source: str) -> bool:
"""Save a single source's data to file."""
return self._save_source(source.upper())
def save_all(self) -> int:
"""Save all sources' data to files."""
self.logger.debug('Saving all sources...')
success_count = 0
for source in self.sources:
if (not self.is_config_updated(source)) and self.sources_updated[source] == len(
self.shared_data[source]['logs']
):
self.logger.info(f'{source} has not updated data')
continue
if self._save_source(source):
success_count += 1
self.logger.info(f'Saved {success_count}/{len(self.sources)} sources')
return success_count
def safe_close(self) -> None:
"""Safe close method that handles atexit callbacks."""
if not self._closed:
try:
self.close()
except Exception as e:
# Log to stderr as logging may be shutdown
print(f'Error during safe_close: {e}', flush=True)
def close(self) -> None:
"""Cleanup resources."""
if self._closed:
return
self._closed = True
try:
if not self.stop_event.is_set():
self.stop_event.set()
except Exception:
pass
self.logger.info('Closing database manager...')
self.stop_event.set()
# Final save
self.logger.info('Performing final save...')
self.save_all()
# Shutdown manager
self.logger.info('Database manager closed')
def __enter__(self) -> 'LogDBManager':
return self
def __exit__(
self,
exc_type: Union[type[BaseException], None],
exc_val: Union[BaseException, None],
exc_tb: Union[TracebackType, None],
) -> None:
self.close()

View File

@@ -0,0 +1,967 @@
# SPDX-FileCopyrightText: 2025 Espressif Systems (Shanghai) CO LTD
# SPDX-License-Identifier: Apache-2.0
# The current project needs to support environments before Python 3.9,
# and UP007 will prohibit the use of Tuple, Union, etc.
# ruff: noqa: UP007
# The current project needs to support environments before Python 3.9,
# Therefore, it is necessary to prohibit UP006 from automatically
# changing the annotation type
# ruff: noqa: UP006
# ruff: noqa: UP035
"""
BLE Log Compression Utility
===========================
This script processes Bluetooth source files to compress logging statements.
"""
import argparse
import enum
import logging
import os
import re
import shutil
import sys
import textwrap
import traceback
from datetime import datetime
from pathlib import Path
from typing import Any
from typing import Dict
from typing import List
from typing import Tuple
from typing import Union
import tree_sitter_c as tsc
import yaml
from c_format_parse import parse_format_string
from inttypes_map import TYPES_MACRO_MAP
from LogDBManager import LogDBManager
try:
import importlib.metadata as meta
_TS_VER = tuple(map(int, meta.version('tree-sitter').split('.')[:2]))
except Exception:
_TS_VER = (0, 20)
if _TS_VER >= (0, 21):
from tree_sitter import Language
from tree_sitter import Node
from tree_sitter import Parser
from tree_sitter import Query
from tree_sitter import Tree
if _TS_VER >= (0, 25):
from tree_sitter import QueryCursor
else:
from tree_sitter import Language
from tree_sitter import Parser
from tree_sitter import Tree
# Initialize logger
LOGGER = logging.getLogger('ble_log_compression')
# Global parser instances
C_LANGUAGE: Union[Language, None] = None
CLANG_PARSER: Union[Parser, None] = None
# Log source enumeration
SOURCE_ENUM_MAP = {
'BLE_HOST': 0,
'BLE_MESH': 1,
}
# Functions that require hex formatting
HEX_FUNCTIONS = ['bt_hex'] # Used in Mesh and Audio modules
# C keywords to exclude from function names
C_KEYWORDS = {
'auto',
'break',
'case',
'char',
'const',
'continue',
'default',
'do',
'double',
'else',
'enum',
'extern',
'float',
'for',
'goto',
'if',
'int',
'long',
'register',
'return',
'short',
'signed',
'sizeof',
'static',
'struct',
'switch',
'typedef',
'union',
'unsigned',
'void',
'volatile',
'while',
}
FUNC_MACROS = {'__func__', '__FUNCTION__'}
LINE_MACROS = {
'__LINE__',
}
BLUEDROID_LOG_MODE_LEVEL_GET = {
'BTM': 'btm_cb.trace_level',
'L2CAP': 'l2cb.l2cap_trace_level',
'GAP': 'gap_cb.trace_level',
'GATT': 'gatt_cb.trace_level',
'SMP': 'smp_cb.trace_level',
'APPL': 'appl_trace_level',
}
class ARG_SIZE_TYPE(enum.IntEnum):
U32 = 0
STR = 1
U64 = 2
LZU32 = 3
LZU64 = 4
AZU32 = 5
AZU64 = 6
def TsInit() -> Tuple[Language, Parser]:
if _TS_VER > (0, 21):
lang = Language(tsc.language())
parser = Parser(lang)
return lang, parser
else:
lang = Language(tsc.language(), 'c')
parser = Parser()
parser.set_language(lang)
return lang, parser
def TsQueryByTree(language: Language, tree: Tree, query_str: str) -> Dict[str, List[Node]]:
if _TS_VER == (0, 21):
captures_res = language.query(query_str).captures(tree.root_node)
elif _TS_VER > (0, 21) and _TS_VER < (0, 25):
captures_res = Query(language, (query_str)).captures(tree.root_node)
else:
captures_res = QueryCursor(Query(language, (query_str))).captures(tree.root_node)
captures: Dict[str, List[Node]] = {}
if isinstance(captures_res, list):
for node, node_type in captures_res:
if node_type not in captures:
captures[node_type] = []
captures[node_type].append(node)
else:
captures = captures_res
return captures
class LogCompressor:
"""Main class for BLE log compression."""
def __init__(self) -> None:
self.bt_component_path = Path()
self.build_dir = Path()
self.bt_compressed_srcs_path = Path()
self.config: dict[str, Any] = {}
self.module_info: dict[str, Any] = {}
def init_parser(self) -> Parser:
"""Initialize tree-sitter parser for C."""
global C_LANGUAGE, CLANG_PARSER
C_LANGUAGE, CLANG_PARSER = TsInit()
return CLANG_PARSER
def extract_log_calls(self, code_content: bytes, log_tags: list[str]) -> list[dict]:
"""
Extract log statements from C source code.
Args:
code_content: Source code as bytes
log_tags: List of log tags to search for
Returns:
List of dictionaries containing log information
"""
parser = self.init_parser()
tree = parser.parse(code_content)
function_map = self._get_function_boundaries(tree)
return self._find_log_statements(tree, log_tags, function_map)
def _get_function_boundaries(self, tree: Tree) -> list[tuple[str, int, int, int]]:
"""
Identify function boundaries in the AST.
Returns:
List of tuples (function_name, start_byte, end_byte, line_number)
"""
function_query = """
[
(function_declarator
(identifier) @func_name
)
(declaration
(function_declarator
(identifier) @func_decls
)
)
]
"""
captures: dict[str, list[Node]] = TsQueryByTree(C_LANGUAGE, tree, function_query)
# Filter valid function names
func_names = [
node
for node in captures.get('func_name', [])
if node not in captures.get('func_decls', []) and node.text.decode('utf-8') not in C_KEYWORDS
]
# Sort by start byte
func_names.sort(key=lambda node: node.start_byte)
boundaries = []
for i, node in enumerate(func_names):
func_name = node.text.decode('utf-8')
start_byte = node.start_byte
end_byte = func_names[i + 1].start_byte if i < len(func_names) - 1 else len(tree.root_node.text)
line_number = node.start_point[0] + 1
boundaries.append((func_name, start_byte, end_byte, line_number))
return boundaries
def _find_log_statements(
self, tree: Tree, log_tags: list[str], function_boundaries: list[tuple[str, int, int, int]]
) -> list[dict]:
"""
Find log statements in the AST.
Args:
tree: Parsed AST tree
log_tags: List of log tags to search for
function_boundaries: Function boundaries list
Returns:
List of log information dictionaries
"""
# Build pattern for matching log tags
tag_pattern = '|'.join(log_tags)
log_query = f"""
(expression_statement
(call_expression
function: (identifier) @fname
arguments: (argument_list) @args
(#match? @fname "^({tag_pattern})$")
) @log_stmt
)
"""
captures: dict[str, list[Node]] = TsQueryByTree(C_LANGUAGE, tree, log_query)
log_nodes = captures.get('log_stmt', [])
# It is necessary to ensure that the nodes
# are sorted according to the starting bytes,
# because only the nodes are ordered can ensure
# that the subsequent calculation offset is correct.
log_nodes.sort(key=lambda node: node.start_byte)
logs = []
for node in log_nodes:
try:
log_info = self._process_log_node(node, function_boundaries)
if log_info:
logs.append(log_info)
except Exception as e:
LOGGER.error(f'Error processing log node: {e}\n{traceback.format_exc()}')
raise
return logs
def _process_log_node(self, node: Node, function_boundaries: list[tuple[str, int, int, int]]) -> Union[dict, None]:
"""
Process a log AST node and extract information.
Args:
node: Log statement AST node
function_boundaries: Function boundaries list
Returns:
Log information dictionary or None if invalid
"""
# Extract basic information
tag_node = node.child_by_field_name('function')
if not tag_node:
return None
tag = tag_node.text.decode('utf-8')
args_node = node.child_by_field_name('arguments')
if not args_node or args_node.type != 'argument_list':
return None
# Initialize log info
log_info = {
'tag': (tag, tag_node.start_byte, tag_node.end_byte),
'line_number': node.start_point[0] + 1,
'arguments': [],
'hexify': True,
}
# Process format string (first argument)
valid_arg_childrn: list[Node] = [n for n in args_node.named_children if n.type != 'comment']
fmt_node = valid_arg_childrn[0] if len(valid_arg_childrn) > 0 else None
if not fmt_node:
return None
if fmt_node.type == 'concatenated_string':
log_fmt = self._process_concatenated_string(fmt_node)
elif fmt_node.type == 'string_literal':
log_fmt = fmt_node.text.decode('utf-8')[1:-1] # Remove quotes
else:
return None
log_info['arguments'].append((f'"{log_fmt}"', fmt_node.start_byte, fmt_node.end_byte))
# Parse format tokens
tokens = parse_format_string(f'"{log_fmt}"')
tokens_tuple_map: list[int] = []
for idx, tk in enumerate(tokens):
if isinstance(tk, tuple):
tokens_tuple_map.append(idx)
arguments: list[Node] = valid_arg_childrn[1:]
if len(arguments) != len(tokens_tuple_map):
raise SyntaxError(f'LogSyntaxError:{node.text.decode("utf-8")}')
# Process each argument
for i, (token, arg_node) in enumerate(zip([t for t in tokens if isinstance(t, tuple)], arguments)):
arg_text = arg_node.text.decode('utf-8')
log_info['arguments'].append((arg_text, arg_node.start_byte, arg_node.end_byte))
# Check if argument can be hexified
# if not self._can_be_hexified(token, arg_node):
# log_info['hexify'] = False
# Handle special identifiers
if arg_text in FUNC_MACROS:
token_list = list(token)
token_list[6] = '@func' # Modify conversion char to special marker
tokens[tokens_tuple_map[i]] = tuple(token_list)
elif arg_text in LINE_MACROS:
token_list = list(token)
token_list[6] = '@line'
tokens[tokens_tuple_map[i]] = tuple(token_list)
# Handle hex functions
if (
arg_node.type == 'call_expression'
and arg_node.child_by_field_name('function')
and arg_node.child_by_field_name('function').text.decode('utf-8') in HEX_FUNCTIONS
):
# Extract arguments of the hex function
hex_args = arg_node.child_by_field_name('arguments')
if hex_args and hex_args.named_child_count >= 2:
buf_node = hex_args.named_children[0]
len_node = hex_args.named_children[1]
token_list = list(token)
token_list[6] = f'@hex_func@{buf_node.text.decode("utf-8")}@{len_node.text.decode("utf-8")}'
tokens[tokens_tuple_map[i]] = tuple(token_list)
log_info['argu_tokens'] = tokens
# Find calling function
caller_info = self._find_calling_function(node.start_byte, function_boundaries)
if not caller_info:
return None
log_info.update(caller_info)
return log_info
def _process_concatenated_string(self, node: Node) -> str:
"""Process a concatenated string node into a single string."""
parts = []
for child in node.named_children:
if child.type == 'identifier':
identifier = child.text.decode('utf-8')
if identifier in TYPES_MACRO_MAP:
parts.append(TYPES_MACRO_MAP[identifier])
else:
raise ValueError(f'Unknown format macro: {identifier}')
elif child.type == 'string_literal':
parts.append(child.text.decode('utf-8')[1:-1]) # Remove quotes
else:
raise ValueError(f'Unsupported node in concatenated string: {child.type}')
return ''.join(parts)
def _can_be_hexified(self, token: tuple[int, int, str, str, str, str, str], node: Node) -> bool:
"""Determine if a node can be represented in hex format."""
if token[-1] != 's':
return True
if node.type == 'identifier' and node.text.decode('utf-8') in FUNC_MACROS:
return True
if (
node.type == 'call_expression'
and node.child_by_field_name('function')
and node.child_by_field_name('function').text.decode('utf-8') in HEX_FUNCTIONS
):
return True
return False
def _find_calling_function(
self, log_start: int, function_boundaries: list[tuple[str, int, int, int]]
) -> Union[dict, None]:
"""
Find the function containing the log statement.
Args:
log_start: Start byte of the log statement
function_boundaries: List of function boundaries
Returns:
Dictionary with caller information or None if not found
"""
for name, start, end, line in function_boundaries:
if start <= log_start < end:
return {'caller_name': name, 'caller_line_number': line}
return None
def generate_compressed_macro(
self, source: str, log_idx: int, tag: str, print_fmt: Union[str, None], log_info: dict
) -> str:
"""
Generate a compressed log macro definition.
Args:
source: Log source module
log_idx: Unique log index
tag: Original log tag
print_fmt: Simplified format string
log_info: Log information dictionary
Returns:
Macro definition string
"""
if not log_idx:
return ''
def generate_mesh_log_prefix(source: str, tag: str, print_statm: str) -> str:
level = tag.split('_')[-1]
mod = tag.split('_')[0]
if level == 'ERR':
level = 'ERROR'
log_level = 'BLE_MESH_LOG_LEVEL_ERROR'
elif level == 'WARN':
level = 'WARN'
log_level = 'BLE_MESH_LOG_LEVEL_WARN'
elif level == 'INFO':
level = 'INFO'
log_level = 'BLE_MESH_LOG_LEVEL_INFO'
elif level == 'DBG':
level = 'DEBUG'
log_level = 'BLE_MESH_LOG_LEVEL_DEBUG'
else:
LOGGER.error(f'Invalid log level {level}')
return ''
if mod == 'NET':
used_log_levl = 'BLE_MESH_NET_BUF_LOG_LEVEL'
used_log_mod = 'BLE_MESH_NET_BUF'
else:
used_log_levl = 'BLE_MESH_LOG_LEVEL'
used_log_mod = 'BLE_MESH'
return (
f'{{do {{ if (({used_log_levl} >= {log_level}) &&'
f' BLE_MESH_LOG_LEVEL_CHECK({used_log_mod}, {level})) {print_statm};}} while (0);}}\\\n'
)
def generate_bluedroid_log_prefix(source: str, tag: str, print_statm: str) -> str:
tag_info = tag.split('_')
mod = tag_info[0]
return (
f'{{if ({BLUEDROID_LOG_MODE_LEVEL_GET[mod]} >= BT_TRACE_LEVEL_{tag_info[-1]} &&'
f' BT_LOG_LEVEL_CHECK({mod}, {tag_info[-1]})) {print_statm};}}\\\n'
)
def generate_log_lvl_prefix(source: str, tag: str, print_statm: str) -> str:
if source == 'BLE_MESH':
return ' ' + generate_mesh_log_prefix(source, tag, print_statm)
elif source == 'BLE_HOST': # only bluedroid host supported for now
return ' ' + generate_bluedroid_log_prefix(source, tag, print_statm)
else:
LOGGER.error(f'Unknown source {source}')
return ''
source_value = SOURCE_ENUM_MAP.get(source.upper())
if source_value is None:
raise ValueError(f'Invalid source: {source}')
macro = f'#define {tag}_{log_idx}(fmt, ...) {{\\\n'
if log_info['hexify']:
# Count of arguments that are not special (__func__, __LINE__, etc.)
arg_tokens = [t for t in log_info['argu_tokens'] if isinstance(t, tuple)]
arg_count = len(arg_tokens)
arguments = []
sizes = []
hex_func: list[str] = []
LOGGER.info(f'{arg_tokens}:{[a[0] for a in log_info["arguments"]]}')
for token, argument in zip(
arg_tokens,
[a[0] for a in log_info['arguments'][1:]],
):
# Skip special tokens
if token[6] in ('@func', '@line'):
arg_count -= 1
continue
# Handle hex function
if token[6].startswith('@hex_func'):
if not hex_func:
hex_func = []
hex_func.append(token[6])
arg_count -= 1
continue
arguments.append(argument)
if token[6] == 'f' or token[5] == 'll': # float or long long
sizes.append(f'{int(ARG_SIZE_TYPE.U64)}')
elif token[6] == 's':
sizes.append(f'{int(ARG_SIZE_TYPE.STR)}')
else:
sizes.append(f'{int(ARG_SIZE_TYPE.U32)}')
if arg_count > 0:
size_str = ', '.join(sizes)
arg_str = ', '.join(arguments).replace('\n', '')
macro += generate_log_lvl_prefix(
source,
tag,
(f'BLE_LOG_COMPRESSED_HEX_PRINT({source_value}, {log_idx}, {arg_count}, {size_str}, {arg_str})'),
)
for idx, item in enumerate(hex_func):
# hex_func format: @hex_func@buf@len
parts = item.split('@')
if len(parts) >= 4:
buf = parts[2]
buf_len = parts[3]
macro += generate_log_lvl_prefix(
source,
tag,
(f'BLE_LOG_COMPRESSED_HEX_PRINT_BUF({source_value}, {log_idx}, {idx}, {buf}, {buf_len})'),
)
else:
macro += generate_log_lvl_prefix(
source, tag, f'BLE_LOG_COMPRESSED_HEX_PRINT_WITH_ZERO_ARGUMENTS({source_value}, {log_idx})'
)
for idx, item in enumerate(hex_func):
# hex_func format: @hex_func@buf@len
parts = item.split('@')
if len(parts) >= 4:
buf = parts[2]
buf_len = parts[3]
macro += generate_log_lvl_prefix(
source,
tag,
(f'BLE_LOG_COMPRESSED_HEX_PRINT_BUF({source_value}, {log_idx}, {idx}, {buf}, {buf_len})'),
)
if (
'tags_with_preserve' in self.module_info[source]
and tag in self.module_info[source]['tags_with_preserve']
):
macro += f' {tag}(fmt, ##__VA_ARGS__);\\\n'
else:
# Non-hexified log
print_fmt = print_fmt or 'NULL'
macro += f' BLE_LOG_COMPRESSED_PRINT({source_value}, {log_idx}, "{print_fmt}", ##__VA_ARGS__); \\\n'
macro += '}\n'
return macro
def compress_file(self, file_info: tuple[str, str]) -> list[tuple[str, int, str]]:
"""
Process a single file for log compression.
Args:
file_info: Tuple of (module_name, file_path)
Returns:
List of generated macros (module, log_id, macro)
"""
module, file_path = file_info
generated_macros = []
try:
with open(file_path, 'rb') as f:
content = f.read()
new_content = bytearray(content)
logs = self.extract_log_calls(content, self.module_info[module]['tags'])
LOGGER.info(f'Processing {file_path} - found {len(logs)} logs')
offset = 0 # Track cumulative changes due to tag replacements
for log in logs:
tag = log['tag'][0]
# Extract existing index if present
if match := re.fullmatch(r'(.+)_([0-9A-F]{8})', tag):
tag_base = match.group(1)
file_index = match.group(2)
else:
tag_base = tag
file_index = None
# Generate simplified format string
no_buf_fmt: str = ''
simple_fmt_list: list[str] = []
hex_buffer_cnt = 0
for token in log['argu_tokens']:
if isinstance(token, tuple):
if '@func' in token[6] or '@line' in token[6]:
continue
if '@hex_func' in token[6]:
simple_fmt_list.append(f'@hex_buffer{hex_buffer_cnt}')
no_buf_fmt += f'@hex_buffer{hex_buffer_cnt}'
hex_buffer_cnt += 1
continue
simple_fmt_list.append(token[2])
no_buf_fmt += token[2]
else:
no_buf_fmt += token
simple_fmt_str = ' '.join(simple_fmt_list) if simple_fmt_list else None
# Add to database
result, db_index = self.db_manager.add_log(
source=module,
log_tag=tag_base,
log_format=no_buf_fmt if log['hexify'] else log['arguments'][0][0],
log_line_number=log['line_number'],
hexify=log['hexify'],
caller_func=log['caller_name'],
caller_line=log['caller_line_number'],
file_name=os.path.basename(file_path),
)
LOGGER.info(f'Got log tag {tag}, generated or quired idx {db_index}')
if result == LogDBManager.SUCCESS:
LOGGER.info(f'Added new log: {db_index} - {tag_base}')
elif result == LogDBManager.LOG_EXISTS:
if file_index:
if int(file_index, 16) != db_index:
LOGGER.info(f'Updating index: {file_index} -> {db_index}')
else:
LOGGER.info(f'duplicate index: {file_index} == {db_index}')
continue
else:
LOGGER.info(f'Recovery log index {db_index}')
else:
LOGGER.error(f'Database error for log: {tag_base}')
continue
# Update tag in content
new_tag_bytes = f'{tag_base}_{db_index}'.encode()
tag_start, tag_end = log['tag'][1], log['tag'][2]
new_content[offset + tag_start : offset + tag_end] = new_tag_bytes
offset += len(new_tag_bytes) - (tag_end - tag_start)
# Generate macro if this is a new log or index changed
if result == LogDBManager.SUCCESS or file_index or self.db_manager.is_config_updated(module):
macro = self.generate_compressed_macro(module, db_index, tag_base, simple_fmt_str, log)
generated_macros.append((module, db_index, macro))
# Write updated content
with open(file_path, 'wb') as f_out:
f_out.write(new_content)
except Exception as e:
LOGGER.error(f'Error processing {file_path}: {e}\n{traceback.format_exc()}')
raise
return generated_macros
def prepare_source_files(self, srcs: list[str]) -> None:
"""
Prepare source files for processing.
Args:
srcs: List of source file paths
"""
for module, info in self.module_info.items():
code_dirs = '|'.join(info['code_path'])
pattern = re.compile(f'({code_dirs}).*\\.c$')
info['files_to_process'] = []
compressed_file_cnt = 0
total_cnt = 0
for src in srcs:
if pattern.match(src):
src_path = self.bt_component_path / src
dest_path = self.bt_compressed_srcs_path / src
temp_path = f'{dest_path}.tmp'
total_cnt += 1
# Skip if already processed
if self.db_manager.is_file_processed(
module, src_path, temp_path
) and not self.db_manager.is_config_updated(module):
compressed_file_cnt += 1
continue
# Ensure directory exists
os.makedirs(os.path.dirname(temp_path), exist_ok=True)
shutil.copy2(src_path, temp_path)
info['files_to_process'].append(temp_path)
LOGGER.info(f'Prepared: {src}')
LOGGER.info(f'Compressed cnt {compressed_file_cnt} {total_cnt}')
if compressed_file_cnt == total_cnt:
print(
f'All source files in module {module} have been compressed\n', flush=True, end='', file=sys.stdout
)
else:
print(
f'Found {len(info["files_to_process"])} source files in module {module} requiring compression\n',
flush=True,
end='',
file=sys.stdout,
)
LOGGER.info('Source file preparation complete')
def generate_log_index_header(self, module: str, macros: list[tuple[int, str]]) -> None:
"""
Generate or update the log index header file.
Args:
module: Module name
macros: List of (log_id, macro_definition)
"""
# header_path = self.bt_component_path / self.module_info[module]['log_index_path']
header_path = self.build_dir / Path('ble_log') / Path('include') / self.module_info[module]['log_index_file']
# Create directory if needed
header_path.parent.mkdir(parents=True, exist_ok=True)
update_state = self.db_manager.source_update_state(source=module)
if update_state == self.db_manager.SOURCE_LOG_UPDATE_NONE:
return
elif update_state == self.db_manager.SOURCE_LOG_UPDATE_FULL:
# Header template
header_content = (
textwrap.dedent(f"""
/*
* SPDX-FileCopyrightText: {datetime.now().year} Espressif Systems (Shanghai) CO LTD
*
* SPDX-License-Identifier: Apache-2.0
*/
#ifndef __{module.upper()}_INTERNAL_LOG_INDEX_H
#define __{module.upper()}_INTERNAL_LOG_INDEX_H
#include <stddef.h>
#include <stdlib.h>
// Compression function declarations
extern int ble_log_compressed_hex_print
(uint32_t source, uint32_t log_index, size_t args_size_cnt, ...);
extern int ble_log_compressed_hex_print_buf
(uint8_t source, uint32_t log_index, uint8_t buf_idx, const uint8_t *buf, size_t len);
// Compression macros
#define BLE_LOG_COMPRESSED_HEX_PRINT(source, log_index, args_cnt, ...) \\
ble_log_compressed_hex_print(source, log_index, args_cnt, ##__VA_ARGS__)
#define BLE_LOG_COMPRESSED_HEX_PRINT_BUF(source, log_index, buf_idx, buf, len) \\
ble_log_compressed_hex_print_buf(source, log_index, buf_idx, (const uint8_t *)buf, len)
#define BLE_LOG_COMPRESSED_HEX_PRINT_WITH_ZERO_ARGUMENTS(source, log_index) \\
ble_log_compressed_hex_print(source, log_index, 0)
""").strip()
+ '\n\n'
)
# Add sorted macros
for log_id, macro_def in sorted(macros, key=lambda x: x[0]):
header_content += macro_def + '\n'
header_content += f'#endif // __{module.upper()}_INTERNAL_LOG_INDEX_H\n'
with open(header_path, 'w') as f:
f.write(header_content)
else:
append_content = ''
log_idx_set: dict[int, int] = dict()
for log_id, macro_def in sorted(macros, key=lambda x: x[0]):
append_content += macro_def + '\n'
log_idx_set[log_id] = 1
with open(header_path, encoding='utf-8') as f:
lines = f.readlines()
log_idx_pattern = re.compile(r'#define .+(\d+)\(fmt,')
for idx, line in enumerate(lines):
if line.strip().startswith('#define'):
res = log_idx_pattern.match(line)
if res:
li = int(res.group(1))
if li in log_idx_set:
raise ValueError(
f'The generated log index{li} andlog_index in the header file have duplicates'
)
if line.strip().startswith('#endif'):
break
else:
raise RuntimeError('#endif not found')
lines.insert(idx, append_content)
with open(header_path, 'w', encoding='utf-8') as f:
f.writelines(lines)
LOGGER.info(f'Generated log index header: {header_path}')
def load_config(self, config_path: str, module_names: list[str]) -> None:
"""
Load and validate log configuration.
Args:
config_path: Path to configuration file
module_names: List of module names to load
"""
with open(config_path, encoding='utf-8') as f:
config = yaml.safe_load(f)
# Extract global config
log_config = config.get('log_config', {})
for key, value in log_config.items():
if key != 'modules':
self.config[key] = value
# Extract module configs
modules = log_config.get('modules', {})
for module in module_names:
if module in modules:
self.module_info[module] = modules[module]
print(f'Found module {module} for compression\n', flush=True, end='', file=sys.stdout)
else:
LOGGER.warning(f"Skipping module '{module}' - config not found")
def main(self) -> int:
"""Main entry point for the compression utility."""
parser = argparse.ArgumentParser(description='BLE Log Compression Utility')
subparsers = parser.add_subparsers(dest='command', required=True)
compress_parser = subparsers.add_parser('compress')
compress_parser.add_argument('--srcs', required=True, help='Semicolon-separated source file paths')
compress_parser.add_argument('--bt_path', required=True, help='Bluetooth component root path')
compress_parser.add_argument('--module', required=True, help='Semicolon-separated module names')
compress_parser.add_argument('--build_path', required=True, help='Build output directory')
compress_parser.add_argument('--compressed_srcs_path', required=True, help='Directory for processed sources')
args = parser.parse_args()
# Setup paths
self.bt_component_path = Path(args.bt_path)
self.build_dir = Path(args.build_path)
self.bt_compressed_srcs_path = Path(args.compressed_srcs_path)
# Create directories
(self.build_dir / 'ble_log').mkdir(parents=True, exist_ok=True)
self.bt_compressed_srcs_path.mkdir(parents=True, exist_ok=True)
# Configure logging
log_file = self.build_dir / 'ble_log' / f'ble_script_log_{datetime.now().strftime("%y%m%d_%H%M%S")}.log'
logging.basicConfig(
level=logging.INFO,
format='%(asctime)s - %(name)s - %(levelname)s - %(message)s',
handlers=[logging.FileHandler(log_file, mode='w')],
)
# Load configuration
modules = args.module.split(';')
config_path = self.build_dir / 'ble_log/module_info.yml'
self.load_config(str(config_path), modules)
# Initialize database
db_path = self.build_dir / self.config.get('db_path', 'log_db')
db_manager = LogDBManager(
data_dir=str(db_path),
sources={source: str(config) for source, config in self.module_info.items()},
logger=LOGGER,
)
self.db_manager = db_manager
# Prepare source files
src_list = args.srcs.split(';')
self.prepare_source_files(src_list)
# Collect files to process
files_to_process = []
for module, info in self.module_info.items():
files_to_process.extend([(module, path) for path in info['files_to_process']])
if not files_to_process:
LOGGER.info('No files to process')
print('No source files require compression; exiting log compression\n', flush=True, end='', file=sys.stdout)
for root, _, files in os.walk(self.bt_compressed_srcs_path):
for name in files:
if name.endswith('.tmp'):
file_src = os.path.join(root, name)
dst_path = os.path.join(root, name[: -len('.tmp')])
shutil.copy2(file_src, dst_path)
LOGGER.info(f'Recovery src {file_src} dst {dst_path}')
db_manager.close()
return 0
all_macros: dict[str, list[tuple[int, str]]] = {}
files_to_process.sort(key=lambda x: x[1])
try:
compressed_log_count = 0
for needs_compressed_file in files_to_process:
file_macros = self.compress_file(needs_compressed_file)
compressed_log_count += len(file_macros)
for module, log_id, macro in file_macros:
all_macros.setdefault(module, []).append((log_id, macro))
print(f'{compressed_log_count} ble log(s) compressed\n', flush=True, end='', file=sys.stdout)
except Exception as e:
LOGGER.error(f'Processing failed: {e}')
db_manager.close()
return 1
# Generate headers
for module, macros in all_macros.items():
self.generate_log_index_header(module, macros)
print('Header file for compressed logs generated\n', flush=True, end='', file=sys.stdout)
# Mark files as processed
for module, info in self.module_info.items():
for temp_path in info['files_to_process']:
src_path = self.bt_component_path / os.path.relpath(temp_path[:-4], self.bt_compressed_srcs_path)
db_manager.mark_file_processed(module, src_path, temp_path)
for root, _, files in os.walk(self.bt_compressed_srcs_path):
for name in files:
if name.endswith('.tmp'):
file_src = os.path.join(root, name)
dst_path = os.path.join(root, name[: -len('.tmp')])
shutil.copy2(file_src, dst_path)
LOGGER.info(f'Recovery src {file_src} dst {dst_path}')
db_manager.close()
LOGGER.info('Compression completed successfully')
return 0
if __name__ == '__main__':
print('Log compression underway, please wait...\n', flush=True, end='', file=sys.stdout)
compressor = LogCompressor()
exit(compressor.main())

View File

@@ -0,0 +1,268 @@
# SPDX-FileCopyrightText: 2025 Espressif Systems (Shanghai) CO LTD
# SPDX-License-Identifier: Apache-2.0
# ruff: noqa: UP007
"""
Format String Parser
====================
Parses C-style format strings and handles argument formatting for log compression.
"""
import struct
from typing import Union
def parse_format_string(format_str: str) -> list[Union[str, tuple[int, int, str, str, str, str, str]]]:
"""
Parse a format string into tokens.
Args:
format_str: C-style format string
Returns:
List of tokens (strings or format spec tuples)
"""
tokens: list[Union[str, tuple[int, int, str, str, str, str, str]]] = []
i = 0
n = len(format_str)
while i < n:
if format_str[i] == '%':
start = i
i += 1
# Handle escaped %
if i < n and format_str[i] == '%':
tokens.append('%')
i += 1
continue
# Parse flags
flags = ''
while i < n and format_str[i] in '-+ #0':
flags += format_str[i]
i += 1
# Parse width
width = ''
while i < n and format_str[i].isdigit():
width += format_str[i]
i += 1
# Parse precision
precision = ''
if i < n and format_str[i] == '.':
i += 1
while i < n and format_str[i].isdigit():
precision += format_str[i]
i += 1
# Parse length modifier
length = ''
if i < n and format_str[i] in 'zhl':
length += format_str[i]
i += 1
# Handle double length (e.g., ll)
if i < n and format_str[i] == 'l' and length == 'l':
length += 'l'
i += 1
if i < n and format_str[i] in 'diuoxXfcsplL':
conv_char = format_str[i]
i += 1
full_spec = format_str[start:i]
tokens.append((start, i, full_spec, flags, width, length, conv_char))
else:
tokens.append(format_str[start:i])
else:
# Regular text
start = i
while i < n and format_str[i] != '%':
i += 1
tokens.append(format_str[start:i])
return tokens
def format_integer(value: int, conv_char: str, flags: str, width: str, length_mod: str) -> str:
"""
Format an integer value according to format specifiers.
Args:
value: Integer value
conv_char: Conversion character (d, i, u, o, x, X)
flags: Format flags
width: Minimum width
length_mod: Length modifier
Returns:
Formatted string
"""
# Determine base
base = 10
uppercase = False
if conv_char in 'xX':
base = 16
uppercase = conv_char == 'X'
elif conv_char == 'o':
base = 8
# Generate number string
if base == 16:
num_str = format(value, 'X' if uppercase else 'x')
elif base == 8:
num_str = format(value, 'o')
else: # decimal
num_str = str(value)
# Add prefix
prefix = ''
if '#' in flags and value != 0:
if base == 16:
prefix = '0X' if uppercase else '0x'
elif base == 8:
prefix = '0'
# Apply width and padding
total_len = len(prefix) + len(num_str)
width_val = int(width) if width else 0
if width_val > total_len:
padding = width_val - total_len
if '0' in flags and '-' not in flags:
num_str = num_str.zfill(padding + len(num_str))
else:
pad_char = ' ' * padding
if '-' in flags:
num_str = prefix + num_str + pad_char
prefix = ''
else:
num_str = pad_char + prefix + num_str
prefix = ''
return prefix + num_str
def parse_compressed_arguments(byte_sequence: bytes, format_str: str) -> str:
"""
Parse compressed log arguments into formatted string.
Args:
byte_sequence: Compressed argument bytes
format_str: Original format string
Returns:
Formatted log string
Raises:
ValueError: If the input is invalid
"""
if len(byte_sequence) < 2:
raise ValueError('Insufficient bytes for header')
# Parse header
header = (byte_sequence[0] << 8) | byte_sequence[1]
type_flag = (header >> 15) & 0x01
arg_count = header & 0x7FFF
if type_flag != 1:
raise ValueError(f'Unsupported type flag: {type_flag}')
# Parse size list
size_bytes_needed = (arg_count + 1) // 2
if len(byte_sequence) < 2 + size_bytes_needed:
raise ValueError('Insufficient bytes for size list')
size_bytes = byte_sequence[2 : 2 + size_bytes_needed]
arg_sizes = []
for i in range(arg_count):
byte_index = i // 2
if i % 2 == 0:
size = (size_bytes[byte_index] >> 4) & 0x0F
else:
size = size_bytes[byte_index] & 0x0F
arg_sizes.append(size)
# Parse arguments
args = []
pos = 2 + size_bytes_needed
for size in arg_sizes:
if pos + size > len(byte_sequence):
raise ValueError('Insufficient bytes for arguments')
args.append(byte_sequence[pos : pos + size])
pos += size
# Parse format string
tokens = parse_format_string(format_str)
output = []
arg_index = 0
for token in tokens:
if isinstance(token, tuple):
start, end, flags, width, precision, length_mod, conv_char = token
if conv_char == '%':
output.append('%')
else:
if arg_index >= len(args):
raise ValueError('Not enough arguments for format string')
arg_bytes = args[arg_index]
arg_index += 1
# Character type
if conv_char == 'c':
# Pad to 4 bytes for unpacking
padded = arg_bytes.ljust(4, b'\x00')
char_code = struct.unpack('>I', padded)[0]
output.append(chr(char_code))
# Pointer type
elif conv_char == 'p':
ptr_value = int.from_bytes(arg_bytes, 'big', signed=False)
output.append(hex(ptr_value))
# Floating point types
elif conv_char in 'fFeEgGaA':
if len(arg_bytes) == 4:
float_value = struct.unpack('>f', arg_bytes)[0]
elif len(arg_bytes) == 8:
float_value = struct.unpack('>d', arg_bytes)[0]
else:
raise ValueError(f'Unsupported float size: {len(arg_bytes)} bytes')
output.append(str(float_value))
# Integer types
elif conv_char in 'diuoxX':
signed = conv_char in 'di'
# Determine expected size
if length_mod == 'll':
expected_size = 8
elif length_mod in ('l', 'z', 'j', 't') or conv_char == 'p':
expected_size = 4
else:
expected_size = len(arg_bytes)
# Pad to expected size
if len(arg_bytes) < expected_size:
if signed and arg_bytes and (arg_bytes[0] & 0x80):
# Sign extension for negative numbers
pad = b'\xff' * (expected_size - len(arg_bytes))
else:
pad = b'\x00' * (expected_size - len(arg_bytes))
arg_bytes = pad + arg_bytes
elif len(arg_bytes) > expected_size:
arg_bytes = arg_bytes[:expected_size]
# Convert to integer
int_value = int.from_bytes(arg_bytes, 'big', signed=signed)
output.append(format_integer(int_value, conv_char, flags, width, length_mod))
else:
raise ValueError(f'Unsupported conversion: {conv_char}')
else:
output.append(token)
return ''.join(output)

View File

@@ -0,0 +1,17 @@
log_config:
db_path: "ble_log/ble_log_database"
modules:
BLE_MESH:
description: "BLE Mesh"
code_path: [@BLE_MESH_CODE_PATH@]
log_index_file: @BLE_MESH_LOG_INDEX_HEADER@
tags: [@BLE_MESH_TAGS@]
tags_with_preserve: [@BLE_MESH_TAGS_PRESERVE@]
BLE_HOST:
description: "BLE Host"
code_path: [@HOST_CODE_PATH@]
log_index_file: @HOST_LOG_INDEX_HEADER@
tags: [@BLE_HOST_TAGS@]
tags_with_preserve: [@BLE_HOST_TAGS_PRESERVE@]

View File

@@ -0,0 +1,139 @@
# SPDX-FileCopyrightText: 2025 Espressif Systems (Shanghai) CO LTD
# SPDX-License-Identifier: Apache-2.0
try:
import tree_sitter_c
except ModuleNotFoundError:
tree_sitter_c = None
try:
import tree_sitter
except ModuleNotFoundError:
tree_sitter = None
import platform
import sys
from importlib.metadata import version
from pathlib import Path
from ble_log_compress import TsInit
from ble_log_compress import TsQueryByTree
IDF_PATH = Path(Path(__file__).resolve().parent / Path('../../../../../../')).resolve()
TEST_C_STR = b"""
void test_func(void);
int main(void) {
printf("Hello world\n");
return 0;
}
"""
TEST_FUNCTION_QUERY = """
[
(function_declarator
(identifier) @func_name
)
(declaration
(function_declarator
(identifier) @func_decls
)
)
]
"""
TEST_LOG_QUERY = """
(expression_statement
(call_expression
function: (identifier) @fname
arguments: (argument_list) @args
(#match? @fname "^(printf)$")
) @log_stmt
)
"""
def check_py_version(min_version: str = '3.8.0') -> None:
cur_str = platform.python_version()
cur_tuple = tuple(map(int, cur_str.split('.')))
min_tuple = tuple(map(int, min_version.split('.')))
if min_tuple > cur_tuple:
print(
('Please use Python 3.8 or above'),
file=sys.stderr,
)
exit(1)
def validate() -> None:
if tree_sitter is None:
print(
(
'tree_sitter import failed, please check whether the package is installed correctly,'
'Please refer to the file:'
f'{IDF_PATH}/components/bt/common/ble_log/log_compression/scripts/install.en.md'
' for installation instructions.'
),
file=sys.stderr,
)
exit(1)
if tree_sitter_c is None:
print(
(
'tree_sitter_c import failed, '
'please check whether the package is installed correctly,'
'Please refer to the file:'
f'{IDF_PATH}/components/bt/common/ble_log/log_compression/scripts/install.en.md'
' for installation instructions.'
),
file=sys.stderr,
)
exit(1)
def test_parse() -> None:
try:
lang, parser = TsInit()
tree = parser.parse(TEST_C_STR)
captures = TsQueryByTree(lang, tree, TEST_LOG_QUERY)
assert len(captures.keys()) == 3
assert len(captures.values()) == 3
assert 'log_stmt' in captures.keys()
assert 'fname' in captures.keys()
assert 'args' in captures.keys()
assert captures['log_stmt'][0].type == 'call_expression'
assert captures['log_stmt'][0].start_point == (3, 4)
assert captures['log_stmt'][0].end_point == (4, 2)
captures = TsQueryByTree(lang, tree, TEST_FUNCTION_QUERY)
print(captures, file=sys.stderr)
assert len(captures.keys()) == 2
assert len(captures.values()) == 2
assert 'func_decls' in captures.keys()
assert 'func_name' in captures.keys()
assert len(captures['func_name']) == 2
assert len(captures['func_decls']) == 1
assert captures['func_name'][0].type == 'identifier'
assert captures['func_name'][0].start_point == (1, 5)
assert captures['func_name'][0].end_point == (1, 14)
except Exception:
print(
(
f'Code parsing error,'
f'please provide the following information to Espressif to help you solve the problem'
f'tree-sitter version: {version(tree_sitter)} '
f'tree-sitter-c version: {version(tree_sitter_c)} '
f'python version: {platform.python_version()} ',
f'os_system: {platform.system()} ',
f'os_release: {platform.release()} ',
f'os_version: {platform.version()} ',
f'machine: {platform.machine()}',
),
file=sys.stderr,
)
exit(1)
check_py_version()
validate()
test_parse()
exit(0)

View File

@@ -0,0 +1,166 @@
# SPDX-FileCopyrightText: 2025 Espressif Systems (Shanghai) CO LTD
# SPDX-License-Identifier: Apache-2.0
"""
用于适配标准化打印
file_path: /usr/include/inttypes.h
"""
__PRI64_PREFIX = 'll'
__PRIPTR_PREFIX = ''
TYPES_MACRO_MAP = {
'PRId8': 'd',
'PRId16': 'd',
'PRId32': 'd',
'PRId64': __PRI64_PREFIX + 'd',
'PRIdLEAST8': 'd',
'PRIdLEAST16': 'd',
'PRIdLEAST32': 'd',
'PRIdLEAST64': __PRI64_PREFIX + 'd',
'PRIdFAST8': 'd',
'PRIdFAST16': __PRIPTR_PREFIX + 'd',
'PRIdFAST32': __PRIPTR_PREFIX + 'd',
'PRIdFAST64': __PRI64_PREFIX + 'd',
'PRIi8': 'i',
'PRIi16': 'i',
'PRIi32': 'i',
'PRIi64': __PRI64_PREFIX + 'i',
'PRIiLEAST8': 'i',
'PRIiLEAST16': 'i',
'PRIiLEAST32': 'i',
'PRIiLEAST64': __PRI64_PREFIX + 'i',
'PRIiFAST8': 'i',
'PRIiFAST16': __PRIPTR_PREFIX + 'i',
'PRIiFAST32': __PRIPTR_PREFIX + 'i',
'PRIiFAST64': __PRI64_PREFIX + 'i',
'PRIo8': 'o',
'PRIo16': 'o',
'PRIo32': 'o',
'PRIo64': __PRI64_PREFIX + 'o',
'PRIoLEAST8': 'o',
'PRIoLEAST16': 'o',
'PRIoLEAST32': 'o',
'PRIoLEAST64': __PRI64_PREFIX + 'o',
'PRIoFAST8': 'o',
'PRIoFAST16': __PRIPTR_PREFIX + 'o',
'PRIoFAST32': __PRIPTR_PREFIX + 'o',
'PRIoFAST64': __PRI64_PREFIX + 'o',
'PRIu8': 'u',
'PRIu16': 'u',
'PRIu32': 'u',
'PRIu64': __PRI64_PREFIX + 'u',
'PRIuLEAST8': 'u',
'PRIuLEAST16': 'u',
'PRIuLEAST32': 'u',
'PRIuLEAST64': __PRI64_PREFIX + 'u',
'PRIuFAST8': 'u',
'PRIuFAST16': __PRIPTR_PREFIX + 'u',
'PRIuFAST32': __PRIPTR_PREFIX + 'u',
'PRIuFAST64': __PRI64_PREFIX + 'u',
'PRIx8': 'x',
'PRIx16': 'x',
'PRIx32': 'x',
'PRIx64': __PRI64_PREFIX + 'x',
'PRIxLEAST8': 'x',
'PRIxLEAST16': 'x',
'PRIxLEAST32': 'x',
'PRIxLEAST64': __PRI64_PREFIX + 'x',
'PRIxFAST8': 'x',
'PRIxFAST16': __PRIPTR_PREFIX + 'x',
'PRIxFAST32': __PRIPTR_PREFIX + 'x',
'PRIxFAST64': __PRI64_PREFIX + 'x',
'PRIX8': 'X',
'PRIX16': 'X',
'PRIX32': 'X',
'PRIX64': __PRI64_PREFIX + 'X',
'PRIXLEAST8': 'X',
'PRIXLEAST16': 'X',
'PRIXLEAST32': 'X',
'PRIXLEAST64': __PRI64_PREFIX + 'X',
'PRIXFAST8': 'X',
'PRIXFAST16': __PRIPTR_PREFIX + 'X',
'PRIXFAST32': __PRIPTR_PREFIX + 'X',
'PRIXFAST64': __PRI64_PREFIX + 'X',
'PRIdMAX': __PRI64_PREFIX + 'd',
'PRIiMAX': __PRI64_PREFIX + 'i',
'PRIoMAX': __PRI64_PREFIX + 'o',
'PRIuMAX': __PRI64_PREFIX + 'u',
'PRIxMAX': __PRI64_PREFIX + 'x',
'PRIXMAX': __PRI64_PREFIX + 'X',
'PRIdPTR': __PRIPTR_PREFIX + 'd',
'PRIiPTR': __PRIPTR_PREFIX + 'i',
'PRIoPTR': __PRIPTR_PREFIX + 'o',
'PRIuPTR': __PRIPTR_PREFIX + 'u',
'PRIxPTR': __PRIPTR_PREFIX + 'x',
'PRIXPTR': __PRIPTR_PREFIX + 'X',
'SCNd8': 'hhd',
'SCNd16': 'hd',
'SCNd32': 'd',
'SCNd64': __PRI64_PREFIX + 'd',
'SCNdLEAST8': 'hhd',
'SCNdLEAST16': 'hd',
'SCNdLEAST32': 'd',
'SCNdLEAST64': __PRI64_PREFIX + 'd',
'SCNdFAST8': 'hhd',
'SCNdFAST16': __PRIPTR_PREFIX + 'd',
'SCNdFAST32': __PRIPTR_PREFIX + 'd',
'SCNdFAST64': __PRI64_PREFIX + 'd',
'SCNi8': 'hhi',
'SCNi16': 'hi',
'SCNi32': 'i',
'SCNi64': __PRI64_PREFIX + 'i',
'SCNiLEAST8': 'hhi',
'SCNiLEAST16': 'hi',
'SCNiLEAST32': 'i',
'SCNiLEAST64': __PRI64_PREFIX + 'i',
'SCNiFAST8': 'hhi',
'SCNiFAST16': __PRIPTR_PREFIX + 'i',
'SCNiFAST32': __PRIPTR_PREFIX + 'i',
'SCNiFAST64': __PRI64_PREFIX + 'i',
'SCNu8': 'hhu',
'SCNu16': 'hu',
'SCNu32': 'u',
'SCNu64': __PRI64_PREFIX + 'u',
'SCNuLEAST8': 'hhu',
'SCNuLEAST16': 'hu',
'SCNuLEAST32': 'u',
'SCNuLEAST64': __PRI64_PREFIX + 'u',
'SCNuFAST8': 'hhu',
'SCNuFAST16': __PRIPTR_PREFIX + 'u',
'SCNuFAST32': __PRIPTR_PREFIX + 'u',
'SCNuFAST64': __PRI64_PREFIX + 'u',
'SCNo8': 'hho',
'SCNo16': 'ho',
'SCNo32': 'o',
'SCNo64': __PRI64_PREFIX + 'o',
'SCNoLEAST8': 'hho',
'SCNoLEAST16': 'ho',
'SCNoLEAST32': 'o',
'SCNoLEAST64': __PRI64_PREFIX + 'o',
'SCNoFAST8': 'hho',
'SCNoFAST16': __PRIPTR_PREFIX + 'o',
'SCNoFAST32': __PRIPTR_PREFIX + 'o',
'SCNoFAST64': __PRI64_PREFIX + 'o',
'SCNx8': 'hhx',
'SCNx16': 'hx',
'SCNx32': 'x',
'SCNx64': __PRI64_PREFIX + 'x',
'SCNxLEAST8': 'hhx',
'SCNxLEAST16': 'hx',
'SCNxLEAST32': 'x',
'SCNxLEAST64': __PRI64_PREFIX + 'x',
'SCNxFAST8': 'hhx',
'SCNxFAST16': __PRIPTR_PREFIX + 'x',
'SCNxFAST32': __PRIPTR_PREFIX + 'x',
'SCNxFAST64': __PRI64_PREFIX + 'x',
'SCNdMAX': __PRI64_PREFIX + 'd',
'SCNiMAX': __PRI64_PREFIX + 'i',
'SCNoMAX': __PRI64_PREFIX + 'o',
'SCNuMAX': __PRI64_PREFIX + 'u',
'SCNxMAX': __PRI64_PREFIX + 'x',
'SCNdPTR': __PRIPTR_PREFIX + 'd',
'SCNiPTR': __PRIPTR_PREFIX + 'i',
'SCNoPTR': __PRIPTR_PREFIX + 'o',
'SCNuPTR': __PRIPTR_PREFIX + 'u',
'SCNxPTR': __PRIPTR_PREFIX + 'x',
}

View File

@@ -0,0 +1,6 @@
tree_sitter~=0.21; python_version == "3.8"
tree_sitter_c~=0.21; python_version == "3.8"
tree_sitter>=0.23,<=0.23.2; python_version == "3.9"
tree_sitter_c>=0.23,<0.23.5; python_version == "3.9"
tree-sitter~=0.25; python_version >= "3.10"
tree-sitter-c~=0.24; python_version >= "3.10"

View File

@@ -61,5 +61,4 @@ void ble_log_spi_out_le_audio_write(const uint8_t *addr, uint16_t len);
int ble_log_spi_out_host_write(uint8_t source, const char *prefix, const char *format, ...);
int ble_log_spi_out_hci_write(uint8_t source, const uint8_t *addr, uint16_t len);
int ble_log_spi_out_mesh_write(const char *prefix, const char *format, ...);
#endif // __BT_SPI_OUT_H__

View File

@@ -13,6 +13,9 @@
#include "esp_log.h"
#include "mesh/utils.h"
#include "esp_rom_sys.h"
#if CONFIG_BLE_MESH_COMPRESSED_LOG_ENABLE
#include "mesh_log_index.h"
#endif
#ifdef __cplusplus
extern "C" {

View File

@@ -24,6 +24,9 @@
#include "bluedroid_user_config.h"
#include "stack/bt_types.h"
#include "bt_common.h"
#if CONFIG_BLE_HOST_COMPRESSED_LOG_ENABLE
#include "host_log_index.h"
#endif
#if (BT_BLE_LOG_SPI_OUT_HOST_ENABLED && !CLASSIC_BT_INCLUDED)
#include "ble_log/ble_log_spi_out.h"

View File

@@ -27,5 +27,9 @@ construct
rich
psutil
# ble's compressed log testing tool
tree_sitter
tree_sitter_c
# gdb extensions dependencies
freertos_gdb