change: fix issues reported by ruff

This commit is contained in:
Peter Dragun
2025-03-26 11:12:38 +01:00
parent de6206ed3a
commit 806c93e08e
5 changed files with 951 additions and 728 deletions

View File

@@ -16,15 +16,18 @@ from esp_docs.conf_docs import * # noqa: F403,F401
if os.environ.get('IDF_PATH') is None:
raise RuntimeError('IDF_PATH should be set, run export.sh before building docs')
BT_DOCS = ['api-reference/bluetooth/esp_bt_defs.rst',
BT_DOCS = [
'api-reference/bluetooth/esp_bt_defs.rst',
'api-reference/bluetooth/esp_bt_device.rst',
'api-reference/bluetooth/esp_bt_main.rst',
'api-reference/bluetooth/bt_common.rst',
'api-reference/bluetooth/bt_vhci.rst',
'api-reference/bluetooth/controller_vhci.rst',
'api-reference/bluetooth/index.rst']
'api-reference/bluetooth/index.rst',
]
BLE_DOCS = ['api-guides/ble/index.rst',
BLE_DOCS = [
'api-guides/ble/index.rst',
'api-guides/ble/overview.rst',
'api-guides/ble/ble-feature-support-status.rst',
'api-guides/ble/host-feature-support-status.rst',
@@ -39,17 +42,21 @@ BLE_DOCS = ['api-guides/ble/index.rst',
'api-reference/bluetooth/esp_gatts.rst',
'api-reference/bluetooth/esp_gattc.rst',
'api-reference/bluetooth/nimble/index.rst',
'migration-guides/release-5.x/5.0/bluetooth-low-energy.rst']
'migration-guides/release-5.x/5.0/bluetooth-low-energy.rst',
]
BLE_MESH_DOCS = ['api-guides/esp-ble-mesh/ble-mesh-index.rst',
BLE_MESH_DOCS = [
'api-guides/esp-ble-mesh/ble-mesh-index.rst',
'api-guides/esp-ble-mesh/ble-mesh-feature-list.rst',
'api-guides/esp-ble-mesh/ble-mesh-terminology.rst',
'api-guides/esp-ble-mesh/ble-mesh-architecture.rst',
'api-guides/esp-ble-mesh/ble-mesh-faq.rst',
'api-reference/bluetooth/esp-ble-mesh.rst']
'api-reference/bluetooth/esp-ble-mesh.rst',
]
CLASSIC_BT_DOCS = ['api-guides/classic-bt/index.rst',
CLASSIC_BT_DOCS = [
'api-guides/classic-bt/index.rst',
'api-guides/classic-bt/overview.rst',
'api-reference/bluetooth/classic_bt.rst',
'api-reference/bluetooth/esp_a2dp.rst',
@@ -63,12 +70,13 @@ CLASSIC_BT_DOCS = ['api-guides/classic-bt/index.rst',
'api-reference/bluetooth/esp_hf_ag.rst',
'api-reference/bluetooth/esp_spp.rst',
'api-reference/bluetooth/esp_gap_bt.rst',
'migration-guides/release-5.x/5.0/bluetooth-classic.rst']
'migration-guides/release-5.x/5.0/bluetooth-classic.rst',
]
BLUFI_DOCS = ['api-guides/ble/blufi.rst',
'api-reference/bluetooth/esp_blufi.rst']
BLUFI_DOCS = ['api-guides/ble/blufi.rst', 'api-reference/bluetooth/esp_blufi.rst']
WIFI_DOCS = ['api-guides/low-power-mode/low-power-mode-wifi.rst',
WIFI_DOCS = [
'api-guides/low-power-mode/low-power-mode-wifi.rst',
'api-guides/wifi.rst',
'api-guides/wifi-security.rst',
'api-guides/wireshark-user-guide.rst',
@@ -78,15 +86,14 @@ WIFI_DOCS = ['api-guides/low-power-mode/low-power-mode-wifi.rst',
'api-reference/network/esp_dpp.rst',
'api-reference/provisioning/provisioning.rst',
'api-reference/provisioning/wifi_provisioning.rst',
'migration-guides/release-5.x/5.2/wifi.rst']
'migration-guides/release-5.x/5.2/wifi.rst',
]
IEEE802154_DOCS = ['migration-guides/release-5.x/5.1/ieee802154.rst',
'migration-guides/release-5.x/5.2/ieee802154.rst']
IEEE802154_DOCS = ['migration-guides/release-5.x/5.1/ieee802154.rst', 'migration-guides/release-5.x/5.2/ieee802154.rst']
NAN_DOCS = ['api-reference/network/esp_nan.rst']
WIFI_MESH_DOCS = ['api-guides/esp-wifi-mesh.rst',
'api-reference/network/esp-wifi-mesh.rst']
WIFI_MESH_DOCS = ['api-guides/esp-wifi-mesh.rst', 'api-reference/network/esp-wifi-mesh.rst']
COEXISTENCE_DOCS = ['api-guides/coexist.rst']
@@ -102,8 +109,7 @@ UART_DOCS = ['api-reference/peripherals/uart.rst']
SDMMC_DOCS = ['api-reference/peripherals/sdmmc_host.rst']
SDIO_SLAVE_DOCS = ['api-reference/peripherals/sdio_slave.rst',
'api-reference/protocols/esp_sdio_slave_protocol.rst']
SDIO_SLAVE_DOCS = ['api-reference/peripherals/sdio_slave.rst', 'api-reference/protocols/esp_sdio_slave_protocol.rst']
MCPWM_DOCS = ['api-reference/peripherals/mcpwm.rst']
@@ -127,7 +133,8 @@ TOUCH_SENSOR_DOCS = ['api-reference/peripherals/cap_touch_sens.rst']
SPIRAM_DOCS = ['api-guides/external-ram.rst']
USB_DOCS = ['api-reference/peripherals/usb_device.rst',
USB_DOCS = [
'api-reference/peripherals/usb_device.rst',
'api-reference/peripherals/usb_host.rst',
'api-reference/peripherals/usb_host/usb_host_notes_arch.rst',
'api-reference/peripherals/usb_host/usb_host_notes_design.rst',
@@ -136,7 +143,8 @@ USB_DOCS = ['api-reference/peripherals/usb_device.rst',
'api-reference/peripherals/usb_host/usb_host_notes_usbh.rst',
'api-reference/peripherals/usb_host/usb_host_notes_enum.rst',
'api-reference/peripherals/usb_host/usb_host_notes_ext_hub.rst',
'api-reference/peripherals/usb_host/usb_host_notes_ext_port.rst']
'api-reference/peripherals/usb_host/usb_host_notes_ext_port.rst',
]
I80_LCD_DOCS = ['api-reference/peripherals/lcd/i80_lcd.rst']
RGB_LCD_DOCS = ['api-reference/peripherals/lcd/rgb_lcd.rst']
@@ -149,19 +157,24 @@ USB_OTG_CONSOLE_DOCS = ['api-guides/usb-otg-console.rst']
FTDI_JTAG_DOCS = ['api-guides/jtag-debugging/configure-ft2232h-jtag.rst']
USB_SERIAL_JTAG_DOCS = ['api-guides/jtag-debugging/configure-builtin-jtag.rst',
'api-guides/usb-serial-jtag-console.rst']
USB_SERIAL_JTAG_DOCS = [
'api-guides/jtag-debugging/configure-builtin-jtag.rst',
'api-guides/usb-serial-jtag-console.rst',
]
ULP_FSM_DOCS = ['api-reference/system/ulp.rst',
ULP_FSM_DOCS = [
'api-reference/system/ulp.rst',
'api-reference/system/ulp_macros.rst',
'api-reference/system/ulp_instruction_set.rst']
'api-reference/system/ulp_instruction_set.rst',
]
RISCV_COPROC_DOCS = ['api-reference/system/ulp-risc-v.rst',]
RISCV_COPROC_DOCS = [
'api-reference/system/ulp-risc-v.rst',
]
LP_CORE_DOCS = ['api-reference/system/ulp-lp-core.rst']
XTENSA_DOCS = ['api-guides/hlinterrupts.rst',
'api-reference/system/perfmon.rst']
XTENSA_DOCS = ['api-guides/hlinterrupts.rst', 'api-reference/system/perfmon.rst']
RISCV_DOCS = [] # type: list[str]
@@ -169,15 +182,19 @@ TWAI_DOCS = ['api-reference/peripherals/twai.rst']
SDM_DOCS = ['api-reference/peripherals/sdm.rst']
I2C_DOCS = ['api-reference/peripherals/i2c.rst',
I2C_DOCS = [
'api-reference/peripherals/i2c.rst',
'api-reference/peripherals/lcd/i2c_lcd.rst',
'api-reference/peripherals/i2c_slave_v1.rst']
'api-reference/peripherals/i2c_slave_v1.rst',
]
SPI_DOCS = ['api-reference/peripherals/spi_master.rst',
SPI_DOCS = [
'api-reference/peripherals/spi_master.rst',
'api-reference/peripherals/spi_slave.rst',
'api-reference/peripherals/sdspi_host.rst',
'api-reference/peripherals/sdspi_share.rst',
'api-reference/peripherals/lcd/spi_lcd.rst']
'api-reference/peripherals/lcd/spi_lcd.rst',
]
I2S_DOCS = ['api-reference/peripherals/i2s.rst']
@@ -189,8 +206,7 @@ ISP_DOCS = ['api-reference/peripherals/isp.rst']
DSLP_STUB_DOCS = ['api-guides/deep-sleep-stub.rst']
ADC_DOCS = ['api-reference/peripherals/adc_oneshot.rst',
'api-reference/peripherals/adc_calibration.rst']
ADC_DOCS = ['api-reference/peripherals/adc_oneshot.rst', 'api-reference/peripherals/adc_calibration.rst']
ADC_DMA_DOCS = ['api-reference/peripherals/adc_continuous.rst']
ANA_CMPR_DOCS = ['api-reference/peripherals/ana_cmpr.rst']
@@ -203,14 +219,18 @@ PPA_DOCS = ['api-reference/peripherals/ppa.rst']
QEMU_DOCS = ['api-guides/tools/qemu.rst']
ESP_TEE_DOCS = ['security/tee/index.rst',
ESP_TEE_DOCS = [
'security/tee/index.rst',
'security/tee/tee.rst',
'security/tee/tee-advanced.rst',
'security/tee/tee-attestation.rst',
'security/tee/tee-ota.rst',
'security/tee/tee-sec-storage.rst']
'security/tee/tee-sec-storage.rst',
]
ESP32_DOCS = ['api-reference/system/himem.rst',
ESP32_DOCS = (
[
'api-reference/system/himem.rst',
'api-guides/romconsole.rst',
'api-reference/system/ipc.rst',
'security/secure-boot-v1.rst',
@@ -218,49 +238,68 @@ ESP32_DOCS = ['api-reference/system/himem.rst',
'api-reference/peripherals/sd_pullup_requirements.rst',
'hw-reference/esp32/**',
'api-guides/RF_calibration.rst',
'api-guides/phy.rst'] + FTDI_JTAG_DOCS + QEMU_DOCS
'api-guides/phy.rst',
]
+ FTDI_JTAG_DOCS
+ QEMU_DOCS
)
ESP32S2_DOCS = ['hw-reference/esp32s2/**',
ESP32S2_DOCS = (
[
'hw-reference/esp32s2/**',
'api-guides/usb-console.rst',
'api-reference/peripherals/ds.rst',
'api-reference/peripherals/temp_sensor.rst',
'api-reference/system/async_memcpy.rst',
'api-reference/peripherals/touch_element.rst',
'api-guides/RF_calibration.rst',
'api-guides/phy.rst'] + FTDI_JTAG_DOCS + USB_OTG_DFU_DOCS + USB_OTG_CONSOLE_DOCS
'api-guides/phy.rst',
]
+ FTDI_JTAG_DOCS
+ USB_OTG_DFU_DOCS
+ USB_OTG_CONSOLE_DOCS
)
ESP32S3_DOCS = ['hw-reference/esp32s3/**',
ESP32S3_DOCS = (
[
'hw-reference/esp32s3/**',
'api-reference/system/ipc.rst',
'api-guides/flash_psram_config.rst',
'api-reference/peripherals/sd_pullup_requirements.rst',
'api-guides/RF_calibration.rst',
'api-guides/phy.rst'] + USB_OTG_DFU_DOCS + USB_OTG_CONSOLE_DOCS + QEMU_DOCS
'api-guides/phy.rst',
]
+ USB_OTG_DFU_DOCS
+ USB_OTG_CONSOLE_DOCS
+ QEMU_DOCS
)
# No JTAG docs for this one as it gets gated on SOC_USB_SERIAL_JTAG_SUPPORTED down below.
ESP32C3_DOCS = ['hw-reference/esp32c3/**',
'api-guides/RF_calibration.rst',
'api-guides/phy.rst'] + QEMU_DOCS
ESP32C3_DOCS = ['hw-reference/esp32c3/**', 'api-guides/RF_calibration.rst', 'api-guides/phy.rst'] + QEMU_DOCS
ESP32C2_DOCS = ['api-guides/RF_calibration.rst',
'api-guides/phy.rst']
ESP32C2_DOCS = ['api-guides/RF_calibration.rst', 'api-guides/phy.rst']
ESP32C5_DOCS = ['api-guides/phy.rst']
ESP32C61_DOCS = ['api-guides/phy.rst']
ESP32C6_DOCS = ['api-guides/RF_calibration.rst',
ESP32C6_DOCS = [
'api-guides/RF_calibration.rst',
'api-reference/peripherals/sd_pullup_requirements.rst',
'api-guides/phy.rst'] + ESP_TEE_DOCS
'api-guides/phy.rst',
] + ESP_TEE_DOCS
ESP32H2_DOCS = ['api-guides/RF_calibration.rst',
'api-guides/phy.rst']
ESP32H2_DOCS = ['api-guides/RF_calibration.rst', 'api-guides/phy.rst']
ESP32P4_DOCS = ['api-reference/system/ipc.rst',
ESP32P4_DOCS = [
'api-reference/system/ipc.rst',
'api-reference/peripherals/cap_touch_sens.rst',
'api-reference/peripherals/sd_pullup_requirements.rst'] + USB_OTG_DFU_DOCS
'api-reference/peripherals/sd_pullup_requirements.rst',
] + USB_OTG_DFU_DOCS
# format: {tag needed to include: documents to included}, tags are parsed from sdkconfig and peripheral_caps.h headers
conditional_include_dict = {'SOC_BT_SUPPORTED':BT_DOCS,
conditional_include_dict = {
'SOC_BT_SUPPORTED': BT_DOCS,
'SOC_BLE_SUPPORTED': BLE_DOCS,
'SOC_BLE_MESH_SUPPORTED': BLE_MESH_DOCS,
'SOC_BLUFI_SUPPORTED': BLUFI_DOCS,
@@ -328,9 +367,11 @@ conditional_include_dict = {'SOC_BT_SUPPORTED':BT_DOCS,
'esp32c6': ESP32C6_DOCS,
'esp32c61': ESP32C61_DOCS,
'esp32h2': ESP32H2_DOCS,
'esp32p4':ESP32P4_DOCS}
'esp32p4': ESP32P4_DOCS,
}
extensions += ['sphinx_copybutton',
extensions += [ # noqa: F405
'sphinx_copybutton',
'sphinxcontrib.wavedrom',
# Note: order is important here, events must
# be registered by one extension before they can be
@@ -355,8 +396,8 @@ smartquotes = False
github_repo = 'espressif/esp-idf'
# context used by sphinx_idf_theme
html_context['github_user'] = 'espressif'
html_context['github_repo'] = 'esp-idf'
html_context['github_user'] = 'espressif' # noqa: F405
html_context['github_repo'] = 'esp-idf' # noqa: F405
# Extra options required by sphinx_idf_theme
@@ -372,26 +413,33 @@ project_homepage = 'https://github.com/espressif/esp-idf'
linkcheck_anchors = False
linkcheck_exclude_documents = ['index', # several false positives due to the way we link to different sections
linkcheck_exclude_documents = [
'index', # several false positives due to the way we link to different sections
'api-reference/protocols/esp_local_ctrl', # Fails due to `https://<mdns-hostname>.local`
'api-reference/provisioning/wifi_provisioning', # Fails due to `https://<mdns-hostname>.local`
]
linkcheck_ignore = ['https://webhome.phy.duke.edu/~rgb/General/dieharder.php', # Certificate error
'https://docs.espressif.com/projects/esptool/en/latest/esp32c2/espefuse/index.html', # Not published
'https://docs.espressif.com/projects/esptool/en/latest/esp32c2/espsecure/index.html#remote-signing-using-an-external-hs', # Not published
'https://docs.espressif.com/projects/esptool/en/latest/esp32c6/espefuse/index.html', # Not published
'https://docs.espressif.com/projects/esptool/en/latest/esp32c6/espsecure/index.html#remote-signing-using-an-external-hs', # Not published
'https://docs.espressif.com/projects/esptool/en/latest/esp32h2/espefuse/index.html', # Not published
'https://docs.espressif.com/projects/esptool/en/latest/esp32h2/espsecure/index.html#remote-signing-using-an-external-hs', # Not published
'https://www.cadence.com/content/dam/cadence-www/global/en_US/documents/tools/ip/tensilica-ip/isa-summary.pdf', # Rejects user-agent
# URLs to ignore during linkcheck
linkcheck_ignore = [
# Certificate error
'https://webhome.phy.duke.edu/~rgb/General/dieharder.php',
# Not published docs
'https://docs.espressif.com/projects/esptool/en/latest/esp32c2/espefuse/index.html',
'https://docs.espressif.com/projects/esptool/en/latest/esp32c2/espsecure/index.html#remote-signing-using-an-external-hs',
'https://docs.espressif.com/projects/esptool/en/latest/esp32c6/espefuse/index.html',
'https://docs.espressif.com/projects/esptool/en/latest/esp32c6/espsecure/index.html#remote-signing-using-an-external-hs',
'https://docs.espressif.com/projects/esptool/en/latest/esp32h2/espefuse/index.html',
'https://docs.espressif.com/projects/esptool/en/latest/esp32h2/espsecure/index.html#remote-signing-using-an-external-hs',
# Rejects user-agent
'https://www.cadence.com/content/dam/cadence-www/global/en_US/documents/tools/ip/tensilica-ip/isa-summary.pdf',
]
# Custom added feature to allow redirecting old URLs
with open('../page_redirects.txt') as f:
lines = [re.sub(' +', ' ', line.strip()) for line in f.readlines() if line.strip() != '' and not line.startswith('#')]
lines = [
re.sub(' +', ' ', line.strip()) for line in f.readlines() if line.strip() != '' and not line.startswith('#')
]
for line in lines: # check for well-formed entries
if len(line.split(' ')) != 2:
raise RuntimeError('Invalid line in page_redirects.txt: %s' % line)
@@ -399,7 +447,10 @@ html_redirect_pages = [tuple(line.split(' ')) for line in lines]
html_static_path = ['../_static']
idf_build_system = {'doxygen_component_info': True, 'component_info_ignore_file': Path(os.environ['IDF_PATH']) / 'docs' / 'component_info_ignore_file.txt'}
idf_build_system = {
'doxygen_component_info': True,
'component_info_ignore_file': Path(os.environ['IDF_PATH']) / 'docs' / 'component_info_ignore_file.txt',
}
# Please update following list to enable Qemu doc guide (and cross references) for a new target
QEMU_TARGETS = ['esp32', 'esp32c3', 'esp32s3']
@@ -408,7 +459,9 @@ QEMU_TARGETS = ['esp32', 'esp32c3', 'esp32s3']
# Callback function for user setup that needs be done after `config-init`-event
# config.idf_target is not available at the initial config stage
def conf_setup(app, config):
config.add_warnings_content = 'This document is not updated for {} yet, so some of the content may not be correct.'.format(config.idf_target.upper())
config.add_warnings_content = (
f'This document is not updated for {config.idf_target.upper()} yet, so some of the content may not be correct.'
)
add_warnings_file = '{}/../docs_not_updated/{}.txt'.format(app.confdir, config.idf_target)

View File

@@ -12,20 +12,23 @@ try:
from packaging.requirements import Requirement
from packaging.version import Version
except ImportError:
print('packaging cannot be imported. '
'If you\'ve installed a custom Python then this package is provided separately and have to be installed as well. '
'Please refer to the Get Started section of the ESP-IDF Programming Guide for setting up the required packages.')
print(
'packaging cannot be imported. '
"If you've installed a custom Python then this package is provided separately and have to be installed as "
'well. Please refer to the Get Started section of the ESP-IDF Programming Guide for setting up the required '
'packages.'
)
sys.exit(1)
try:
from importlib.metadata import PackageNotFoundError
from importlib.metadata import requires as _requires
from importlib.metadata import version as _version
from importlib.metadata import PackageNotFoundError
except ImportError:
# compatibility for python <=3.7
from importlib_metadata import PackageNotFoundError # type: ignore
from importlib_metadata import requires as _requires # type: ignore
from importlib_metadata import version as _version # type: ignore
from importlib_metadata import PackageNotFoundError # type: ignore
try:
from typing import Set
@@ -66,12 +69,20 @@ def get_requires(name: str) -> Optional[list]:
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='ESP-IDF Python package dependency checker')
parser.add_argument('--requirements', '-r',
parser.add_argument(
'--requirements',
'-r',
help='Path to a requirements file (can be used multiple times)',
action='append', default=[])
parser.add_argument('--constraints', '-c', default=[],
action='append',
default=[],
)
parser.add_argument(
'--constraints',
'-c',
default=[],
help='Path to a constraints file (can be used multiple times)',
action='append')
action='append',
)
args = parser.parse_args()
required_set = set()
@@ -87,7 +98,8 @@ if __name__ == '__main__':
con = os.path.basename(con)
elif con.startswith('--only-binary'):
continue
elif con.startswith('-e') and '#egg=' in con: # version control URLs, take the egg= part at the end only
# version control URLs, take the egg= part at the end only
elif con.startswith('-e') and '#egg=' in con:
con_m = re.search(r'#egg=([^\s]+)', con)
if not con_m:
print('Malformed input. Cannot find name in {}'.format(con))
@@ -103,7 +115,7 @@ if __name__ == '__main__':
not_satisfied = [] # in string form which will be printed
# already_checked set is used in order to avoid circular checks which would cause looping.
already_checked = set() # type: Set[Requirement]
already_checked: Set[Requirement] = set()
# required_set contains package names in string form without version constraints. If the package has a constraint
# specification (package name + version requirement) then use that instead. new_req_list is used to store
@@ -125,7 +137,10 @@ if __name__ == '__main__':
except Exception as e:
# Catch general exception, because get_version may return None (https://github.com/python/cpython/issues/91216)
# log package name alongside the error message for easier debugging
not_satisfied.append(f"Error while checking requirement '{req}'. Package was not found and is required by the application: {e}")
not_satisfied.append(
f"Error while checking requirement '{req}'. Package was not found and is required by the "
f'application: {e}'
)
new_req_list.remove(req)
else:
new_req_list.remove(req)
@@ -138,7 +153,7 @@ if __name__ == '__main__':
try:
dependency_requirements = set()
extras = list(requirement.extras) or ['']
# `requires` returns all sub-requirements including all extras - we need to filter out just required ones
# `requires` returns all sub-requirements including all extras; we need to filter out just required ones
for name in get_requires(requirement.name) or []:
sub_req = Requirement(name)
# check extras e.g. esptool[hsm]
@@ -155,7 +170,10 @@ if __name__ == '__main__':
except Exception as e:
# Catch general exception, because get_version may return None (https://github.com/python/cpython/issues/91216)
# log package name alongside the error message for easier debugging
not_satisfied.append(f"Error while checking requirement '{req}'. Package was not found and is required by the application: {e}")
not_satisfied.append(
f"Error while checking requirement '{req}'. Package was not found and is required by the "
f'application: {e}'
)
if len(not_satisfied) > 0:
print('The following Python requirements are not satisfied:')
@@ -166,8 +184,10 @@ if __name__ == '__main__':
install_script = 'install.bat' if sys.platform == 'win32' else 'install.sh'
print('To install the missing packages, please run "{}"'.format(install_script))
else:
print('Please follow the instructions found in the "Set up the tools" section of '
'ESP-IDF Getting Started Guide.')
print(
'Please follow the instructions found in the "Set up the tools" section of '
'ESP-IDF Getting Started Guide.'
)
print('Diagnostic information:')
idf_python_env_path = os.environ.get('IDF_PYTHON_ENV_PATH')

View File

@@ -1,6 +1,6 @@
#!/usr/bin/env python
#
# SPDX-FileCopyrightText: 2020-2024 Espressif Systems (Shanghai) CO LTD
# SPDX-FileCopyrightText: 2020-2025 Espressif Systems (Shanghai) CO LTD
# SPDX-License-Identifier: Apache-2.0
#
# This program creates archives compatible with ESP32-S* ROM DFU implementation.
@@ -19,13 +19,13 @@ from collections import namedtuple
from functools import partial
try:
import typing
import typing # noqa: F401
except ImportError:
# Only used for type annotations
pass
try:
from itertools import izip as zip # type: ignore
from itertools import izip as zip # type: ignore # noqa: A004
except ImportError:
# Python 3
pass
@@ -55,9 +55,7 @@ CPIOHeader = namedtuple(
CPIO_TRAILER = 'TRAILER!!!'
def make_cpio_header(
filename_len, file_len, is_trailer=False
): # type: (int, int, bool) -> CPIOHeader
def make_cpio_header(filename_len, file_len, is_trailer=False): # type: (int, int, bool) -> CPIOHeader
"""Returns CPIOHeader for the given file name and file size"""
def as_hex(val): # type: (int) -> bytes
@@ -91,9 +89,7 @@ DFUInfo = namedtuple('DFUInfo', ['address', 'flags', 'name', 'md5'])
DFUINFO_FILE = 'dfuinfo0.dat'
# Structure which gets added at the end of the entire DFU file
DFUSUFFIX_STRUCT = b'<H H H H 3s B'
DFUSuffix = namedtuple(
'DFUSuffix', ['bcd_device', 'pid', 'vid', 'bcd_dfu', 'sig', 'len']
)
DFUSuffix = namedtuple('DFUSuffix', ['bcd_device', 'pid', 'vid', 'bcd_dfu', 'sig', 'len'])
ESPRESSIF_VID = 12346
# This CRC32 gets added after DFUSUFFIX_STRUCT
DFUCRC_STRUCT = b'<I'
@@ -114,9 +110,9 @@ FlashParamsData = namedtuple(
)
FLASH_PARAMS_STRUCT = b'<IIIIIIII'
FLASH_PARAMS_FILE = 'flash_params.dat'
DFU_INFO_FLAG_PARAM = (1 << 2)
DFU_INFO_FLAG_NOERASE = (1 << 3)
DFU_INFO_FLAG_IGNORE_MD5 = (1 << 4)
DFU_INFO_FLAG_PARAM = 1 << 2
DFU_INFO_FLAG_NOERASE = 1 << 3
DFU_INFO_FLAG_IGNORE_MD5 = 1 << 4
def dfu_crc(data, crc=0): # type: (bytes, int) -> int
@@ -167,7 +163,7 @@ class EspDfuWriter(object):
block_size=64 * 1024,
sector_size=4 * 1024,
page_size=256,
status_mask=0xffff,
status_mask=0xFFFF,
)
data = struct.pack(FLASH_PARAMS_STRUCT, *flash_params)
flags = DFU_INFO_FLAG_PARAM | DFU_INFO_FLAG_NOERASE | DFU_INFO_FLAG_IGNORE_MD5
@@ -214,9 +210,7 @@ class EspDfuWriter(object):
# Finally write the entire binary
self.dest.write(out_data)
def _add_cpio_flash_entry(
self, filename, flash_addr, data, flags=0
): # type: (str, int, bytes, int) -> None
def _add_cpio_flash_entry(self, filename, flash_addr, data, flags=0): # type: (str, int, bytes, int) -> None
md5 = hashlib.md5()
md5.update(data)
self.index.append(
@@ -229,14 +223,10 @@ class EspDfuWriter(object):
)
self._add_cpio_entry(filename, data)
def _add_cpio_entry(
self, filename, data, first=False, trailer=False
): # type: (str, bytes, bool, bool) -> None
def _add_cpio_entry(self, filename, data, first=False, trailer=False): # type: (str, bytes, bool, bool) -> None
filename_b = filename.encode('utf-8') + b'\x00'
cpio_header = make_cpio_header(len(filename_b), len(data), is_trailer=trailer)
entry = pad_bytes(
struct.pack(CPIO_STRUCT, *cpio_header) + filename_b, 4
) + pad_bytes(data, 4)
entry = pad_bytes(struct.pack(CPIO_STRUCT, *cpio_header) + filename_b, 4) + pad_bytes(data, 4)
if not first:
self.entries.append(entry)
else:
@@ -262,27 +252,33 @@ def main(): # type: () -> None
# Provision to add "info" command
subparsers = parser.add_subparsers(dest='command')
write_parser = subparsers.add_parser('write')
write_parser.add_argument('-o', '--output-file',
write_parser.add_argument(
'-o',
'--output-file',
help='Filename for storing the output DFU image',
required=True,
type=argparse.FileType('wb'))
write_parser.add_argument('--pid',
required=True,
type=lambda h: int(h, 16),
help='Hexa-decimal product indentificator')
write_parser.add_argument('--json',
help='Optional file for loading "flash_files" dictionary with <address> <file> items')
write_parser.add_argument('--part-size',
type=argparse.FileType('wb'),
)
write_parser.add_argument(
'--pid', required=True, type=lambda h: int(h, 16), help='Hexa-decimal product indentificator'
)
write_parser.add_argument(
'--json', help='Optional file for loading "flash_files" dictionary with <address> <file> items'
)
write_parser.add_argument(
'--part-size',
default=os.environ.get('ESP_DFU_PART_SIZE', 512 * 1024),
type=lambda x: int(x, 0),
help='Larger files are split-up into smaller partitions of this size')
write_parser.add_argument('files',
metavar='<address> <file>', help='Add <file> at <address>',
nargs='*')
write_parser.add_argument('-fs', '--flash-size',
help='Larger files are split-up into smaller partitions of this size',
)
write_parser.add_argument('files', metavar='<address> <file>', help='Add <file> at <address>', nargs='*')
write_parser.add_argument(
'-fs',
'--flash-size',
help='SPI Flash size in MegaBytes (1MB, 2MB, 4MB, 8MB, 16MB, 32MB, 64MB, 128MB)',
choices=['1MB', '2MB', '4MB', '8MB', '16MB', '32MB', '64MB', '128MB'],
default='2MB')
default='2MB',
)
args = parser.parse_args()
@@ -299,28 +295,28 @@ def main(): # type: () -> None
json_dir = os.path.dirname(os.path.abspath(args.json))
def process_json_file(path): # type: (str) -> str
'''
"""
The input path is relative to json_dir. This function makes it relative to the current working
directory.
'''
"""
return check_file(os.path.relpath(os.path.join(json_dir, path), start=os.curdir))
with open(args.json, encoding='utf-8') as f:
files += [(int(addr, 0),
process_json_file(f_name)) for addr, f_name in json.load(f)['flash_files'].items()]
files += [(int(addr, 0), process_json_file(f_name)) for addr, f_name in json.load(f)['flash_files'].items()]
files = sorted([(addr, f_name) for addr, f_name in dict(files).items()],
key=lambda x: x[0]) # remove possible duplicates and sort based on the address
files = sorted(
[(addr, f_name) for addr, f_name in dict(files).items()], key=lambda x: x[0]
) # remove possible duplicates and sort based on the address
cmd_args = {'output_file': args.output_file,
cmd_args = {
'output_file': args.output_file,
'files': files,
'pid': args.pid,
'part_size': args.part_size,
'flash_size': args.flash_size,
}
{'write': action_write
}[args.command](cmd_args)
{'write': action_write}[args.command](cmd_args)
if __name__ == '__main__':

File diff suppressed because it is too large Load Diff

View File

@@ -1,4 +1,4 @@
# SPDX-FileCopyrightText: 2022-2023 Espressif Systems (Shanghai) CO LTD
# SPDX-FileCopyrightText: 2022-2025 Espressif Systems (Shanghai) CO LTD
# SPDX-License-Identifier: Apache-2.0
# NOTE: unittest is by default sorting tests based on their names,
# so the order if which the tests are started may be different from
@@ -15,7 +15,7 @@ import subprocess
import sys
import tempfile
import unittest
from typing import List
from typing import List # noqa: F401
try:
import idf_tools
@@ -85,7 +85,7 @@ class BasePythonInstall(unittest.TestCase):
# Wheel for foopackage-0.99-py3-none-any.whl
# This is dummy package for testing purposes created with
# python -m build --wheel for the following package
'''
"""
├── foopackage
│   └── __init__.py
└── setup.py
@@ -101,9 +101,10 @@ class BasePythonInstall(unittest.TestCase):
__init__.py
if __name__ == '__main__':
return
'''
"""
whl = (b'PK\x03\x04\x14\x00\x00\x00\x08\x00\x07fqVz|E\t&\x00\x00\x00&\x00\x00\x00\x16\x00\x00\x00'
whl = (
b'PK\x03\x04\x14\x00\x00\x00\x08\x00\x07fqVz|E\t&\x00\x00\x00&\x00\x00\x00\x16\x00\x00\x00'
b'foopackage/__init__.py\xcbLS\x88\x8f\xcfK\xccM\x8d\x8fW\xb0\xb5UP\x8f\x8f\xcfM\xcc\xcc\x8b\x8fW'
b'\xb7\xe2R\x00\x82\xa2\xd4\x92\xd2\xa2<.\x00PK\x03\x04\x14\x00\x00\x00\x08\x00%fqV\x8d\x90\x81\x05'
b'1\x00\x00\x006\x00\x00\x00"\x00\x00\x00foopackage-0.99.dist-info/METADATA\xf3M-ILI,I\xd4\rK-*\xce'
@@ -112,7 +113,7 @@ class BasePythonInstall(unittest.TestCase):
b'.dist-info/WHEEL\x0b\xcfHM\xcd\xd1\rK-*\xce\xcc\xcf\xb3R0\xd43\xe0rO\xcdK-J,\xc9/\xb2RHJ\xc9,.\x89/'
b'\x07\xa9Q\xd00\xd031\xd03\xd0\xe4\n\xca\xcf/\xd1\xf5,\xd6\r(-J\xcd\xc9L\xb2R()*M\xe5\nIL\xb7R(\xa84'
b'\xd6\xcd\xcb\xcfK\xd5M\xcc\xab\xe4\xe2\x02\x00PK\x03\x04\x14\x00\x00\x00\x08\x00%fqVI*\x9e\xa7\r\x00'
b'\x00\x00\x0b\x00\x00\x00\'\x00\x00\x00foopackage-0.99.dist-info/top_level.txtK\xcb\xcf/HL\xceNLO\xe5'
b"\x00\x00\x0b\x00\x00\x00'\x00\x00\x00foopackage-0.99.dist-info/top_level.txtK\xcb\xcf/HL\xceNLO\xe5"
b'\x02\x00PK\x03\x04\x14\x00\x00\x00\x08\x00%fqV&\xdc\x9b\x88\xfd\x00\x00\x00}\x01\x00\x00 \x00\x00\x00'
b'foopackage-0.99.dist-info/RECORD}\xcc;\x92\x820\x00\x00\xd0\xde\xb3\x04\xe4#\xbfb\x8b\xac\xb0\x0b,'
b'\xa8\x83\x02#M&\x08\x81\x80\x02c\x02\x82\xa7\xb7rK\xdf\x01\x1e\xe9\xfb\x01_Z\\\x95k\x84hG9B\xe2\xb0'
@@ -120,28 +121,30 @@ class BasePythonInstall(unittest.TestCase):
b':\xec\x00\xd5\\\x91\xffL\x90D\xcb\x12\x0b\xca\xb8@;\xd2\xafC\xe7\x04mx\x82\xef\xb8\xf2\xc6"\xd9\xdd'
b'\r\x18\xe4\xcd\xef=\xf7\n7\x9eg4?\xa7\x04V*gXI\xff\xcanD\xc1\xf1\xc0\x80\xb6\xf9\x10\xa7\xae\xe3\x04'
b'\xefuh/<;?\xe3\xe3\x06\x9e\x93N/|\xc1Puc\xefgt\xfaQJ3\x82V\x8e\xb2\xef\x86\x12\xd9\x04\x96\xf2a\xe5'
b'\xfd\x80\xae\xe5T^E>\xf3\xf7\x1eW\x122\xe4\x91\xfbi\x1f\xd6\xeem\x99\xd4\xec\x11Ju\x9d\'R\xc83R\x19>'
b"\xfd\x80\xae\xe5T^E>\xf3\xf7\x1eW\x122\xe4\x91\xfbi\x1f\xd6\xeem\x99\xd4\xec\x11Ju\x9d'R\xc83R\x19>"
b'jbO:\xb8\x8b\td\xf9\xc3\x1e9\xdb}d\x03\xb0z\x01PK\x01\x02\x14\x03\x14\x00\x00\x00\x08\x00\x07fqVz|E\t'
b'&\x00\x00\x00&\x00\x00\x00\x16\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xa4\x81\x00\x00\x00\x00'
b'foopackage/__init__.pyPK\x01\x02\x14\x03\x14\x00\x00\x00\x08\x00%fqV\x8d\x90\x81\x051\x00\x00\x006\x00'
b'\x00\x00"\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xa4\x81Z\x00\x00\x00foopackage-0.99.dist-info'
b'/METADATAPK\x01\x02\x14\x03\x14\x00\x00\x00\x08\x00%fqVI\xa2!\xcb\\\x00\x00\x00\\\x00\x00\x00\x1f\x00'
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xa4\x81\xcb\x00\x00\x00foopackage-0.99.dist-info/WHEELPK\x01'
b'\x02\x14\x03\x14\x00\x00\x00\x08\x00%fqVI*\x9e\xa7\r\x00\x00\x00\x0b\x00\x00\x00\'\x00\x00\x00\x00\x00'
b"\x02\x14\x03\x14\x00\x00\x00\x08\x00%fqVI*\x9e\xa7\r\x00\x00\x00\x0b\x00\x00\x00'\x00\x00\x00\x00\x00"
b'\x00\x00\x00\x00\x00\x00\xa4\x81d\x01\x00\x00foopackage-0.99.dist-info/top_level.txtPK\x01\x02\x14\x03'
b'\x14\x00\x00\x00\x08\x00%fqV&\xdc\x9b\x88\xfd\x00\x00\x00}\x01\x00\x00 \x00\x00\x00\x00\x00\x00\x00'
b'\x00\x00\x00\x00\xb4\x81\xb6\x01\x00\x00foopackage-0.99.dist-info/RECORDPK\x05\x06\x00\x00\x00\x00\x05'
b'\x00\x05\x00\x84\x01\x00\x00\xf1\x02\x00\x00\x00\x00')
b'\x00\x05\x00\x84\x01\x00\x00\xf1\x02\x00\x00\x00\x00'
)
return self.dump_package(whl, 'foopackage-0.99-py3-none-any.whl')
def dump_foopackage_dev(self): # type: () -> str
# similar to dump_foopackage, but using dev release version
whl = (b'PK\x03\x04\x14\x00\x00\x00\x08\x00\nl\x03W !Z\xfc%\x00\x00\x00%\x00\x00\x00\x16\x00\x00\x00'
whl = (
b'PK\x03\x04\x14\x00\x00\x00\x08\x00\nl\x03W !Z\xfc%\x00\x00\x00%\x00\x00\x00\x16\x00\x00\x00'
b'foopackage/__init__.py\xcbLS\x88\x8f\xcfK\xccM\x8d\x8fW\xb0\xb5UP\x8f\x8f\xcfM\xcc\xcc\x8b\x8fW\xb7'
b'\xe2R\x00\x82\xa2\xd4\x92\xd2\xa2<\x00PK\x03\x04\x14\x00\x00\x00\x08\x00Jl\x03W\xb4wO\x876\x00\x00'
b'\x00;\x00\x00\x00\'\x00\x00\x00foopackage-0.99.dev0.dist-info/METADATA\xf3M-ILI,I\xd4\rK-*\xce\xcc'
b"\x00;\x00\x00\x00'\x00\x00\x00foopackage-0.99.dev0.dist-info/METADATA\xf3M-ILI,I\xd4\rK-*\xce\xcc"
b'\xcf\xb3R0\xd23\xe4\xf2K\xccM\xb5RH\xcb\xcf/HL\xceNLO\xe5\x82\xcb\x1a\xe8YZ\xea\xa5\xa4\x96\x19pq'
b'\x01\x00PK\x03\x04\x14\x00\x00\x00\x08\x00Jl\x03W\xda9\xe8\xb4[\x00\x00\x00\\\x00\x00\x00$\x00\x00'
b'\x00foopackage-0.99.dev0.dist-info/WHEEL\x0b\xcfHM\xcd\xd1\rK-*\xce\xcc\xcf\xb3R0\xd43\xe0rO\xcdK-J,'
@@ -159,7 +162,7 @@ class BasePythonInstall(unittest.TestCase):
b'\xca\x83\xbb\t\xf3\xa9\xf33\t5\x7f\xfa\x90\xd2\xe2\x04}\x9eW\xb5\xee\xe2\xefx\x07\x0f\xced\x00EyWD'
b'\xb6\x15Fk\x00f\x7fPK\x01\x02\x14\x03\x14\x00\x00\x00\x08\x00\nl\x03W !Z\xfc%\x00\x00\x00%\x00\x00'
b'\x00\x16\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xa4\x81\x00\x00\x00\x00foopackage/__init__.py'
b'PK\x01\x02\x14\x03\x14\x00\x00\x00\x08\x00Jl\x03W\xb4wO\x876\x00\x00\x00;\x00\x00\x00\'\x00\x00\x00'
b"PK\x01\x02\x14\x03\x14\x00\x00\x00\x08\x00Jl\x03W\xb4wO\x876\x00\x00\x00;\x00\x00\x00'\x00\x00\x00"
b'\x00\x00\x00\x00\x00\x00\x00\x00\xa4\x81Y\x00\x00\x00foopackage-0.99.dev0.dist-info/METADATAPK\x01'
b'\x02\x14\x03\x14\x00\x00\x00\x08\x00Jl\x03W\xda9\xe8\xb4[\x00\x00\x00\\\x00\x00\x00$\x00\x00\x00\x00'
b'\x00\x00\x00\x00\x00\x00\x00\xa4\x81\xd4\x00\x00\x00foopackage-0.99.dev0.dist-info/WHEELPK\x01\x02'
@@ -167,13 +170,13 @@ class BasePythonInstall(unittest.TestCase):
b'\x00\x00\x00\x00\x00\x00\x00\xa4\x81q\x01\x00\x00foopackage-0.99.dev0.dist-info/top_level.txtPK\x01'
b'\x02\x14\x03\x14\x00\x00\x00\x08\x00Jl\x03W\x1e\xbaW\xb5\x00\x01\x00\x00\x91\x01\x00\x00%\x00\x00'
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\xb4\x81\xc8\x01\x00\x00foopackage-0.99.dev0.dist-info/RECORDPK'
b'\x05\x06\x00\x00\x00\x00\x05\x00\x05\x00\x98\x01\x00\x00\x0b\x03\x00\x00\x00\x00')
b'\x05\x06\x00\x00\x00\x00\x05\x00\x05\x00\x98\x01\x00\x00\x0b\x03\x00\x00\x00\x00'
)
return self.dump_package(whl, 'foopackage-0.99.dev0-py3-none-any.whl')
class TestPythonInstall(BasePythonInstall):
def setUp(self): # type: () -> None
if os.path.isdir(PYTHON_DIR):
shutil.rmtree(PYTHON_DIR)
@@ -183,7 +186,7 @@ class TestPythonInstall(BasePythonInstall):
def test_default_arguments(self): # type: () -> None
output = self.run_idf_tools(['check-python-dependencies'])
self.assertNotIn(REQ_SATISFIED, output)
self.assertIn(f'{PYTHON_BINARY} doesn\'t exist', output)
self.assertIn(f"{PYTHON_BINARY} doesn't exist", output)
output = self.run_idf_tools(['install-python-env'])
self.assertIn(CONSTR, output)
@@ -219,7 +222,6 @@ class TestPythonInstall(BasePythonInstall):
class TestCustomPythonPathInstall(BasePythonInstall):
def setUp(self): # type: () -> None
self.CUSTOM_PYTHON_DIR = tempfile.mkdtemp()
self.environ_old = os.environ.copy()
@@ -244,7 +246,6 @@ class TestCustomPythonPathInstall(BasePythonInstall):
class TestCheckPythonDependencies(BasePythonInstall):
"""
The constraint file name is available as the constraint_file attribute. The content of the file is changed by these
tests. The backup_constraint_file is a temporary file with the content of the original constraint file. This is
@@ -252,6 +253,7 @@ class TestCheckPythonDependencies(BasePythonInstall):
important for consequent tests which should not download a new one especially when the test was run with a custom
constraint file different from the one on dl.espressif.com.
"""
constraint_file: str
backup_constraint_file: str