From 3cca3da1d52ad6cbf7190d7ef3e64dbe12853b66 Mon Sep 17 00:00:00 2001 From: Marek Fiala Date: Tue, 23 Jul 2024 15:59:09 +0200 Subject: [PATCH] feat(tools): Enforce utf-8 encoding with open() function --- components/efuse/efuse_table_gen.py | 10 ++++---- .../main/gen_digital_signature_tests.py | 2 +- .../check_system_init_priorities.py | 4 ++-- components/espcoredump/espcoredump.py | 3 +-- components/partition_table/gen_esp32part.py | 2 +- .../partition_table/gen_extra_subtypes_inc.py | 3 +-- components/partition_table/parttool.py | 2 +- components/ulp/esp32ulp_mapgen.py | 11 ++++----- conftest.py | 2 +- .../esp_local_ctrl/pytest_esp_local_ctrl.py | 3 +-- .../simple/pytest_https_server_simple.py | 2 +- .../protocols/mqtt/ssl/pytest_mqtt_ssl.py | 7 +++--- .../semihost_vfs/pytest_semihost_vfs.py | 4 ++-- .../spiffsgen/pytest_spiffsgen_example.py | 2 +- .../app_trace_basic/pytest_app_trace_basic.py | 4 ++-- .../system/app_trace_to_plot/read_trace.py | 14 +++++------ .../native_ota_example/pytest_native_ota.py | 2 +- .../simple_ota_example/pytest_simple_ota.py | 8 +++---- .../sysview_tracing/pytest_sysview_tracing.py | 2 +- .../pytest_sysview_tracing_heap_log.py | 2 +- tools/check_python_dependencies.py | 4 ++-- .../gen_soc_caps_kconfig.py | 6 ++--- tools/idf.py | 4 ++-- tools/idf_py_actions/create_ext.py | 2 +- tools/idf_py_actions/debug_ext.py | 12 +++++----- tools/idf_py_actions/serial_ext.py | 4 ++-- tools/idf_py_actions/tools.py | 12 +++++----- tools/idf_tools.py | 24 +++++++++---------- tools/install_util.py | 7 ++---- tools/ldgen/ldgen.py | 2 +- tools/mass_mfg/mfg_gen.py | 24 +++++++++---------- tools/mkdfu.py | 7 ++---- tools/mkuf2.py | 19 +++++++-------- 33 files changed, 102 insertions(+), 114 deletions(-) diff --git a/components/efuse/efuse_table_gen.py b/components/efuse/efuse_table_gen.py index 976b2d6a20..c1a0d6a1d8 100755 --- a/components/efuse/efuse_table_gen.py +++ b/components/efuse/efuse_table_gen.py @@ -128,13 +128,13 @@ class FuseTable(list): field_name = p.field_name + p.group if field_name != '' and len(duplicates.intersection([field_name])) != 0: fl_error = True - print('Field at %s, %s, %s, %s have dublicate field_name' % + print('Field at %s, %s, %s, %s have duplicate field_name' % (p.field_name, p.efuse_block, p.bit_start, p.bit_count)) if fl_error is True: raise InputError('Field names must be unique') def check_struct_field_name(self): - # check that stuctured fields have a root field + # check that structured fields have a root field for p in self: if '.' in p.field_name: name = '' @@ -456,7 +456,7 @@ def process_input_file(file, type_table): def ckeck_md5_in_file(md5, filename): if os.path.exists(filename): - with open(filename, 'r') as f: + with open(filename, 'r', encoding='utf-8') as f: for line in f: if md5 in line: return True @@ -480,12 +480,12 @@ def create_output_files(name, output_table, debug): if ckeck_md5_in_file(output_table.md5_digest_table, file_c_path) is False: status('Creating efuse *.h file ' + file_h_path + ' ...') output = output_table.to_header(file_name) - with open(file_h_path, 'w') as f: + with open(file_h_path, 'w', encoding='utf-8') as f: f.write(output) status('Creating efuse *.c file ' + file_c_path + ' ...') output = output_table.to_c_file(file_name, debug) - with open(file_c_path, 'w') as f: + with open(file_c_path, 'w', encoding='utf-8') as f: f.write(output) else: print('Source files do not require updating correspond to csv file.') diff --git a/components/esp_hw_support/test_apps/esp_hw_support_unity_tests/main/gen_digital_signature_tests.py b/components/esp_hw_support/test_apps/esp_hw_support_unity_tests/main/gen_digital_signature_tests.py index 2aa2b6f3ab..674475c91d 100644 --- a/components/esp_hw_support/test_apps/esp_hw_support_unity_tests/main/gen_digital_signature_tests.py +++ b/components/esp_hw_support/test_apps/esp_hw_support_unity_tests/main/gen_digital_signature_tests.py @@ -67,7 +67,7 @@ def generate_tests_cases(target): # type: (str) -> None messages = [random.randrange(0, 1 << max_key_size) for x in range(NUM_MESSAGES)] - with open('digital_signature_test_cases.h', 'w') as f: + with open('digital_signature_test_cases.h', 'w', encoding='utf-8') as f: f.write('/*\n') year = datetime.datetime.now().year f.write(' * SPDX-FileCopyrightText: {year} Espressif Systems (Shanghai) CO LTD\n'.format(year=year)) diff --git a/components/esp_system/check_system_init_priorities.py b/components/esp_system/check_system_init_priorities.py index 4bb1741c08..cd36e69a8a 100644 --- a/components/esp_system/check_system_init_priorities.py +++ b/components/esp_system/check_system_init_priorities.py @@ -50,7 +50,7 @@ def main() -> None: glob_iter = glob.glob(os.path.join(idf_path, 'components', '**', f'*.{extension}'), recursive=True) source_files_iters.append(glob_iter) for filename in itertools.chain(*source_files_iters): - with open(filename, 'r') as f_obj: + with open(filename, 'r', encoding='utf-8') as f_obj: file_contents = f_obj.read() if ESP_SYSTEM_INIT_FN_STR not in file_contents: continue @@ -80,7 +80,7 @@ def main() -> None: # 3. Load startup entries list from STARTUP_ENTRIES_FILE, removing comments and empty lines # startup_entries_expected_lines = [] - with open(os.path.join(idf_path, STARTUP_ENTRIES_FILE), 'r') as startup_entries_expected_file: + with open(os.path.join(idf_path, STARTUP_ENTRIES_FILE), 'r', encoding='utf-8') as startup_entries_expected_file: for line in startup_entries_expected_file: if line.startswith('#') or len(line.strip()) == 0: continue diff --git a/components/espcoredump/espcoredump.py b/components/espcoredump/espcoredump.py index b17f234b56..5bb35dd96e 100755 --- a/components/espcoredump/espcoredump.py +++ b/components/espcoredump/espcoredump.py @@ -4,7 +4,6 @@ # # SPDX-License-Identifier: Apache-2.0 # - import json import logging import os.path @@ -26,7 +25,7 @@ def get_prefix_map_gdbinit_path(prog_path): # type: (str) -> Any logging.warning('%s does not exist. Please build the app with "idf.py build"', desc_path) return '' - with open(desc_path, 'r') as f: + with open(desc_path, 'r', encoding='utf-8') as f: project_desc = json.load(f) return project_desc.get('debug_prefix_map_gdbinit') diff --git a/components/partition_table/gen_esp32part.py b/components/partition_table/gen_esp32part.py index 5e18a86ed0..068adac1d5 100755 --- a/components/partition_table/gen_esp32part.py +++ b/components/partition_table/gen_esp32part.py @@ -591,7 +591,7 @@ def main(): if input_is_binary: output = table.to_csv() - with sys.stdout if args.output == '-' else open(args.output, 'w') as f: + with sys.stdout if args.output == '-' else open(args.output, 'w', encoding='utf-8') as f: f.write(output) else: output = table.to_binary() diff --git a/components/partition_table/gen_extra_subtypes_inc.py b/components/partition_table/gen_extra_subtypes_inc.py index 45e4751a47..59814bfa2f 100755 --- a/components/partition_table/gen_extra_subtypes_inc.py +++ b/components/partition_table/gen_extra_subtypes_inc.py @@ -1,14 +1,13 @@ #!/usr/bin/env python # SPDX-FileCopyrightText: 2022 Espressif Systems (Shanghai) CO LTD # SPDX-License-Identifier: Apache-2.0 - import argparse def gen_header_file(path: str, subtypes: str) -> None: HDR_MESSAGE = '/* Automatically generated file. DO NOT EDIT. */\n\n' PARTTOOL_USAGE = 'If you want to use parttool.py manually, please use the following as an extra argument:' - with open(path, 'w') as f: + with open(path, 'w', encoding='utf-8') as f: f.write(HDR_MESSAGE) if subtypes: f.write('/*\n\t' + PARTTOOL_USAGE + '\n\t') diff --git a/components/partition_table/parttool.py b/components/partition_table/parttool.py index 2b6920cb59..988c89473b 100755 --- a/components/partition_table/parttool.py +++ b/components/partition_table/parttool.py @@ -92,7 +92,7 @@ class ParttoolTarget(): partition_table = gen.PartitionTable.from_binary(f.read()) if partition_table is None: - with open(partition_table_file, 'r') as f: + with open(partition_table_file, 'r', encoding='utf-8') as f: f.seek(0) partition_table = gen.PartitionTable.from_csv(f.read()) else: diff --git a/components/ulp/esp32ulp_mapgen.py b/components/ulp/esp32ulp_mapgen.py index 382752269c..91ae2655ca 100755 --- a/components/ulp/esp32ulp_mapgen.py +++ b/components/ulp/esp32ulp_mapgen.py @@ -1,12 +1,9 @@ #!/usr/bin/env python +# SPDX-FileCopyrightText: 2016-2024 Espressif Systems (Shanghai) CO LTD +# SPDX-License-Identifier: Apache-2.0 +# # esp32ulp_mapgen utility converts a symbol list provided by nm into an export script # for the linker and a header file. -# -# SPDX-FileCopyrightText: 2016-2021 Espressif Systems (Shanghai) CO LTD -# SPDX-License-Identifier: Apache-2.0 - -from __future__ import print_function - import argparse import os import textwrap @@ -65,7 +62,7 @@ def main() -> None: args = parser.parse_args() - with open(args.outputfile + '.h', 'w') as f_h, open(args.outputfile + '.ld', 'w') as f_ld: + with open(args.outputfile + '.h', 'w', encoding='utf-8') as f_h, open(args.outputfile + '.ld', 'w', encoding='utf-8') as f_ld: gen_ld_h_from_sym(args.symfile, f_ld, f_h, int(args.base_addr, 0)) diff --git a/conftest.py b/conftest.py index 68631c1a50..6167b31bf3 100644 --- a/conftest.py +++ b/conftest.py @@ -180,7 +180,7 @@ def check_performance(idf_path: str) -> Callable[[str, float, str], None]: """ def _find_perf_item(operator: str, path: str) -> float: - with open(path, 'r') as f: + with open(path, 'r', encoding='utf-8') as f: data = f.read() match = re.search(r'#define\s+IDF_PERFORMANCE_{}_{}\s+([\d.]+)'.format(operator, item.upper()), data) return float(match.group(1)) # type: ignore diff --git a/examples/protocols/esp_local_ctrl/pytest_esp_local_ctrl.py b/examples/protocols/esp_local_ctrl/pytest_esp_local_ctrl.py index aaa5ace52a..bfd59b66cb 100644 --- a/examples/protocols/esp_local_ctrl/pytest_esp_local_ctrl.py +++ b/examples/protocols/esp_local_ctrl/pytest_esp_local_ctrl.py @@ -1,6 +1,5 @@ # SPDX-FileCopyrightText: 2022-2023 Espressif Systems (Shanghai) CO LTD # SPDX-License-Identifier: Unlicense OR CC0-1.0 - import logging import os import re @@ -22,7 +21,7 @@ def get_sdk_path() -> str: class CustomProcess(object): def __init__(self, cmd: str, logfile: str, verbose:bool =True) -> None: self.verbose = verbose - self.f = open(logfile, 'w') + self.f = open(logfile, 'w', encoding='utf-8') if self.verbose: logging.info('Starting {} > {}'.format(cmd, self.f.name)) self.pexpect_proc = pexpect.spawn(cmd, timeout=60, logfile=self.f, encoding='utf-8', codec_errors='ignore') diff --git a/examples/protocols/https_server/simple/pytest_https_server_simple.py b/examples/protocols/https_server/simple/pytest_https_server_simple.py index 86aa6a3f42..1cff1d21cc 100644 --- a/examples/protocols/https_server/simple/pytest_https_server_simple.py +++ b/examples/protocols/https_server/simple/pytest_https_server_simple.py @@ -133,7 +133,7 @@ def test_examples_protocol_https_server_simple(dut: Dut) -> None: ssl_context.check_hostname = False ssl_context.load_verify_locations(cadata=server_cert_pem) - with open(CLIENT_CERT_FILE, 'w') as cert, open(CLIENT_KEY_FILE, 'w') as key: + with open(CLIENT_CERT_FILE, 'w', encoding='utf-8') as cert, open(CLIENT_KEY_FILE, 'w', encoding='utf-8') as key: cert.write(client_cert_pem) key.write(client_key_pem) diff --git a/examples/protocols/mqtt/ssl/pytest_mqtt_ssl.py b/examples/protocols/mqtt/ssl/pytest_mqtt_ssl.py index c16b89cbb1..d27cc65b78 100644 --- a/examples/protocols/mqtt/ssl/pytest_mqtt_ssl.py +++ b/examples/protocols/mqtt/ssl/pytest_mqtt_ssl.py @@ -1,11 +1,12 @@ -# SPDX-FileCopyrightText: 2022 Espressif Systems (Shanghai) CO LTD +# SPDX-FileCopyrightText: 2022-2024 Espressif Systems (Shanghai) CO LTD # SPDX-License-Identifier: Unlicense OR CC0-1.0 import logging import os import re import ssl import sys -from threading import Event, Thread +from threading import Event +from threading import Thread import paho.mqtt.client as mqtt import pexpect @@ -47,7 +48,7 @@ def on_message(client, userdata, msg): # type: (mqtt.Client, tuple, mqtt.client event_client_received_binary.set() return recv_binary = binary + '.received' - with open(recv_binary, 'w') as fw: + with open(recv_binary, 'w', encoding='utf-8') as fw: fw.write(msg.payload) raise ValueError('Received binary (saved as: {}) does not match the original file: {}'.format(recv_binary, binary)) diff --git a/examples/storage/semihost_vfs/pytest_semihost_vfs.py b/examples/storage/semihost_vfs/pytest_semihost_vfs.py index 03dd2acdb2..8269caa87e 100644 --- a/examples/storage/semihost_vfs/pytest_semihost_vfs.py +++ b/examples/storage/semihost_vfs/pytest_semihost_vfs.py @@ -43,7 +43,7 @@ def test_semihost_vfs(dut: IdfDut) -> None: dut.expect_exact('example: Wrote 2798 bytes') dut.expect_exact('====================== HOST DATA START =========================') - with open(HOST_FILE_PATH) as f: + with open(HOST_FILE_PATH, encoding='utf-8') as f: for line in f: if line.strip(): dut.expect_exact(line.strip()) @@ -51,7 +51,7 @@ def test_semihost_vfs(dut: IdfDut) -> None: dut.expect_exact('====================== HOST DATA END =========================') dut.expect_exact('example: Read 6121 bytes') - with open(os.path.join(TEMP_DIR, 'esp32_stdout.txt')) as f: + with open(os.path.join(TEMP_DIR, 'esp32_stdout.txt'), encoding='utf-8') as f: def expected_content() -> t.Iterator[str]: yield 'example: Switched to semihosted stdout' diff --git a/examples/storage/spiffsgen/pytest_spiffsgen_example.py b/examples/storage/spiffsgen/pytest_spiffsgen_example.py index 75f2a9ad31..a7eebe820d 100644 --- a/examples/storage/spiffsgen/pytest_spiffsgen_example.py +++ b/examples/storage/spiffsgen/pytest_spiffsgen_example.py @@ -14,7 +14,7 @@ def test_spiffsgen_example(dut: Dut) -> None: base_dir = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'spiffs_image') # Expect hello.txt is read successfully - with open(os.path.join(base_dir, 'hello.txt'), 'r') as hello_txt: + with open(os.path.join(base_dir, 'hello.txt'), 'r', encoding='utf-8') as hello_txt: dut.expect('Read from hello.txt: ' + hello_txt.read().rstrip()) # Expect alice.txt MD5 hash is computed accurately diff --git a/examples/system/app_trace_basic/pytest_app_trace_basic.py b/examples/system/app_trace_basic/pytest_app_trace_basic.py index 9027eb4181..1747aedcd9 100644 --- a/examples/system/app_trace_basic/pytest_app_trace_basic.py +++ b/examples/system/app_trace_basic/pytest_app_trace_basic.py @@ -46,7 +46,7 @@ def test_examples_app_trace_basic(dut: IdfDut, openocd: OpenOcd) -> None: assert 'Targets connected.' in dut.openocd.write('esp apptrace start file://apptrace.log 0 2000 3 0 0') apptrace_wait_stop(dut.openocd) - with open(openocd._logfile) as oocd_log: # pylint: disable=protected-access + with open(openocd._logfile, encoding='utf-8') as oocd_log: # pylint: disable=protected-access cores = 1 if dut.app.sdkconfig.get('FREERTOS_UNICORE') is True else 2 params_str = 'App trace params: from {} cores,'.format(cores) found = False @@ -59,7 +59,7 @@ def test_examples_app_trace_basic(dut: IdfDut, openocd: OpenOcd) -> None: '"{}" could not be found in {}'.format(params_str, openocd._logfile) # pylint: disable=protected-access ) - with open('apptrace.log') as apptrace_log: + with open('apptrace.log', encoding='utf-8') as apptrace_log: for sample_num in range(1, 51): log_str = 'Apptrace test data[{}]:{}'.format(sample_num, sample_num * sample_num) found = False diff --git a/examples/system/app_trace_to_plot/read_trace.py b/examples/system/app_trace_to_plot/read_trace.py index 660348cea2..50c90d2232 100644 --- a/examples/system/app_trace_to_plot/read_trace.py +++ b/examples/system/app_trace_to_plot/read_trace.py @@ -1,6 +1,5 @@ -# SPDX-FileCopyrightText: 2023 Espressif Systems (Shanghai) CO LTD +# SPDX-FileCopyrightText: 2023-2024 Espressif Systems (Shanghai) CO LTD # SPDX-License-Identifier: Apache-2.0 - import argparse import datetime import json @@ -9,7 +8,8 @@ import signal import sys from enum import Enum from functools import partial -from typing import Any, List +from typing import Any +from typing import List try: import espytrace.apptrace @@ -47,7 +47,7 @@ app.layout = html.Div( html.Div([ html.H2('Telemetry Data'), html.Div(id='live-update-data'), - dcc.Graph(id='live-update-graph', style={'height': 800}), # Height of the plotting area setted to 800px + dcc.Graph(id='live-update-graph', style={'height': 800}), # Height of the plotting area set to 800px dcc.Interval( id='interval-component', interval=5 * 100, # Graph will be updated every 500 ms @@ -57,7 +57,7 @@ app.layout = html.Div( ) -# Multiple components can update everytime interval gets fired. +# Multiple components can update every time interval gets fired. @app.callback(Output('live-update-graph', 'figure'), Input('interval-component', 'n_intervals')) def update_graph_live(_n: Any) -> Any: # pylint: disable=undefined-argument @@ -162,13 +162,13 @@ class CustomRequestHandler(espytrace.apptrace.TCPRequestHandler): def read_json(file_path: str) -> Any: - with open(file_path, 'r') as f: + with open(file_path, 'r', encoding='utf-8') as f: data = json.load(f) return data def save_data(file_path: str) -> None: - with open(file_path, 'w') as f: + with open(file_path, 'w', encoding='utf-8') as f: f.writelines(output_lines) diff --git a/examples/system/ota/native_ota_example/pytest_native_ota.py b/examples/system/ota/native_ota_example/pytest_native_ota.py index 5559d8d1fe..a0823167de 100644 --- a/examples/system/ota/native_ota_example/pytest_native_ota.py +++ b/examples/system/ota/native_ota_example/pytest_native_ota.py @@ -67,7 +67,7 @@ server_key = '-----BEGIN PRIVATE KEY-----\n'\ def create_file(server_file: str, file_data: str) -> None: - with open(server_file, 'w+') as file: + with open(server_file, 'w+', encoding='utf-8') as file: file.write(file_data) diff --git a/examples/system/ota/simple_ota_example/pytest_simple_ota.py b/examples/system/ota/simple_ota_example/pytest_simple_ota.py index d6efe5a25d..ca6227c910 100644 --- a/examples/system/ota/simple_ota_example/pytest_simple_ota.py +++ b/examples/system/ota/simple_ota_example/pytest_simple_ota.py @@ -69,13 +69,13 @@ def start_https_server(ota_image_dir: str, server_ip: str, server_port: int, ser if server_file is None: server_file = os.path.join(ota_image_dir, 'server_cert.pem') - cert_file_handle = open(server_file, 'w+') + cert_file_handle = open(server_file, 'w+', encoding='utf-8') cert_file_handle.write(server_cert) cert_file_handle.close() if key_file is None: key_file = os.path.join(ota_image_dir, 'server_key.pem') - key_file_handle = open('server_key.pem', 'w+') + key_file_handle = open('server_key.pem', 'w+', encoding='utf-8') key_file_handle.write(server_key) key_file_handle.close() @@ -90,12 +90,12 @@ def start_https_server(ota_image_dir: str, server_ip: str, server_port: int, ser def start_tls1_3_server(ota_image_dir: str, server_port: int) -> subprocess.Popen: os.chdir(ota_image_dir) server_file = os.path.join(ota_image_dir, 'server_cert.pem') - cert_file_handle = open(server_file, 'w+') + cert_file_handle = open(server_file, 'w+', encoding='utf-8') cert_file_handle.write(server_cert) cert_file_handle.close() key_file = os.path.join(ota_image_dir, 'server_key.pem') - key_file_handle = open('server_key.pem', 'w+') + key_file_handle = open('server_key.pem', 'w+', encoding='utf-8') key_file_handle.write(server_key) key_file_handle.close() diff --git a/examples/system/sysview_tracing/pytest_sysview_tracing.py b/examples/system/sysview_tracing/pytest_sysview_tracing.py index a970480d3f..6af3e02c71 100644 --- a/examples/system/sysview_tracing/pytest_sysview_tracing.py +++ b/examples/system/sysview_tracing/pytest_sysview_tracing.py @@ -34,7 +34,7 @@ def test_examples_sysview_tracing(dut: IdfDut) -> None: dut.gdb.write('c', non_blocking=True) time.sleep(1) # to avoid EOF file error - with open(dut.gdb._logfile) as fr: # pylint: disable=protected-access + with open(dut.gdb._logfile, encoding='utf-8') as fr: # pylint: disable=protected-access gdb_pexpect_proc = pexpect.fdpexpect.fdspawn(fr.fileno()) gdb_pexpect_proc.expect('Thread 2 "main" hit Breakpoint 1, app_main ()') diff --git a/examples/system/sysview_tracing_heap_log/pytest_sysview_tracing_heap_log.py b/examples/system/sysview_tracing_heap_log/pytest_sysview_tracing_heap_log.py index f96242dc22..c6147f52d5 100644 --- a/examples/system/sysview_tracing_heap_log/pytest_sysview_tracing_heap_log.py +++ b/examples/system/sysview_tracing_heap_log/pytest_sysview_tracing_heap_log.py @@ -49,7 +49,7 @@ def test_examples_sysview_tracing_heap_log(idf_path: str, dut: IdfDut) -> None: sysviewtrace.expect(r'Found \d+ leaked bytes in \d+ blocks.', timeout=120) # Validate GDB logs - with open(dut.gdb._logfile) as fr: # pylint: disable=protected-access + with open(dut.gdb._logfile, encoding='utf-8') as fr: # pylint: disable=protected-access gdb_pexpect_proc = pexpect.fdpexpect.fdspawn(fr.fileno()) gdb_pexpect_proc.expect_exact( 'Thread 2 "main" hit Temporary breakpoint 1, heap_trace_start (mode_param', timeout=10) # should be (mode_param=HEAP_TRACE_ALL) # TODO GCC-329 diff --git a/tools/check_python_dependencies.py b/tools/check_python_dependencies.py index 945d8e7a0d..6621b03314 100755 --- a/tools/check_python_dependencies.py +++ b/tools/check_python_dependencies.py @@ -45,12 +45,12 @@ if __name__ == '__main__': required_set = set() for req_path in args.requirements: - with open(req_path) as f: + with open(req_path, encoding='utf-8') as f: required_set |= set(i for i in map(str.strip, f.readlines()) if len(i) > 0 and not i.startswith('#')) constr_dict = {} # for example package_name -> package_name==1.0 for const_path in args.constraints: - with open(const_path) as f: + with open(const_path, encoding='utf-8') as f: for con in [i for i in map(str.strip, f.readlines()) if len(i) > 0 and not i.startswith('#')]: if con.startswith('file://'): con = os.path.basename(con) diff --git a/tools/gen_soc_caps_kconfig/gen_soc_caps_kconfig.py b/tools/gen_soc_caps_kconfig/gen_soc_caps_kconfig.py index 3ba6608652..a11bf84a15 100755 --- a/tools/gen_soc_caps_kconfig/gen_soc_caps_kconfig.py +++ b/tools/gen_soc_caps_kconfig/gen_soc_caps_kconfig.py @@ -80,7 +80,7 @@ class KconfigWriter(): def update_file(self, kconfig_path, always_write): # type: (Path, bool) -> bool try: - with open(kconfig_path, 'r') as f: + with open(kconfig_path, 'r', encoding='utf-8') as f: old_content = f.readlines() except FileNotFoundError: old_content = [''] @@ -99,7 +99,7 @@ class KconfigWriter(): if file_needs_update: print('\n' + 'Updating file: {}'.format(kconfig_path)) - with open(kconfig_path, 'w') as f: + with open(kconfig_path, 'w', encoding='utf-8') as f: f.writelines(new_content) return file_needs_update @@ -175,7 +175,7 @@ def generate_defines(soc_caps_dir, filename, always_write): # type: (Path, str, def get_defines(header_path): # type: (Path) -> list[str] defines = [] logging.info('Reading macros from {}...'.format(header_path)) - with open(header_path, 'r') as f: + with open(header_path, 'r', encoding='utf-8') as f: output = f.read() for line in output.split('\n'): diff --git a/tools/idf.py b/tools/idf.py index aa122adbbc..4f5652e1a0 100755 --- a/tools/idf.py +++ b/tools/idf.py @@ -465,7 +465,7 @@ def init_cli(verbose_output: Optional[List]=None) -> Any: # Otherwise, if we built any binaries print a message about # how to flash them def print_flashing_message(title: str, key: str) -> None: - with open(os.path.join(args.build_dir, 'flasher_args.json')) as file: + with open(os.path.join(args.build_dir, 'flasher_args.json'), encoding='utf-8') as file: flasher_args: Dict[str, Any] = json.load(file) def flasher_path(f: Union[str, 'os.PathLike[str]']) -> str: @@ -773,7 +773,7 @@ def expand_file_arguments(argv: List[Any]) -> List[Any]: visited.add(rel_path) try: - with open(rel_path, 'r') as f: + with open(rel_path, 'r', encoding='utf-8') as f: for line in f: expanded_args.extend(expand_args(shlex.split(line), os.path.dirname(rel_path), file_stack + [file_name])) except IOError: diff --git a/tools/idf_py_actions/create_ext.py b/tools/idf_py_actions/create_ext.py index f84b3dbb02..231ee1aa92 100644 --- a/tools/idf_py_actions/create_ext.py +++ b/tools/idf_py_actions/create_ext.py @@ -17,7 +17,7 @@ def get_type(action: str) -> str: def replace_in_file(filename: str, pattern: str, replacement: str) -> None: - with open(filename, 'r+') as f: + with open(filename, 'r+', encoding='utf-8') as f: content = f.read() overwritten_content = re.sub(pattern, replacement, content, flags=re.M) f.seek(0) diff --git a/tools/idf_py_actions/debug_ext.py b/tools/idf_py_actions/debug_ext.py index 06afe80708..ebe31d0770 100644 --- a/tools/idf_py_actions/debug_ext.py +++ b/tools/idf_py_actions/debug_ext.py @@ -70,7 +70,7 @@ def action_extensions(base_actions: Dict, project_path: str) -> Dict: if p.poll() is not None: print('OpenOCD exited with {}'.format(p.poll())) break - with open(name, 'r') as f: + with open(name, 'r', encoding='utf-8') as f: content = f.read() if re.search(r'Listening on port \d+ for gdb connections', content): # expect OpenOCD has started successfully - stop watching @@ -78,7 +78,7 @@ def action_extensions(base_actions: Dict, project_path: str) -> Dict: time.sleep(0.5) # OpenOCD exited or is not listening -> print full log and terminate - with open(name, 'r') as f: + with open(name, 'r', encoding='utf-8') as f: print(f.read()) raise FatalError('Action "{}" failed due to errors in OpenOCD'.format(target), ctx) @@ -194,7 +194,7 @@ def action_extensions(base_actions: Dict, project_path: str) -> Dict: name = processes[target + '_outfile_name'] pos = 0 while True: - with open(name, 'r') as f: + with open(name, 'r', encoding='utf-8') as f: f.seek(pos) for line in f: print(line.rstrip()) @@ -212,7 +212,7 @@ def action_extensions(base_actions: Dict, project_path: str) -> Dict: desc_path = os.path.join(args.build_dir, 'project_description.json') if not os.path.exists(desc_path): ensure_build_directory(args, ctx.info_name) - with open(desc_path, 'r') as f: + with open(desc_path, 'r', encoding='utf-8') as f: project_desc = json.load(f) return project_desc @@ -237,7 +237,7 @@ def action_extensions(base_actions: Dict, project_path: str) -> Dict: local_dir = project_desc['build_dir'] args = ['openocd'] + shlex.split(openocd_arguments) openocd_out_name = os.path.join(local_dir, OPENOCD_OUT_FILE) - openocd_out = open(openocd_out_name, 'w') + openocd_out = open(openocd_out_name, 'w', encoding='utf-8') try: process = subprocess.Popen(args, stdout=openocd_out, stderr=subprocess.STDOUT, bufsize=1) except Exception as e: @@ -352,7 +352,7 @@ def action_extensions(base_actions: Dict, project_path: str) -> Dict: if gdbgui_port is not None: gdbgui_args += ['--port', gdbgui_port] gdbgui_out_name = os.path.join(local_dir, GDBGUI_OUT_FILE) - gdbgui_out = open(gdbgui_out_name, 'w') + gdbgui_out = open(gdbgui_out_name, 'w', encoding='utf-8') env = os.environ.copy() # The only known solution for https://github.com/cs01/gdbgui/issues/359 is to set the following environment # variable. The greenlet package cannot be downgraded for compatibility with other requirements (gdbgui, diff --git a/tools/idf_py_actions/serial_ext.py b/tools/idf_py_actions/serial_ext.py index 86c663b5d4..2756b92d57 100644 --- a/tools/idf_py_actions/serial_ext.py +++ b/tools/idf_py_actions/serial_ext.py @@ -45,7 +45,7 @@ def action_extensions(base_actions: Dict, project_path: str) -> Dict: desc_path = os.path.join(args.build_dir, 'project_description.json') if not os.path.exists(desc_path): ensure_build_directory(args, ctx.info_name) - with open(desc_path, 'r') as f: + with open(desc_path, 'r', encoding='utf-8') as f: project_desc = json.load(f) return project_desc @@ -61,7 +61,7 @@ def action_extensions(base_actions: Dict, project_path: str) -> Dict: result += ['-p', args.port] result += ['-b', str(args.baud)] - with open(os.path.join(args.build_dir, 'flasher_args.json')) as f: + with open(os.path.join(args.build_dir, 'flasher_args.json'), encoding='utf-8') as f: flasher_args = json.load(f) extra_esptool_args = flasher_args['extra_esptool_args'] diff --git a/tools/idf_py_actions/tools.py b/tools/idf_py_actions/tools.py index 15840ff984..bdcb060cb3 100644 --- a/tools/idf_py_actions/tools.py +++ b/tools/idf_py_actions/tools.py @@ -64,7 +64,7 @@ def _set_build_context(args: 'PropertyDict') -> None: proj_desc_fn = f'{args.build_dir}/project_description.json' try: - with open(proj_desc_fn, 'r') as f: + with open(proj_desc_fn, 'r', encoding='utf-8') as f: ctx['proj_desc'] = json.load(f) except (OSError, ValueError) as e: raise FatalError(f'Cannot load {proj_desc_fn}: {e}') @@ -85,7 +85,7 @@ def _idf_version_from_cmake() -> Optional[str]: regex = re.compile(r'^\s*set\s*\(\s*IDF_VERSION_([A-Z]{5})\s+(\d+)') ver = {} try: - with open(version_path) as f: + with open(version_path, encoding='utf-8') as f: for line in f: m = regex.match(line) @@ -183,7 +183,7 @@ def load_hints() -> Dict: } current_module_dir = os.path.dirname(__file__) - with open(os.path.join(current_module_dir, 'hints.yml'), 'r') as file: + with open(os.path.join(current_module_dir, 'hints.yml'), 'r', encoding='utf-8') as file: hints['yml'] = yaml.safe_load(file) hint_modules_dir = os.path.join(current_module_dir, 'hint_modules') @@ -257,7 +257,7 @@ def generate_hints(*filenames: str) -> Generator: """Getting output files and printing hints on how to resolve errors based on the output.""" hints = load_hints() for file_name in filenames: - with open(file_name, 'r') as file: + with open(file_name, 'r', encoding='utf-8') as file: yield from generate_hints_buffer(file.read(), hints) @@ -685,7 +685,7 @@ def get_sdkconfig_filename(args: 'PropertyDict', cache_cmdl: Optional[Dict]=None proj_desc_path = os.path.join(args.build_dir, 'project_description.json') try: - with open(proj_desc_path, 'r') as f: + with open(proj_desc_path, 'r', encoding='utf-8') as f: proj_desc = json.load(f) return str(proj_desc['config_file']) except (OSError, KeyError): @@ -706,7 +706,7 @@ def get_sdkconfig_value(sdkconfig_file: str, key: str) -> Optional[str]: value = None # if the value is quoted, this excludes the quotes from the value pattern = re.compile(r"^{}=\"?([^\"]*)\"?$".format(key)) - with open(sdkconfig_file, 'r') as f: + with open(sdkconfig_file, 'r', encoding='utf-8') as f: for line in f: match = re.match(pattern, line) if match: diff --git a/tools/idf_tools.py b/tools/idf_tools.py index e76a253dba..ff8b839798 100755 --- a/tools/idf_tools.py +++ b/tools/idf_tools.py @@ -1335,7 +1335,7 @@ class ENVState: if cls.deactivate_file_path: try: - with open(cls.deactivate_file_path, 'r') as fp: + with open(cls.deactivate_file_path, 'r', encoding='utf-8') as fp: env_state_obj.idf_variables = json.load(fp) except (IOError, OSError, ValueError): pass @@ -1345,7 +1345,7 @@ class ENVState: try: if self.deactivate_file_path and os.path.basename(self.deactivate_file_path).endswith('idf_' + str(os.getppid())): # If exported file path/name exists and belongs to actual opened shell - with open(self.deactivate_file_path, 'w') as w: + with open(self.deactivate_file_path, 'w', encoding='utf-8') as w: json.dump(self.idf_variables, w, ensure_ascii=False, indent=4) # type: ignore else: with tempfile.NamedTemporaryFile(delete=False, suffix='idf_' + str(os.getppid())) as fp: @@ -1363,7 +1363,7 @@ def load_tools_info(): # type: () -> dict[str, IDFTool] """ tool_versions_file_name = global_tools_json - with open(tool_versions_file_name, 'r') as f: # type: ignore + with open(tool_versions_file_name, 'r', encoding='utf-8') as f: # type: ignore tools_info = json.load(f) return parse_tools_info_json(tools_info) # type: ignore @@ -1414,7 +1414,7 @@ def get_idf_version() -> str: version_file_path = os.path.join(global_idf_path or '', 'version.txt') if os.path.exists(version_file_path): - with open(version_file_path, 'r') as version_file: + with open(version_file_path, 'r', encoding='utf-8') as version_file: idf_version_str = version_file.read() match = re.match(r'^v([0-9]+\.[0-9]+).*', idf_version_str) @@ -1423,7 +1423,7 @@ def get_idf_version() -> str: if idf_version is None: try: - with open(os.path.join(global_idf_path or '', 'components', 'esp_common', 'include', 'esp_idf_version.h')) as f: + with open(os.path.join(global_idf_path or '', 'components', 'esp_common', 'include', 'esp_idf_version.h'), encoding='utf-8') as f: m = re.search(r'^#define\s+ESP_IDF_VERSION_MAJOR\s+(\d+).+?^#define\s+ESP_IDF_VERSION_MINOR\s+(\d+)', f.read(), re.DOTALL | re.MULTILINE) if m: @@ -1805,7 +1805,7 @@ def process_tool( def check_python_venv_compatibility(idf_python_env_path: str, idf_version: str) -> None: try: - with open(os.path.join(idf_python_env_path, VENV_VER_FILE), 'r') as f: + with open(os.path.join(idf_python_env_path, VENV_VER_FILE), 'r', encoding='utf-8') as f: read_idf_version = f.read().strip() if read_idf_version != idf_version: fatal(f'Python environment is set to {idf_python_env_path} which was generated for ' @@ -2265,7 +2265,7 @@ def action_install_python_env(args): # type: ignore stdout=sys.stdout, stderr=sys.stderr) try: - with open(os.path.join(idf_python_env_path, VENV_VER_FILE), 'w') as f: + with open(os.path.join(idf_python_env_path, VENV_VER_FILE), 'w', encoding='utf-8') as f: f.write(idf_version) except OSError as e: warn(f'The following issue occurred while generating the ESP-IDF version file in the Python environment: {e}. ' @@ -2397,7 +2397,7 @@ class ChecksumFileParser(): sha256_file = sha256_file_tmp download(url, sha256_file) - with open(sha256_file, 'r') as f: + with open(sha256_file, 'r', encoding='utf-8') as f: self.checksum = f.read().splitlines() # remove temp file @@ -2470,7 +2470,7 @@ def action_add_version(args): # type: ignore json_str = dump_tools_json(tools_info) if not args.output: args.output = os.path.join(global_idf_path, TOOLS_FILE_NEW) - with open(args.output, 'w') as f: + with open(args.output, 'w', encoding='utf-8') as f: f.write(json_str) f.write('\n') info('Wrote output to {}'.format(args.output)) @@ -2481,7 +2481,7 @@ def action_rewrite(args): # type: ignore json_str = dump_tools_json(tools_info) if not args.output: args.output = os.path.join(global_idf_path, TOOLS_FILE_NEW) - with open(args.output, 'w') as f: + with open(args.output, 'w', encoding='utf-8') as f: f.write(json_str) f.write('\n') info('Wrote output to {}'.format(args.output)) @@ -2571,10 +2571,10 @@ def action_validate(args): # type: ignore fatal('You need to install jsonschema package to use validate command') raise SystemExit(1) - with open(os.path.join(global_idf_path, TOOLS_FILE), 'r') as tools_file: + with open(os.path.join(global_idf_path, TOOLS_FILE), 'r', encoding='utf-8') as tools_file: tools_json = json.load(tools_file) - with open(os.path.join(global_idf_path, TOOLS_SCHEMA_FILE), 'r') as schema_file: + with open(os.path.join(global_idf_path, TOOLS_SCHEMA_FILE), 'r', encoding='utf-8') as schema_file: schema_json = json.load(schema_file) jsonschema.validate(tools_json, schema_json) # on failure, this will raise an exception with a fairly verbose diagnostic message diff --git a/tools/install_util.py b/tools/install_util.py index 387327d20f..1200b8c517 100644 --- a/tools/install_util.py +++ b/tools/install_util.py @@ -1,12 +1,9 @@ #!/usr/bin/env python - -# SPDX-FileCopyrightText: 2022-2023 Espressif Systems (Shanghai) CO LTD +# SPDX-FileCopyrightText: 2022-2024 Espressif Systems (Shanghai) CO LTD # # SPDX-License-Identifier: Apache-2.0 - # This script is used from the $IDF_PATH/install.* scripts. This way the argument parsing can be done at one place and # doesn't have to be implemented for all shells. - import argparse import json import os @@ -57,7 +54,7 @@ def action_print_help(script_extension: str) -> None: # extract the list of features from ./requirements.json thisdir = os.path.dirname(os.path.realpath(__file__)) - with open(f'{thisdir}/requirements.json', 'r') as f: + with open(f'{thisdir}/requirements.json', 'r', encoding='utf-8') as f: json_data = json.load(f) features = [feat['name'] for feat in json_data['features']] diff --git a/tools/ldgen/ldgen.py b/tools/ldgen/ldgen.py index 5f19b7b553..cf25b4776b 100755 --- a/tools/ldgen/ldgen.py +++ b/tools/ldgen/ldgen.py @@ -164,7 +164,7 @@ def main(): if exc.errno != errno.EEXIST: raise - with open(output_path, 'w') as f: # only create output file after generation has suceeded + with open(output_path, 'w', encoding='utf-8') as f: # only create output file after generation has succeeded f.write(output.read()) except LdGenFailure as e: print('linker script generation failed for %s\nERROR: %s' % (input_file.name, e)) diff --git a/tools/mass_mfg/mfg_gen.py b/tools/mass_mfg/mfg_gen.py index 5f05a7b1a6..8a1dabc82b 100644 --- a/tools/mass_mfg/mfg_gen.py +++ b/tools/mass_mfg/mfg_gen.py @@ -31,7 +31,7 @@ def create_temp_files(args): def strip_blank_lines(input_filename, output_filename): - with open(input_filename, 'r') as read_from, open(output_filename,'w', newline='') as write_to: + with open(input_filename, 'r', encoding='utf-8') as read_from, open(output_filename,'w', newline='', encoding='utf-8') as write_to: for line in read_from: if not line.isspace(): write_to.write(line) @@ -40,7 +40,7 @@ def strip_blank_lines(input_filename, output_filename): def verify_values_exist(input_values_file, keys_in_values_file): """ Verify all keys have corresponding values in values file """ - with open(input_values_file, 'r') as values_file: + with open(input_values_file, 'r', encoding='utf-8') as values_file: values_file_reader = csv.reader(values_file, delimiter=',') next(values_file_reader) @@ -56,7 +56,7 @@ def verify_keys_exist(values_file_keys, input_config_file): """ keys_missing = [] - with open(input_config_file,'r') as config_file: + with open(input_config_file,'r', encoding='utf-8') as config_file: config_file_reader = csv.reader(config_file, delimiter=',') for line_num, line in enumerate(config_file_reader, start=1): @@ -82,7 +82,7 @@ def verify_datatype_encoding(input_config_file): valid_encodings = {'string', 'binary', 'hex2bin','u8', 'i8', 'u16', 'u32', 'i32','base64'} valid_datatypes = {'file','data','namespace'} - with open(input_config_file,'r') as config_file: + with open(input_config_file,'r', encoding='utf-8') as config_file: config_file_reader = csv.reader(config_file, delimiter=',') for line_num, line in enumerate(config_file_reader, start=1): @@ -98,7 +98,7 @@ def verify_file_data_count(input_config_file, keys_repeat): """ Verify count of data on each line in config file is equal to 3 (as format must be: ) """ - with open(input_config_file, 'r') as config_file: + with open(input_config_file, 'r', encoding='utf-8') as config_file: config_file_reader = csv.reader(config_file, delimiter=',') for line_num, line in enumerate(config_file_reader, start=1): @@ -144,7 +144,7 @@ def add_config_data_per_namespace(input_config_file): config_data_to_write = [] config_data_per_namespace = [] - with open(input_config_file,'r') as csv_config_file: + with open(input_config_file,'r', encoding='utf-8') as csv_config_file: config_file_reader = csv.reader(csv_config_file, delimiter=',') # `config_data_per_namespace` is added to `config_data_to_write` list after reading next namespace @@ -190,7 +190,7 @@ def add_data_to_file(config_data_to_write, key_value_pair, output_csv_file): header = ['key', 'type', 'encoding', 'value'] data_to_write = [] - with open(output_csv_file, 'w', newline='') as target_csv_file: + with open(output_csv_file, 'w', newline='', encoding='utf-8') as target_csv_file: output_file_writer = csv.writer(target_csv_file, delimiter=',') output_file_writer.writerow(header) @@ -222,7 +222,7 @@ def create_dir(filetype, output_dir_path): def set_repeat_value(total_keys_repeat, keys, csv_file, target_filename): - with open(csv_file, 'r') as read_from, open(target_filename,'w', newline='') as write_to: + with open(csv_file, 'r', encoding='utf-8') as read_from, open(target_filename,'w', newline='', encoding='utf-8') as write_to: csv_file_reader = csv.reader(read_from, delimiter=',') headers = next(csv_file_reader) values = next(csv_file_reader) @@ -255,7 +255,7 @@ def create_intermediate_csv(args, keys_in_values_file, keys_repeat, is_encr=Fals config_data_to_write = add_config_data_per_namespace(args.conf) try: - with open(args.values, 'r') as csv_values_file: + with open(args.values, 'r', encoding='utf-8') as csv_values_file: values_file_reader = csv.reader(csv_values_file, delimiter=',') keys = next(values_file_reader) @@ -266,7 +266,7 @@ def create_intermediate_csv(args, keys_in_values_file, keys_repeat, is_encr=Fals else: target_values_file = args.values - with open(target_values_file, 'r') as csv_values_file: + with open(target_values_file, 'r', encoding='utf-8') as csv_values_file: values_file_reader = csv.reader(csv_values_file, delimiter=',') next(values_file_reader) @@ -336,7 +336,7 @@ def verify_file_format(args): raise SystemExit('Error: values file: %s is empty.' % args.values) # Extract keys from config file - with open(args.conf, 'r') as config_file: + with open(args.conf, 'r', encoding='utf-8') as config_file: config_file_reader = csv.reader(config_file, delimiter=',') for config_data in config_file_reader: if 'namespace' not in config_data: @@ -345,7 +345,7 @@ def verify_file_format(args): keys_repeat.append(config_data[0]) # Extract keys from values file - with open(args.values, 'r') as values_file: + with open(args.values, 'r', encoding='utf-8') as values_file: values_file_reader = csv.reader(values_file, delimiter=',') keys_in_values_file = next(values_file_reader) diff --git a/tools/mkdfu.py b/tools/mkdfu.py index 5c994e71ec..0fe4000e5e 100755 --- a/tools/mkdfu.py +++ b/tools/mkdfu.py @@ -1,6 +1,6 @@ #!/usr/bin/env python # -# SPDX-FileCopyrightText: 2020-2022 Espressif Systems (Shanghai) CO LTD +# SPDX-FileCopyrightText: 2020-2024 Espressif Systems (Shanghai) CO LTD # SPDX-License-Identifier: Apache-2.0 # # This program creates archives compatible with ESP32-S* ROM DFU implementation. @@ -9,9 +9,6 @@ # as a separate file. In addition to that, a special index file, 'dfuinfo0.dat', is created. # This file must be the first one in the archive. It contains binary structures describing each # subsequent file (for example, where the file needs to be flashed/loaded). - -from __future__ import print_function, unicode_literals - import argparse import hashlib import json @@ -308,7 +305,7 @@ def main(): # type: () -> None ''' return check_file(os.path.relpath(os.path.join(json_dir, path), start=os.curdir)) - with open(args.json) as f: + with open(args.json, encoding='utf-8') as f: files += [(int(addr, 0), process_json_file(f_name)) for addr, f_name in json.load(f)['flash_files'].items()] diff --git a/tools/mkuf2.py b/tools/mkuf2.py index 05d45dfecc..90e8572cba 100755 --- a/tools/mkuf2.py +++ b/tools/mkuf2.py @@ -1,16 +1,15 @@ #!/usr/bin/env python # -# SPDX-FileCopyrightText: 2020-2023 Espressif Systems (Shanghai) CO LTD +# SPDX-FileCopyrightText: 2020-2024 Espressif Systems (Shanghai) CO LTD # SPDX-License-Identifier: Apache-2.0 # Module was moved to the esptool in ESP-IDF v5.2 and relicensed under GPL v2.0 license. - -from __future__ import division - import argparse import json import os import subprocess import sys +from typing import List +from typing import Tuple def main() -> None: @@ -75,7 +74,7 @@ def main() -> None: raise RuntimeError('{} is not a regular file!'.format(file_name)) return file_name - files = [] + files: List[Tuple[int, str]] = [] if args.files: files += [(addr, check_file(f_name)) for addr, f_name in zip(args.files[::2], args.files[1::2])] @@ -89,7 +88,7 @@ def main() -> None: ''' return check_file(os.path.abspath(os.path.join(json_dir, path))) - with open(args.json) as f: + with open(args.json, encoding='utf-8') as f: json_content = json.load(f) if args.bin: @@ -107,10 +106,10 @@ def main() -> None: files += [(addr, process_json_file(f_name)) for addr, f_name in flash_dic.items()] # remove possible duplicates and sort based on the address - files = sorted([(addr, f_name) for addr, f_name in dict(files).items()], key=lambda x: x[0]) # type: ignore + files = sorted([(addr, f_name) for addr, f_name in dict(files).items()], key=lambda x: x[0]) # list of tuples to simple list - files = [item for t in files for item in t] + files_flatten = [item for t in files for item in t] cmd = [ sys.executable, '-m', 'esptool', @@ -125,10 +124,10 @@ def main() -> None: if args.md5_disable: cmd.append('--md5-disable') - cmd_str = ' '.join(cmd + files) + cmd_str = ' '.join(cmd + files_flatten) print(f'Executing: {cmd_str}') - sys.exit(subprocess.run(cmd + files).returncode) + sys.exit(subprocess.run(cmd + files_flatten).returncode) if __name__ == '__main__':