diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index 5bf113b946..b04abdc998 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -22,11 +22,18 @@ variables: # GIT_STRATEGY is not defined here. # Use an option from "CI / CD Settings" - "General pipelines". - # "normal" strategy for fetching only top-level submodules since nothing requires the sub-submodules code for building IDF. - # If the "recursive" strategy is used we have a problem with using relative URLs for sub-submodules. - GIT_SUBMODULE_STRATEGY: normal + # we will download archive for each submodule instead of clone. + # we don't do "recursive" when fetch submodule as they're not used in CI now. + GIT_SUBMODULE_STRATEGY: none + SUBMODULE_FETCH_TOOL: "tools/ci/ci_fetch_submodule.py" + # by default we will fetch all submodules + # jobs can overwrite this variable to only fetch submodules they required + # set to "none" if don't need to fetch submodules + SUBMODULES_TO_FETCH: "all" + # tell build system do not check submodule update as we download archive instead of clone + IDF_SKIP_CHECK_SUBMODULES: 1 - UNIT_TEST_BUILD_SYSTEM: make + UNIT_TEST_BUILD_SYSTEM: cmake # IDF environment IDF_PATH: "$CI_PROJECT_DIR" @@ -42,6 +49,7 @@ variables: CI_TARGET_TEST_CONFIG_FILE: "$CI_PROJECT_DIR/tools/ci/config/target-test.yml" + # before each job, we need to check if this job is filtered by bot stage/job filter .apply_bot_filter: &apply_bot_filter python $APPLY_BOT_FILTER_SCRIPT || exit 0 @@ -70,12 +78,10 @@ variables: tools/idf_tools.py --non-interactive install && eval "$(tools/idf_tools.py --non-interactive export)" || exit 1 fi -.show_submodule_urls: &show_submodule_urls | - git config --get-regexp '^submodule\..*\.url$' || true +.fetch_submodules: &fetch_submodules | + python $SUBMODULE_FETCH_TOOL -s $SUBMODULES_TO_FETCH before_script: - - echo "Running common script" - - *show_submodule_urls - source tools/ci/setup_python.sh # apply bot filter in before script - *apply_bot_filter @@ -91,6 +97,8 @@ before_script: - *setup_tools_unless_target_test + - *fetch_submodules + - *setup_custom_toolchain # used for check scripts which we want to run unconditionally diff --git a/components/lwip/CMakeLists.txt b/components/lwip/CMakeLists.txt index 81e8f15055..b2af693006 100644 --- a/components/lwip/CMakeLists.txt +++ b/components/lwip/CMakeLists.txt @@ -145,3 +145,11 @@ if(GCC_NOT_5_2_0) -Wno-implicit-fallthrough ) endif() + +# "comparison is always false due to limited range of data type" warning +# when setting CONFIG_LWIP_TCP_WND_DEFAULT to 65535 +set_source_files_properties( + lwip/src/core/tcp.c + PROPERTIES COMPILE_FLAGS + -Wno-type-limits +) diff --git a/components/lwip/component.mk b/components/lwip/component.mk index a92b888693..e34b4afc33 100644 --- a/components/lwip/component.mk +++ b/components/lwip/component.mk @@ -36,4 +36,6 @@ lwip/src/netif/ppp/ppp.o: CFLAGS += -Wno-uninitialized lwip/src/netif/ppp/pppos.o: CFLAGS += -Wno-implicit-fallthrough endif +lwip/src/core/tcp.o: CFLAGS += -Wno-type-limits + COMPONENT_ADD_LDFRAGMENTS += linker.lf diff --git a/components/soc/test/CMakeLists.txt b/components/soc/test/CMakeLists.txt index 619190b156..c64b31f1b9 100644 --- a/components/soc/test/CMakeLists.txt +++ b/components/soc/test/CMakeLists.txt @@ -2,7 +2,7 @@ idf_build_get_property(soc_name IDF_TARGET) get_filename_component(soc_test "${CMAKE_CURRENT_SOURCE_DIR}/../${soc_name}/test" ABSOLUTE) if(EXISTS "${soc_test}") - set(srcs "${soc_test}") + set(src_dirs "${soc_test}") set(include_dirs "${soc_test}") endif() diff --git a/components/spi_flash/test/CMakeLists.txt b/components/spi_flash/test/CMakeLists.txt index 831e142120..7c5bc7fe89 100644 --- a/components/spi_flash/test/CMakeLists.txt +++ b/components/spi_flash/test/CMakeLists.txt @@ -1,7 +1,4 @@ idf_component_register(SRC_DIRS "." INCLUDE_DIRS "." + EXCLUDE_SRCS "test_esp_flash.c" "test_partition_ext.c" REQUIRES unity test_utils spi_flash bootloader_support app_update) - -if(CONFIG_SPI_FLASH_USE_LEGACY_IMPL) - set(COMPONENT_SRCEXCLUDE "test_esp_flash.c" "test_partition_ext.c") -endif() diff --git a/examples/bluetooth/bluedroid/ble/ble_ancs/sdkconfig.defaults b/examples/bluetooth/bluedroid/ble/ble_ancs/sdkconfig.defaults index 8dbe56f4f4..ee53a228d8 100644 --- a/examples/bluetooth/bluedroid/ble/ble_ancs/sdkconfig.defaults +++ b/examples/bluetooth/bluedroid/ble/ble_ancs/sdkconfig.defaults @@ -2,5 +2,5 @@ # by default in this example CONFIG_BT_ENABLED=y CONFIG_BTDM_CTRL_MODE_BLE_ONLY=y -CONFIG_BTDM_CTRL_MODE_BR_EDR_ONLY= -CONFIG_BTDM_CTRL_MODE_BTDM= +CONFIG_BTDM_CTRL_MODE_BR_EDR_ONLY=n +CONFIG_BTDM_CTRL_MODE_BTDM=n diff --git a/examples/bluetooth/bluedroid/ble/ble_eddystone/sdkconfig.defaults b/examples/bluetooth/bluedroid/ble/ble_eddystone/sdkconfig.defaults index f0f9acf9fe..00fb52113f 100644 --- a/examples/bluetooth/bluedroid/ble/ble_eddystone/sdkconfig.defaults +++ b/examples/bluetooth/bluedroid/ble/ble_eddystone/sdkconfig.defaults @@ -2,5 +2,5 @@ # and WiFi disabled by default in this example CONFIG_BT_ENABLED=y CONFIG_BTDM_CTRL_MODE_BLE_ONLY=y -CONFIG_BTDM_CTRL_MODE_BR_EDR_ONLY= -CONFIG_BTDM_CTRL_MODE_BTDM= +CONFIG_BTDM_CTRL_MODE_BR_EDR_ONLY=n +CONFIG_BTDM_CTRL_MODE_BTDM=n diff --git a/examples/bluetooth/bluedroid/ble/ble_hid_device_demo/sdkconfig.defaults b/examples/bluetooth/bluedroid/ble/ble_hid_device_demo/sdkconfig.defaults index 8dbe56f4f4..ee53a228d8 100644 --- a/examples/bluetooth/bluedroid/ble/ble_hid_device_demo/sdkconfig.defaults +++ b/examples/bluetooth/bluedroid/ble/ble_hid_device_demo/sdkconfig.defaults @@ -2,5 +2,5 @@ # by default in this example CONFIG_BT_ENABLED=y CONFIG_BTDM_CTRL_MODE_BLE_ONLY=y -CONFIG_BTDM_CTRL_MODE_BR_EDR_ONLY= -CONFIG_BTDM_CTRL_MODE_BTDM= +CONFIG_BTDM_CTRL_MODE_BR_EDR_ONLY=n +CONFIG_BTDM_CTRL_MODE_BTDM=n diff --git a/examples/bluetooth/bluedroid/ble/ble_ibeacon/sdkconfig.defaults b/examples/bluetooth/bluedroid/ble/ble_ibeacon/sdkconfig.defaults index f0f9acf9fe..00fb52113f 100644 --- a/examples/bluetooth/bluedroid/ble/ble_ibeacon/sdkconfig.defaults +++ b/examples/bluetooth/bluedroid/ble/ble_ibeacon/sdkconfig.defaults @@ -2,5 +2,5 @@ # and WiFi disabled by default in this example CONFIG_BT_ENABLED=y CONFIG_BTDM_CTRL_MODE_BLE_ONLY=y -CONFIG_BTDM_CTRL_MODE_BR_EDR_ONLY= -CONFIG_BTDM_CTRL_MODE_BTDM= +CONFIG_BTDM_CTRL_MODE_BR_EDR_ONLY=n +CONFIG_BTDM_CTRL_MODE_BTDM=n diff --git a/examples/bluetooth/bluedroid/ble/ble_spp_client/sdkconfig.defaults b/examples/bluetooth/bluedroid/ble/ble_spp_client/sdkconfig.defaults index f0f9acf9fe..00fb52113f 100644 --- a/examples/bluetooth/bluedroid/ble/ble_spp_client/sdkconfig.defaults +++ b/examples/bluetooth/bluedroid/ble/ble_spp_client/sdkconfig.defaults @@ -2,5 +2,5 @@ # and WiFi disabled by default in this example CONFIG_BT_ENABLED=y CONFIG_BTDM_CTRL_MODE_BLE_ONLY=y -CONFIG_BTDM_CTRL_MODE_BR_EDR_ONLY= -CONFIG_BTDM_CTRL_MODE_BTDM= +CONFIG_BTDM_CTRL_MODE_BR_EDR_ONLY=n +CONFIG_BTDM_CTRL_MODE_BTDM=n diff --git a/examples/bluetooth/bluedroid/ble/ble_spp_server/sdkconfig.defaults b/examples/bluetooth/bluedroid/ble/ble_spp_server/sdkconfig.defaults index ee02dcb06e..a6ee02f1d3 100644 --- a/examples/bluetooth/bluedroid/ble/ble_spp_server/sdkconfig.defaults +++ b/examples/bluetooth/bluedroid/ble/ble_spp_server/sdkconfig.defaults @@ -6,8 +6,8 @@ # CONFIG_BT_ENABLED=y CONFIG_BTDM_CTRL_MODE_BLE_ONLY=y -CONFIG_BTDM_CTRL_MODE_BR_EDR_ONLY= -CONFIG_BTDM_CTRL_MODE_BTDM= +CONFIG_BTDM_CTRL_MODE_BR_EDR_ONLY=n +CONFIG_BTDM_CTRL_MODE_BTDM=n # # ESP32-specific config # diff --git a/examples/bluetooth/bluedroid/ble/ble_throughput/throughput_client/sdkconfig.defaults b/examples/bluetooth/bluedroid/ble/ble_throughput/throughput_client/sdkconfig.defaults index 5d486b83fa..00cfa32ecd 100644 --- a/examples/bluetooth/bluedroid/ble/ble_throughput/throughput_client/sdkconfig.defaults +++ b/examples/bluetooth/bluedroid/ble/ble_throughput/throughput_client/sdkconfig.defaults @@ -2,8 +2,8 @@ # by default in this example CONFIG_BT_ENABLED=y CONFIG_BTDM_CTRL_MODE_BLE_ONLY=y -CONFIG_BTDM_CTRL_MODE_BR_EDR_ONLY= -CONFIG_BTDM_CTRL_MODE_BTDM= +CONFIG_BTDM_CTRL_MODE_BR_EDR_ONLY=n +CONFIG_BTDM_CTRL_MODE_BTDM=n CONFIG_BTDM_CTRL_BLE_MAX_CONN=9 CONFIG_GATTS_NOTIFY_THROUGHPUT=y CONFIG_BTDM_MODEM_SLEEP=n diff --git a/examples/bluetooth/bluedroid/ble/ble_throughput/throughput_server/sdkconfig.defaults b/examples/bluetooth/bluedroid/ble/ble_throughput/throughput_server/sdkconfig.defaults index b007cab140..3c869dae0b 100644 --- a/examples/bluetooth/bluedroid/ble/ble_throughput/throughput_server/sdkconfig.defaults +++ b/examples/bluetooth/bluedroid/ble/ble_throughput/throughput_server/sdkconfig.defaults @@ -2,8 +2,8 @@ # by default in this example CONFIG_BT_ENABLED=y CONFIG_BTDM_CTRL_MODE_BLE_ONLY=y -CONFIG_BTDM_CTRL_MODE_BR_EDR_ONLY= -CONFIG_BTDM_CTRL_MODE_BTDM= +CONFIG_BTDM_CTRL_MODE_BR_EDR_ONLY=n +CONFIG_BTDM_CTRL_MODE_BTDM=n CONFIG_BTDM_CTRL_BLE_MAX_CONN=9 CONFIG_EXAMPLE_GATTS_NOTIFY_THROUGHPUT=y CONFIG_BTDM_MODEM_SLEEP=n diff --git a/examples/bluetooth/bluedroid/ble/blufi/sdkconfig.defaults b/examples/bluetooth/bluedroid/ble/blufi/sdkconfig.defaults index aba8be090e..dcadf58eec 100644 --- a/examples/bluetooth/bluedroid/ble/blufi/sdkconfig.defaults +++ b/examples/bluetooth/bluedroid/ble/blufi/sdkconfig.defaults @@ -6,8 +6,8 @@ # CONFIG_BT_ENABLED=y CONFIG_BTDM_CTRL_MODE_BLE_ONLY=y -CONFIG_BTDM_CTRL_MODE_BR_EDR_ONLY= -CONFIG_BTDM_CTRL_MODE_BTDM= +CONFIG_BTDM_CTRL_MODE_BR_EDR_ONLY=n +CONFIG_BTDM_CTRL_MODE_BTDM=n CONFIG_BTDM_CTRL_PINNED_TO_CORE_0=y CONFIG_BTDM_CTRL_PINNED_TO_CORE_1=n CONFIG_BTDM_CTRL_PINNED_TO_CORE=0 diff --git a/examples/bluetooth/bluedroid/ble/gatt_client/sdkconfig.defaults b/examples/bluetooth/bluedroid/ble/gatt_client/sdkconfig.defaults index 8dbe56f4f4..ee53a228d8 100644 --- a/examples/bluetooth/bluedroid/ble/gatt_client/sdkconfig.defaults +++ b/examples/bluetooth/bluedroid/ble/gatt_client/sdkconfig.defaults @@ -2,5 +2,5 @@ # by default in this example CONFIG_BT_ENABLED=y CONFIG_BTDM_CTRL_MODE_BLE_ONLY=y -CONFIG_BTDM_CTRL_MODE_BR_EDR_ONLY= -CONFIG_BTDM_CTRL_MODE_BTDM= +CONFIG_BTDM_CTRL_MODE_BR_EDR_ONLY=n +CONFIG_BTDM_CTRL_MODE_BTDM=n diff --git a/examples/bluetooth/bluedroid/ble/gatt_security_client/sdkconfig.defaults b/examples/bluetooth/bluedroid/ble/gatt_security_client/sdkconfig.defaults index f0f9acf9fe..00fb52113f 100644 --- a/examples/bluetooth/bluedroid/ble/gatt_security_client/sdkconfig.defaults +++ b/examples/bluetooth/bluedroid/ble/gatt_security_client/sdkconfig.defaults @@ -2,5 +2,5 @@ # and WiFi disabled by default in this example CONFIG_BT_ENABLED=y CONFIG_BTDM_CTRL_MODE_BLE_ONLY=y -CONFIG_BTDM_CTRL_MODE_BR_EDR_ONLY= -CONFIG_BTDM_CTRL_MODE_BTDM= +CONFIG_BTDM_CTRL_MODE_BR_EDR_ONLY=n +CONFIG_BTDM_CTRL_MODE_BTDM=n diff --git a/examples/bluetooth/bluedroid/ble/gatt_security_server/sdkconfig.defaults b/examples/bluetooth/bluedroid/ble/gatt_security_server/sdkconfig.defaults index 8dbe56f4f4..ee53a228d8 100644 --- a/examples/bluetooth/bluedroid/ble/gatt_security_server/sdkconfig.defaults +++ b/examples/bluetooth/bluedroid/ble/gatt_security_server/sdkconfig.defaults @@ -2,5 +2,5 @@ # by default in this example CONFIG_BT_ENABLED=y CONFIG_BTDM_CTRL_MODE_BLE_ONLY=y -CONFIG_BTDM_CTRL_MODE_BR_EDR_ONLY= -CONFIG_BTDM_CTRL_MODE_BTDM= +CONFIG_BTDM_CTRL_MODE_BR_EDR_ONLY=n +CONFIG_BTDM_CTRL_MODE_BTDM=n diff --git a/examples/bluetooth/bluedroid/ble/gatt_server/sdkconfig.defaults b/examples/bluetooth/bluedroid/ble/gatt_server/sdkconfig.defaults index 8dbe56f4f4..ee53a228d8 100644 --- a/examples/bluetooth/bluedroid/ble/gatt_server/sdkconfig.defaults +++ b/examples/bluetooth/bluedroid/ble/gatt_server/sdkconfig.defaults @@ -2,5 +2,5 @@ # by default in this example CONFIG_BT_ENABLED=y CONFIG_BTDM_CTRL_MODE_BLE_ONLY=y -CONFIG_BTDM_CTRL_MODE_BR_EDR_ONLY= -CONFIG_BTDM_CTRL_MODE_BTDM= +CONFIG_BTDM_CTRL_MODE_BR_EDR_ONLY=n +CONFIG_BTDM_CTRL_MODE_BTDM=n diff --git a/examples/bluetooth/bluedroid/ble/gatt_server_service_table/sdkconfig.defaults b/examples/bluetooth/bluedroid/ble/gatt_server_service_table/sdkconfig.defaults index e8587bbc2b..97761eb9d7 100644 --- a/examples/bluetooth/bluedroid/ble/gatt_server_service_table/sdkconfig.defaults +++ b/examples/bluetooth/bluedroid/ble/gatt_server_service_table/sdkconfig.defaults @@ -6,8 +6,8 @@ # CONFIG_BT_ENABLED=y CONFIG_BTDM_CTRL_MODE_BLE_ONLY=y -CONFIG_BTDM_CTRL_MODE_BR_EDR_ONLY= -CONFIG_BTDM_CTRL_MODE_BTDM= +CONFIG_BTDM_CTRL_MODE_BR_EDR_ONLY=n +CONFIG_BTDM_CTRL_MODE_BTDM=n # # ESP32-specific config diff --git a/examples/bluetooth/bluedroid/ble/gattc_multi_connect/sdkconfig.defaults b/examples/bluetooth/bluedroid/ble/gattc_multi_connect/sdkconfig.defaults index 0f7207aeeb..249325cce6 100644 --- a/examples/bluetooth/bluedroid/ble/gattc_multi_connect/sdkconfig.defaults +++ b/examples/bluetooth/bluedroid/ble/gattc_multi_connect/sdkconfig.defaults @@ -2,6 +2,6 @@ # and WiFi disabled by default in this example CONFIG_BT_ENABLED=y CONFIG_BTDM_CTRL_MODE_BLE_ONLY=y -CONFIG_BTDM_CTRL_MODE_BR_EDR_ONLY= -CONFIG_BTDM_CTRL_MODE_BTDM= +CONFIG_BTDM_CTRL_MODE_BR_EDR_ONLY=n +CONFIG_BTDM_CTRL_MODE_BTDM=n CONFIG_BTDM_CTRL_BLE_MAX_CONN=9 diff --git a/examples/bluetooth/bluedroid/classic_bt/a2dp_sink/sdkconfig.defaults b/examples/bluetooth/bluedroid/classic_bt/a2dp_sink/sdkconfig.defaults index e27427a3bd..52869c8bbb 100644 --- a/examples/bluetooth/bluedroid/classic_bt/a2dp_sink/sdkconfig.defaults +++ b/examples/bluetooth/bluedroid/classic_bt/a2dp_sink/sdkconfig.defaults @@ -1,9 +1,9 @@ # Override some defaults so BT stack is enabled and # Classic BT is enabled and BT_DRAM_RELEASE is disabled CONFIG_BT_ENABLED=y -CONFIG_BTDM_CTRL_MODE_BLE_ONLY= +CONFIG_BTDM_CTRL_MODE_BLE_ONLY=n CONFIG_BTDM_CTRL_MODE_BR_EDR_ONLY=y -CONFIG_BTDM_CTRL_MODE_BTDM= +CONFIG_BTDM_CTRL_MODE_BTDM=n CONFIG_BT_BLUEDROID_ENABLED=y CONFIG_BT_CLASSIC_ENABLED=y CONFIG_BT_A2DP_ENABLE=y diff --git a/examples/bluetooth/bluedroid/classic_bt/a2dp_source/sdkconfig.defaults b/examples/bluetooth/bluedroid/classic_bt/a2dp_source/sdkconfig.defaults index c7adef6a5f..e87de5c3bd 100644 --- a/examples/bluetooth/bluedroid/classic_bt/a2dp_source/sdkconfig.defaults +++ b/examples/bluetooth/bluedroid/classic_bt/a2dp_source/sdkconfig.defaults @@ -1,9 +1,9 @@ # Override some defaults so BT stack is enabled and # Classic BT is enabled CONFIG_BT_ENABLED=y -CONFIG_BTDM_CTRL_MODE_BLE_ONLY= +CONFIG_BTDM_CTRL_MODE_BLE_ONLY=n CONFIG_BTDM_CTRL_MODE_BR_EDR_ONLY=y -CONFIG_BTDM_CTRL_MODE_BTDM= +CONFIG_BTDM_CTRL_MODE_BTDM=n CONFIG_BT_BLUEDROID_ENABLED=y CONFIG_BT_CLASSIC_ENABLED=y CONFIG_BT_A2DP_ENABLE=y diff --git a/examples/bluetooth/bluedroid/classic_bt/bt_discovery/sdkconfig.defaults b/examples/bluetooth/bluedroid/classic_bt/bt_discovery/sdkconfig.defaults index 00e3d9971e..356b30e950 100644 --- a/examples/bluetooth/bluedroid/classic_bt/bt_discovery/sdkconfig.defaults +++ b/examples/bluetooth/bluedroid/classic_bt/bt_discovery/sdkconfig.defaults @@ -1,9 +1,9 @@ # Override some defaults so BT stack is enabled and # Classic BT is enabled and BT_DRAM_RELEASE is disabled CONFIG_BT_ENABLED=y -CONFIG_BTDM_CTRL_MODE_BLE_ONLY= +CONFIG_BTDM_CTRL_MODE_BLE_ONLY=n CONFIG_BTDM_CTRL_MODE_BR_EDR_ONLY=y -CONFIG_BTDM_CTRL_MODE_BTDM= +CONFIG_BTDM_CTRL_MODE_BTDM=n CONFIG_BT_CLASSIC_ENABLED=y CONFIG_BT_A2DP_ENABLE=n CONFIG_BT_BLE_ENABLED=n diff --git a/examples/bluetooth/bluedroid/classic_bt/bt_spp_acceptor/sdkconfig.defaults b/examples/bluetooth/bluedroid/classic_bt/bt_spp_acceptor/sdkconfig.defaults index ea64364f53..a303599521 100644 --- a/examples/bluetooth/bluedroid/classic_bt/bt_spp_acceptor/sdkconfig.defaults +++ b/examples/bluetooth/bluedroid/classic_bt/bt_spp_acceptor/sdkconfig.defaults @@ -1,9 +1,9 @@ # Override some defaults so BT stack is enabled # and WiFi disabled by default in this example CONFIG_BT_ENABLED=y -CONFIG_BTDM_CTRL_MODE_BLE_ONLY= +CONFIG_BTDM_CTRL_MODE_BLE_ONLY=n CONFIG_BTDM_CTRL_MODE_BR_EDR_ONLY=y -CONFIG_BTDM_CTRL_MODE_BTDM= +CONFIG_BTDM_CTRL_MODE_BTDM=n CONFIG_BT_CLASSIC_ENABLED=y CONFIG_WIFI_ENABLED=n CONFIG_BT_SPP_ENABLED=y diff --git a/examples/bluetooth/bluedroid/classic_bt/bt_spp_initiator/sdkconfig.defaults b/examples/bluetooth/bluedroid/classic_bt/bt_spp_initiator/sdkconfig.defaults index ea64364f53..a303599521 100644 --- a/examples/bluetooth/bluedroid/classic_bt/bt_spp_initiator/sdkconfig.defaults +++ b/examples/bluetooth/bluedroid/classic_bt/bt_spp_initiator/sdkconfig.defaults @@ -1,9 +1,9 @@ # Override some defaults so BT stack is enabled # and WiFi disabled by default in this example CONFIG_BT_ENABLED=y -CONFIG_BTDM_CTRL_MODE_BLE_ONLY= +CONFIG_BTDM_CTRL_MODE_BLE_ONLY=n CONFIG_BTDM_CTRL_MODE_BR_EDR_ONLY=y -CONFIG_BTDM_CTRL_MODE_BTDM= +CONFIG_BTDM_CTRL_MODE_BTDM=n CONFIG_BT_CLASSIC_ENABLED=y CONFIG_WIFI_ENABLED=n CONFIG_BT_SPP_ENABLED=y diff --git a/examples/bluetooth/bluedroid/classic_bt/bt_spp_vfs_acceptor/sdkconfig.defaults b/examples/bluetooth/bluedroid/classic_bt/bt_spp_vfs_acceptor/sdkconfig.defaults index ea64364f53..a303599521 100644 --- a/examples/bluetooth/bluedroid/classic_bt/bt_spp_vfs_acceptor/sdkconfig.defaults +++ b/examples/bluetooth/bluedroid/classic_bt/bt_spp_vfs_acceptor/sdkconfig.defaults @@ -1,9 +1,9 @@ # Override some defaults so BT stack is enabled # and WiFi disabled by default in this example CONFIG_BT_ENABLED=y -CONFIG_BTDM_CTRL_MODE_BLE_ONLY= +CONFIG_BTDM_CTRL_MODE_BLE_ONLY=n CONFIG_BTDM_CTRL_MODE_BR_EDR_ONLY=y -CONFIG_BTDM_CTRL_MODE_BTDM= +CONFIG_BTDM_CTRL_MODE_BTDM=n CONFIG_BT_CLASSIC_ENABLED=y CONFIG_WIFI_ENABLED=n CONFIG_BT_SPP_ENABLED=y diff --git a/examples/bluetooth/bluedroid/classic_bt/bt_spp_vfs_initiator/sdkconfig.defaults b/examples/bluetooth/bluedroid/classic_bt/bt_spp_vfs_initiator/sdkconfig.defaults index ea64364f53..a303599521 100644 --- a/examples/bluetooth/bluedroid/classic_bt/bt_spp_vfs_initiator/sdkconfig.defaults +++ b/examples/bluetooth/bluedroid/classic_bt/bt_spp_vfs_initiator/sdkconfig.defaults @@ -1,9 +1,9 @@ # Override some defaults so BT stack is enabled # and WiFi disabled by default in this example CONFIG_BT_ENABLED=y -CONFIG_BTDM_CTRL_MODE_BLE_ONLY= +CONFIG_BTDM_CTRL_MODE_BLE_ONLY=n CONFIG_BTDM_CTRL_MODE_BR_EDR_ONLY=y -CONFIG_BTDM_CTRL_MODE_BTDM= +CONFIG_BTDM_CTRL_MODE_BTDM=n CONFIG_BT_CLASSIC_ENABLED=y CONFIG_WIFI_ENABLED=n CONFIG_BT_SPP_ENABLED=y diff --git a/examples/bluetooth/bluedroid/coex/a2dp_gatts_coex/sdkconfig.defaults b/examples/bluetooth/bluedroid/coex/a2dp_gatts_coex/sdkconfig.defaults index ca97b31243..ee81cabedd 100644 --- a/examples/bluetooth/bluedroid/coex/a2dp_gatts_coex/sdkconfig.defaults +++ b/examples/bluetooth/bluedroid/coex/a2dp_gatts_coex/sdkconfig.defaults @@ -1,8 +1,8 @@ # Override some defaults so BT stack is enabled and # Classic BT is enabled and BT_DRAM_RELEASE is disabled CONFIG_BT_ENABLED=y -CONFIG_BTDM_CTRL_MODE_BLE_ONLY= -CONFIG_BTDM_CTRL_MODE_BR_EDR_ONLY= +CONFIG_BTDM_CTRL_MODE_BLE_ONLY=n +CONFIG_BTDM_CTRL_MODE_BR_EDR_ONLY=n CONFIG_BTDM_CTRL_MODE_BTDM=y CONFIG_BTDM_CTRL_PINNED_TO_CORE_0=y CONFIG_BTDM_CTRL_PINNED_TO_CORE_1=n diff --git a/examples/bluetooth/bluedroid/coex/gattc_gatts_coex/sdkconfig.defaults b/examples/bluetooth/bluedroid/coex/gattc_gatts_coex/sdkconfig.defaults index 8dbe56f4f4..ee53a228d8 100644 --- a/examples/bluetooth/bluedroid/coex/gattc_gatts_coex/sdkconfig.defaults +++ b/examples/bluetooth/bluedroid/coex/gattc_gatts_coex/sdkconfig.defaults @@ -2,5 +2,5 @@ # by default in this example CONFIG_BT_ENABLED=y CONFIG_BTDM_CTRL_MODE_BLE_ONLY=y -CONFIG_BTDM_CTRL_MODE_BR_EDR_ONLY= -CONFIG_BTDM_CTRL_MODE_BTDM= +CONFIG_BTDM_CTRL_MODE_BR_EDR_ONLY=n +CONFIG_BTDM_CTRL_MODE_BTDM=n diff --git a/examples/bluetooth/esp_ble_mesh/ble_mesh_console/ble_mesh_node/sdkconfig.defaults b/examples/bluetooth/esp_ble_mesh/ble_mesh_console/ble_mesh_node/sdkconfig.defaults index 5920b6a5d1..c017190adb 100644 --- a/examples/bluetooth/esp_ble_mesh/ble_mesh_console/ble_mesh_node/sdkconfig.defaults +++ b/examples/bluetooth/esp_ble_mesh/ble_mesh_console/ble_mesh_node/sdkconfig.defaults @@ -5,8 +5,8 @@ CONFIG_ESPTOOLPY_BAUD_921600B=y CONFIG_ESPTOOLPY_MONITOR_BAUD_921600B=y CONFIG_BT_ENABLED=y CONFIG_BTDM_CTRL_MODE_BLE_ONLY=y -CONFIG_BTDM_CTRL_MODE_BR_EDR_ONLY= -CONFIG_BTDM_CTRL_MODE_BTDM= +CONFIG_BTDM_CTRL_MODE_BR_EDR_ONLY=n +CONFIG_BTDM_CTRL_MODE_BTDM=n CONFIG_BTDM_MODEM_SLEEP=n CONFIG_BTDM_BLE_SCAN_DUPL=y CONFIG_BTDM_SCAN_DUPL_TYPE=2 @@ -42,7 +42,7 @@ CONFIG_BLE_MESH_RX_SEG_MSG_COUNT=1 CONFIG_BLE_MESH_RX_SDU_MAX=384 CONFIG_BLE_MESH_TX_SEG_MAX=32 CONFIG_BLE_MESH_RELAY=y -CONFIG_BLE_MESH_LOW_POWER= -CONFIG_BLE_MESH_FRIEND= +CONFIG_BLE_MESH_LOW_POWER=n +CONFIG_BLE_MESH_FRIEND=n CONFIG_BLE_MESH_CFG_CLI=y CONFIG_BT_BTU_TASK_STACK_SIZE=4512 diff --git a/examples/bluetooth/esp_ble_mesh/ble_mesh_console/ble_mesh_provisioner/sdkconfig.defaults b/examples/bluetooth/esp_ble_mesh/ble_mesh_console/ble_mesh_provisioner/sdkconfig.defaults index 18210534d7..2cfcfa14a6 100644 --- a/examples/bluetooth/esp_ble_mesh/ble_mesh_console/ble_mesh_provisioner/sdkconfig.defaults +++ b/examples/bluetooth/esp_ble_mesh/ble_mesh_console/ble_mesh_provisioner/sdkconfig.defaults @@ -2,8 +2,8 @@ # by default in this example CONFIG_BT_ENABLED=y CONFIG_BTDM_CTRL_MODE_BLE_ONLY=y -CONFIG_BTDM_CTRL_MODE_BR_EDR_ONLY= -CONFIG_BTDM_CTRL_MODE_BTDM= +CONFIG_BTDM_CTRL_MODE_BR_EDR_ONLY=n +CONFIG_BTDM_CTRL_MODE_BTDM=n CONFIG_BTDM_MODEM_SLEEP=n CONFIG_BTDM_BLE_SCAN_DUPL=y CONFIG_BTDM_SCAN_DUPL_TYPE=2 @@ -29,8 +29,8 @@ CONFIG_BLE_MESH_NET_BUF_POOL_USAGE=y CONFIG_BLE_MESH_PB_GATT=y CONFIG_BLE_MESH_GATT_PROXY_SERVER=y CONFIG_BLE_MESH_RELAY=y -CONFIG_BLE_MESH_LOW_POWER= -CONFIG_BLE_MESH_FRIEND= +CONFIG_BLE_MESH_LOW_POWER=n +CONFIG_BLE_MESH_FRIEND=n CONFIG_BLE_MESH_MSG_CACHE_SIZE=10 CONFIG_BLE_MESH_ADV_BUF_COUNT=60 CONFIG_BLE_MESH_TX_SEG_MSG_COUNT=6 diff --git a/examples/bluetooth/esp_ble_mesh/ble_mesh_fast_provision/ble_mesh_fast_prov_client/sdkconfig.defaults b/examples/bluetooth/esp_ble_mesh/ble_mesh_fast_provision/ble_mesh_fast_prov_client/sdkconfig.defaults index b1a37395f3..9412f7d91b 100644 --- a/examples/bluetooth/esp_ble_mesh/ble_mesh_fast_provision/ble_mesh_fast_prov_client/sdkconfig.defaults +++ b/examples/bluetooth/esp_ble_mesh/ble_mesh_fast_provision/ble_mesh_fast_prov_client/sdkconfig.defaults @@ -2,8 +2,8 @@ # by default in this example CONFIG_BT_ENABLED=y CONFIG_BTDM_CTRL_MODE_BLE_ONLY=y -CONFIG_BTDM_CTRL_MODE_BR_EDR_ONLY= -CONFIG_BTDM_CTRL_MODE_BTDM= +CONFIG_BTDM_CTRL_MODE_BR_EDR_ONLY=n +CONFIG_BTDM_CTRL_MODE_BTDM=n CONFIG_BTDM_MODEM_SLEEP=n CONFIG_BTDM_BLE_SCAN_DUPL=y CONFIG_BTDM_SCAN_DUPL_TYPE=2 @@ -30,8 +30,8 @@ CONFIG_BLE_MESH_NET_BUF_POOL_USAGE=y CONFIG_BLE_MESH_PB_GATT=y CONFIG_BLE_MESH_GATT_PROXY_SERVER=n CONFIG_BLE_MESH_RELAY=y -CONFIG_BLE_MESH_LOW_POWER= -CONFIG_BLE_MESH_FRIEND= +CONFIG_BLE_MESH_LOW_POWER=n +CONFIG_BLE_MESH_FRIEND=n CONFIG_BT_BTU_TASK_STACK_SIZE=4512 CONFIG_BLE_MESH_CFG_CLI=y CONFIG_BLE_MESH_GENERIC_ONOFF_CLI=y diff --git a/examples/bluetooth/esp_ble_mesh/ble_mesh_fast_provision/ble_mesh_fast_prov_server/sdkconfig.defaults b/examples/bluetooth/esp_ble_mesh/ble_mesh_fast_provision/ble_mesh_fast_prov_server/sdkconfig.defaults index 41a148bdf0..970540d62b 100644 --- a/examples/bluetooth/esp_ble_mesh/ble_mesh_fast_provision/ble_mesh_fast_prov_server/sdkconfig.defaults +++ b/examples/bluetooth/esp_ble_mesh/ble_mesh_fast_provision/ble_mesh_fast_prov_server/sdkconfig.defaults @@ -2,8 +2,8 @@ # by default in this example CONFIG_BT_ENABLED=y CONFIG_BTDM_CTRL_MODE_BLE_ONLY=y -CONFIG_BTDM_CTRL_MODE_BR_EDR_ONLY= -CONFIG_BTDM_CTRL_MODE_BTDM= +CONFIG_BTDM_CTRL_MODE_BR_EDR_ONLY=n +CONFIG_BTDM_CTRL_MODE_BTDM=n CONFIG_BTDM_MODEM_SLEEP=n CONFIG_BTDM_BLE_SCAN_DUPL=y CONFIG_BTDM_SCAN_DUPL_TYPE=2 @@ -38,8 +38,8 @@ CONFIG_BLE_MESH_MODEL_GROUP_COUNT=3 CONFIG_BLE_MESH_PB_GATT=y CONFIG_BLE_MESH_GATT_PROXY_SERVER=y CONFIG_BLE_MESH_RELAY=y -CONFIG_BLE_MESH_LOW_POWER= -CONFIG_BLE_MESH_FRIEND= +CONFIG_BLE_MESH_LOW_POWER=n +CONFIG_BLE_MESH_FRIEND=n CONFIG_BT_BTU_TASK_STACK_SIZE=4512 CONFIG_BLE_MESH_CFG_CLI=y CONFIG_BLE_MESH_CRPL=60 diff --git a/examples/bluetooth/esp_ble_mesh/ble_mesh_node/onoff_client/sdkconfig.defaults b/examples/bluetooth/esp_ble_mesh/ble_mesh_node/onoff_client/sdkconfig.defaults index 58f6fe8208..6a5db41eeb 100644 --- a/examples/bluetooth/esp_ble_mesh/ble_mesh_node/onoff_client/sdkconfig.defaults +++ b/examples/bluetooth/esp_ble_mesh/ble_mesh_node/onoff_client/sdkconfig.defaults @@ -2,8 +2,8 @@ # by default in this example CONFIG_BT_ENABLED=y CONFIG_BTDM_CTRL_MODE_BLE_ONLY=y -CONFIG_BTDM_CTRL_MODE_BR_EDR_ONLY= -CONFIG_BTDM_CTRL_MODE_BTDM= +CONFIG_BTDM_CTRL_MODE_BR_EDR_ONLY=n +CONFIG_BTDM_CTRL_MODE_BTDM=n CONFIG_BTDM_MODEM_SLEEP=n CONFIG_BTDM_BLE_SCAN_DUPL=y CONFIG_BTDM_SCAN_DUPL_TYPE=2 @@ -26,7 +26,7 @@ CONFIG_BLE_MESH_PB_GATT=y CONFIG_BLE_MESH_GATT_PROXY_SERVER=y CONFIG_BLE_MESH_NODE_ID_TIMEOUT=60 CONFIG_BLE_MESH_PROXY_FILTER_SIZE=1 -CONFIG_BLE_MESH_IV_UPDATE_TEST= +CONFIG_BLE_MESH_IV_UPDATE_TEST=n CONFIG_BLE_MESH_SUBNET_COUNT=1 CONFIG_BLE_MESH_APP_KEY_COUNT=1 CONFIG_BLE_MESH_MODEL_KEY_COUNT=1 @@ -40,8 +40,8 @@ CONFIG_BLE_MESH_RX_SEG_MSG_COUNT=1 CONFIG_BLE_MESH_RX_SDU_MAX=384 CONFIG_BLE_MESH_TX_SEG_MAX=32 CONFIG_BLE_MESH_RELAY=y -CONFIG_BLE_MESH_LOW_POWER= -CONFIG_BLE_MESH_FRIEND= -CONFIG_BLE_MESH_CFG_CLI= +CONFIG_BLE_MESH_LOW_POWER=n +CONFIG_BLE_MESH_FRIEND=n +CONFIG_BLE_MESH_CFG_CLI=n CONFIG_BLE_MESH_GENERIC_ONOFF_CLI=y CONFIG_BT_BTU_TASK_STACK_SIZE=4512 diff --git a/examples/bluetooth/esp_ble_mesh/ble_mesh_node/onoff_server/sdkconfig.defaults b/examples/bluetooth/esp_ble_mesh/ble_mesh_node/onoff_server/sdkconfig.defaults index 2d34f548b7..0883ed0b60 100644 --- a/examples/bluetooth/esp_ble_mesh/ble_mesh_node/onoff_server/sdkconfig.defaults +++ b/examples/bluetooth/esp_ble_mesh/ble_mesh_node/onoff_server/sdkconfig.defaults @@ -2,8 +2,8 @@ # by default in this example CONFIG_BT_ENABLED=y CONFIG_BTDM_CTRL_MODE_BLE_ONLY=y -CONFIG_BTDM_CTRL_MODE_BR_EDR_ONLY= -CONFIG_BTDM_CTRL_MODE_BTDM= +CONFIG_BTDM_CTRL_MODE_BR_EDR_ONLY=n +CONFIG_BTDM_CTRL_MODE_BTDM=n CONFIG_BTDM_MODEM_SLEEP=n CONFIG_BTDM_BLE_SCAN_DUPL=y CONFIG_BTDM_SCAN_DUPL_TYPE=2 @@ -39,7 +39,7 @@ CONFIG_BLE_MESH_RX_SEG_MSG_COUNT=1 CONFIG_BLE_MESH_RX_SDU_MAX=384 CONFIG_BLE_MESH_TX_SEG_MAX=32 CONFIG_BLE_MESH_RELAY=y -CONFIG_BLE_MESH_LOW_POWER= -CONFIG_BLE_MESH_FRIEND= +CONFIG_BLE_MESH_LOW_POWER=n +CONFIG_BLE_MESH_FRIEND=n CONFIG_BLE_MESH_CFG_CLI=y CONFIG_BT_BTU_TASK_STACK_SIZE=4512 diff --git a/examples/bluetooth/esp_ble_mesh/ble_mesh_provisioner/sdkconfig.defaults b/examples/bluetooth/esp_ble_mesh/ble_mesh_provisioner/sdkconfig.defaults index e62771fe62..cabb4af85b 100644 --- a/examples/bluetooth/esp_ble_mesh/ble_mesh_provisioner/sdkconfig.defaults +++ b/examples/bluetooth/esp_ble_mesh/ble_mesh_provisioner/sdkconfig.defaults @@ -2,8 +2,8 @@ # by default in this example CONFIG_BT_ENABLED=y CONFIG_BTDM_CTRL_MODE_BLE_ONLY=y -CONFIG_BTDM_CTRL_MODE_BR_EDR_ONLY= -CONFIG_BTDM_CTRL_MODE_BTDM= +CONFIG_BTDM_CTRL_MODE_BR_EDR_ONLY=n +CONFIG_BTDM_CTRL_MODE_BTDM=n CONFIG_BTDM_MODEM_SLEEP=n CONFIG_BTDM_BLE_SCAN_DUPL=y CONFIG_BTDM_SCAN_DUPL_TYPE=2 @@ -30,8 +30,8 @@ CONFIG_BLE_MESH_NET_BUF_POOL_USAGE=y CONFIG_BLE_MESH_PB_GATT=y CONFIG_BLE_MESH_GATT_PROXY_SERVER=n CONFIG_BLE_MESH_RELAY=y -CONFIG_BLE_MESH_LOW_POWER= -CONFIG_BLE_MESH_FRIEND= +CONFIG_BLE_MESH_LOW_POWER=n +CONFIG_BLE_MESH_FRIEND=n CONFIG_BLE_MESH_ADV_BUF_COUNT=60 CONFIG_BLE_MESH_TX_SEG_MSG_COUNT=6 CONFIG_BLE_MESH_RX_SEG_MSG_COUNT=6 diff --git a/examples/bluetooth/esp_ble_mesh/ble_mesh_vendor_models/fast_prov_vendor_model/sdkconfig.defaults b/examples/bluetooth/esp_ble_mesh/ble_mesh_vendor_models/fast_prov_vendor_model/sdkconfig.defaults index 5eb9cdb3b1..5ebe2b8c97 100644 --- a/examples/bluetooth/esp_ble_mesh/ble_mesh_vendor_models/fast_prov_vendor_model/sdkconfig.defaults +++ b/examples/bluetooth/esp_ble_mesh/ble_mesh_vendor_models/fast_prov_vendor_model/sdkconfig.defaults @@ -18,8 +18,8 @@ CONFIG_BLE_MESH_NET_BUF_POOL_USAGE=y CONFIG_BLE_MESH_PB_GATT=y CONFIG_BLE_MESH_GATT_PROXY_SERVER=y CONFIG_BLE_MESH_RELAY=y -CONFIG_BLE_MESH_LOW_POWER= -CONFIG_BLE_MESH_FRIEND= +CONFIG_BLE_MESH_LOW_POWER=n +CONFIG_BLE_MESH_FRIEND=n CONFIG_BT_BTU_TASK_STACK_SIZE=4512 CONFIG_BLE_MESH_CFG_CLI=y CONFIG_BLE_MESH_CRPL=60 diff --git a/examples/bluetooth/esp_ble_mesh/ble_mesh_wifi_coexist/sdkconfig.defaults b/examples/bluetooth/esp_ble_mesh/ble_mesh_wifi_coexist/sdkconfig.defaults index ce7ba23e70..7beba052ce 100644 --- a/examples/bluetooth/esp_ble_mesh/ble_mesh_wifi_coexist/sdkconfig.defaults +++ b/examples/bluetooth/esp_ble_mesh/ble_mesh_wifi_coexist/sdkconfig.defaults @@ -6,8 +6,8 @@ CONFIG_ESP32_DEFAULT_CPU_FREQ_MHZ=240 CONFIG_MEMMAP_SMP=y CONFIG_BT_ENABLED=y CONFIG_BTDM_CTRL_MODE_BLE_ONLY=y -CONFIG_BTDM_CTRL_MODE_BR_EDR_ONLY= -CONFIG_BTDM_CTRL_MODE_BTDM= +CONFIG_BTDM_CTRL_MODE_BR_EDR_ONLY=n +CONFIG_BTDM_CTRL_MODE_BTDM=n CONFIG_BTDM_MODEM_SLEEP=n CONFIG_BTDM_BLE_SCAN_DUPL=y CONFIG_BTDM_SCAN_DUPL_TYPE=2 @@ -38,8 +38,8 @@ CONFIG_BLE_MESH_NET_BUF_POOL_USAGE=y CONFIG_BLE_MESH_PB_GATT=y CONFIG_BLE_MESH_GATT_PROXY_SERVER=y CONFIG_BLE_MESH_RELAY=y -CONFIG_BLE_MESH_LOW_POWER= -CONFIG_BLE_MESH_FRIEND= +CONFIG_BLE_MESH_LOW_POWER=n +CONFIG_BLE_MESH_FRIEND=n CONFIG_BT_BTU_TASK_STACK_SIZE=4512 CONFIG_BLE_MESH_CFG_CLI=y CONFIG_BLE_MESH_GENERIC_ONOFF_CLI=y @@ -63,18 +63,18 @@ CONFIG_ESP32_WIFI_TX_BA_WIN=32 CONFIG_ESP32_WIFI_AMPDU_RX_ENABLED=y CONFIG_ESP32_WIFI_RX_BA_WIN=32 -CONFIG_FREERTOS_UNICORE= +CONFIG_FREERTOS_UNICORE=n CONFIG_FREERTOS_HZ=1000 -CONFIG_ESP_INT_WDT= -CONFIG_ESP_TASK_WDT= +CONFIG_ESP_INT_WDT=n +CONFIG_ESP_TASK_WDT=n CONFIG_LWIP_TCP_SND_BUF_DEFAULT=65534 CONFIG_LWIP_TCP_WND_DEFAULT=65534 CONFIG_LWIP_TCP_RECVMBOX_SIZE=64 CONFIG_LWIP_UDP_RECVMBOX_SIZE=64 CONFIG_LWIP_TCPIP_RECVMBOX_SIZE=64 -CONFIG_LWIP_ETHARP_TRUST_IP_MAC= +CONFIG_LWIP_ETHARP_TRUST_IP_MAC=n CONFIG_ESPTOOLPY_FLASHMODE_QIO=y CONFIG_ESPTOOLPY_FLASHFREQ_40M=y diff --git a/examples/bluetooth/hci/controller_hci_uart/sdkconfig.defaults b/examples/bluetooth/hci/controller_hci_uart/sdkconfig.defaults index 306b16d9f3..798c444b50 100644 --- a/examples/bluetooth/hci/controller_hci_uart/sdkconfig.defaults +++ b/examples/bluetooth/hci/controller_hci_uart/sdkconfig.defaults @@ -5,8 +5,8 @@ # BT config # CONFIG_BT_ENABLED=y -CONFIG_BTDM_CTRL_MODE_BLE_ONLY= -CONFIG_BTDM_CTRL_MODE_BR_EDR_ONLY= +CONFIG_BTDM_CTRL_MODE_BLE_ONLY=n +CONFIG_BTDM_CTRL_MODE_BR_EDR_ONLY=n CONFIG_BTDM_CTRL_MODE_BTDM=y CONFIG_BTDM_CTRL_BLE_MAX_CONN=9 CONFIG_BTDM_CTRL_BR_EDR_MAX_ACL_CONN=7 diff --git a/examples/bluetooth/hci/controller_vhci_ble_adv/sdkconfig.defaults b/examples/bluetooth/hci/controller_vhci_ble_adv/sdkconfig.defaults index 3b0474335e..3bad2fdeed 100644 --- a/examples/bluetooth/hci/controller_vhci_ble_adv/sdkconfig.defaults +++ b/examples/bluetooth/hci/controller_vhci_ble_adv/sdkconfig.defaults @@ -8,5 +8,3 @@ CONFIG_BT_ENABLED=y CONFIG_BTDM_CTRL_MODE_BLE_ONLY=y CONFIG_BTDM_CTRL_MODE_BR_EDR_ONLY=n CONFIG_BTDM_CTRL_MODE_BTDM=n -CONFIG_BT_BLUEDROID_ENABLED=n -CONFIG_BT_CONTROLLER_ONLY=y diff --git a/examples/bluetooth/nimble/blecent/blecent_test.py b/examples/bluetooth/nimble/blecent/blecent_test.py index 0933a71e2d..db5c5e5a84 100644 --- a/examples/bluetooth/nimble/blecent/blecent_test.py +++ b/examples/bluetooth/nimble/blecent/blecent_test.py @@ -16,42 +16,20 @@ from __future__ import print_function import os -import sys import re import uuid import subprocess -try: - # This environment variable is expected on the host machine - test_fw_path = os.getenv("TEST_FW_PATH") - if test_fw_path and test_fw_path not in sys.path: - sys.path.insert(0, test_fw_path) - import IDF -except ImportError as e: - print(e) - print("\nCheck your IDF_PATH\nOR") - print("Try `export TEST_FW_PATH=$IDF_PATH/tools/tiny-test-fw` for resolving the issue\nOR") - print("Try `pip install -r $IDF_PATH/tools/tiny-test-fw/requirements.txt` for resolving the issue") - import IDF - -try: - import lib_ble_client -except ImportError: - lib_ble_client_path = os.getenv("IDF_PATH") + "/tools/ble" - if lib_ble_client_path and lib_ble_client_path not in sys.path: - sys.path.insert(0, lib_ble_client_path) - import lib_ble_client - - -import Utility +from tiny_test_fw import Utility +import ttfw_idf +from ble import lib_ble_client # When running on local machine execute the following before running this script # > make app bootloader # > make print_flash_cmd | tail -n 1 > build/download.config -# > export TEST_FW_PATH=~/esp/esp-idf/tools/tiny-test-fw -@IDF.idf_example_test(env_tag="Example_WIFI_BT") +@ttfw_idf.idf_example_test(env_tag="Example_WIFI_BT") def test_example_app_ble_central(env, extra_data): """ Steps: @@ -72,7 +50,7 @@ def test_example_app_ble_central(env, extra_data): # Get binary file binary_file = os.path.join(dut.app.binary_path, "blecent.bin") bin_size = os.path.getsize(binary_file) - IDF.log_performance("blecent_bin_size", "{}KB".format(bin_size // 1024)) + ttfw_idf.log_performance("blecent_bin_size", "{}KB".format(bin_size // 1024)) # Upload binary and start testing Utility.console_log("Starting blecent example test app") diff --git a/examples/bluetooth/nimble/blecent/sdkconfig.defaults b/examples/bluetooth/nimble/blecent/sdkconfig.defaults index 4fa07777fe..c829fc5c00 100644 --- a/examples/bluetooth/nimble/blecent/sdkconfig.defaults +++ b/examples/bluetooth/nimble/blecent/sdkconfig.defaults @@ -6,7 +6,7 @@ # CONFIG_BT_ENABLED=y CONFIG_BTDM_CTRL_MODE_BLE_ONLY=y -CONFIG_BTDM_CTRL_MODE_BR_EDR_ONLY= -CONFIG_BTDM_CTRL_MODE_BTDM= -CONFIG_BT_BLUEDROID_ENABLED= +CONFIG_BTDM_CTRL_MODE_BR_EDR_ONLY=n +CONFIG_BTDM_CTRL_MODE_BTDM=n +CONFIG_BT_BLUEDROID_ENABLED=n CONFIG_BT_NIMBLE_ENABLED=y diff --git a/examples/bluetooth/nimble/blehr/blehr_test.py b/examples/bluetooth/nimble/blehr/blehr_test.py index 7cc10ed889..73d2c6ad5b 100644 --- a/examples/bluetooth/nimble/blehr/blehr_test.py +++ b/examples/bluetooth/nimble/blehr/blehr_test.py @@ -16,41 +16,19 @@ from __future__ import print_function import os -import sys import re import threading import traceback import Queue import subprocess -try: - # This environment variable is expected on the host machine - test_fw_path = os.getenv("TEST_FW_PATH") - if test_fw_path and test_fw_path not in sys.path: - sys.path.insert(0, test_fw_path) - import IDF -except ImportError as e: - print(e) - print("\nCheck your IDF_PATH\nOR") - print("Try `export TEST_FW_PATH=$IDF_PATH/tools/tiny-test-fw` for resolving the issue\nOR") - print("Try `pip install -r $IDF_PATH/tools/tiny-test-fw/requirements.txt` for resolving the issue\n") - import IDF - -try: - import lib_ble_client -except ImportError: - lib_ble_client_path = os.getenv("IDF_PATH") + "/tools/ble" - if lib_ble_client_path and lib_ble_client_path not in sys.path: - sys.path.insert(0, lib_ble_client_path) - import lib_ble_client - - -import Utility +from tiny_test_fw import Utility +import ttfw_idf +from ble import lib_ble_client # When running on local machine execute the following before running this script # > make app bootloader # > make print_flash_cmd | tail -n 1 > build/download.config -# > export TEST_FW_PATH=~/esp/esp-idf/tools/tiny-test-fw def blehr_client_task(hr_obj, dut_addr): @@ -114,7 +92,7 @@ class BleHRThread(threading.Thread): self.exceptions_queue.put(traceback.format_exc(), block=False) -@IDF.idf_example_test(env_tag="Example_WIFI_BT") +@ttfw_idf.idf_example_test(env_tag="Example_WIFI_BT") def test_example_app_ble_hr(env, extra_data): """ Steps: @@ -133,8 +111,8 @@ def test_example_app_ble_hr(env, extra_data): # Get binary file binary_file = os.path.join(dut.app.binary_path, "blehr.bin") bin_size = os.path.getsize(binary_file) - IDF.log_performance("blehr_bin_size", "{}KB".format(bin_size // 1024)) - IDF.check_performance("blehr_bin_size", bin_size // 1024) + ttfw_idf.log_performance("blehr_bin_size", "{}KB".format(bin_size // 1024)) + ttfw_idf.check_performance("blehr_bin_size", bin_size // 1024) # Upload binary and start testing Utility.console_log("Starting blehr simple example test app") diff --git a/examples/bluetooth/nimble/blehr/sdkconfig.defaults b/examples/bluetooth/nimble/blehr/sdkconfig.defaults index 4fa07777fe..c829fc5c00 100644 --- a/examples/bluetooth/nimble/blehr/sdkconfig.defaults +++ b/examples/bluetooth/nimble/blehr/sdkconfig.defaults @@ -6,7 +6,7 @@ # CONFIG_BT_ENABLED=y CONFIG_BTDM_CTRL_MODE_BLE_ONLY=y -CONFIG_BTDM_CTRL_MODE_BR_EDR_ONLY= -CONFIG_BTDM_CTRL_MODE_BTDM= -CONFIG_BT_BLUEDROID_ENABLED= +CONFIG_BTDM_CTRL_MODE_BR_EDR_ONLY=n +CONFIG_BTDM_CTRL_MODE_BTDM=n +CONFIG_BT_BLUEDROID_ENABLED=n CONFIG_BT_NIMBLE_ENABLED=y diff --git a/examples/bluetooth/nimble/blemesh/sdkconfig.defaults b/examples/bluetooth/nimble/blemesh/sdkconfig.defaults index 7c8e90fad9..991dda58da 100644 --- a/examples/bluetooth/nimble/blemesh/sdkconfig.defaults +++ b/examples/bluetooth/nimble/blemesh/sdkconfig.defaults @@ -6,8 +6,8 @@ # CONFIG_BT_ENABLED=y CONFIG_BTDM_CTRL_MODE_BLE_ONLY=y -CONFIG_BTDM_CTRL_MODE_BR_EDR_ONLY= -CONFIG_BTDM_CTRL_MODE_BTDM= -CONFIG_BT_BLUEDROID_ENABLED= +CONFIG_BTDM_CTRL_MODE_BR_EDR_ONLY=n +CONFIG_BTDM_CTRL_MODE_BTDM=n +CONFIG_BT_BLUEDROID_ENABLED=n CONFIG_BT_NIMBLE_ENABLED=y CONFIG_BT_NIMBLE_MESH=y diff --git a/examples/bluetooth/nimble/bleprph/bleprph_test.py b/examples/bluetooth/nimble/bleprph/bleprph_test.py index 97bfeca97a..e89d572dee 100644 --- a/examples/bluetooth/nimble/bleprph/bleprph_test.py +++ b/examples/bluetooth/nimble/bleprph/bleprph_test.py @@ -16,35 +16,15 @@ from __future__ import print_function import os -import sys import re import Queue import traceback import threading import subprocess -try: - # This environment variable is expected on the host machine - test_fw_path = os.getenv("TEST_FW_PATH") - if test_fw_path and test_fw_path not in sys.path: - sys.path.insert(0, test_fw_path) - import IDF -except ImportError as e: - print(e) - print("Try `export TEST_FW_PATH=$IDF_PATH/tools/tiny-test-fw` for resolving the issue") - print("Try `pip install -r $IDF_PATH/tools/tiny-test-fw/requirements.txt` for resolving the issue") - import IDF - -try: - import lib_ble_client -except ImportError: - lib_ble_client_path = os.getenv("IDF_PATH") + "/tools/ble" - if lib_ble_client_path and lib_ble_client_path not in sys.path: - sys.path.insert(0, lib_ble_client_path) - import lib_ble_client - - -import Utility +from tiny_test_fw import Utility +import ttfw_idf +from ble import lib_ble_client # When running on local machine execute the following before running this script # > make app bootloader @@ -135,7 +115,7 @@ class BlePrphThread(threading.Thread): self.exceptions_queue.put(traceback.format_exc(), block=False) -@IDF.idf_example_test(env_tag="Example_WIFI_BT") +@ttfw_idf.idf_example_test(env_tag="Example_WIFI_BT") def test_example_app_ble_peripheral(env, extra_data): """ Steps: @@ -154,8 +134,8 @@ def test_example_app_ble_peripheral(env, extra_data): # Get binary file binary_file = os.path.join(dut.app.binary_path, "bleprph.bin") bin_size = os.path.getsize(binary_file) - IDF.log_performance("bleprph_bin_size", "{}KB".format(bin_size // 1024)) - IDF.check_performance("bleprph_bin_size", bin_size // 1024) + ttfw_idf.log_performance("bleprph_bin_size", "{}KB".format(bin_size // 1024)) + ttfw_idf.check_performance("bleprph_bin_size", bin_size // 1024) # Upload binary and start testing Utility.console_log("Starting bleprph simple example test app") diff --git a/examples/bluetooth/nimble/bleprph/sdkconfig.defaults b/examples/bluetooth/nimble/bleprph/sdkconfig.defaults index 4fa07777fe..c829fc5c00 100644 --- a/examples/bluetooth/nimble/bleprph/sdkconfig.defaults +++ b/examples/bluetooth/nimble/bleprph/sdkconfig.defaults @@ -6,7 +6,7 @@ # CONFIG_BT_ENABLED=y CONFIG_BTDM_CTRL_MODE_BLE_ONLY=y -CONFIG_BTDM_CTRL_MODE_BR_EDR_ONLY= -CONFIG_BTDM_CTRL_MODE_BTDM= -CONFIG_BT_BLUEDROID_ENABLED= +CONFIG_BTDM_CTRL_MODE_BR_EDR_ONLY=n +CONFIG_BTDM_CTRL_MODE_BTDM=n +CONFIG_BT_BLUEDROID_ENABLED=n CONFIG_BT_NIMBLE_ENABLED=y diff --git a/examples/get-started/blink/example_test.py b/examples/get-started/blink/example_test.py index 91b0f9bc0e..288aefe50f 100644 --- a/examples/get-started/blink/example_test.py +++ b/examples/get-started/blink/example_test.py @@ -5,20 +5,10 @@ from __future__ import print_function from __future__ import unicode_literals import re import os -import sys import hashlib -try: - import IDF -except ImportError: - # This environment variable is expected on the host machine - test_fw_path = os.getenv("TEST_FW_PATH") - if test_fw_path and test_fw_path not in sys.path: - sys.path.insert(0, test_fw_path) - - import IDF - -import Utility +from tiny_test_fw import Utility +import ttfw_idf def verify_elf_sha256_embedding(dut): @@ -38,13 +28,13 @@ def verify_elf_sha256_embedding(dut): raise ValueError('ELF file SHA256 mismatch') -@IDF.idf_example_test(env_tag="Example_WIFI") +@ttfw_idf.idf_example_test(env_tag="Example_WIFI") def test_examples_blink(env, extra_data): dut = env.get_dut("blink", "examples/get-started/blink") binary_file = os.path.join(dut.app.binary_path, "blink.bin") bin_size = os.path.getsize(binary_file) - IDF.log_performance("blink_bin_size", "{}KB".format(bin_size // 1024)) - IDF.check_performance("blink_bin_size", bin_size // 1024) + ttfw_idf.log_performance("blink_bin_size", "{}KB".format(bin_size // 1024)) + ttfw_idf.check_performance("blink_bin_size", bin_size // 1024) dut.start_app() diff --git a/examples/peripherals/can/can_alert_and_recovery/example_test.py b/examples/peripherals/can/can_alert_and_recovery/example_test.py index 403e056b28..640c51d28c 100644 --- a/examples/peripherals/can/can_alert_and_recovery/example_test.py +++ b/examples/peripherals/can/can_alert_and_recovery/example_test.py @@ -1,27 +1,15 @@ # Need Python 3 string formatting functions from __future__ import print_function -import os -import sys - -try: - import IDF -except ImportError: - # The test cause is dependent on the Tiny Test Framework. Ensure the - # `TEST_FW_PATH` environment variable is set to `$IDF_PATH/tools/tiny-test-fw` - test_fw_path = os.getenv("TEST_FW_PATH") - if test_fw_path and test_fw_path not in sys.path: - sys.path.insert(0, test_fw_path) - import IDF +import ttfw_idf # CAN Self Test Example constants STR_EXPECT = ("CAN Alert and Recovery: Driver installed", "CAN Alert and Recovery: Driver uninstalled") EXPECT_TIMEOUT = 20 -@IDF.idf_example_test(env_tag='Example_CAN1', ignore=True) +@ttfw_idf.idf_example_test(env_tag='Example_CAN1') def test_can_alert_and_recovery_example(env, extra_data): - # Get device under test, flash and start example. "dut4" must be defined in EnvConfig dut = env.get_dut('dut1', 'examples/peripherals/can/can_alert_and_recovery') dut.start_app() diff --git a/examples/peripherals/can/can_network/example_test.py b/examples/peripherals/can/can_network/example_test.py index b0ccf854a4..dc025052e6 100644 --- a/examples/peripherals/can/can_network/example_test.py +++ b/examples/peripherals/can/can_network/example_test.py @@ -1,18 +1,9 @@ # Need Python 3 string formatting functions from __future__ import print_function -import os -import sys from threading import Thread -try: - import IDF -except ImportError: - # The test cause is dependent on the Tiny Test Framework. Ensure the - # `TEST_FW_PATH` environment variable is set to `$IDF_PATH/tools/tiny-test-fw` - test_fw_path = os.getenv("TEST_FW_PATH") - if test_fw_path and test_fw_path not in sys.path: - sys.path.insert(0, test_fw_path) - import IDF + +import ttfw_idf # Define tuple of strings to expect for each DUT. master_expect = ("CAN Master: Driver installed", "CAN Master: Driver uninstalled") @@ -36,7 +27,7 @@ def dut_thread_callback(**kwargs): result[0] = True -@IDF.idf_example_test(env_tag='Example_CAN2', ignore=True) +@ttfw_idf.idf_example_test(env_tag='Example_CAN2') def test_can_network_example(env, extra_data): # Get device under test. "dut1", "dut2", and "dut3" must be properly defined in EnvConfig diff --git a/examples/peripherals/can/can_self_test/example_test.py b/examples/peripherals/can/can_self_test/example_test.py index 20f1727688..5cc6999758 100644 --- a/examples/peripherals/can/can_self_test/example_test.py +++ b/examples/peripherals/can/can_self_test/example_test.py @@ -1,17 +1,7 @@ # Need Python 3 string formatting functions from __future__ import print_function -import os -import sys -try: - import IDF -except ImportError: - # The test cause is dependent on the Tiny Test Framework. Ensure the - # `TEST_FW_PATH` environment variable is set to `$IDF_PATH/tools/tiny-test-fw` - test_fw_path = os.getenv("TEST_FW_PATH") - if test_fw_path and test_fw_path not in sys.path: - sys.path.insert(0, test_fw_path) - import IDF +import ttfw_idf # CAN Self Test Example constants @@ -19,9 +9,9 @@ STR_EXPECT = ("CAN Self Test: Driver installed", "CAN Self Test: Driver uninstal EXPECT_TIMEOUT = 20 -@IDF.idf_example_test(env_tag='Example_CAN1', ignore=True) +@ttfw_idf.idf_example_test(env_tag='Example_CAN1') def test_can_self_test_example(env, extra_data): - # Get device under test, flash and start example. "dut4" must be defined in EnvConfig + # Get device under test, flash and start example. "dut1" must be defined in EnvConfig dut = env.get_dut('dut1', 'examples/peripherals/can/can_self_test') dut.start_app() diff --git a/examples/peripherals/i2c/i2c_tools/example_test.py b/examples/peripherals/i2c/i2c_tools/example_test.py index a42a0f0740..3e0b265a27 100644 --- a/examples/peripherals/i2c/i2c_tools/example_test.py +++ b/examples/peripherals/i2c/i2c_tools/example_test.py @@ -1,19 +1,11 @@ from __future__ import print_function -import os -import sys + +import ttfw_idf EXPECT_TIMEOUT = 20 -try: - import IDF -except ImportError: - test_fw_path = os.getenv("TEST_FW_PATH") - if test_fw_path and test_fw_path not in sys.path: - sys.path.insert(0, test_fw_path) - import IDF - -@IDF.idf_example_test(env_tag='Example_I2C_CCS811_SENSOR') +@ttfw_idf.idf_example_test(env_tag='Example_I2C_CCS811_SENSOR') def test_i2ctools_example(env, extra_data): # Get device under test, flash and start example. "i2ctool" must be defined in EnvConfig dut = env.get_dut('i2ctools', 'examples/peripherals/i2c/i2c_tools') diff --git a/examples/peripherals/sdio/sdio_test.py b/examples/peripherals/sdio/sdio_test.py index 8361dc3c86..9470e5d40b 100644 --- a/examples/peripherals/sdio/sdio_test.py +++ b/examples/peripherals/sdio/sdio_test.py @@ -12,24 +12,11 @@ # See the License for the specific language governing permissions and # limitations under the License. -""" example of writing test with TinyTestFW """ -import os -import sys - -try: - import TinyFW -except ImportError: - # if we want to run test case outside `tiny-test-fw` folder, - # we need to insert tiny-test-fw path into sys path - test_fw_path = os.getenv("TEST_FW_PATH") - if test_fw_path and test_fw_path not in sys.path: - sys.path.insert(0, test_fw_path) - import TinyFW - -import IDF +from tiny_test_fw import TinyFW +import ttfw_idf -@IDF.idf_example_test(env_tag="Example_SDIO", ignore=True) +@ttfw_idf.idf_example_test(env_tag="Example_SDIO", ignore=True) def test_example_sdio_communication(env, extra_data): """ Configurations @@ -132,5 +119,5 @@ def test_example_sdio_communication(env, extra_data): if __name__ == '__main__': - TinyFW.set_default_config(env_config_file="EnvConfigTemplate.yml", dut=IDF.IDFDUT) + TinyFW.set_default_config(env_config_file="EnvConfigTemplate.yml", dut=ttfw_idf.IDFDUT) test_example_sdio_communication() diff --git a/examples/protocols/asio/chat_client/asio_chat_client_test.py b/examples/protocols/asio/chat_client/asio_chat_client_test.py index 7a507c4b7b..d8dc8a7945 100644 --- a/examples/protocols/asio/chat_client/asio_chat_client_test.py +++ b/examples/protocols/asio/chat_client/asio_chat_client_test.py @@ -1,21 +1,10 @@ import re import os -import sys import socket from threading import Thread import time -try: - import IDF -except ImportError: - # this is a test case write with tiny-test-fw. - # to run test cases outside tiny-test-fw, - # we need to set environment variable `TEST_FW_PATH`, - # then get and insert `TEST_FW_PATH` to sys path before import FW module - test_fw_path = os.getenv("TEST_FW_PATH") - if test_fw_path and test_fw_path not in sys.path: - sys.path.insert(0, test_fw_path) - import IDF +import ttfw_idf global g_client_response global g_msg_to_client @@ -55,7 +44,7 @@ def chat_server_sketch(my_ip): print("server closed") -@IDF.idf_example_test(env_tag="Example_WIFI") +@ttfw_idf.idf_example_test(env_tag="Example_WIFI") def test_examples_protocol_asio_chat_client(env, extra_data): """ steps: | @@ -73,8 +62,8 @@ def test_examples_protocol_asio_chat_client(env, extra_data): # check and log bin size binary_file = os.path.join(dut1.app.binary_path, "asio_chat_client.bin") bin_size = os.path.getsize(binary_file) - IDF.log_performance("asio_chat_client_size", "{}KB".format(bin_size // 1024)) - IDF.check_performance("asio_chat_client_size", bin_size // 1024) + ttfw_idf.log_performance("asio_chat_client_size", "{}KB".format(bin_size // 1024)) + ttfw_idf.check_performance("asio_chat_client_size", bin_size // 1024) # 1. start a tcp server on the host host_ip = get_my_ip() thread1 = Thread(target=chat_server_sketch, args=(host_ip,)) diff --git a/examples/protocols/asio/chat_server/asio_chat_server_test.py b/examples/protocols/asio/chat_server/asio_chat_server_test.py index cc273b447f..5e6f3e72e5 100644 --- a/examples/protocols/asio/chat_server/asio_chat_server_test.py +++ b/examples/protocols/asio/chat_server/asio_chat_server_test.py @@ -1,23 +1,11 @@ import re import os -import sys import socket - -try: - import IDF -except ImportError: - # this is a test case write with tiny-test-fw. - # to run test cases outside tiny-test-fw, - # we need to set environment variable `TEST_FW_PATH`, - # then get and insert `TEST_FW_PATH` to sys path before import FW module - test_fw_path = os.getenv("TEST_FW_PATH") - if test_fw_path and test_fw_path not in sys.path: - sys.path.insert(0, test_fw_path) - import IDF +import ttfw_idf -@IDF.idf_example_test(env_tag="Example_WIFI") +@ttfw_idf.idf_example_test(env_tag="Example_WIFI") def test_examples_protocol_asio_chat_server(env, extra_data): """ steps: | @@ -31,8 +19,8 @@ def test_examples_protocol_asio_chat_server(env, extra_data): # check and log bin size binary_file = os.path.join(dut1.app.binary_path, "asio_chat_server.bin") bin_size = os.path.getsize(binary_file) - IDF.log_performance("asio_chat_server_bin_size", "{}KB".format(bin_size // 1024)) - IDF.check_performance("asio_chat_server_size", bin_size // 1024) + ttfw_idf.log_performance("asio_chat_server_bin_size", "{}KB".format(bin_size // 1024)) + ttfw_idf.check_performance("asio_chat_server_size", bin_size // 1024) # 1. start test dut1.start_app() # 2. get the server IP address diff --git a/examples/protocols/asio/tcp_echo_server/asio_tcp_server_test.py b/examples/protocols/asio/tcp_echo_server/asio_tcp_server_test.py index 440841cfc4..3c19228835 100644 --- a/examples/protocols/asio/tcp_echo_server/asio_tcp_server_test.py +++ b/examples/protocols/asio/tcp_echo_server/asio_tcp_server_test.py @@ -1,24 +1,11 @@ import re import os -import sys import socket - -try: - import IDF -except ImportError: - # this is a test case write with tiny-test-fw. - # to run test cases outside tiny-test-fw, - # we need to set environment variable `TEST_FW_PATH`, - # then get and insert `TEST_FW_PATH` to sys path before import FW module - test_fw_path = os.getenv("TEST_FW_PATH") - if test_fw_path and test_fw_path not in sys.path: - sys.path.insert(0, test_fw_path) - - import IDF +import ttfw_idf -@IDF.idf_example_test(env_tag="Example_WIFI") +@ttfw_idf.idf_example_test(env_tag="Example_WIFI") def test_examples_protocol_asio_tcp_server(env, extra_data): """ steps: | @@ -33,8 +20,8 @@ def test_examples_protocol_asio_tcp_server(env, extra_data): # check and log bin size binary_file = os.path.join(dut1.app.binary_path, "asio_tcp_echo_server.bin") bin_size = os.path.getsize(binary_file) - IDF.log_performance("asio_tcp_echo_server_bin_size", "{}KB".format(bin_size // 1024)) - IDF.check_performance("asio_tcp_echo_server_size", bin_size // 1024) + ttfw_idf.log_performance("asio_tcp_echo_server_bin_size", "{}KB".format(bin_size // 1024)) + ttfw_idf.check_performance("asio_tcp_echo_server_size", bin_size // 1024) # 1. start test dut1.start_app() # 2. get the server IP address diff --git a/examples/protocols/asio/udp_echo_server/asio_udp_server_test.py b/examples/protocols/asio/udp_echo_server/asio_udp_server_test.py index feaf98f633..d06ef66c87 100644 --- a/examples/protocols/asio/udp_echo_server/asio_udp_server_test.py +++ b/examples/protocols/asio/udp_echo_server/asio_udp_server_test.py @@ -1,24 +1,11 @@ import re import os -import sys import socket - -try: - import IDF -except ImportError: - # this is a test case write with tiny-test-fw. - # to run test cases outside tiny-test-fw, - # we need to set environment variable `TEST_FW_PATH`, - # then get and insert `TEST_FW_PATH` to sys path before import FW module - test_fw_path = os.getenv("TEST_FW_PATH") - if test_fw_path and test_fw_path not in sys.path: - sys.path.insert(0, test_fw_path) - - import IDF +import ttfw_idf -@IDF.idf_example_test(env_tag="Example_WIFI") +@ttfw_idf.idf_example_test(env_tag="Example_WIFI") def test_examples_protocol_asio_udp_server(env, extra_data): """ steps: | @@ -33,8 +20,8 @@ def test_examples_protocol_asio_udp_server(env, extra_data): # check and log bin size binary_file = os.path.join(dut1.app.binary_path, "asio_udp_echo_server.bin") bin_size = os.path.getsize(binary_file) - IDF.log_performance("asio_udp_echo_server_bin_size", "{}KB".format(bin_size // 1024)) - IDF.check_performance("asio_udp_echo_server_size", bin_size // 1024) + ttfw_idf.log_performance("asio_udp_echo_server_bin_size", "{}KB".format(bin_size // 1024)) + ttfw_idf.check_performance("asio_udp_echo_server_size", bin_size // 1024) # 1. start test dut1.start_app() # 2. get the server IP address diff --git a/examples/protocols/esp_http_client/esp_http_client_test.py b/examples/protocols/esp_http_client/esp_http_client_test.py index 1c9c1d5701..c0700f567e 100644 --- a/examples/protocols/esp_http_client/esp_http_client_test.py +++ b/examples/protocols/esp_http_client/esp_http_client_test.py @@ -1,22 +1,10 @@ import re import os -import sys -try: - import IDF -except ImportError: - # this is a test case write with tiny-test-fw. - # to run test cases outside tiny-test-fw, - # we need to set environment variable `TEST_FW_PATH`, - # then get and insert `TEST_FW_PATH` to sys path before import FW module - test_fw_path = os.getenv("TEST_FW_PATH") - if test_fw_path and test_fw_path not in sys.path: - sys.path.insert(0, test_fw_path) - - import IDF +import ttfw_idf -@IDF.idf_example_test(env_tag="Example_WIFI", ignore=True) +@ttfw_idf.idf_example_test(env_tag="Example_WIFI", ignore=True) def test_examples_protocol_esp_http_client(env, extra_data): """ steps: | @@ -27,8 +15,8 @@ def test_examples_protocol_esp_http_client(env, extra_data): # check and log bin size binary_file = os.path.join(dut1.app.binary_path, "esp-http-client-example.bin") bin_size = os.path.getsize(binary_file) - IDF.log_performance("esp_http_client_bin_size", "{}KB".format(bin_size // 1024)) - IDF.check_performance("esp_http_client_bin_size", bin_size // 1024) + ttfw_idf.log_performance("esp_http_client_bin_size", "{}KB".format(bin_size // 1024)) + ttfw_idf.check_performance("esp_http_client_bin_size", bin_size // 1024) # start test dut1.start_app() dut1.expect("Connected to AP, begin http example", timeout=30) diff --git a/examples/protocols/http_server/advanced_tests/http_server_advanced_test.py b/examples/protocols/http_server/advanced_tests/http_server_advanced_test.py index f3e7033d77..dd3bea1c8e 100644 --- a/examples/protocols/http_server/advanced_tests/http_server_advanced_test.py +++ b/examples/protocols/http_server/advanced_tests/http_server_advanced_test.py @@ -19,29 +19,14 @@ from __future__ import print_function from __future__ import unicode_literals import re import os -import sys -try: - import IDF -except ImportError: - # This environment variable is expected on the host machine - test_fw_path = os.getenv("TEST_FW_PATH") - if test_fw_path and test_fw_path not in sys.path: - sys.path.insert(0, test_fw_path) - - import IDF - -import Utility +from tiny_test_fw import Utility +import ttfw_idf +from idf_http_server_test import test as client # When running on local machine execute the following before running this script # > make app bootloader # > make print_flash_cmd | tail -n 1 > build/download.config -# > export TEST_FW_PATH=~/esp/esp-idf/tools/tiny-test-fw - -# Import client module -expath = os.path.dirname(os.path.realpath(__file__)) -client = Utility.load_source("client", expath + "/scripts/test.py") - # Due to connectivity issues (between runner host and DUT) in the runner environment, # some of the `advanced_tests` are ignored. These tests are intended for verifying @@ -49,7 +34,7 @@ client = Utility.load_source("client", expath + "/scripts/test.py") # of large HTTP packets and malformed requests, running multiple parallel sessions, etc. # It is advised that all these tests be run locally, when making changes or adding new # features to this component. -@IDF.idf_example_test(env_tag="Example_WIFI") +@ttfw_idf.idf_example_test(env_tag="Example_WIFI") def test_examples_protocol_http_server_advanced(env, extra_data): # Acquire DUT dut1 = env.get_dut("http_server", "examples/protocols/http_server/advanced_tests") @@ -57,8 +42,8 @@ def test_examples_protocol_http_server_advanced(env, extra_data): # Get binary file binary_file = os.path.join(dut1.app.binary_path, "tests.bin") bin_size = os.path.getsize(binary_file) - IDF.log_performance("http_server_bin_size", "{}KB".format(bin_size // 1024)) - IDF.check_performance("http_server_bin_size", bin_size // 1024) + ttfw_idf.log_performance("http_server_bin_size", "{}KB".format(bin_size // 1024)) + ttfw_idf.check_performance("http_server_bin_size", bin_size // 1024) # Upload binary and start testing Utility.console_log("Starting http_server advanced test app") diff --git a/examples/protocols/http_server/persistent_sockets/http_server_persistence_test.py b/examples/protocols/http_server/persistent_sockets/http_server_persistence_test.py index 43174bc52c..b0a6f5c135 100644 --- a/examples/protocols/http_server/persistent_sockets/http_server_persistence_test.py +++ b/examples/protocols/http_server/persistent_sockets/http_server_persistence_test.py @@ -21,31 +21,18 @@ from builtins import str from builtins import range import re import os -import sys import random -try: - import IDF -except ImportError: - # This environment variable is expected on the host machine - test_fw_path = os.getenv("TEST_FW_PATH") - if test_fw_path and test_fw_path not in sys.path: - sys.path.insert(0, test_fw_path) - import IDF - -import Utility +from tiny_test_fw import Utility +import ttfw_idf +from idf_http_server_test import adder as client # When running on local machine execute the following before running this script # > make app bootloader # > make print_flash_cmd | tail -n 1 > build/download.config -# > export TEST_FW_PATH=~/esp/esp-idf/tools/tiny-test-fw - -# Import client module -expath = os.path.dirname(os.path.realpath(__file__)) -client = Utility.load_source("client", expath + "/scripts/adder.py") -@IDF.idf_example_test(env_tag="Example_WIFI") +@ttfw_idf.idf_example_test(env_tag="Example_WIFI") def test_examples_protocol_http_server_persistence(env, extra_data): # Acquire DUT dut1 = env.get_dut("http_server", "examples/protocols/http_server/persistent_sockets") @@ -53,8 +40,8 @@ def test_examples_protocol_http_server_persistence(env, extra_data): # Get binary file binary_file = os.path.join(dut1.app.binary_path, "persistent_sockets.bin") bin_size = os.path.getsize(binary_file) - IDF.log_performance("http_server_bin_size", "{}KB".format(bin_size // 1024)) - IDF.check_performance("http_server_bin_size", bin_size // 1024) + ttfw_idf.log_performance("http_server_bin_size", "{}KB".format(bin_size // 1024)) + ttfw_idf.check_performance("http_server_bin_size", bin_size // 1024) # Upload binary and start testing Utility.console_log("Starting http_server persistance test app") diff --git a/examples/protocols/http_server/simple/http_server_simple_test.py b/examples/protocols/http_server/simple/http_server_simple_test.py index 4cd344db83..62c2ccdfaf 100644 --- a/examples/protocols/http_server/simple/http_server_simple_test.py +++ b/examples/protocols/http_server/simple/http_server_simple_test.py @@ -20,33 +20,20 @@ from __future__ import unicode_literals from builtins import range import re import os -import sys import string import random -try: - import IDF -except ImportError: - # This environment variable is expected on the host machine - test_fw_path = os.getenv("TEST_FW_PATH") - if test_fw_path and test_fw_path not in sys.path: - sys.path.insert(0, test_fw_path) +from tiny_test_fw import Utility +import ttfw_idf +from idf_http_server_test import client - import IDF - -import Utility # When running on local machine execute the following before running this script # > make app bootloader # > make print_flash_cmd | tail -n 1 > build/download.config -# > export TEST_FW_PATH=~/esp/esp-idf/tools/tiny-test-fw - -# Import client module -expath = os.path.dirname(os.path.realpath(__file__)) -client = Utility.load_source("client", expath + "/scripts/client.py") -@IDF.idf_example_test(env_tag="Example_WIFI") +@ttfw_idf.idf_example_test(env_tag="Example_WIFI") def test_examples_protocol_http_server_simple(env, extra_data): # Acquire DUT dut1 = env.get_dut("http_server", "examples/protocols/http_server/simple") @@ -54,8 +41,8 @@ def test_examples_protocol_http_server_simple(env, extra_data): # Get binary file binary_file = os.path.join(dut1.app.binary_path, "simple.bin") bin_size = os.path.getsize(binary_file) - IDF.log_performance("http_server_bin_size", "{}KB".format(bin_size // 1024)) - IDF.check_performance("http_server_bin_size", bin_size // 1024) + ttfw_idf.log_performance("http_server_bin_size", "{}KB".format(bin_size // 1024)) + ttfw_idf.check_performance("http_server_bin_size", bin_size // 1024) # Upload binary and start testing Utility.console_log("Starting http_server simple test app") diff --git a/examples/protocols/https_request/example_test.py b/examples/protocols/https_request/example_test.py index ce8453edb4..dddecd749a 100644 --- a/examples/protocols/https_request/example_test.py +++ b/examples/protocols/https_request/example_test.py @@ -1,21 +1,10 @@ import re import os -import sys -try: - import IDF -except ImportError: - # this is a test case write with tiny-test-fw. - # to run test cases outside tiny-test-fw, - # we need to set environment variable `TEST_FW_PATH`, - # then get and insert `TEST_FW_PATH` to sys path before import FW module - test_fw_path = os.getenv("TEST_FW_PATH") - if test_fw_path and test_fw_path not in sys.path: - sys.path.insert(0, test_fw_path) - import IDF +import ttfw_idf -@IDF.idf_example_test(env_tag="Example_WIFI", ignore=True) +@ttfw_idf.idf_example_test(env_tag="Example_WIFI", ignore=True) def test_examples_protocol_https_request(env, extra_data): """ steps: | @@ -27,8 +16,8 @@ def test_examples_protocol_https_request(env, extra_data): # check and log bin size binary_file = os.path.join(dut1.app.binary_path, "https_request.bin") bin_size = os.path.getsize(binary_file) - IDF.log_performance("https_request_bin_size", "{}KB".format(bin_size // 1024)) - IDF.check_performance("https_request_bin_size", bin_size // 1024) + ttfw_idf.log_performance("https_request_bin_size", "{}KB".format(bin_size // 1024)) + ttfw_idf.check_performance("https_request_bin_size", bin_size // 1024) # start test dut1.start_app() dut1.expect("Connection established...", timeout=30) diff --git a/examples/protocols/mdns/mdns_example_test.py b/examples/protocols/mdns/mdns_example_test.py index cb527989a5..b999c1889e 100644 --- a/examples/protocols/mdns/mdns_example_test.py +++ b/examples/protocols/mdns/mdns_example_test.py @@ -1,6 +1,5 @@ import re import os -import sys import socket import time import struct @@ -8,21 +7,8 @@ import dpkt import dpkt.dns from threading import Thread - -# this is a test case write with tiny-test-fw. -# to run test cases outside tiny-test-fw, -# we need to set environment variable `TEST_FW_PATH`, -# then get and insert `TEST_FW_PATH` to sys path before import FW module - -try: - import IDF -except ImportError: - test_fw_path = os.getenv("TEST_FW_PATH") - if test_fw_path and test_fw_path not in sys.path: - sys.path.insert(0, test_fw_path) - import IDF - -import DUT +from tiny_test_fw import DUT +import ttfw_idf g_run_server = True g_done = False @@ -76,7 +62,7 @@ def mdns_server(esp_host): continue -@IDF.idf_example_test(env_tag="Example_WIFI") +@ttfw_idf.idf_example_test(env_tag="Example_WIFI") def test_examples_protocol_mdns(env, extra_data): global g_run_server """ @@ -90,8 +76,8 @@ def test_examples_protocol_mdns(env, extra_data): # check and log bin size binary_file = os.path.join(dut1.app.binary_path, "mdns-test.bin") bin_size = os.path.getsize(binary_file) - IDF.log_performance("mdns-test_bin_size", "{}KB".format(bin_size // 1024)) - IDF.check_performance("mdns-test_bin_size", bin_size // 1024) + ttfw_idf.log_performance("mdns-test_bin_size", "{}KB".format(bin_size // 1024)) + ttfw_idf.check_performance("mdns-test_bin_size", bin_size // 1024) # 1. start mdns application dut1.start_app() # 2. get the dut host name (and IP address) diff --git a/examples/protocols/mqtt/ssl/mqtt_ssl_example_test.py b/examples/protocols/mqtt/ssl/mqtt_ssl_example_test.py index ef9d1a9d64..c2a458739e 100644 --- a/examples/protocols/mqtt/ssl/mqtt_ssl_example_test.py +++ b/examples/protocols/mqtt/ssl/mqtt_ssl_example_test.py @@ -8,20 +8,8 @@ import ssl import paho.mqtt.client as mqtt from threading import Thread, Event - -try: - import IDF -except ImportError: - # this is a test case write with tiny-test-fw. - # to run test cases outside tiny-test-fw, - # we need to set environment variable `TEST_FW_PATH`, - # then get and insert `TEST_FW_PATH` to sys path before import FW module - test_fw_path = os.getenv("TEST_FW_PATH") - if test_fw_path and test_fw_path not in sys.path: - sys.path.insert(0, test_fw_path) - import IDF - -import DUT +from tiny_test_fw import DUT +import ttfw_idf event_client_connected = Event() @@ -53,7 +41,7 @@ def on_message(client, userdata, msg): message_log += "Received data:" + msg.topic + " " + payload + "\n" -@IDF.idf_example_test(env_tag="Example_WIFI") +@ttfw_idf.idf_example_test(env_tag="Example_WIFI") def test_examples_protocol_mqtt_ssl(env, extra_data): broker_url = "" broker_port = 0 @@ -68,9 +56,9 @@ def test_examples_protocol_mqtt_ssl(env, extra_data): # check and log bin size binary_file = os.path.join(dut1.app.binary_path, "mqtt_ssl.bin") bin_size = os.path.getsize(binary_file) - IDF.log_performance("mqtt_ssl_bin_size", "{}KB" - .format(bin_size // 1024)) - IDF.check_performance("mqtt_ssl_size", bin_size // 1024) + ttfw_idf.log_performance("mqtt_ssl_bin_size", "{}KB" + .format(bin_size // 1024)) + ttfw_idf.check_performance("mqtt_ssl_size", bin_size // 1024) # Look for host:port in sdkconfig try: value = re.search(r'\:\/\/([^:]+)\:([0-9]+)', dut1.app.get_sdkconfig()["CONFIG_BROKER_URI"]) diff --git a/examples/protocols/mqtt/tcp/mqtt_tcp_example_test.py b/examples/protocols/mqtt/tcp/mqtt_tcp_example_test.py index 05e51420ed..c3ea1a9106 100644 --- a/examples/protocols/mqtt/tcp/mqtt_tcp_example_test.py +++ b/examples/protocols/mqtt/tcp/mqtt_tcp_example_test.py @@ -6,20 +6,8 @@ from threading import Thread import struct import time - -try: - import IDF -except ImportError: - # this is a test case write with tiny-test-fw. - # to run test cases outside tiny-test-fw, - # we need to set environment variable `TEST_FW_PATH`, - # then get and insert `TEST_FW_PATH` to sys path before import FW module - test_fw_path = os.getenv("TEST_FW_PATH") - if test_fw_path and test_fw_path not in sys.path: - sys.path.insert(0, test_fw_path) - import IDF - -import DUT +from tiny_test_fw import DUT +import ttfw_idf msgid = -1 @@ -65,7 +53,7 @@ def mqqt_server_sketch(my_ip, port): print("server closed") -@IDF.idf_example_test(env_tag="Example_WIFI") +@ttfw_idf.idf_example_test(env_tag="Example_WIFI") def test_examples_protocol_mqtt_qos1(env, extra_data): global msgid """ @@ -79,8 +67,8 @@ def test_examples_protocol_mqtt_qos1(env, extra_data): # check and log bin size binary_file = os.path.join(dut1.app.binary_path, "mqtt_tcp.bin") bin_size = os.path.getsize(binary_file) - IDF.log_performance("mqtt_tcp_bin_size", "{}KB".format(bin_size // 1024)) - IDF.check_performance("mqtt_tcp_size", bin_size // 1024) + ttfw_idf.log_performance("mqtt_tcp_bin_size", "{}KB".format(bin_size // 1024)) + ttfw_idf.check_performance("mqtt_tcp_size", bin_size // 1024) # 1. start mqtt broker sketch host_ip = get_my_ip() thread1 = Thread(target=mqqt_server_sketch, args=(host_ip,1883)) diff --git a/examples/protocols/mqtt/ws/mqtt_ws_example_test.py b/examples/protocols/mqtt/ws/mqtt_ws_example_test.py index 58b8d5e37a..2531480834 100644 --- a/examples/protocols/mqtt/ws/mqtt_ws_example_test.py +++ b/examples/protocols/mqtt/ws/mqtt_ws_example_test.py @@ -7,20 +7,8 @@ import sys import paho.mqtt.client as mqtt from threading import Thread, Event - -try: - import IDF -except Exception: - # this is a test case write with tiny-test-fw. - # to run test cases outside tiny-test-fw, - # we need to set environment variable `TEST_FW_PATH`, - # then get and insert `TEST_FW_PATH` to sys path before import FW module - test_fw_path = os.getenv("TEST_FW_PATH") - if test_fw_path and test_fw_path not in sys.path: - sys.path.insert(0, test_fw_path) - import IDF - -import DUT +from tiny_test_fw import DUT +import ttfw_idf event_client_connected = Event() event_stop_client = Event() @@ -51,7 +39,7 @@ def on_message(client, userdata, msg): message_log += "Received data:" + msg.topic + " " + payload + "\n" -@IDF.idf_example_test(env_tag="Example_WIFI") +@ttfw_idf.idf_example_test(env_tag="Example_WIFI") def test_examples_protocol_mqtt_ws(env, extra_data): broker_url = "" broker_port = 0 @@ -66,8 +54,8 @@ def test_examples_protocol_mqtt_ws(env, extra_data): # check and log bin size binary_file = os.path.join(dut1.app.binary_path, "mqtt_websocket.bin") bin_size = os.path.getsize(binary_file) - IDF.log_performance("mqtt_websocket_bin_size", "{}KB".format(bin_size // 1024)) - IDF.check_performance("mqtt_websocket_size", bin_size // 1024) + ttfw_idf.log_performance("mqtt_websocket_bin_size", "{}KB".format(bin_size // 1024)) + ttfw_idf.check_performance("mqtt_websocket_size", bin_size // 1024) # Look for host:port in sdkconfig try: value = re.search(r'\:\/\/([^:]+)\:([0-9]+)', dut1.app.get_sdkconfig()["CONFIG_BROKER_URI"]) diff --git a/examples/protocols/mqtt/wss/mqtt_wss_example_test.py b/examples/protocols/mqtt/wss/mqtt_wss_example_test.py index 15ca2834a8..f8975bc7c3 100644 --- a/examples/protocols/mqtt/wss/mqtt_wss_example_test.py +++ b/examples/protocols/mqtt/wss/mqtt_wss_example_test.py @@ -8,20 +8,8 @@ import ssl import paho.mqtt.client as mqtt from threading import Thread, Event - -try: - import IDF -except ImportError: - # this is a test case write with tiny-test-fw. - # to run test cases outside tiny-test-fw, - # we need to set environment variable `TEST_FW_PATH`, - # then get and insert `TEST_FW_PATH` to sys path before import FW module - test_fw_path = os.getenv("TEST_FW_PATH") - if test_fw_path and test_fw_path not in sys.path: - sys.path.insert(0, test_fw_path) - import IDF - -import DUT +from tiny_test_fw import DUT +import ttfw_idf event_client_connected = Event() event_stop_client = Event() @@ -52,7 +40,7 @@ def on_message(client, userdata, msg): message_log += "Received data:" + msg.topic + " " + payload + "\n" -@IDF.idf_example_test(env_tag="Example_WIFI") +@ttfw_idf.idf_example_test(env_tag="Example_WIFI") def test_examples_protocol_mqtt_wss(env, extra_data): broker_url = "" broker_port = 0 @@ -67,8 +55,8 @@ def test_examples_protocol_mqtt_wss(env, extra_data): # check and log bin size binary_file = os.path.join(dut1.app.binary_path, "mqtt_websocket_secure.bin") bin_size = os.path.getsize(binary_file) - IDF.log_performance("mqtt_websocket_secure_bin_size", "{}KB".format(bin_size // 1024)) - IDF.check_performance("mqtt_websocket_secure_size", bin_size // 1024) + ttfw_idf.log_performance("mqtt_websocket_secure_bin_size", "{}KB".format(bin_size // 1024)) + ttfw_idf.check_performance("mqtt_websocket_secure_size", bin_size // 1024) # Look for host:port in sdkconfig try: value = re.search(r'\:\/\/([^:]+)\:([0-9]+)', dut1.app.get_sdkconfig()["CONFIG_BROKER_URI"]) diff --git a/examples/protocols/websocket/example_test.py b/examples/protocols/websocket/example_test.py index ef0c3b2f2b..a91b4ab6c6 100644 --- a/examples/protocols/websocket/example_test.py +++ b/examples/protocols/websocket/example_test.py @@ -1,18 +1,10 @@ import re import os -import sys -import IDF -# this is a test case write with tiny-test-fw. -# to run test cases outside tiny-test-fw, -# we need to set environment variable `TEST_FW_PATH`, -# then get and insert `TEST_FW_PATH` to sys path before import FW module -test_fw_path = os.getenv("TEST_FW_PATH") -if test_fw_path and test_fw_path not in sys.path: - sys.path.insert(0, test_fw_path) +import ttfw_idf -@IDF.idf_example_test(env_tag="Example_WIFI", ignore=True) +@ttfw_idf.idf_example_test(env_tag="Example_WIFI", ignore=True) def test_examples_protocol_websocket(env, extra_data): """ steps: | @@ -24,8 +16,8 @@ def test_examples_protocol_websocket(env, extra_data): # check and log bin size binary_file = os.path.join(dut1.app.binary_path, "websocket-example.bin") bin_size = os.path.getsize(binary_file) - IDF.log_performance("websocket_bin_size", "{}KB".format(bin_size // 1024)) - IDF.check_performance("websocket_bin_size", bin_size // 1024) + ttfw_idf.log_performance("websocket_bin_size", "{}KB".format(bin_size // 1024)) + ttfw_idf.check_performance("websocket_bin_size", bin_size // 1024) # start test dut1.start_app() dut1.expect("Waiting for wifi ...") diff --git a/examples/provisioning/ble_prov/ble_prov_test.py b/examples/provisioning/ble_prov/ble_prov_test.py index 01277117ed..c610015458 100644 --- a/examples/provisioning/ble_prov/ble_prov_test.py +++ b/examples/provisioning/ble_prov/ble_prov_test.py @@ -17,30 +17,16 @@ from __future__ import print_function import re import os -import sys import time -try: - import IDF -except ImportError: - test_fw_path = os.getenv("TEST_FW_PATH") - if test_fw_path and test_fw_path not in sys.path: - sys.path.insert(0, test_fw_path) - import IDF - -try: - import esp_prov -except ImportError: - esp_prov_path = os.getenv("IDF_PATH") + "/tools/esp_prov" - if esp_prov_path and esp_prov_path not in sys.path: - sys.path.insert(0, esp_prov_path) - import esp_prov +import ttfw_idf +import esp_prov # Have esp_prov throw exception esp_prov.config_throw_except = True -@IDF.idf_example_test(env_tag="Example_WIFI_BT") +@ttfw_idf.idf_example_test(env_tag="Example_WIFI_BT") def test_examples_provisioning_ble(env, extra_data): # Acquire DUT dut1 = env.get_dut("ble_prov", "examples/provisioning/ble_prov") @@ -48,8 +34,8 @@ def test_examples_provisioning_ble(env, extra_data): # Get binary file binary_file = os.path.join(dut1.app.binary_path, "ble_prov.bin") bin_size = os.path.getsize(binary_file) - IDF.log_performance("ble_prov_bin_size", "{}KB".format(bin_size // 1024)) - IDF.check_performance("ble_prov_bin_size", bin_size // 1024) + ttfw_idf.log_performance("ble_prov_bin_size", "{}KB".format(bin_size // 1024)) + ttfw_idf.check_performance("ble_prov_bin_size", bin_size // 1024) # Upload binary and start testing dut1.start_app() diff --git a/examples/provisioning/ble_prov/sdkconfig.defaults b/examples/provisioning/ble_prov/sdkconfig.defaults index e28cc05069..37eeb5f8ed 100644 --- a/examples/provisioning/ble_prov/sdkconfig.defaults +++ b/examples/provisioning/ble_prov/sdkconfig.defaults @@ -1,8 +1,8 @@ # Override some defaults so BT stack is enabled and CONFIG_BT_ENABLED=y CONFIG_BTDM_CTRL_MODE_BLE_ONLY=y -CONFIG_BTDM_CTRL_MODE_BR_EDR_ONLY= -CONFIG_BTDM_CTRL_MODE_BTDM= +CONFIG_BTDM_CTRL_MODE_BR_EDR_ONLY=n +CONFIG_BTDM_CTRL_MODE_BTDM=n # Binary is larger than default size CONFIG_PARTITION_TABLE_CUSTOM=y diff --git a/examples/provisioning/manager/sdkconfig.defaults b/examples/provisioning/manager/sdkconfig.defaults index b8548ff9db..9125e679d1 100644 --- a/examples/provisioning/manager/sdkconfig.defaults +++ b/examples/provisioning/manager/sdkconfig.defaults @@ -1,8 +1,8 @@ # Override some defaults so BT stack is enabled and CONFIG_BT_ENABLED=y CONFIG_BTDM_CTRL_MODE_BLE_ONLY=y -CONFIG_BTDM_CTRL_MODE_BR_EDR_ONLY= -CONFIG_BTDM_CTRL_MODE_BTDM= +CONFIG_BTDM_CTRL_MODE_BR_EDR_ONLY=n +CONFIG_BTDM_CTRL_MODE_BTDM=n CONFIG_BT_NIMBLE_ENABLED=y ## For Bluedroid as binary is larger than default size diff --git a/examples/provisioning/manager/wifi_prov_mgr_test.py b/examples/provisioning/manager/wifi_prov_mgr_test.py index ea7172dc4d..1a9e480804 100644 --- a/examples/provisioning/manager/wifi_prov_mgr_test.py +++ b/examples/provisioning/manager/wifi_prov_mgr_test.py @@ -17,30 +17,16 @@ from __future__ import print_function import re import os -import sys import time -try: - import IDF -except ImportError: - test_fw_path = os.getenv("TEST_FW_PATH") - if test_fw_path and test_fw_path not in sys.path: - sys.path.insert(0, test_fw_path) - import IDF - -try: - import esp_prov -except ImportError: - esp_prov_path = os.getenv("IDF_PATH") + "/tools/esp_prov" - if esp_prov_path and esp_prov_path not in sys.path: - sys.path.insert(0, esp_prov_path) - import esp_prov +import ttfw_idf +import esp_prov # Have esp_prov throw exception esp_prov.config_throw_except = True -@IDF.idf_example_test(env_tag="Example_WIFI_BT") +@ttfw_idf.idf_example_test(env_tag="Example_WIFI_BT") def test_examples_wifi_prov_mgr(env, extra_data): # Acquire DUT dut1 = env.get_dut("wifi_prov_mgr", "examples/provisioning/manager") @@ -48,8 +34,8 @@ def test_examples_wifi_prov_mgr(env, extra_data): # Get binary file binary_file = os.path.join(dut1.app.binary_path, "wifi_prov_mgr.bin") bin_size = os.path.getsize(binary_file) - IDF.log_performance("wifi_prov_mgr_bin_size", "{}KB".format(bin_size // 1024)) - IDF.check_performance("wifi_prov_mgr_bin_size", bin_size // 1024) + ttfw_idf.log_performance("wifi_prov_mgr_bin_size", "{}KB".format(bin_size // 1024)) + ttfw_idf.check_performance("wifi_prov_mgr_bin_size", bin_size // 1024) # Upload binary and start testing dut1.start_app() diff --git a/examples/provisioning/softap_prov/softap_prov_test.py b/examples/provisioning/softap_prov/softap_prov_test.py index 6f67f47c8e..f129f35003 100644 --- a/examples/provisioning/softap_prov/softap_prov_test.py +++ b/examples/provisioning/softap_prov/softap_prov_test.py @@ -17,38 +17,17 @@ from __future__ import print_function import re import os -import sys import time -try: - import IDF -except ImportError: - test_fw_path = os.getenv("TEST_FW_PATH") - if test_fw_path and test_fw_path not in sys.path: - sys.path.insert(0, test_fw_path) - import IDF - -try: - import esp_prov -except ImportError: - esp_prov_path = os.getenv("IDF_PATH") + "/tools/esp_prov" - if esp_prov_path and esp_prov_path not in sys.path: - sys.path.insert(0, esp_prov_path) - import esp_prov - -try: - import wifi_tools -except ImportError: - wifi_tools_path = os.getenv("IDF_PATH") + "/examples/provisioning/softap_prov/utils" - if wifi_tools_path and wifi_tools_path not in sys.path: - sys.path.insert(0, wifi_tools_path) - import wifi_tools +import ttfw_idf +import esp_prov +import wifi_tools # Have esp_prov throw exception esp_prov.config_throw_except = True -@IDF.idf_example_test(env_tag="Example_WIFI_BT") +@ttfw_idf.idf_example_test(env_tag="Example_WIFI_BT") def test_examples_provisioning_softap(env, extra_data): # Acquire DUT dut1 = env.get_dut("softap_prov", "examples/provisioning/softap_prov") @@ -56,8 +35,8 @@ def test_examples_provisioning_softap(env, extra_data): # Get binary file binary_file = os.path.join(dut1.app.binary_path, "softap_prov.bin") bin_size = os.path.getsize(binary_file) - IDF.log_performance("softap_prov_bin_size", "{}KB".format(bin_size // 1024)) - IDF.check_performance("softap_prov_bin_size", bin_size // 1024) + ttfw_idf.log_performance("softap_prov_bin_size", "{}KB".format(bin_size // 1024)) + ttfw_idf.check_performance("softap_prov_bin_size", bin_size // 1024) # Upload binary and start testing dut1.start_app() diff --git a/examples/security/flash_encryption/example_test.py b/examples/security/flash_encryption/example_test.py index 401083b0cd..cea78bbf6f 100644 --- a/examples/security/flash_encryption/example_test.py +++ b/examples/security/flash_encryption/example_test.py @@ -1,14 +1,5 @@ from __future__ import print_function -import os -import sys - -try: - import IDF -except ImportError: - test_fw_path = os.getenv('TEST_FW_PATH') - if test_fw_path and test_fw_path not in sys.path: - sys.path.insert(0, test_fw_path) - import IDF +import ttfw_idf # To prepare a test runner for this example: @@ -18,7 +9,7 @@ except ImportError: # espefuse.py --do-not-confirm -p $ESPPORT burn_efuse FLASH_CRYPT_CONFIG 0xf # espefuse.py --do-not-confirm -p $ESPPORT burn_efuse FLASH_CRYPT_CNT 0x1 # espefuse.py --do-not-confirm -p $ESPPORT burn_key flash_encryption key.bin -@IDF.idf_example_test(env_tag='Example_Flash_Encryption') +@ttfw_idf.idf_example_test(env_tag='Example_Flash_Encryption') def test_examples_security_flash_encryption(env, extra_data): dut = env.get_dut('flash_encryption', 'examples/security/flash_encryption') # start test diff --git a/examples/storage/ext_flash_fatfs/example_test.py b/examples/storage/ext_flash_fatfs/example_test.py index a66ad8c3a4..3a62b1eaa2 100644 --- a/examples/storage/ext_flash_fatfs/example_test.py +++ b/examples/storage/ext_flash_fatfs/example_test.py @@ -1,17 +1,9 @@ from __future__ import print_function -import os -import sys -try: - import IDF -except ImportError: - test_fw_path = os.getenv('TEST_FW_PATH') - if test_fw_path and test_fw_path not in sys.path: - sys.path.insert(0, test_fw_path) - import IDF +import ttfw_idf -@IDF.idf_example_test(env_tag='Example_ExtFlash') +@ttfw_idf.idf_example_test(env_tag='Example_ExtFlash') def test_examples_storage_ext_flash_fatfs(env, extra_data): dut = env.get_dut('ext_flash_fatfs', 'examples/storage/ext_flash_fatfs') dut.start_app() diff --git a/examples/storage/parttool/example_test.py b/examples/storage/parttool/example_test.py index 6ba90c1756..1b7229ba06 100644 --- a/examples/storage/parttool/example_test.py +++ b/examples/storage/parttool/example_test.py @@ -3,16 +3,10 @@ import os import sys import subprocess -try: - import IDF -except ImportError: - test_fw_path = os.getenv('TEST_FW_PATH') - if test_fw_path and test_fw_path not in sys.path: - sys.path.insert(0, test_fw_path) - import IDF +import ttfw_idf -@IDF.idf_example_test(env_tag='Example_WIFI') +@ttfw_idf.idf_example_test(env_tag='Example_WIFI') def test_examples_parttool(env, extra_data): dut = env.get_dut('parttool', 'examples/storage/parttool') dut.start_app(False) diff --git a/examples/storage/spiffsgen/example_test.py b/examples/storage/spiffsgen/example_test.py index 636d69c7db..bb5f53f6e2 100644 --- a/examples/storage/spiffsgen/example_test.py +++ b/examples/storage/spiffsgen/example_test.py @@ -1,18 +1,11 @@ from __future__ import print_function import os -import sys import hashlib -try: - import IDF -except ImportError: - test_fw_path = os.getenv('TEST_FW_PATH') - if test_fw_path and test_fw_path not in sys.path: - sys.path.insert(0, test_fw_path) - import IDF +import ttfw_idf -@IDF.idf_example_test(env_tag='Example_WIFI') +@ttfw_idf.idf_example_test(env_tag='Example_WIFI') def test_examples_spiffsgen(env, extra_data): # Test with default build configurations dut = env.get_dut('spiffsgen', 'examples/storage/spiffsgen') diff --git a/examples/system/cpp_exceptions/example_test.py b/examples/system/cpp_exceptions/example_test.py index 7b4c004ec5..fd3a041153 100644 --- a/examples/system/cpp_exceptions/example_test.py +++ b/examples/system/cpp_exceptions/example_test.py @@ -1,17 +1,9 @@ from __future__ import print_function -import os -import sys -try: - import IDF -except ImportError: - test_fw_path = os.getenv('TEST_FW_PATH') - if test_fw_path and test_fw_path not in sys.path: - sys.path.insert(0, test_fw_path) - import IDF +import ttfw_idf -@IDF.idf_example_test(env_tag='Example_WIFI') +@ttfw_idf.idf_example_test(env_tag='Example_WIFI') def test_examples_system_cpp_exceptions(env, extra_data): dut = env.get_dut('cpp_exceptions_example', 'examples/system/cpp_exceptions') # start test diff --git a/examples/system/esp_event/default_event_loop/example_test.py b/examples/system/esp_event/default_event_loop/example_test.py index 002c35eddf..43a5344515 100644 --- a/examples/system/esp_event/default_event_loop/example_test.py +++ b/examples/system/esp_event/default_event_loop/example_test.py @@ -1,18 +1,6 @@ from __future__ import print_function -import os -import sys -try: - import IDF -except ImportError: - # this is a test case write with tiny-test-fw. - # to run test cases outside tiny-test-fw, - # we need to set environment variable `TEST_FW_PATH`, - # then get and insert `TEST_FW_PATH` to sys path before import FW module - test_fw_path = os.getenv('TEST_FW_PATH') - if test_fw_path and test_fw_path not in sys.path: - sys.path.insert(0, test_fw_path) - import IDF +import ttfw_idf # Timer events TIMER_EVENT_LIMIT = 3 @@ -91,7 +79,7 @@ def _test_iteration_events(dut): print("Deleted task event source") -@IDF.idf_example_test(env_tag='Example_WIFI') +@ttfw_idf.idf_example_test(env_tag='Example_WIFI') def test_default_event_loop_example(env, extra_data): dut = env.get_dut('default_event_loop', 'examples/system/esp_event/default_event_loop') diff --git a/examples/system/esp_event/user_event_loops/example_test.py b/examples/system/esp_event/user_event_loops/example_test.py index ccd8eeab95..cec531ab66 100644 --- a/examples/system/esp_event/user_event_loops/example_test.py +++ b/examples/system/esp_event/user_event_loops/example_test.py @@ -1,18 +1,6 @@ from __future__ import print_function -import os -import sys -try: - import IDF -except ImportError: - # this is a test case write with tiny-test-fw. - # to run test cases outside tiny-test-fw, - # we need to set environment variable `TEST_FW_PATH`, - # then get and insert `TEST_FW_PATH` to sys path before import FW module - test_fw_path = os.getenv('TEST_FW_PATH') - if test_fw_path and test_fw_path not in sys.path: - sys.path.insert(0, test_fw_path) - import IDF +import ttfw_idf TASK_ITERATION_LIMIT = 10 @@ -20,7 +8,7 @@ TASK_ITERATION_POSTING = "posting TASK_EVENTS:TASK_ITERATION_EVENT to {}, iterat TASK_ITERATION_HANDLING = "handling TASK_EVENTS:TASK_ITERATION_EVENT from {}, iteration {}" -@IDF.idf_example_test(env_tag='Example_WIFI') +@ttfw_idf.idf_example_test(env_tag='Example_WIFI') def test_user_event_loops_example(env, extra_data): dut = env.get_dut('user_event_loops', 'examples/system/esp_event/user_event_loops') diff --git a/examples/system/esp_timer/example_test.py b/examples/system/esp_timer/example_test.py index 474df5e0fa..7d59439489 100644 --- a/examples/system/esp_timer/example_test.py +++ b/examples/system/esp_timer/example_test.py @@ -1,19 +1,7 @@ from __future__ import print_function import re -import os -import sys -try: - import IDF -except ImportError: - # this is a test case write with tiny-test-fw. - # to run test cases outside tiny-test-fw, - # we need to set environment variable `TEST_FW_PATH`, - # then get and insert `TEST_FW_PATH` to sys path before import FW module - test_fw_path = os.getenv('TEST_FW_PATH') - if test_fw_path and test_fw_path not in sys.path: - sys.path.insert(0, test_fw_path) - import IDF +import ttfw_idf STARTING_TIMERS_REGEX = re.compile(r'Started timers, time since boot: (\d+) us') @@ -37,7 +25,7 @@ LIGHT_SLEEP_TIME = 500000 ONE_SHOT_TIMER_PERIOD = 5000000 -@IDF.idf_example_test(env_tag='Example_WIFI') +@ttfw_idf.idf_example_test(env_tag='Example_WIFI') def test_examples_system_esp_timer(env, extra_data): dut = env.get_dut('esp_timer_example', 'examples/system/esp_timer') # start test diff --git a/examples/system/freertos/real_time_stats/example_test.py b/examples/system/freertos/real_time_stats/example_test.py index f5ae64c3e4..4665242fb3 100644 --- a/examples/system/freertos/real_time_stats/example_test.py +++ b/examples/system/freertos/real_time_stats/example_test.py @@ -1,24 +1,12 @@ from __future__ import print_function -import os -import sys -try: - import IDF -except ImportError: - # this is a test case write with tiny-test-fw. - # to run test cases outside tiny-test-fw, - # we need to set environment variable `TEST_FW_PATH`, - # then get and insert `TEST_FW_PATH` to sys path before import FW module - test_fw_path = os.getenv('TEST_FW_PATH') - if test_fw_path and test_fw_path not in sys.path: - sys.path.insert(0, test_fw_path) - import IDF +import ttfw_idf STATS_TASK_ITERS = 3 STATS_TASK_EXPECT = "Real time stats obtained" -@IDF.idf_example_test(env_tag='Example_WIFI') +@ttfw_idf.idf_example_test(env_tag='Example_WIFI') def test_real_time_stats_example(env, extra_data): dut = env.get_dut('real_time_stats', 'examples/system/freertos/real_time_stats') dut.start_app() diff --git a/examples/system/himem/sdkconfig.defaults b/examples/system/himem/sdkconfig.defaults index b08e7fa95f..545334af99 100644 --- a/examples/system/himem/sdkconfig.defaults +++ b/examples/system/himem/sdkconfig.defaults @@ -1,6 +1,6 @@ CONFIG_ESP32_SPIRAM_SUPPORT=y CONFIG_SPIRAM_BOOT_INIT=y -CONFIG_SPIRAM_IGNORE_NOTFOUND= +CONFIG_SPIRAM_IGNORE_NOTFOUND=n CONFIG_SPIRAM_USE_MALLOC=y CONFIG_SPIRAM_TYPE_AUTO=y CONFIG_SPIRAM_SIZE=-1 diff --git a/examples/system/light_sleep/example_test.py b/examples/system/light_sleep/example_test.py index a3ddd97a16..5c2698b733 100644 --- a/examples/system/light_sleep/example_test.py +++ b/examples/system/light_sleep/example_test.py @@ -1,16 +1,8 @@ from __future__ import print_function import re -import os -import sys import time -try: - import IDF -except ImportError: - test_fw_path = os.getenv('TEST_FW_PATH') - if test_fw_path and test_fw_path not in sys.path: - sys.path.insert(0, test_fw_path) - import IDF +import ttfw_idf ENTERING_SLEEP_STR = 'Entering light sleep' EXIT_SLEEP_REGEX = re.compile(r'Returned from light sleep, reason: (\w+), t=(\d+) ms, slept for (\d+) ms') @@ -19,7 +11,7 @@ WAITING_FOR_GPIO_STR = 'Waiting for GPIO0 to go high...' WAKEUP_INTERVAL_MS = 2000 -@IDF.idf_example_test(env_tag='Example_WIFI') +@ttfw_idf.idf_example_test(env_tag='Example_WIFI') def test_examples_system_light_sleep(env, extra_data): dut = env.get_dut('light_sleep_example', 'examples/system/light_sleep') dut.start_app() diff --git a/examples/system/ota/otatool/example_test.py b/examples/system/ota/otatool/example_test.py index a22ebf5a89..dbd497d637 100644 --- a/examples/system/ota/otatool/example_test.py +++ b/examples/system/ota/otatool/example_test.py @@ -3,20 +3,10 @@ import os import sys import subprocess -try: - import IDF -except ImportError: - # this is a test case write with tiny-test-fw. - # to run test cases outside tiny-test-fw, - # we need to set environment variable `TEST_FW_PATH`, - # then get and insert `TEST_FW_PATH` to sys path before import FW module - test_fw_path = os.getenv('TEST_FW_PATH') - if test_fw_path and test_fw_path not in sys.path: - sys.path.insert(0, test_fw_path) - import IDF +import ttfw_idf -@IDF.idf_example_test(env_tag='Example_WIFI') +@ttfw_idf.idf_example_test(env_tag='Example_WIFI') def test_otatool_example(env, extra_data): dut = env.get_dut('otatool', 'examples/system/ota/otatool') diff --git a/examples/system/ota/simple_ota_example/example_test.py b/examples/system/ota/simple_ota_example/example_test.py index 7c732a5425..ad18957d38 100644 --- a/examples/system/ota/simple_ota_example/example_test.py +++ b/examples/system/ota/simple_ota_example/example_test.py @@ -1,25 +1,13 @@ import re import os -import sys import socket import BaseHTTPServer import SimpleHTTPServer from threading import Thread import ssl -try: - import IDF -except ImportError: - # this is a test case write with tiny-test-fw. - # to run test cases outside tiny-test-fw, - # we need to set environment variable `TEST_FW_PATH`, - # then get and insert `TEST_FW_PATH` to sys path before import FW module - test_fw_path = os.getenv("TEST_FW_PATH") - if test_fw_path and test_fw_path not in sys.path: - sys.path.insert(0, test_fw_path) - import IDF - -import DUT +from tiny_test_fw import DUT +import ttfw_idf server_cert = "-----BEGIN CERTIFICATE-----\n" \ "MIIDXTCCAkWgAwIBAgIJAP4LF7E72HakMA0GCSqGSIb3DQEBCwUAMEUxCzAJBgNV\n"\ @@ -107,7 +95,7 @@ def start_https_server(ota_image_dir, server_ip, server_port): httpd.serve_forever() -@IDF.idf_example_test(env_tag="Example_WIFI") +@ttfw_idf.idf_example_test(env_tag="Example_WIFI") def test_examples_protocol_simple_ota_example(env, extra_data): """ steps: | @@ -119,8 +107,8 @@ def test_examples_protocol_simple_ota_example(env, extra_data): # check and log bin size binary_file = os.path.join(dut1.app.binary_path, "simple_ota.bin") bin_size = os.path.getsize(binary_file) - IDF.log_performance("simple_ota_bin_size", "{}KB".format(bin_size // 1024)) - IDF.check_performance("simple_ota_bin_size", bin_size // 1024) + ttfw_idf.log_performance("simple_ota_bin_size", "{}KB".format(bin_size // 1024)) + ttfw_idf.check_performance("simple_ota_bin_size", bin_size // 1024) # start test host_ip = get_my_ip() thread1 = Thread(target=start_https_server, args=(dut1.app.binary_path, host_ip, 8000)) diff --git a/examples/system/sysview_tracing_heap_log/sdkconfig.defaults b/examples/system/sysview_tracing_heap_log/sdkconfig.defaults index 26ca10d1df..abe211ddfa 100644 --- a/examples/system/sysview_tracing_heap_log/sdkconfig.defaults +++ b/examples/system/sysview_tracing_heap_log/sdkconfig.defaults @@ -23,6 +23,6 @@ CONFIG_SYSVIEW_EVT_IDLE_ENABLE=y CONFIG_SYSVIEW_EVT_TIMER_ENTER_ENABLE=y CONFIG_SYSVIEW_EVT_TIMER_EXIT_ENABLE=y # Disable color output in logs -CONFIG_LOG_COLORS= +CONFIG_LOG_COLORS=n # Enable heap tracing to host CONFIG_HEAP_TRACING_TOHOST=y diff --git a/examples/system/unit_test/test/sdkconfig.defaults b/examples/system/unit_test/test/sdkconfig.defaults index 11ea461367..a1f2ef6f8b 100644 --- a/examples/system/unit_test/test/sdkconfig.defaults +++ b/examples/system/unit_test/test/sdkconfig.defaults @@ -1 +1 @@ -CONFIG_ESP_TASK_WDT= +CONFIG_ESP_TASK_WDT=n diff --git a/examples/wifi/iperf/iperf_test.py b/examples/wifi/iperf/iperf_test.py index 35538cae7c..42ed9a072e 100644 --- a/examples/wifi/iperf/iperf_test.py +++ b/examples/wifi/iperf/iperf_test.py @@ -26,33 +26,13 @@ from builtins import range from builtins import object import re import os -import sys import time import subprocess -try: - import IDF -except ImportError: - # this is a test case write with tiny-test-fw. - # to run test cases outside tiny-test-fw, - # we need to set environment variable `TEST_FW_PATH`, - # then get and insert `TEST_FW_PATH` to sys path before import FW module - test_fw_path = os.getenv("TEST_FW_PATH") - if test_fw_path and test_fw_path not in sys.path: - sys.path.insert(0, test_fw_path) - import IDF +from tiny_test_fw import TinyFW, DUT, Utility +import ttfw_idf +from idf_iperf_test_util import (Attenuator, PowerControl, LineChart, TestReport) -import DUT -import TinyFW -import Utility -from Utility import (Attenuator, PowerControl, LineChart) - -try: - from test_report import (ThroughputForConfigsReport, ThroughputVsRssiReport) -except ImportError: - # add current folder to system path for importing test_report - sys.path.append(os.path.dirname(__file__)) - from test_report import (ThroughputForConfigsReport, ThroughputVsRssiReport) # configurations TEST_TIME = TEST_TIMEOUT = 60 @@ -165,8 +145,8 @@ class TestResult(object): throughput = 0.0 if throughput == 0 and rssi > self.ZERO_THROUGHPUT_THRESHOLD: - self.error_list.append("[Error][Fatal][{}][att: {}][rssi: {}]: No throughput data found" - .format(ap_ssid, att, rssi)) + self.error_list.append("[Error][Fatal][{}][att: {}][rssi: {}]: No throughput data found" + .format(ap_ssid, att, rssi)) self._save_result(throughput, ap_ssid, att, rssi, heap_size) @@ -466,32 +446,7 @@ class IperfTestUtility(object): return ret -def build_iperf_with_config(config_name): - """ - we need to build iperf example with different configurations. - - :param config_name: sdkconfig we want to build - """ - - # switch to iperf example path before build when we're running test with Runner - example_path = os.path.dirname(__file__) - cwd = os.getcwd() - if cwd != example_path and example_path: - os.chdir(example_path) - try: - subprocess.check_call("make clean > /dev/null", shell=True) - subprocess.check_call(["cp", "sdkconfig.defaults.{}".format(config_name), "sdkconfig.defaults"]) - subprocess.check_call(["rm", "-f", "sdkconfig"]) - subprocess.check_call("make defconfig > /dev/null", shell=True) - # save sdkconfig to generate config comparision report - subprocess.check_call(["cp", "sdkconfig", "sdkconfig.{}".format(config_name)]) - subprocess.check_call("make -j5 > /dev/null", shell=True) - subprocess.check_call("make print_flash_cmd | tail -n 1 > build/download.config", shell=True) - finally: - os.chdir(cwd) - - -@IDF.idf_example_test(env_tag="Example_ShieldBox_Basic", category="stress") +@ttfw_idf.idf_example_test(env_tag="Example_ShieldBox_Basic", category="stress") def test_wifi_throughput_with_different_configs(env, extra_data): """ steps: | @@ -511,13 +466,12 @@ def test_wifi_throughput_with_different_configs(env, extra_data): sdkconfig_files = dict() for config_name in CONFIG_NAME_PATTERN.findall(config_names_raw): - # 1. build config - build_iperf_with_config(config_name) + # 1. get the config sdkconfig_files[config_name] = os.path.join(os.path.dirname(__file__), - "sdkconfig.{}".format(config_name)) + "sdkconfig.ci.{}".format(config_name)) # 2. get DUT and download - dut = env.get_dut("iperf", "examples/wifi/iperf") + dut = env.get_dut("iperf", "examples/wifi/iperf", app_config_name=config_name) dut.start_app() dut.expect("esp32>") @@ -544,12 +498,12 @@ def test_wifi_throughput_with_different_configs(env, extra_data): env.close_dut("iperf") # 5. generate report - report = ThroughputForConfigsReport(os.path.join(env.log_path, "ThroughputForConfigsReport"), - ap_info["ssid"], test_result, sdkconfig_files) + report = TestReport.ThroughputForConfigsReport(os.path.join(env.log_path, "ThroughputForConfigsReport"), + ap_info["ssid"], test_result, sdkconfig_files) report.generate_report() -@IDF.idf_example_test(env_tag="Example_ShieldBox", category="stress") +@ttfw_idf.idf_example_test(env_tag="Example_ShieldBox", category="stress") def test_wifi_throughput_vs_rssi(env, extra_data): """ steps: | @@ -571,15 +525,12 @@ def test_wifi_throughput_vs_rssi(env, extra_data): "udp_rx": TestResult("udp", "rx", BEST_PERFORMANCE_CONFIG), } - # 1. build config - build_iperf_with_config(BEST_PERFORMANCE_CONFIG) - - # 2. get DUT and download - dut = env.get_dut("iperf", "examples/wifi/iperf") + # 1. get DUT and download + dut = env.get_dut("iperf", "examples/wifi/iperf", app_config_name=BEST_PERFORMANCE_CONFIG) dut.start_app() dut.expect("esp32>") - # 3. run test for each required att value + # 2. run test for each required att value for ap_info in ap_list: test_utility = IperfTestUtility(dut, BEST_PERFORMANCE_CONFIG, ap_info["ssid"], ap_info["password"], pc_nic_ip, pc_iperf_log_file, test_result) @@ -597,16 +548,16 @@ def test_wifi_throughput_vs_rssi(env, extra_data): assert Attenuator.set_att(att_port, atten_val) is True test_utility.run_all_cases(atten_val) - # 4. check test results + # 3. check test results env.close_dut("iperf") - # 5. generate report - report = ThroughputVsRssiReport(os.path.join(env.log_path, "ThroughputVsRssiReport"), - test_result) + # 4. generate report + report = TestReport.ThroughputVsRssiReport(os.path.join(env.log_path, "ThroughputVsRssiReport"), + test_result) report.generate_report() -@IDF.idf_example_test(env_tag="Example_ShieldBox_Basic") +@ttfw_idf.idf_example_test(env_tag="Example_ShieldBox_Basic") def test_wifi_throughput_basic(env, extra_data): """ steps: | @@ -620,15 +571,12 @@ def test_wifi_throughput_basic(env, extra_data): "password": env.get_variable("ap_password"), } - # 1. build iperf with best config - build_iperf_with_config(BEST_PERFORMANCE_CONFIG) - - # 2. get DUT - dut = env.get_dut("iperf", "examples/wifi/iperf") + # 1. get DUT + dut = env.get_dut("iperf", "examples/wifi/iperf", app_config_name=BEST_PERFORMANCE_CONFIG) dut.start_app() dut.expect("esp32>") - # 3. preparing + # 2. preparing test_result = { "tcp_tx": TestResult("tcp", "tx", BEST_PERFORMANCE_CONFIG), "tcp_rx": TestResult("tcp", "rx", BEST_PERFORMANCE_CONFIG), @@ -639,24 +587,24 @@ def test_wifi_throughput_basic(env, extra_data): test_utility = IperfTestUtility(dut, BEST_PERFORMANCE_CONFIG, ap_info["ssid"], ap_info["password"], pc_nic_ip, pc_iperf_log_file, test_result) - # 4. run test for TCP Tx, Rx and UDP Tx, Rx + # 3. run test for TCP Tx, Rx and UDP Tx, Rx for _ in range(RETRY_COUNT_FOR_BEST_PERFORMANCE): test_utility.run_all_cases(0) - # 5. log performance and compare with pass standard + # 4. log performance and compare with pass standard performance_items = [] for throughput_type in test_result: - IDF.log_performance("{}_throughput".format(throughput_type), - "{:.02f} Mbps".format(test_result[throughput_type].get_best_throughput())) + ttfw_idf.log_performance("{}_throughput".format(throughput_type), + "{:.02f} Mbps".format(test_result[throughput_type].get_best_throughput())) performance_items.append(["{}_throughput".format(throughput_type), "{:.02f} Mbps".format(test_result[throughput_type].get_best_throughput())]) - # save to report + # 5. save to report TinyFW.JunitReport.update_performance(performance_items) # do check after logging, otherwise test will exit immediately if check fail, some performance can't be logged. for throughput_type in test_result: - IDF.check_performance("{}_throughput".format(throughput_type), - test_result[throughput_type].get_best_throughput()) + ttfw_idf.check_performance("{}_throughput".format(throughput_type), + test_result[throughput_type].get_best_throughput()) env.close_dut("iperf") diff --git a/examples/wifi/iperf/sdkconfig.defaults.00 b/examples/wifi/iperf/sdkconfig.ci.00 similarity index 60% rename from examples/wifi/iperf/sdkconfig.defaults.00 rename to examples/wifi/iperf/sdkconfig.ci.00 index 438fe641be..ca2a31cf6c 100644 --- a/examples/wifi/iperf/sdkconfig.defaults.00 +++ b/examples/wifi/iperf/sdkconfig.ci.00 @@ -2,5 +2,5 @@ CONFIG_MEMMAP_SMP=y CONFIG_ESP_SYSTEM_EVENT_TASK_STACK_SIZE=4096 -CONFIG_ESP_INT_WDT= -CONFIG_ESP_TASK_WDT= +CONFIG_ESP_INT_WDT=n +CONFIG_ESP_TASK_WDT=n diff --git a/examples/wifi/iperf/sdkconfig.defaults.01 b/examples/wifi/iperf/sdkconfig.ci.01 similarity index 83% rename from examples/wifi/iperf/sdkconfig.defaults.01 rename to examples/wifi/iperf/sdkconfig.ci.01 index 6393e8d341..0cea3afaae 100644 --- a/examples/wifi/iperf/sdkconfig.defaults.01 +++ b/examples/wifi/iperf/sdkconfig.ci.01 @@ -1,11 +1,11 @@ CONFIG_MEMMAP_SMP=y -CONFIG_ESP_INT_WDT= -CONFIG_ESP_TASK_WDT= +CONFIG_ESP_INT_WDT=n +CONFIG_ESP_TASK_WDT=n CONFIG_ESP_SYSTEM_EVENT_TASK_STACK_SIZE=4096 -CONFIG_FREERTOS_UNICORE= +CONFIG_FREERTOS_UNICORE=n CONFIG_FREERTOS_HZ=1000 CONFIG_ESP32_WIFI_STATIC_RX_BUFFER_NUM=12 @@ -21,5 +21,5 @@ CONFIG_LWIP_TCP_WND_DEFAULT=11488 CONFIG_LWIP_TCP_RECVMBOX_SIZE=12 CONFIG_LWIP_UDP_RECVMBOX_SIZE=12 CONFIG_LWIP_TCPIP_RECVMBOX_SIZE=48 -CONFIG_LWIP_ETHARP_TRUST_IP_MAC= +CONFIG_LWIP_ETHARP_TRUST_IP_MAC=n diff --git a/examples/wifi/iperf/sdkconfig.defaults.02 b/examples/wifi/iperf/sdkconfig.ci.02 similarity index 83% rename from examples/wifi/iperf/sdkconfig.defaults.02 rename to examples/wifi/iperf/sdkconfig.ci.02 index be98dffad5..0d3476911b 100644 --- a/examples/wifi/iperf/sdkconfig.defaults.02 +++ b/examples/wifi/iperf/sdkconfig.ci.02 @@ -1,11 +1,11 @@ CONFIG_MEMMAP_SMP=y -CONFIG_ESP_INT_WDT= -CONFIG_ESP_TASK_WDT= +CONFIG_ESP_INT_WDT=n +CONFIG_ESP_TASK_WDT=n CONFIG_ESP_SYSTEM_EVENT_TASK_STACK_SIZE=4096 -CONFIG_FREERTOS_UNICORE= +CONFIG_FREERTOS_UNICORE=n CONFIG_FREERTOS_HZ=1000 CONFIG_ESP32_WIFI_STATIC_RX_BUFFER_NUM=16 @@ -21,5 +21,5 @@ CONFIG_LWIP_TCP_WND_DEFAULT=11488 CONFIG_LWIP_TCP_RECVMBOX_SIZE=12 CONFIG_LWIP_UDP_RECVMBOX_SIZE=12 CONFIG_LWIP_TCPIP_RECVMBOX_SIZE=48 -CONFIG_LWIP_ETHARP_TRUST_IP_MAC= +CONFIG_LWIP_ETHARP_TRUST_IP_MAC=n diff --git a/examples/wifi/iperf/sdkconfig.defaults.03 b/examples/wifi/iperf/sdkconfig.ci.03 similarity index 83% rename from examples/wifi/iperf/sdkconfig.defaults.03 rename to examples/wifi/iperf/sdkconfig.ci.03 index ac93ca610e..521d0a19dc 100644 --- a/examples/wifi/iperf/sdkconfig.defaults.03 +++ b/examples/wifi/iperf/sdkconfig.ci.03 @@ -1,11 +1,11 @@ CONFIG_MEMMAP_SMP=y -CONFIG_ESP_INT_WDT= -CONFIG_ESP_TASK_WDT= +CONFIG_ESP_INT_WDT=n +CONFIG_ESP_TASK_WDT=n CONFIG_ESP_SYSTEM_EVENT_TASK_STACK_SIZE=4096 -CONFIG_FREERTOS_UNICORE= +CONFIG_FREERTOS_UNICORE=n CONFIG_FREERTOS_HZ=1000 CONFIG_ESP32_WIFI_STATIC_RX_BUFFER_NUM=16 @@ -21,5 +21,5 @@ CONFIG_LWIP_TCP_WND_DEFAULT=32768 CONFIG_LWIP_TCP_RECVMBOX_SIZE=64 CONFIG_LWIP_UDP_RECVMBOX_SIZE=64 CONFIG_LWIP_TCPIP_RECVMBOX_SIZE=64 -CONFIG_LWIP_ETHARP_TRUST_IP_MAC= +CONFIG_LWIP_ETHARP_TRUST_IP_MAC=n diff --git a/examples/wifi/iperf/sdkconfig.defaults.04 b/examples/wifi/iperf/sdkconfig.ci.04 similarity index 83% rename from examples/wifi/iperf/sdkconfig.defaults.04 rename to examples/wifi/iperf/sdkconfig.ci.04 index 8805c8a3c5..4aaeca925f 100644 --- a/examples/wifi/iperf/sdkconfig.defaults.04 +++ b/examples/wifi/iperf/sdkconfig.ci.04 @@ -1,11 +1,11 @@ CONFIG_MEMMAP_SMP=y -CONFIG_ESP_INT_WDT= -CONFIG_ESP_TASK_WDT= +CONFIG_ESP_INT_WDT=n +CONFIG_ESP_TASK_WDT=n CONFIG_ESP_SYSTEM_EVENT_TASK_STACK_SIZE=4096 -CONFIG_FREERTOS_UNICORE= +CONFIG_FREERTOS_UNICORE=n CONFIG_FREERTOS_HZ=1000 CONFIG_ESP32_WIFI_STATIC_RX_BUFFER_NUM=16 @@ -21,5 +21,5 @@ CONFIG_LWIP_TCP_WND_DEFAULT=65535 CONFIG_LWIP_TCP_RECVMBOX_SIZE=64 CONFIG_LWIP_UDP_RECVMBOX_SIZE=64 CONFIG_LWIP_TCPIP_RECVMBOX_SIZE=64 -CONFIG_LWIP_ETHARP_TRUST_IP_MAC= +CONFIG_LWIP_ETHARP_TRUST_IP_MAC=n diff --git a/examples/wifi/iperf/sdkconfig.defaults.05 b/examples/wifi/iperf/sdkconfig.ci.05 similarity index 85% rename from examples/wifi/iperf/sdkconfig.defaults.05 rename to examples/wifi/iperf/sdkconfig.ci.05 index cb287b13d1..27ca6f0881 100644 --- a/examples/wifi/iperf/sdkconfig.defaults.05 +++ b/examples/wifi/iperf/sdkconfig.ci.05 @@ -1,11 +1,11 @@ CONFIG_MEMMAP_SMP=y -CONFIG_ESP_INT_WDT= -CONFIG_ESP_TASK_WDT= +CONFIG_ESP_INT_WDT=n +CONFIG_ESP_TASK_WDT=n CONFIG_ESP_SYSTEM_EVENT_TASK_STACK_SIZE=4096 -CONFIG_FREERTOS_UNICORE= +CONFIG_FREERTOS_UNICORE=n CONFIG_FREERTOS_HZ=1000 CONFIG_ESP32_WIFI_STATIC_RX_BUFFER_NUM=16 @@ -21,7 +21,7 @@ CONFIG_LWIP_TCP_WND_DEFAULT=65534 CONFIG_LWIP_TCP_RECVMBOX_SIZE=64 CONFIG_LWIP_UDP_RECVMBOX_SIZE=64 CONFIG_LWIP_TCPIP_RECVMBOX_SIZE=64 -CONFIG_LWIP_ETHARP_TRUST_IP_MAC= +CONFIG_LWIP_ETHARP_TRUST_IP_MAC=n CONFIG_ESPTOOLPY_FLASHMODE_QIO=y CONFIG_ESPTOOLPY_FLASHFREQ_80M=y diff --git a/examples/wifi/iperf/sdkconfig.defaults.06 b/examples/wifi/iperf/sdkconfig.ci.06 similarity index 89% rename from examples/wifi/iperf/sdkconfig.defaults.06 rename to examples/wifi/iperf/sdkconfig.ci.06 index 1b35b695e3..6d47f8f851 100644 --- a/examples/wifi/iperf/sdkconfig.defaults.06 +++ b/examples/wifi/iperf/sdkconfig.ci.06 @@ -11,15 +11,15 @@ CONFIG_ESP32_WIFI_RX_BA_WIN=32 CONFIG_FREERTOS_UNICORE=y CONFIG_FREERTOS_HZ=1000 -CONFIG_ESP_INT_WDT= -CONFIG_ESP_TASK_WDT= +CONFIG_ESP_INT_WDT=n +CONFIG_ESP_TASK_WDT=n CONFIG_LWIP_TCP_SND_BUF_DEFAULT=65534 CONFIG_LWIP_TCP_WND_DEFAULT=65534 CONFIG_LWIP_TCP_RECVMBOX_SIZE=64 CONFIG_LWIP_UDP_RECVMBOX_SIZE=64 CONFIG_LWIP_TCPIP_RECVMBOX_SIZE=64 -CONFIG_LWIP_ETHARP_TRUST_IP_MAC= +CONFIG_LWIP_ETHARP_TRUST_IP_MAC=n CONFIG_ESPTOOLPY_FLASHMODE_QIO=y CONFIG_ESPTOOLPY_FLASHFREQ_80M=y diff --git a/examples/wifi/iperf/sdkconfig.defaults.07 b/examples/wifi/iperf/sdkconfig.ci.07 similarity index 87% rename from examples/wifi/iperf/sdkconfig.defaults.07 rename to examples/wifi/iperf/sdkconfig.ci.07 index 4fc2301613..f251e0a2dc 100644 --- a/examples/wifi/iperf/sdkconfig.defaults.07 +++ b/examples/wifi/iperf/sdkconfig.ci.07 @@ -12,18 +12,18 @@ CONFIG_ESP32_WIFI_TX_BA_WIN=32 CONFIG_ESP32_WIFI_AMPDU_RX_ENABLED=y CONFIG_ESP32_WIFI_RX_BA_WIN=32 -CONFIG_FREERTOS_UNICORE= +CONFIG_FREERTOS_UNICORE=n CONFIG_FREERTOS_HZ=1000 -CONFIG_ESP_INT_WDT= -CONFIG_ESP_TASK_WDT= +CONFIG_ESP_INT_WDT=n +CONFIG_ESP_TASK_WDT=n CONFIG_LWIP_TCP_SND_BUF_DEFAULT=65534 CONFIG_LWIP_TCP_WND_DEFAULT=65534 CONFIG_LWIP_TCP_RECVMBOX_SIZE=64 CONFIG_LWIP_UDP_RECVMBOX_SIZE=64 CONFIG_LWIP_TCPIP_RECVMBOX_SIZE=64 -CONFIG_LWIP_ETHARP_TRUST_IP_MAC= +CONFIG_LWIP_ETHARP_TRUST_IP_MAC=n CONFIG_ESPTOOLPY_FLASHMODE_QIO=y CONFIG_ESPTOOLPY_FLASHFREQ_80M=y diff --git a/examples/wifi/iperf/sdkconfig.defaults.99 b/examples/wifi/iperf/sdkconfig.ci.99 similarity index 87% rename from examples/wifi/iperf/sdkconfig.defaults.99 rename to examples/wifi/iperf/sdkconfig.ci.99 index c7e50345bc..52c69eb3da 100644 --- a/examples/wifi/iperf/sdkconfig.defaults.99 +++ b/examples/wifi/iperf/sdkconfig.ci.99 @@ -12,18 +12,18 @@ CONFIG_ESP32_WIFI_TX_BA_WIN=32 CONFIG_ESP32_WIFI_AMPDU_RX_ENABLED=y CONFIG_ESP32_WIFI_RX_BA_WIN=32 -CONFIG_FREERTOS_UNICORE= +CONFIG_FREERTOS_UNICORE=n CONFIG_FREERTOS_HZ=1000 -CONFIG_ESP_INT_WDT= -CONFIG_ESP_TASK_WDT= +CONFIG_ESP_INT_WDT=n +CONFIG_ESP_TASK_WDT=n CONFIG_LWIP_TCP_SND_BUF_DEFAULT=65534 CONFIG_LWIP_TCP_WND_DEFAULT=65534 CONFIG_LWIP_TCP_RECVMBOX_SIZE=64 CONFIG_LWIP_UDP_RECVMBOX_SIZE=64 CONFIG_LWIP_TCPIP_RECVMBOX_SIZE=64 -CONFIG_LWIP_ETHARP_TRUST_IP_MAC= +CONFIG_LWIP_ETHARP_TRUST_IP_MAC=n CONFIG_ESPTOOLPY_FLASHMODE_QIO=y CONFIG_ESPTOOLPY_FLASHFREQ_80M=y diff --git a/make/project.mk b/make/project.mk index b2e29ecc30..a3a51809d6 100644 --- a/make/project.mk +++ b/make/project.mk @@ -144,7 +144,7 @@ EXTRA_COMPONENT_DIRS ?= COMPONENT_DIRS := $(PROJECT_PATH)/components $(EXTRA_COMPONENT_DIRS) $(IDF_PATH)/components $(PROJECT_PATH)/main endif # Make sure that every directory in the list is an absolute path without trailing slash. -# This is necessary to split COMPONENT_DIRS into SINGLE_COMPONENT_DIRS and MULTI_COMPONENT_DIRS below. +# This is necessary to split COMPONENT_DIRS into SINGLE_COMPONENT_DIRS and MULTI_COMPONENT_DIRS below. COMPONENT_DIRS := $(foreach cd,$(COMPONENT_DIRS),$(abspath $(cd))) export COMPONENT_DIRS @@ -153,11 +153,11 @@ $(warning SRCDIRS variable is deprecated. These paths can be added to EXTRA_COMP COMPONENT_DIRS += $(abspath $(SRCDIRS)) endif -# List of component directories, i.e. directories which contain a component.mk file +# List of component directories, i.e. directories which contain a component.mk file SINGLE_COMPONENT_DIRS := $(abspath $(dir $(dir $(foreach cd,$(COMPONENT_DIRS),\ $(wildcard $(cd)/component.mk))))) -# List of components directories, i.e. directories which may contain components +# List of components directories, i.e. directories which may contain components MULTI_COMPONENT_DIRS := $(filter-out $(SINGLE_COMPONENT_DIRS),$(COMPONENT_DIRS)) # The project Makefile can define a list of components, but if it does not do this @@ -621,6 +621,11 @@ clean: app-clean bootloader-clean config-clean ldgen-clean # # This only works for components inside IDF_PATH check-submodules: +# for internal use: +# skip submodule check if running on Gitlab CI and job is configured as not clone submodules +ifeq ($(IDF_SKIP_CHECK_SUBMODULES),1) + @echo "skip submodule check on internal CI" +else # Check if .gitmodules exists, otherwise skip submodule check, assuming flattened structure ifneq ("$(wildcard ${IDF_PATH}/.gitmodules)","") @@ -648,7 +653,7 @@ endef # so the argument is suitable for use with 'git submodule' commands $(foreach submodule,$(subst $(IDF_PATH)/,,$(filter $(IDF_PATH)/%,$(COMPONENT_SUBMODULES))),$(eval $(call GenerateSubmoduleCheckTarget,$(submodule)))) endif # End check for .gitmodules existence - +endif # PHONY target to list components in the build and their paths list-components: diff --git a/tools/tiny-test-fw/docs/_static/.keep b/tools/ble/__init__.py similarity index 100% rename from tools/tiny-test-fw/docs/_static/.keep rename to tools/ble/__init__.py diff --git a/tools/ble/lib_ble_client.py b/tools/ble/lib_ble_client.py index 6fda14288c..a17cf0a3fc 100644 --- a/tools/ble/lib_ble_client.py +++ b/tools/ble/lib_ble_client.py @@ -29,7 +29,7 @@ try: from gi.repository import GLib except ImportError as e: if 'linux' not in sys.platform: - sys.exit("Error: Only supported on Linux platform") + raise e print(e) print("Install packages `libgirepository1.0-dev gir1.2-gtk-3.0 libcairo2-dev libdbus-1-dev libdbus-glib-1-dev` for resolving the issue") print("Run `pip install -r $IDF_PATH/tools/ble/requirements.txt` for resolving the issue") diff --git a/tools/ble/lib_gap.py b/tools/ble/lib_gap.py index 2033dc4ab8..02466c7aa1 100644 --- a/tools/ble/lib_gap.py +++ b/tools/ble/lib_gap.py @@ -25,7 +25,7 @@ try: import dbus.service except ImportError as e: if 'linux' not in sys.platform: - sys.exit("Error: Only supported on Linux platform") + raise e print(e) print("Install packages `libgirepository1.0-dev gir1.2-gtk-3.0 libcairo2-dev libdbus-1-dev libdbus-glib-1-dev` for resolving the issue") print("Run `pip install -r $IDF_PATH/tools/ble/requirements.txt` for resolving the issue") diff --git a/tools/ble/lib_gatt.py b/tools/ble/lib_gatt.py index c666d8e7e8..4710e1549c 100644 --- a/tools/ble/lib_gatt.py +++ b/tools/ble/lib_gatt.py @@ -25,7 +25,7 @@ try: import dbus.service except ImportError as e: if 'linux' not in sys.platform: - sys.exit("Error: Only supported on Linux platform") + raise e print(e) print("Install packages `libgirepository1.0-dev gir1.2-gtk-3.0 libcairo2-dev libdbus-1-dev libdbus-glib-1-dev` for resolving the issue") print("Run `pip install -r $IDF_PATH/tools/ble/requirements.txt` for resolving the issue") diff --git a/tools/build_apps.py b/tools/build_apps.py new file mode 100755 index 0000000000..d25a8e279f --- /dev/null +++ b/tools/build_apps.py @@ -0,0 +1,129 @@ +#!/usr/bin/env python +# coding=utf-8 +# +# ESP-IDF helper script to build multiple applications. Consumes the input of find_apps.py. +# + +import argparse +import sys +import logging +from find_build_apps import BuildItem, BuildError, setup_logging, BUILD_SYSTEMS + + +def main(): + parser = argparse.ArgumentParser(description="ESP-IDF app builder") + parser.add_argument( + "-v", + "--verbose", + action="count", + help="Increase the logging level of the script. Can be specified multiple times.", + ) + parser.add_argument( + "--build-verbose", + action="store_true", + help="Enable verbose output from build system.", + ) + parser.add_argument( + "--log-file", + type=argparse.FileType("w"), + help="Write the script log to the specified file, instead of stderr", + ) + parser.add_argument( + "--parallel-count", + default=1, + type=int, + help="Number of parallel build jobs. Note that this script doesn't start the jobs, " + + "it needs to be executed multiple times with same value of --parallel-count and " + + "different values of --parallel-index.", + ) + parser.add_argument( + "--parallel-index", + default=1, + type=int, + help="Index (1-based) of the job, out of the number specified by --parallel-count.", + ) + parser.add_argument( + "--format", + default="json", + choices=["json"], + help="Format to read the list of builds", + ) + parser.add_argument( + "--dry-run", + action="store_true", + help="Don't actually build, only print the build commands", + ) + parser.add_argument( + "--keep-going", + action="store_true", + help="Don't exit immediately when a build fails.", + ) + parser.add_argument( + "--output-build-list", + type=argparse.FileType("w"), + help="If specified, the list of builds (with all the placeholders expanded) will be written to this file.", + ) + parser.add_argument( + "build_list", + type=argparse.FileType("r"), + nargs="?", + default=sys.stdin, + help="Name of the file to read the list of builds from. If not specified, read from stdin.", + ) + args = parser.parse_args() + + setup_logging(args) + + build_items = [BuildItem.from_json(line) for line in args.build_list] + + if not build_items: + logging.error("Empty build list!") + raise SystemExit(1) + + num_builds = len(build_items) + num_jobs = args.parallel_count + job_index = args.parallel_index - 1 # convert to 0-based index + num_builds_per_job = (num_builds + num_jobs - 1) // num_jobs + min_job_index = num_builds_per_job * job_index + if min_job_index >= num_builds: + logging.warn("Nothing to do for job {} (build total: {}, per job: {})".format( + job_index + 1, num_builds, num_builds_per_job)) + raise SystemExit(0) + + max_job_index = min(num_builds_per_job * (job_index + 1) - 1, num_builds - 1) + logging.info("Total {} builds, max. {} builds per job, running builds {}-{}".format( + num_builds, num_builds_per_job, min_job_index + 1, max_job_index + 1)) + + builds_for_current_job = build_items[min_job_index:max_job_index + 1] + for i, build_info in enumerate(builds_for_current_job): + index = i + min_job_index + 1 + build_info.index = index + build_info.dry_run = args.dry_run + build_info.verbose = args.build_verbose + build_info.keep_going = args.keep_going + logging.debug(" Build {}: {}".format(index, repr(build_info))) + if args.output_build_list: + args.output_build_list.write(build_info.to_json_expanded() + "\n") + + failed_builds = [] + for build_info in builds_for_current_job: + logging.info("Running build {}: {}".format(build_info.index, repr(build_info))) + build_system_class = BUILD_SYSTEMS[build_info.build_system] + try: + build_system_class.build(build_info) + except BuildError as e: + logging.error(e.message) + if args.keep_going: + failed_builds.append(build_info) + else: + raise SystemExit(1) + + if failed_builds: + logging.error("The following build have failed:") + for build in failed_builds: + logging.error(" {}".format(build)) + raise SystemExit(1) + + +if __name__ == "__main__": + main() diff --git a/tools/ci/build_examples_cmake.sh b/tools/ci/build_examples_cmake.sh index 6ec906b0dc..032a8bbe82 100755 --- a/tools/ci/build_examples_cmake.sh +++ b/tools/ci/build_examples_cmake.sh @@ -1,24 +1,10 @@ #!/bin/bash # -# Build all examples from the examples directory, out of tree to +# Build all examples from the examples directory, in BUILD_PATH to # ensure they can run when copied to a new directory. # # Runs as part of CI process. # -# Assumes PWD is an out-of-tree build directory, and will copy examples -# to individual subdirectories, one by one. -# -# -# Without arguments it just builds all examples -# -# With one argument it builds part of the examples. This is a useful for -# parallel execution in CI. -# must look like this: -# _ -# It scans .gitlab-ci.yaml to count number of jobs which have name "_" -# It scans the filesystem to count all examples -# Based on this, it decides to run qa set of examples. -# # ----------------------------------------------------------------------------- # Safety settings (see https://gist.github.com/ilg-ul/383869cbb01f61a51c4d). @@ -31,9 +17,6 @@ fi set -o errexit # Exit if command failed. set -o pipefail # Exit if pipe failed. -# Remove the initial space and instead use '\n'. -IFS=$'\n\t' - export PATH="$IDF_PATH/tools/ci:$IDF_PATH/tools:$PATH" # ----------------------------------------------------------------------------- @@ -45,146 +28,76 @@ die() { [ -z ${IDF_PATH} ] && die "IDF_PATH is not set" [ -z ${LOG_PATH} ] && die "LOG_PATH is not set" +[ -z ${BUILD_PATH} ] && die "BUILD_PATH is not set" +[ -z ${IDF_TARGET} ] && die "IDF_TARGET is not set" [ -d ${LOG_PATH} ] || mkdir -p ${LOG_PATH} +[ -d ${BUILD_PATH} ] || mkdir -p ${BUILD_PATH} + +if [ -z ${CI_NODE_TOTAL} ]; then + CI_NODE_TOTAL=1 + echo "Assuming CI_NODE_TOTAL=${CI_NODE_TOTAL}" +fi +if [ -z ${CI_NODE_INDEX} ]; then + # Gitlab uses a 1-based index + CI_NODE_INDEX=1 + echo "Assuming CI_NODE_INDEX=${CI_NODE_INDEX}" +fi set -o nounset # Exit if variable not set. -echo "build_examples running in ${PWD} for target $IDF_TARGET" - -# only 0 or 1 arguments -[ $# -le 1 ] || die "Have to run as $(basename $0) []" - -export BATCH_BUILD=1 -export V=0 # only build verbose if there's an error - -shopt -s lastpipe # Workaround for Bash to use variables in loops (http://mywiki.wooledge.org/BashFAQ/024) - -RESULT=0 -FAILED_EXAMPLES="" -RESULT_ISSUES=22 # magic number result code for issues found -LOG_SUSPECTED=${LOG_PATH}/common_log.txt -touch ${LOG_SUSPECTED} -SDKCONFIG_DEFAULTS_CI=sdkconfig.ci - -EXAMPLE_PATHS=$( get_supported_examples.sh $IDF_TARGET | sed "s#^#${IDF_PATH}\/examples\/#g" | awk '{print $0"/CmakeLists.txt"}' ) -echo "All examples found for target $IDF_TARGET:" -echo $EXAMPLE_PATHS - -if [ -z {CI_NODE_TOTAL} ] -then - START_NUM=0 - END_NUM=999 -else - JOB_NUM=${CI_NODE_INDEX} - # count number of the jobs - NUM_OF_JOBS=${CI_NODE_TOTAL} - - # count number of examples - NUM_OF_EXAMPLES=$( echo "${EXAMPLE_PATHS}" | wc -l ) - [ ${NUM_OF_EXAMPLES} -lt 100 ] && die "NUM_OF_EXAMPLES is bad" - - # separate intervals - #57 / 5 == 12 - NUM_OF_EX_PER_JOB=$(( (${NUM_OF_EXAMPLES} + ${NUM_OF_JOBS} - 1) / ${NUM_OF_JOBS} )) - [ -z ${NUM_OF_EX_PER_JOB} ] && die "NUM_OF_EX_PER_JOB is bad" - - # ex.: [0; 12); [12; 24); [24; 36); [36; 48); [48; 60) - START_NUM=$(( (${JOB_NUM} - 1) * ${NUM_OF_EX_PER_JOB} )) - [ -z ${START_NUM} ] && die "START_NUM is bad" - - END_NUM=$(( ${JOB_NUM} * ${NUM_OF_EX_PER_JOB} )) - [ -z ${END_NUM} ] && die "END_NUM is bad" +export REALPATH=realpath +if [ "$(uname -s)" = "Darwin" ]; then + export REALPATH=grealpath fi -build_example () { - local ID=$1 - shift - local CMAKELISTS=$1 - shift +# Convert LOG_PATH and BUILD_PATH to relative, to make the json file less verbose. +LOG_PATH=$(${REALPATH} --relative-to ${IDF_PATH} ${LOG_PATH}) +BUILD_PATH=$(${REALPATH} --relative-to ${IDF_PATH} ${BUILD_PATH}) - local EXAMPLE_DIR=$(dirname "${CMAKELISTS}") - local EXAMPLE_NAME=$(basename "${EXAMPLE_DIR}") +ALL_BUILD_LIST_JSON="${BUILD_PATH}/list.json" +JOB_BUILD_LIST_JSON="${BUILD_PATH}/list_job_${CI_NODE_INDEX}.json" +mkdir -p "${BUILD_PATH}/example_builds" - echo "Building ${EXAMPLE_NAME} for ${IDF_TARGET} as ${ID}..." - mkdir -p "example_builds/${IDF_TARGET}/${ID}" - cp -r "${EXAMPLE_DIR}" "example_builds/${IDF_TARGET}/${ID}" - pushd "example_builds/${IDF_TARGET}/${ID}/${EXAMPLE_NAME}" - # be stricter in the CI build than the default IDF settings - export EXTRA_CFLAGS=${PEDANTIC_CFLAGS} - export EXTRA_CXXFLAGS=${EXTRA_CFLAGS} +echo "build_examples running for target $IDF_TARGET" - # sdkconfig files are normally not checked into git, but may be present when - # a developer runs this script locally - rm -f sdkconfig +cd ${IDF_PATH} - # If sdkconfig.ci file is present, append it to sdkconfig.defaults, - # replacing environment variables - if [[ -f "$SDKCONFIG_DEFAULTS_CI" ]]; then - cat $SDKCONFIG_DEFAULTS_CI | $IDF_PATH/tools/ci/envsubst.py >> sdkconfig.defaults - fi +# This part of the script produces the same result for all the example build jobs. It may be moved to a separate stage +# (pre-build) later, then the build jobs will receive ${BUILD_LIST_JSON} file as an artifact. - # build non-verbose first - local BUILDLOG=${LOG_PATH}/ex_${ID}_log.txt - touch ${BUILDLOG} +# If changing the work-dir or build-dir format, remember to update the "artifacts" in gitlab-ci configs, and IDFApp.py. - if [ "$EXAMPLE_NAME" != "idf_as_lib" ]; then - idf.py fullclean >>${BUILDLOG} 2>&1 && - idf.py build >>${BUILDLOG} 2>&1 - else - rm -rf build && - ./build-esp32.sh >>${BUILDLOG} 2>&1 - fi || - { - RESULT=$?; FAILED_EXAMPLES+=" ${EXAMPLE_NAME}" ; - } +${IDF_PATH}/tools/find_apps.py examples \ + -vv \ + --format json \ + --build-system cmake \ + --target ${IDF_TARGET} \ + --recursive \ + --exclude examples/build_system/idf_as_lib \ + --work-dir "${BUILD_PATH}/@f/@w/@t" \ + --build-dir build \ + --build-log "${LOG_PATH}/@f.txt" \ + --output ${ALL_BUILD_LIST_JSON} \ + --config 'sdkconfig.ci=default' \ + --config 'sdkconfig.ci.*=' \ + --config '=default' \ - cat ${BUILDLOG} - popd +# --config rules above explained: +# 1. If sdkconfig.ci exists, use it build the example with configuration name "default" +# 2. If sdkconfig.ci.* exists, use it to build the "*" configuration +# 3. If none of the above exist, build the default configuration under the name "default" - grep -i "error\|warning" "${BUILDLOG}" 2>&1 | grep -v "error.c.obj" >> "${LOG_SUSPECTED}" || : -} +# The part below is where the actual builds happen -EXAMPLE_NUM=0 +${IDF_PATH}/tools/build_apps.py \ + -vv \ + --format json \ + --keep-going \ + --parallel-count ${CI_NODE_TOTAL} \ + --parallel-index ${CI_NODE_INDEX} \ + --output-build-list ${JOB_BUILD_LIST_JSON} \ + ${ALL_BUILD_LIST_JSON}\ -echo "Current job will build example ${START_NUM} - ${END_NUM}" -for EXAMPLE_PATH in ${EXAMPLE_PATHS} -do - if [[ $EXAMPLE_NUM -lt $START_NUM || $EXAMPLE_NUM -ge $END_NUM ]] - then - EXAMPLE_NUM=$(( $EXAMPLE_NUM + 1 )) - continue - fi - echo ">>> example [ ${EXAMPLE_NUM} ] - $EXAMPLE_PATH" - - build_example "${EXAMPLE_NUM}" "${EXAMPLE_PATH}" - - EXAMPLE_NUM=$(( $EXAMPLE_NUM + 1 )) -done - -# show warnings -echo -e "\nFound issues:" - -# Ignore the next messages: -# "error.o" or "-Werror" in compiler's command line -# "reassigning to symbol" or "changes choice state" in sdkconfig -# 'Compiler and toochain versions is not supported' from crosstool_version_check.cmake -IGNORE_WARNS="\ -library/error\.o\ -\|\ -Werror\ -\|error\.d\ -\|reassigning to symbol\ -\|changes choice state\ -\|crosstool_version_check\.cmake\ -" - -sort -u "${LOG_SUSPECTED}" | grep -v "${IGNORE_WARNS}" \ - && RESULT=$RESULT_ISSUES \ - || echo -e "\tNone" - -[ -z ${FAILED_EXAMPLES} ] || echo -e "\nThere are errors in the next examples: $FAILED_EXAMPLES" -[ $RESULT -eq 0 ] || echo -e "\nFix all warnings and errors above to pass the test!" - -echo -e "\nReturn code = $RESULT" - -exit $RESULT +# Check for build warnings +${IDF_PATH}/tools/ci/check_build_warnings.py -vv ${JOB_BUILD_LIST_JSON} diff --git a/tools/ci/check_build_warnings.py b/tools/ci/check_build_warnings.py new file mode 100755 index 0000000000..285492d595 --- /dev/null +++ b/tools/ci/check_build_warnings.py @@ -0,0 +1,101 @@ +#!/usr/bin/env python +# coding=utf-8 +# +# CI script to check build logs for warnings. +# Reads the list of builds, in the format produced by find_apps.py or build_apps.py, and finds warnings in the +# log files for every build. +# Exits with a non-zero exit code if any warning is found. + +import os +import sys +import argparse +import logging +import re + +try: + from find_build_apps import BuildItem, setup_logging +except ImportError: + sys.path.insert(0, os.path.join(os.path.dirname(__file__), "..")) + from find_build_apps import BuildItem, setup_logging + +WARNING_REGEX = r"error|warning" + +IGNORE_WARNS = [ + re.compile(r_str) for r_str in [ + r"library/error\.o", + r".*error.*\.c\.obj", + r"-Werror", + r"error\.d", + r"reassigning to symbol", + r"changes choice state", + r"crosstool_version_check\.cmake", + ] +] + + +def line_has_warnings(line): # type: (str) -> bool + if not re.search(WARNING_REGEX, line): + return False + + has_warnings = True + for ignored in IGNORE_WARNS: + if re.search(ignored, line): + has_warnings = False + break + + return has_warnings + + +def main(): + parser = argparse.ArgumentParser(description="ESP-IDF app builder") + parser.add_argument( + "-v", + "--verbose", + action="count", + help="Increase the logging level of the script. Can be specified multiple times.", + ) + parser.add_argument( + "--log-file", + type=argparse.FileType("w"), + help="Write the script log to the specified file, instead of stderr", + ) + parser.add_argument( + "build_list", + type=argparse.FileType("r"), + nargs="?", + default=sys.stdin, + help="Name of the file to read the list of builds from. If not specified, read from stdin.", + ) + args = parser.parse_args() + setup_logging(args) + + build_items = [BuildItem.from_json(line) for line in args.build_list] + + if not build_items: + logging.error("Empty build list!") + raise SystemExit(1) + + found_warnings = 0 + for build_item in build_items: + if not build_item.build_log_path: + logging.debug("No log file for {}".format(build_item.work_dir)) + continue + with open(build_item.build_log_path, "r") as log_file: + for line_no, line in enumerate(log_file): + if line_has_warnings(line): + logging.error("Issue in app {}, config {}:".format(build_item.app_dir, build_item.config_name)) + logging.error(line.rstrip("\n")) + logging.error("See {}:{} for details".format(os.path.basename(build_item.build_log_path), + line_no + 1)) + found_warnings += 1 + break + + if found_warnings: + logging.error("Checked {} builds, found {} warnings".format(len(build_items), found_warnings)) + raise SystemExit(1) + + logging.info("No warnings found") + + +if __name__ == "__main__": + main() diff --git a/tools/ci/ci_fetch_submodule.py b/tools/ci/ci_fetch_submodule.py new file mode 100644 index 0000000000..62401a6694 --- /dev/null +++ b/tools/ci/ci_fetch_submodule.py @@ -0,0 +1,102 @@ +import re +import os +import subprocess +import argparse +import shutil +import time + +import gitlab_api + +SUBMODULE_PATTERN = re.compile(r"\[submodule \"([^\"]+)\"]") +PATH_PATTERN = re.compile(r"path\s+=\s+(\S+)") +URL_PATTERN = re.compile(r"url\s+=\s+(\S+)") + +SUBMODULE_ARCHIVE_TEMP_FOLDER = "submodule_archive" + + +class SubModule(object): + # We don't need to support recursive submodule clone now + + GIT_LS_TREE_OUTPUT_PATTERN = re.compile(r"\d+\s+commit\s+([0-9a-f]+)\s+") + + def __init__(self, gitlab_inst, path, url): + self.path = path + self.gitlab_inst = gitlab_inst + self.project_id = self._get_project_id(url) + self.commit_id = self._get_commit_id(path) + + def _get_commit_id(self, path): + output = subprocess.check_output(["git", "ls-tree", "HEAD", path]) + output = output.decode() + # example output: 160000 commit d88a262fbdf35e5abb372280eb08008749c3faa0 components/esp_wifi/lib + match = self.GIT_LS_TREE_OUTPUT_PATTERN.search(output) + return match.group(1) + + def _get_project_id(self, url): + base_name = os.path.basename(url) + project_id = self.gitlab_inst.get_project_id(os.path.splitext(base_name)[0], # remove .git + namespace="espressif") + return project_id + + def download_archive(self): + print("Update submodule: {}: {}".format(self.path, self.commit_id)) + path_name = self.gitlab_inst.download_archive(self.commit_id, SUBMODULE_ARCHIVE_TEMP_FOLDER, + self.project_id) + renamed_path = os.path.join(os.path.dirname(path_name), os.path.basename(self.path)) + os.rename(path_name, renamed_path) + shutil.rmtree(self.path, ignore_errors=True) + shutil.move(renamed_path, os.path.dirname(self.path)) + + +def update_submodule(git_module_file, submodules_to_update): + gitlab_inst = gitlab_api.Gitlab() + submodules = [] + with open(git_module_file, "r") as f: + data = f.read() + match = SUBMODULE_PATTERN.search(data) + while True: + next_match = SUBMODULE_PATTERN.search(data, pos=match.end()) + if next_match: + end_pos = next_match.start() + else: + end_pos = len(data) + path_match = PATH_PATTERN.search(data, pos=match.end(), endpos=end_pos) + url_match = URL_PATTERN.search(data, pos=match.end(), endpos=end_pos) + path = path_match.group(1) + url = url_match.group(1) + + filter_result = True + if submodules_to_update: + if path not in submodules_to_update: + filter_result = False + if filter_result: + submodules.append(SubModule(gitlab_inst, path, url)) + + match = next_match + if not match: + break + + shutil.rmtree(SUBMODULE_ARCHIVE_TEMP_FOLDER, ignore_errors=True) + + for submodule in submodules: + submodule.download_archive() + + +if __name__ == '__main__': + start_time = time.time() + parser = argparse.ArgumentParser() + parser.add_argument("--repo_path", "-p", default=".", help="repo path") + parser.add_argument("--submodule", "-s", default="all", + help="Submodules to update. By default update all submodules. " + "For multiple submodules, separate them with `;`. " + "`all` and `none` are special values that indicates we fetch all / none submodules") + args = parser.parse_args() + if args.submodule == "none": + print("don't need to update submodules") + exit(0) + if args.submodule == "all": + _submodules = [] + else: + _submodules = args.submodule.split(";") + update_submodule(os.path.join(args.repo_path, ".gitmodules"), _submodules) + print("total time spent on update submodule: {:.02f}s".format(time.time() - start_time)) diff --git a/tools/ci/config/assign-test.yml b/tools/ci/config/assign-test.yml index 92dc8cad2c..e997062e36 100644 --- a/tools/ci/config/assign-test.yml +++ b/tools/ci/config/assign-test.yml @@ -8,16 +8,17 @@ assign_test: # we have a lot build example jobs. now we don't use dependencies, just download all artificats of build stage. dependencies: - build_ssc - - build_esp_idf_tests_make - build_esp_idf_tests_cmake variables: - TEST_FW_PATH: "$CI_PROJECT_DIR/tools/tiny-test-fw" + SUBMODULES_TO_FETCH: "components/esptool_py/esptool" EXAMPLE_CONFIG_OUTPUT_PATH: "$CI_PROJECT_DIR/examples/test_configs" + UNIT_TEST_CASE_FILE: "${CI_PROJECT_DIR}/components/idf_test/unit_test/TestCaseAll.yml" artifacts: paths: - components/idf_test/*/CIConfigs - components/idf_test/*/TC.sqlite - $EXAMPLE_CONFIG_OUTPUT_PATH + - build_examples/artifact_index.json expire_in: 1 week only: variables: @@ -27,9 +28,9 @@ assign_test: - $BOT_LABEL_EXAMPLE_TEST script: # assign example tests - - python $TEST_FW_PATH/CIAssignExampleTest.py $IDF_PATH/examples $CI_TARGET_TEST_CONFIG_FILE $EXAMPLE_CONFIG_OUTPUT_PATH + - python tools/ci/python_packages/ttfw_idf/CIAssignExampleTest.py $IDF_PATH/examples $CI_TARGET_TEST_CONFIG_FILE $EXAMPLE_CONFIG_OUTPUT_PATH # assign unit test cases - - python $TEST_FW_PATH/CIAssignUnitTest.py $IDF_PATH/components/idf_test/unit_test/TestCaseAll.yml $CI_TARGET_TEST_CONFIG_FILE $IDF_PATH/components/idf_test/unit_test/CIConfigs + - python tools/ci/python_packages/ttfw_idf/CIAssignUnitTest.py $UNIT_TEST_CASE_FILE $CI_TARGET_TEST_CONFIG_FILE $IDF_PATH/components/idf_test/unit_test/CIConfigs # clone test script to assign tests - git clone $TEST_SCRIPT_REPOSITORY - python $CHECKOUT_REF_SCRIPT auto_test_script auto_test_script @@ -55,9 +56,9 @@ update_test_cases: - ${CI_PROJECT_DIR}/test-management/*.log expire_in: 1 week variables: + SUBMODULES_TO_FETCH: "components/esptool_py/esptool" UNIT_TEST_CASE_FILE: "${CI_PROJECT_DIR}/components/idf_test/unit_test/TestCaseAll.yml" BOT_ACCOUNT_CONFIG_FILE: "${CI_PROJECT_DIR}/test-management/Config/Account.local.yml" - TEST_FW_PATH: "$CI_PROJECT_DIR/tools/tiny-test-fw" AUTO_TEST_SCRIPT_PATH: "${CI_PROJECT_DIR}/auto_test_script" PYTHON_VER: 3 script: diff --git a/tools/ci/config/build.yml b/tools/ci/config/build.yml index 0d756d5325..a4cf8e4976 100644 --- a/tools/ci/config/build.yml +++ b/tools/ci/config/build.yml @@ -159,16 +159,20 @@ build_examples_make: artifacts: when: always paths: - - build_examples_cmake/*/*/*/*/build/*.bin - - build_examples_cmake/*/*/*/*/sdkconfig - - build_examples_cmake/*/*/*/*/build/*.elf - - build_examples_cmake/*/*/*/*/build/*.map - - build_examples_cmake/*/*/*/*/build/flasher_args.json - - build_examples_cmake/*/*/*/*/build/bootloader/*.bin + - build_examples/list.json + - build_examples/list_job_*.json + - build_examples/*/*/*/sdkconfig + - build_examples/*/*/*/build/*.bin + - build_examples/*/*/*/build/*.elf + - build_examples/*/*/*/build/*.map + - build_examples/*/*/*/build/flasher_args.json + - build_examples/*/*/*/build/bootloader/*.bin + - build_examples/*/*/*/build/partition_table/*.bin - $LOG_PATH expire_in: 3 days variables: - LOG_PATH: "$CI_PROJECT_DIR/log_examples_cmake" + LOG_PATH: "$CI_PROJECT_DIR/log_examples" + BUILD_PATH: "$CI_PROJECT_DIR/build_examples" only: variables: - $BOT_TRIGGER_WITH_LABEL == null @@ -179,10 +183,7 @@ build_examples_make: script: # it's not possible to build 100% out-of-tree and have the "artifacts" # mechanism work, but this is the next best thing - - rm -rf build_examples_cmake - - mkdir build_examples_cmake - - cd build_examples_cmake - # build some of examples + - mkdir -p ${BUILD_PATH} - mkdir -p ${LOG_PATH} - ${IDF_PATH}/tools/ci/build_examples_cmake.sh diff --git a/tools/ci/config/check.yml b/tools/ci/config/check.yml index 0baa375600..ba022c7ce6 100644 --- a/tools/ci/config/check.yml +++ b/tools/ci/config/check.yml @@ -95,7 +95,7 @@ check_submodule_sync: retry: 2 variables: GIT_STRATEGY: clone - GIT_SUBMODULE_STRATEGY: none + SUBMODULES_TO_FETCH: "none" PUBLIC_IDF_URL: "https://github.com/espressif/esp-idf.git" before_script: [] after_script: [] diff --git a/tools/ci/config/host-test.yml b/tools/ci/config/host-test.yml index 4fcab23b8b..397c2c9940 100644 --- a/tools/ci/config/host-test.yml +++ b/tools/ci/config/host-test.yml @@ -277,6 +277,7 @@ check_doc_links: paths: - docs/_build/linkcheck expire_in: 1 week + dependencies: [] script: # must be triggered with CHECK_LINKS=Yes, otherwise exit without test - test "$CHECK_LINKS" = "Yes" || exit 0 diff --git a/tools/ci/config/target-test.yml b/tools/ci/config/target-test.yml index 03c3350720..a83506f56d 100644 --- a/tools/ci/config/target-test.yml +++ b/tools/ci/config/target-test.yml @@ -21,8 +21,6 @@ - $BOT_LABEL_EXAMPLE_TEST dependencies: - assign_test - - build_examples_make - - build_examples_cmake_esp32 artifacts: when: always paths: @@ -31,11 +29,11 @@ reports: junit: $LOG_PATH/*/XUNIT_RESULT.xml variables: - TEST_FW_PATH: "$CI_PROJECT_DIR/tools/tiny-test-fw" TEST_CASE_PATH: "$CI_PROJECT_DIR/examples" CONFIG_FILE_PATH: "${CI_PROJECT_DIR}/examples/test_configs" LOG_PATH: "$CI_PROJECT_DIR/TEST_LOGS" ENV_FILE: "$CI_PROJECT_DIR/ci-test-runner-configs/$CI_RUNNER_DESCRIPTION/EnvConfig.yml" + SUBMODULES_TO_FETCH: "components/esptool_py/esptool" script: - *define_config_file_name # first test if config file exists, if not exist, exit 0 @@ -43,7 +41,7 @@ # clone test env configs - git clone $TEST_ENV_CONFIG_REPOSITORY - python $CHECKOUT_REF_SCRIPT ci-test-runner-configs ci-test-runner-configs - - cd $TEST_FW_PATH + - cd tools/ci/python_packages/tiny_test_fw/bin # run test - python Runner.py $TEST_CASE_PATH -c $CONFIG_FILE -e $ENV_FILE @@ -95,7 +93,7 @@ - $LOG_PATH expire_in: 1 week variables: - GIT_SUBMODULE_STRATEGY: none + SUBMODULES_TO_FETCH: "components/esptool_py/esptool" LOCAL_ENV_CONFIG_PATH: "$CI_PROJECT_DIR/ci-test-runner-configs/$CI_RUNNER_DESCRIPTION/ESP32_IDF" LOG_PATH: "${CI_PROJECT_DIR}/${CI_COMMIT_SHA}" TEST_CASE_FILE_PATH: "$CI_PROJECT_DIR/components/idf_test/integration_test" @@ -126,7 +124,6 @@ test_weekend_mqtt: - $BOT_LABEL_WEEKEND_TEST variables: TEST_CASE_PATH: "$CI_PROJECT_DIR/components/mqtt/weekend_test" - TEST_FW_PATH: "$CI_PROJECT_DIR/tools/tiny-test-fw" LOG_PATH: "$CI_PROJECT_DIR/TEST_LOGS" ENV_FILE: "$CI_PROJECT_DIR/components/mqtt/weekend_test/env.yml" CONFIG_FILE: "$CI_PROJECT_DIR/components/mqtt/weekend_test/config.yml" @@ -143,7 +140,6 @@ test_weekend_network: - $BOT_LABEL_WEEKEND_TEST variables: TEST_CASE_PATH: "$CI_PROJECT_DIR/components/lwip/weekend_test" - TEST_FW_PATH: "$CI_PROJECT_DIR/tools/tiny-test-fw" LOG_PATH: "$CI_PROJECT_DIR/TEST_LOGS" ENV_FILE: "$CI_PROJECT_DIR/components/lwip/weekend_test/env.yml" CONFIG_FILE: "$CI_PROJECT_DIR/components/lwip/weekend_test/config.yml" @@ -161,6 +157,16 @@ example_test_002: tags: - ESP32 - Example_ShieldBox_Basic + script: + - *define_config_file_name + # first test if config file exists, if not exist, exit 0 + - test -e $CONFIG_FILE || exit 0 + # clone test env configs + - git clone $TEST_ENV_CONFIG_REPOSITORY + - python $CHECKOUT_REF_SCRIPT ci-test-runner-configs ci-test-runner-configs + - cd tools/ci/python_packages/tiny_test_fw/bin + # run test + - python Runner.py $TEST_CASE_PATH -c $CONFIG_FILE -e $ENV_FILE .example_test_003: extends: .example_test_template @@ -168,11 +174,17 @@ example_test_002: - ESP32 - Example_SDIO -example_test_004: +example_test_004A: extends: .example_test_template tags: - ESP32 - - Example_CAN + - Example_CAN1 + +example_test_004B: + extends: .example_test_template + tags: + - ESP32 + - Example_CAN2 example_test_005: extends: .example_test_template @@ -210,7 +222,7 @@ example_test_009: UT_001: extends: .unit_test_template - parallel: 50 + parallel: 29 tags: - ESP32_IDF - UT_T1_1 @@ -220,7 +232,7 @@ UT_001: UT_002: extends: .unit_test_template - parallel: 30 + parallel: 10 tags: - ESP32_IDF - UT_T1_1 @@ -228,18 +240,11 @@ UT_002: UT_003: extends: .unit_test_template - parallel: 3 + parallel: 2 tags: - ESP32_IDF - UT_T1_SDMODE -UT_004: - extends: .unit_test_template - parallel: 3 - tags: - - ESP32_IDF - - UT_T1_SPIMODE - UT_005: extends: .unit_test_template tags: @@ -254,13 +259,6 @@ UT_006: - UT_T1_SPIMODE - psram -UT_007: - extends: .unit_test_template - parallel: 4 - tags: - - ESP32_IDF - - UT_T1_GPIO - UT_008: extends: .unit_test_template tags: @@ -268,13 +266,6 @@ UT_008: - UT_T1_GPIO - psram -UT_009: - extends: .unit_test_template - parallel: 4 - tags: - - ESP32_IDF - - UT_T1_PCNT - UT_010: extends: .unit_test_template tags: @@ -282,13 +273,6 @@ UT_010: - UT_T1_PCNT - psram -UT_011: - extends: .unit_test_template - parallel: 4 - tags: - - ESP32_IDF - - UT_T1_LEDC - UT_012: extends: .unit_test_template tags: @@ -296,13 +280,6 @@ UT_012: - UT_T1_LEDC - psram -UT_013: - extends: .unit_test_template - parallel: 4 - tags: - - ESP32_IDF - - UT_T2_RS485 - UT_014: extends: .unit_test_template tags: @@ -312,7 +289,6 @@ UT_014: UT_015: extends: .unit_test_template - parallel: 4 tags: - ESP32_IDF - UT_T1_RMT @@ -326,26 +302,18 @@ UT_016: UT_017: extends: .unit_test_template - parallel: 3 tags: - ESP32_IDF - EMMC UT_018: extends: .unit_test_template - parallel: 5 + parallel: 2 tags: - ESP32_IDF - UT_T1_1 - 8Mpsram -UT_019: - extends: .unit_test_template - parallel: 4 - tags: - - ESP32_IDF - - Example_SPI_Multi_device - UT_020: extends: .unit_test_template tags: @@ -353,13 +321,6 @@ UT_020: - Example_SPI_Multi_device - psram -UT_021: - extends: .unit_test_template - parallel: 4 - tags: - - ESP32_IDF - - UT_T2_I2C - UT_022: extends: .unit_test_template tags: @@ -369,7 +330,6 @@ UT_022: UT_023: extends: .unit_test_template - parallel: 4 tags: - ESP32_IDF - UT_T1_MCPWM @@ -381,13 +341,6 @@ UT_024: - UT_T1_MCPWM - psram -UT_025: - extends: .unit_test_template - parallel: 4 - tags: - - ESP32_IDF - - UT_T1_I2S - UT_026: extends: .unit_test_template tags: @@ -395,13 +348,6 @@ UT_026: - UT_T1_I2S - psram -UT_027: - extends: .unit_test_template - parallel: 3 - tags: - - ESP32_IDF - - UT_T2_1 - UT_028: extends: .unit_test_template tags: @@ -409,34 +355,12 @@ UT_028: - UT_T2_1 - psram -UT_029: - extends: .unit_test_template - tags: - - ESP32_IDF - - UT_T2_1 - - 8Mpsram - -# Gitlab parallel max value is 50. We need to create another UT job if parallel is larger than 50. -UT_030: - extends: .unit_test_template - parallel: 10 - tags: - - ESP32_IDF - - UT_T1_1 - UT_031: extends: .unit_test_template tags: - ESP32_IDF - UT_T1_FlashEncryption -UT_032: - extends: .unit_test_template - parallel: 4 - tags: - - ESP32_IDF - - UT_T2_Ethernet - UT_033: extends: .unit_test_template tags: @@ -446,26 +370,17 @@ UT_033: UT_034: extends: .unit_test_template - parallel: 4 tags: - ESP32_IDF - UT_T1_ESP_FLASH UT_035: extends: .unit_test_template - parallel: 2 tags: - ESP32_IDF - UT_T1_PSRAMV0 - psram -UT_036: - extends: .unit_test_template - parallel: 3 - tags: - - ESP32_IDF - - UT_T1_no32kXTAL - UT_037: extends: .unit_test_template tags: @@ -473,13 +388,6 @@ UT_037: - UT_T1_no32kXTAL - psram -UT_038: - extends: .unit_test_template - parallel: 3 - tags: - - ESP32_IDF - - UT_T1_32kXTAL - UT_039: extends: .unit_test_template tags: diff --git a/tools/ci/executable-list.txt b/tools/ci/executable-list.txt index 85596f382f..6168482255 100644 --- a/tools/ci/executable-list.txt +++ b/tools/ci/executable-list.txt @@ -5,7 +5,6 @@ components/espcoredump/espcoredump.py components/espcoredump/test/test_espcoredump.py components/espcoredump/test/test_espcoredump.sh components/heap/test_multi_heap_host/test_all_configs.sh -components/idf_test/unit_test/TestCaseScript/IDFUnitTest/__init__.py components/nvs_flash/nvs_partition_generator/nvs_partition_gen.py components/partition_table/gen_empty_partition.py components/partition_table/gen_esp32part.py @@ -27,6 +26,7 @@ examples/system/ota/otatool/get_running_partition.py examples/system/ota/otatool/otatool_example.py examples/system/ota/otatool/otatool_example.sh install.sh +tools/build_apps.py tools/check_kconfigs.py tools/check_python_dependencies.py tools/ci/apply_bot_filter.py @@ -34,6 +34,7 @@ tools/ci/build_examples.sh tools/ci/build_examples_cmake.sh tools/ci/check-executable.sh tools/ci/check-line-endings.sh +tools/ci/check_build_warnings.py tools/ci/check_deprecated_kconfigs.py tools/ci/check_examples_cmake_make.sh tools/ci/check_idf_version.sh @@ -57,6 +58,7 @@ tools/esp_app_trace/logtrace_proc.py tools/esp_app_trace/sysviewtrace_proc.py tools/esp_app_trace/test/logtrace/test.sh tools/esp_app_trace/test/sysview/test.sh +tools/find_apps.py tools/format.sh tools/gen_esp_err_to_name.py tools/idf.py diff --git a/tools/ci/python_packages/gitlab_api.py b/tools/ci/python_packages/gitlab_api.py new file mode 100644 index 0000000000..d2e6abe7f7 --- /dev/null +++ b/tools/ci/python_packages/gitlab_api.py @@ -0,0 +1,174 @@ +import os +import re +import argparse +import tempfile +import tarfile +import zipfile + +import gitlab + + +class Gitlab(object): + JOB_NAME_PATTERN = re.compile(r"(\w+)(\s+(\d+)/(\d+))?") + + def __init__(self, project_id=None): + config_data_from_env = os.getenv("PYTHON_GITLAB_CONFIG") + if config_data_from_env: + # prefer to load config from env variable + with tempfile.NamedTemporaryFile("w", delete=False) as temp_file: + temp_file.write(config_data_from_env) + config_files = [temp_file.name] + else: + # otherwise try to use config file at local filesystem + config_files = None + self.gitlab_inst = gitlab.Gitlab.from_config(config_files=config_files) + self.gitlab_inst.auth() + if project_id: + self.project = self.gitlab_inst.projects.get(project_id) + else: + self.project = None + + def get_project_id(self, name, namespace=None): + """ + search project ID by name + + :param name: project name + :param namespace: namespace to match when we have multiple project with same name + :return: project ID + """ + projects = self.gitlab_inst.projects.list(search=name) + for project in projects: + if namespace is None: + if len(projects) == 1: + project_id = project.id + break + if project.namespace["path"] == namespace: + project_id = project.id + break + else: + raise ValueError("Can't find project") + return project_id + + def download_artifacts(self, job_id, destination): + """ + download full job artifacts and extract to destination. + + :param job_id: Gitlab CI job ID + :param destination: extract artifacts to path. + """ + job = self.project.jobs.get(job_id) + + with tempfile.NamedTemporaryFile(delete=False) as temp_file: + job.artifacts(streamed=True, action=temp_file.write) + + with zipfile.ZipFile(temp_file.name, "r") as archive_file: + archive_file.extractall(destination) + + def download_artifact(self, job_id, artifact_path, destination=None): + """ + download specific path of job artifacts and extract to destination. + + :param job_id: Gitlab CI job ID + :param artifact_path: list of path in artifacts (relative path to artifact root path) + :param destination: destination of artifact. Do not save to file if destination is None + :return: A list of artifact file raw data. + """ + job = self.project.jobs.get(job_id) + + raw_data_list = [] + + for a_path in artifact_path: + try: + data = job.artifact(a_path) + except gitlab.GitlabGetError as e: + print("Failed to download '{}' form job {}".format(a_path, job_id)) + raise e + raw_data_list.append(data) + if destination: + file_path = os.path.join(destination, a_path) + try: + os.makedirs(os.path.dirname(file_path)) + except OSError: + # already exists + pass + with open(file_path, "wb") as f: + f.write(data) + + return raw_data_list + + def find_job_id(self, job_name, pipeline_id=None): + """ + Get Job ID from job name of specific pipeline + + :param job_name: job name + :param pipeline_id: If None, will get pipeline id from CI pre-defined variable. + :return: a list of job IDs (parallel job will generate multiple jobs) + """ + job_id_list = [] + if pipeline_id is None: + pipeline_id = os.getenv("CI_PIPELINE_ID") + pipeline = self.project.pipelines.get(pipeline_id) + jobs = pipeline.jobs.list(all=True) + for job in jobs: + match = self.JOB_NAME_PATTERN.match(job.name) + if match: + if match.group(1) == job_name: + job_id_list.append({"id": job.id, "parallel_num": match.group(3)}) + return job_id_list + + def download_archive(self, ref, destination, project_id=None): + """ + Download archive of certain commit of a repository and extract to destination path + + :param ref: commit or branch name + :param destination: destination path of extracted archive file + :param project_id: download project of current instance if project_id is None + :return: root path name of archive file + """ + if project_id is None: + project = self.project + else: + project = self.gitlab_inst.projects.get(project_id) + + with tempfile.NamedTemporaryFile(delete=False) as temp_file: + try: + project.repository_archive(sha=ref, streamed=True, action=temp_file.write) + except gitlab.GitlabGetError as e: + print("Failed to archive from project {}".format(project_id)) + raise e + + print("archive size: {:.03f}MB".format(float(os.path.getsize(temp_file.name)) / (1024 * 1024))) + + with tarfile.open(temp_file.name, "r") as archive_file: + root_name = archive_file.getnames()[0] + archive_file.extractall(destination) + + return os.path.join(os.path.realpath(destination), root_name) + + +if __name__ == '__main__': + parser = argparse.ArgumentParser() + parser.add_argument("action") + parser.add_argument("project_id", type=int) + parser.add_argument("--pipeline_id", "-i", type=int, default=None) + parser.add_argument("--ref", "-r", default="master") + parser.add_argument("--job_id", "-j", type=int, default=None) + parser.add_argument("--job_name", "-n", default=None) + parser.add_argument("--project_name", "-m", default=None) + parser.add_argument("--destination", "-d", default=None) + parser.add_argument("--artifact_path", "-a", nargs="*", default=None) + args = parser.parse_args() + + gitlab_inst = Gitlab(args.project_id) + if args.action == "download_artifacts": + gitlab_inst.download_artifacts(args.job_id, args.destination) + if args.action == "download_artifact": + gitlab_inst.download_artifact(args.job_id, args.artifact_path, args.destination) + elif args.action == "find_job_id": + job_ids = gitlab_inst.find_job_id(args.job_name, args.pipeline_id) + print(";".join([",".join([str(j["id"]), j["parallel_num"]]) for j in job_ids])) + elif args.action == "download_archive": + gitlab_inst.download_archive(args.ref, args.destination) + elif args.action == "get_project_id": + ret = gitlab_inst.get_project_id(args.project_name) + print("project id: {}".format(ret)) diff --git a/tools/ci/python_packages/idf_http_server_test/__init__.py b/tools/ci/python_packages/idf_http_server_test/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/examples/protocols/http_server/persistent_sockets/scripts/adder.py b/tools/ci/python_packages/idf_http_server_test/adder.py similarity index 99% rename from examples/protocols/http_server/persistent_sockets/scripts/adder.py rename to tools/ci/python_packages/idf_http_server_test/adder.py index a1c9c990f7..46eb75a557 100644 --- a/examples/protocols/http_server/persistent_sockets/scripts/adder.py +++ b/tools/ci/python_packages/idf_http_server_test/adder.py @@ -20,7 +20,8 @@ from builtins import str from builtins import range import http.client import argparse -import Utility + +from tiny_test_fw import Utility def start_session(ip, port): diff --git a/examples/protocols/http_server/simple/scripts/client.py b/tools/ci/python_packages/idf_http_server_test/client.py similarity index 96% rename from examples/protocols/http_server/simple/scripts/client.py rename to tools/ci/python_packages/idf_http_server_test/client.py index 5e070b98b8..95d195d824 100644 --- a/examples/protocols/http_server/simple/scripts/client.py +++ b/tools/ci/python_packages/idf_http_server_test/client.py @@ -20,19 +20,8 @@ from builtins import str import http.client import argparse -try: - import Utility -except ImportError: - import sys - import os - # This environment variable is expected on the host machine - # > export TEST_FW_PATH=~/esp/esp-idf/tools/tiny-test-fw - test_fw_path = os.getenv("TEST_FW_PATH") - if test_fw_path and test_fw_path not in sys.path: - sys.path.insert(0, test_fw_path) - - import Utility +from tiny_test_fw import Utility def verbose_print(verbosity, *args): diff --git a/tools/ci/python_packages/idf_http_server_test/test.py b/tools/ci/python_packages/idf_http_server_test/test.py new file mode 100644 index 0000000000..3ad5bcc9c7 --- /dev/null +++ b/tools/ci/python_packages/idf_http_server_test/test.py @@ -0,0 +1,1025 @@ +#!/usr/bin/env python +# +# Copyright 2018 Espressif Systems (Shanghai) PTE LTD +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Utility for testing the web server. Test cases: +# Assume the device supports 'n' simultaneous open sockets +# +# HTTP Server Tests +# +# 0. Firmware Settings: +# - Create a dormant thread whose sole job is to call httpd_stop() when instructed +# - Measure the following before httpd_start() is called: +# - current free memory +# - current free sockets +# - Measure the same whenever httpd_stop is called +# - Register maximum possible URI handlers: should be successful +# - Register one more URI handler: should fail +# - Deregister on URI handler: should be successful +# - Register on more URI handler: should succeed +# - Register separate handlers for /hello, /hello/type_html. Also +# ensure that /hello/type_html is registered BEFORE /hello. (tests +# that largest matching URI is picked properly) +# - Create URI handler /adder. Make sure it uses a custom free_ctx +# structure to free it up + +# 1. Using Standard Python HTTP Client +# - simple GET on /hello (returns Hello World. Ensures that basic +# firmware tests are complete, or returns error) +# - POST on /hello (should fail) +# - PUT on /hello (should fail) +# - simple POST on /echo (returns whatever the POST data) +# - simple PUT on /echo (returns whatever the PUT data) +# - GET on /echo (should fail) +# - simple GET on /hello/type_html (returns Content type as text/html) +# - simple GET on /hello/status_500 (returns HTTP status 500) +# - simple GET on /false_uri (returns HTTP status 404) +# - largest matching URI handler is picked is already verified because +# of /hello and /hello/type_html tests +# +# +# 2. Session Tests +# - Sessions + Pipelining basics: +# - Create max supported sessions +# - On session i, +# - send 3 back-to-back POST requests with data i on /adder +# - read back 3 responses. They should be i, 2i and 3i +# - Tests that +# - pipelining works +# - per-session context is maintained for all supported +# sessions +# - Close all sessions +# +# - Cleanup leftover data: Tests that the web server properly cleans +# up leftover data +# - Create a session +# - POST on /leftover_data with 52 bytes of data (data includes +# \r\n)(the handler only +# reads first 10 bytes and returns them, leaving the rest of the +# bytes unread) +# - GET on /hello (should return 'Hello World') +# - POST on /false_uri with 52 bytes of data (data includes \r\n) +# (should return HTTP 404) +# - GET on /hello (should return 'Hello World') +# +# - Test HTTPd Asynchronous response +# - Create a session +# - GET on /async_data +# - returns 'Hello World!' as a response +# - the handler schedules an async response, which generates a second +# response 'Hello Double World!' +# +# - Spillover test +# - Create max supported sessions with the web server +# - GET /hello on all the sessions (should return Hello World) +# - Create one more session, this should fail +# - GET /hello on all the sessions (should return Hello World) +# +# - Timeout test +# - Create a session and only Send 'GE' on the same (simulates a +# client that left the network halfway through a request) +# - Wait for recv-wait-timeout +# - Server should automatically close the socket + + +# ############ TODO TESTS ############# + +# 3. Stress Tests +# +# - httperf +# - Run the following httperf command: +# httperf --server=10.31.130.126 --wsess=8,50,0.5 --rate 8 --burst-length 2 +# +# - The above implies that the test suite will open +# - 8 simultaneous connections with the server +# - the rate of opening the sessions will be 8 per sec. So in our +# case, a new connection will be opened every 0.2 seconds for 1 second +# - The burst length 2 indicates that 2 requests will be sent +# simultaneously on the same connection in a single go +# - 0.5 seconds is the time between sending out 2 bursts +# - 50 is the total number of requests that will be sent out +# +# - So in the above example, the test suite will open 8 +# connections, each separated by 0.2 seconds. On each connection +# it will send 2 requests in a single burst. The bursts on a +# single connection will be separated by 0.5 seconds. A total of +# 25 bursts (25 x 2 = 50) will be sent out + +# 4. Leak Tests +# - Simple Leak test +# - Simple GET on /hello/restart (returns success, stop web server, measures leaks, restarts webserver) +# - Simple GET on /hello/restart_results (returns the leak results) +# - Leak test with open sockets +# - Open 8 sessions +# - Simple GET on /hello/restart (returns success, stop web server, +# measures leaks, restarts webserver) +# - All sockets should get closed +# - Simple GET on /hello/restart_results (returns the leak results) + + +from __future__ import division +from __future__ import print_function +from builtins import str +from builtins import range +from builtins import object +import threading +import socket +import time +import argparse +import http.client +import sys +import string +import random + +from tiny_test_fw import Utility + +_verbose_ = False + + +class Session(object): + def __init__(self, addr, port, timeout=15): + self.client = socket.create_connection((addr, int(port)), timeout=timeout) + self.target = addr + self.status = 0 + self.encoding = '' + self.content_type = '' + self.content_len = 0 + + def send_err_check(self, request, data=None): + rval = True + try: + self.client.sendall(request.encode()) + if data: + self.client.sendall(data.encode()) + except socket.error as err: + self.client.close() + Utility.console_log("Socket Error in send :", err) + rval = False + return rval + + def send_get(self, path, headers=None): + request = "GET " + path + " HTTP/1.1\r\nHost: " + self.target + if headers: + for field, value in headers.items(): + request += "\r\n" + field + ": " + value + request += "\r\n\r\n" + return self.send_err_check(request) + + def send_put(self, path, data, headers=None): + request = "PUT " + path + " HTTP/1.1\r\nHost: " + self.target + if headers: + for field, value in headers.items(): + request += "\r\n" + field + ": " + value + request += "\r\nContent-Length: " + str(len(data)) + "\r\n\r\n" + return self.send_err_check(request, data) + + def send_post(self, path, data, headers=None): + request = "POST " + path + " HTTP/1.1\r\nHost: " + self.target + if headers: + for field, value in headers.items(): + request += "\r\n" + field + ": " + value + request += "\r\nContent-Length: " + str(len(data)) + "\r\n\r\n" + return self.send_err_check(request, data) + + def read_resp_hdrs(self): + try: + state = 'nothing' + resp_read = '' + while True: + char = self.client.recv(1).decode() + if char == '\r' and state == 'nothing': + state = 'first_cr' + elif char == '\n' and state == 'first_cr': + state = 'first_lf' + elif char == '\r' and state == 'first_lf': + state = 'second_cr' + elif char == '\n' and state == 'second_cr': + state = 'second_lf' + else: + state = 'nothing' + resp_read += char + if state == 'second_lf': + break + # Handle first line + line_hdrs = resp_read.splitlines() + line_comp = line_hdrs[0].split() + self.status = line_comp[1] + del line_hdrs[0] + self.encoding = '' + self.content_type = '' + headers = dict() + # Process other headers + for h in range(len(line_hdrs)): + line_comp = line_hdrs[h].split(':') + if line_comp[0] == 'Content-Length': + self.content_len = int(line_comp[1]) + if line_comp[0] == 'Content-Type': + self.content_type = line_comp[1].lstrip() + if line_comp[0] == 'Transfer-Encoding': + self.encoding = line_comp[1].lstrip() + if len(line_comp) == 2: + headers[line_comp[0]] = line_comp[1].lstrip() + return headers + except socket.error as err: + self.client.close() + Utility.console_log("Socket Error in recv :", err) + return None + + def read_resp_data(self): + try: + read_data = '' + if self.encoding != 'chunked': + while len(read_data) != self.content_len: + read_data += self.client.recv(self.content_len).decode() + else: + chunk_data_buf = '' + while (True): + # Read one character into temp buffer + read_ch = self.client.recv(1) + # Check CRLF + if (read_ch == '\r'): + read_ch = self.client.recv(1).decode() + if (read_ch == '\n'): + # If CRLF decode length of chunk + chunk_len = int(chunk_data_buf, 16) + # Keep adding to contents + self.content_len += chunk_len + rem_len = chunk_len + while (rem_len): + new_data = self.client.recv(rem_len) + read_data += new_data + rem_len -= len(new_data) + chunk_data_buf = '' + # Fetch remaining CRLF + if self.client.recv(2) != "\r\n": + # Error in packet + Utility.console_log("Error in chunked data") + return None + if not chunk_len: + # If last chunk + break + continue + chunk_data_buf += '\r' + # If not CRLF continue appending + # character to chunked data buffer + chunk_data_buf += read_ch + return read_data + except socket.error as err: + self.client.close() + Utility.console_log("Socket Error in recv :", err) + return None + + def close(self): + self.client.close() + + +def test_val(text, expected, received): + if expected != received: + Utility.console_log(" Fail!") + Utility.console_log(" [reason] " + text + ":") + Utility.console_log(" expected: " + str(expected)) + Utility.console_log(" received: " + str(received)) + return False + return True + + +class adder_thread (threading.Thread): + def __init__(self, id, dut, port): + threading.Thread.__init__(self) + self.id = id + self.dut = dut + self.depth = 3 + self.session = Session(dut, port) + + def run(self): + self.response = [] + + # Pipeline 3 requests + if (_verbose_): + Utility.console_log(" Thread: Using adder start " + str(self.id)) + + for _ in range(self.depth): + self.session.send_post('/adder', str(self.id)) + time.sleep(2) + + for _ in range(self.depth): + self.session.read_resp_hdrs() + self.response.append(self.session.read_resp_data()) + + def adder_result(self): + if len(self.response) != self.depth: + Utility.console_log("Error : missing response packets") + return False + for i in range(len(self.response)): + if not test_val("Thread" + str(self.id) + " response[" + str(i) + "]", + str(self.id * (i + 1)), str(self.response[i])): + return False + return True + + def close(self): + self.session.close() + + +def get_hello(dut, port): + # GET /hello should return 'Hello World!' + Utility.console_log("[test] GET /hello returns 'Hello World!' =>", end=' ') + conn = http.client.HTTPConnection(dut, int(port), timeout=15) + conn.request("GET", "/hello") + resp = conn.getresponse() + if not test_val("status_code", 200, resp.status): + conn.close() + return False + if not test_val("data", "Hello World!", resp.read().decode()): + conn.close() + return False + if not test_val("data", "text/html", resp.getheader('Content-Type')): + conn.close() + return False + Utility.console_log("Success") + conn.close() + return True + + +def put_hello(dut, port): + # PUT /hello returns 405' + Utility.console_log("[test] PUT /hello returns 405 =>", end=' ') + conn = http.client.HTTPConnection(dut, int(port), timeout=15) + conn.request("PUT", "/hello", "Hello") + resp = conn.getresponse() + if not test_val("status_code", 405, resp.status): + conn.close() + return False + Utility.console_log("Success") + conn.close() + return True + + +def post_hello(dut, port): + # POST /hello returns 405' + Utility.console_log("[test] POST /hello returns 405 =>", end=' ') + conn = http.client.HTTPConnection(dut, int(port), timeout=15) + conn.request("POST", "/hello", "Hello") + resp = conn.getresponse() + if not test_val("status_code", 405, resp.status): + conn.close() + return False + Utility.console_log("Success") + conn.close() + return True + + +def post_echo(dut, port): + # POST /echo echoes data' + Utility.console_log("[test] POST /echo echoes data =>", end=' ') + conn = http.client.HTTPConnection(dut, int(port), timeout=15) + conn.request("POST", "/echo", "Hello") + resp = conn.getresponse() + if not test_val("status_code", 200, resp.status): + conn.close() + return False + if not test_val("data", "Hello", resp.read().decode()): + conn.close() + return False + Utility.console_log("Success") + conn.close() + return True + + +def put_echo(dut, port): + # PUT /echo echoes data' + Utility.console_log("[test] PUT /echo echoes data =>", end=' ') + conn = http.client.HTTPConnection(dut, int(port), timeout=15) + conn.request("PUT", "/echo", "Hello") + resp = conn.getresponse() + if not test_val("status_code", 200, resp.status): + conn.close() + return False + if not test_val("data", "Hello", resp.read().decode()): + conn.close() + return False + Utility.console_log("Success") + conn.close() + return True + + +def get_echo(dut, port): + # GET /echo returns 404' + Utility.console_log("[test] GET /echo returns 405 =>", end=' ') + conn = http.client.HTTPConnection(dut, int(port), timeout=15) + conn.request("GET", "/echo") + resp = conn.getresponse() + if not test_val("status_code", 405, resp.status): + conn.close() + return False + Utility.console_log("Success") + conn.close() + return True + + +def get_test_headers(dut, port): + # GET /test_header returns data of Header2' + Utility.console_log("[test] GET /test_header =>", end=' ') + conn = http.client.HTTPConnection(dut, int(port), timeout=15) + custom_header = {"Header1": "Value1", "Header3": "Value3"} + header2_values = ["", " ", "Value2", " Value2", "Value2 ", " Value2 "] + for val in header2_values: + custom_header["Header2"] = val + conn.request("GET", "/test_header", headers=custom_header) + resp = conn.getresponse() + if not test_val("status_code", 200, resp.status): + conn.close() + return False + hdr_val_start_idx = val.find("Value2") + if hdr_val_start_idx == -1: + if not test_val("header: Header2", "", resp.read().decode()): + conn.close() + return False + else: + if not test_val("header: Header2", val[hdr_val_start_idx:], resp.read().decode()): + conn.close() + return False + resp.read() + Utility.console_log("Success") + conn.close() + return True + + +def get_hello_type(dut, port): + # GET /hello/type_html returns text/html as Content-Type' + Utility.console_log("[test] GET /hello/type_html has Content-Type of text/html =>", end=' ') + conn = http.client.HTTPConnection(dut, int(port), timeout=15) + conn.request("GET", "/hello/type_html") + resp = conn.getresponse() + if not test_val("status_code", 200, resp.status): + conn.close() + return False + if not test_val("data", "Hello World!", resp.read().decode()): + conn.close() + return False + if not test_val("data", "text/html", resp.getheader('Content-Type')): + conn.close() + return False + Utility.console_log("Success") + conn.close() + return True + + +def get_hello_status(dut, port): + # GET /hello/status_500 returns status 500' + Utility.console_log("[test] GET /hello/status_500 returns status 500 =>", end=' ') + conn = http.client.HTTPConnection(dut, int(port), timeout=15) + conn.request("GET", "/hello/status_500") + resp = conn.getresponse() + if not test_val("status_code", 500, resp.status): + conn.close() + return False + Utility.console_log("Success") + conn.close() + return True + + +def get_false_uri(dut, port): + # GET /false_uri returns status 404' + Utility.console_log("[test] GET /false_uri returns status 404 =>", end=' ') + conn = http.client.HTTPConnection(dut, int(port), timeout=15) + conn.request("GET", "/false_uri") + resp = conn.getresponse() + if not test_val("status_code", 404, resp.status): + conn.close() + return False + Utility.console_log("Success") + conn.close() + return True + + +def parallel_sessions_adder(dut, port, max_sessions): + # POSTs on /adder in parallel sessions + Utility.console_log("[test] POST {pipelined} on /adder in " + str(max_sessions) + " sessions =>", end=' ') + t = [] + # Create all sessions + for i in range(max_sessions): + t.append(adder_thread(i, dut, port)) + + for i in range(len(t)): + t[i].start() + + for i in range(len(t)): + t[i].join() + + res = True + for i in range(len(t)): + if not test_val("Thread" + str(i) + " Failed", t[i].adder_result(), True): + res = False + t[i].close() + if (res): + Utility.console_log("Success") + return res + + +def async_response_test(dut, port): + # Test that an asynchronous work is executed in the HTTPD's context + # This is tested by reading two responses over the same session + Utility.console_log("[test] Test HTTPD Work Queue (Async response) =>", end=' ') + s = Session(dut, port) + + s.send_get('/async_data') + s.read_resp_hdrs() + if not test_val("First Response", "Hello World!", s.read_resp_data()): + s.close() + return False + s.read_resp_hdrs() + if not test_val("Second Response", "Hello Double World!", s.read_resp_data()): + s.close() + return False + s.close() + Utility.console_log("Success") + return True + + +def leftover_data_test(dut, port): + # Leftover data in POST is purged (valid and invalid URIs) + Utility.console_log("[test] Leftover data in POST is purged (valid and invalid URIs) =>", end=' ') + s = http.client.HTTPConnection(dut + ":" + port, timeout=15) + + s.request("POST", url='/leftover_data', body="abcdefghijklmnopqrstuvwxyz\r\nabcdefghijklmnopqrstuvwxyz") + resp = s.getresponse() + if not test_val("Partial data", "abcdefghij", resp.read().decode()): + s.close() + return False + + s.request("GET", url='/hello') + resp = s.getresponse() + if not test_val("Hello World Data", "Hello World!", resp.read().decode()): + s.close() + return False + + s.request("POST", url='/false_uri', body="abcdefghijklmnopqrstuvwxyz\r\nabcdefghijklmnopqrstuvwxyz") + resp = s.getresponse() + if not test_val("False URI Status", str(404), str(resp.status)): + s.close() + return False + # socket would have been closed by server due to error + s.close() + + s = http.client.HTTPConnection(dut + ":" + port, timeout=15) + s.request("GET", url='/hello') + resp = s.getresponse() + if not test_val("Hello World Data", "Hello World!", resp.read().decode()): + s.close() + return False + + s.close() + Utility.console_log("Success") + return True + + +def spillover_session(dut, port, max_sess): + # Session max_sess_sessions + 1 is rejected + Utility.console_log("[test] Session max_sess_sessions (" + str(max_sess) + ") + 1 is rejected =>", end=' ') + s = [] + _verbose_ = True + for i in range(max_sess + 1): + if (_verbose_): + Utility.console_log("Executing " + str(i)) + try: + a = http.client.HTTPConnection(dut + ":" + port, timeout=15) + a.request("GET", url='/hello') + resp = a.getresponse() + if not test_val("Connection " + str(i), "Hello World!", resp.read().decode()): + a.close() + break + s.append(a) + except Exception: + if (_verbose_): + Utility.console_log("Connection " + str(i) + " rejected") + a.close() + break + + # Close open connections + for a in s: + a.close() + + # Check if number of connections is equal to max_sess + Utility.console_log(["Fail","Success"][len(s) == max_sess]) + return (len(s) == max_sess) + + +def recv_timeout_test(dut, port): + Utility.console_log("[test] Timeout occurs if partial packet sent =>", end=' ') + s = Session(dut, port) + s.client.sendall(b"GE") + s.read_resp_hdrs() + resp = s.read_resp_data() + if not test_val("Request Timeout", "Server closed this connection", resp): + s.close() + return False + s.close() + Utility.console_log("Success") + return True + + +def packet_size_limit_test(dut, port, test_size): + Utility.console_log("[test] send size limit test =>", end=' ') + retry = 5 + while (retry): + retry -= 1 + Utility.console_log("data size = ", test_size) + s = http.client.HTTPConnection(dut + ":" + port, timeout=15) + random_data = ''.join(string.printable[random.randint(0,len(string.printable)) - 1] for _ in list(range(test_size))) + path = "/echo" + s.request("POST", url=path, body=random_data) + resp = s.getresponse() + if not test_val("Error", "200", str(resp.status)): + if test_val("Error", "500", str(resp.status)): + Utility.console_log("Data too large to be allocated") + test_size = test_size // 10 + else: + Utility.console_log("Unexpected error") + s.close() + Utility.console_log("Retry...") + continue + resp = resp.read().decode() + result = (resp == random_data) + if not result: + test_val("Data size", str(len(random_data)), str(len(resp))) + s.close() + Utility.console_log("Retry...") + continue + s.close() + Utility.console_log("Success") + return True + Utility.console_log("Failed") + return False + + +def arbitrary_termination_test(dut, port): + Utility.console_log("[test] Arbitrary termination test =>", end=' ') + cases = [ + { + "request": "POST /echo HTTP/1.1\r\nHost: " + dut + "\r\nCustom: SomeValue\r\n\r\n", + "code": "200", + "header": "SomeValue" + }, + { + "request": "POST /echo HTTP/1.1\nHost: " + dut + "\r\nCustom: SomeValue\r\n\r\n", + "code": "200", + "header": "SomeValue" + }, + { + "request": "POST /echo HTTP/1.1\r\nHost: " + dut + "\nCustom: SomeValue\r\n\r\n", + "code": "200", + "header": "SomeValue" + }, + { + "request": "POST /echo HTTP/1.1\r\nHost: " + dut + "\r\nCustom: SomeValue\n\r\n", + "code": "200", + "header": "SomeValue" + }, + { + "request": "POST /echo HTTP/1.1\r\nHost: " + dut + "\r\nCustom: SomeValue\r\n\n", + "code": "200", + "header": "SomeValue" + }, + { + "request": "POST /echo HTTP/1.1\nHost: " + dut + "\nCustom: SomeValue\n\n", + "code": "200", + "header": "SomeValue" + }, + { + "request": "POST /echo HTTP/1.1\r\nHost: " + dut + "\r\nContent-Length: 5\n\r\nABCDE", + "code": "200", + "body": "ABCDE" + }, + { + "request": "POST /echo HTTP/1.1\r\nHost: " + dut + "\r\nContent-Length: 5\r\n\nABCDE", + "code": "200", + "body": "ABCDE" + }, + { + "request": "POST /echo HTTP/1.1\r\nHost: " + dut + "\r\nContent-Length: 5\n\nABCDE", + "code": "200", + "body": "ABCDE" + }, + { + "request": "POST /echo HTTP/1.1\r\nHost: " + dut + "\r\nContent-Length: 5\n\n\rABCD", + "code": "200", + "body": "\rABCD" + }, + { + "request": "POST /echo HTTP/1.1\r\nHost: " + dut + "\r\r\nCustom: SomeValue\r\r\n\r\r\n", + "code": "400" + }, + { + "request": "POST /echo HTTP/1.1\r\r\nHost: " + dut + "\r\n\r\n", + "code": "400" + }, + { + "request": "POST /echo HTTP/1.1\r\n\rHost: " + dut + "\r\n\r\n", + "code": "400" + }, + { + "request": "POST /echo HTTP/1.1\r\nHost: " + dut + "\rCustom: SomeValue\r\n", + "code": "400" + }, + { + "request": "POST /echo HTTP/1.1\r\nHost: " + dut + "\r\nCustom: Some\rValue\r\n", + "code": "400" + }, + { + "request": "POST /echo HTTP/1.1\r\nHost: " + dut + "\r\nCustom- SomeValue\r\n\r\n", + "code": "400" + } + ] + for case in cases: + s = Session(dut, port) + s.client.sendall((case['request']).encode()) + resp_hdrs = s.read_resp_hdrs() + resp_body = s.read_resp_data() + s.close() + if not test_val("Response Code", case["code"], s.status): + return False + if "header" in case.keys(): + resp_hdr_val = None + if "Custom" in resp_hdrs.keys(): + resp_hdr_val = resp_hdrs["Custom"] + if not test_val("Response Header", case["header"], resp_hdr_val): + return False + if "body" in case.keys(): + if not test_val("Response Body", case["body"], resp_body): + return False + Utility.console_log("Success") + return True + + +def code_500_server_error_test(dut, port): + Utility.console_log("[test] 500 Server Error test =>", end=' ') + s = Session(dut, port) + # Sending a very large content length will cause malloc to fail + content_len = 2**30 + s.client.sendall(("POST /echo HTTP/1.1\r\nHost: " + dut + "\r\nContent-Length: " + str(content_len) + "\r\n\r\nABCD").encode()) + s.read_resp_hdrs() + s.read_resp_data() + if not test_val("Server Error", "500", s.status): + s.close() + return False + s.close() + Utility.console_log("Success") + return True + + +def code_501_method_not_impl(dut, port): + Utility.console_log("[test] 501 Method Not Implemented =>", end=' ') + s = Session(dut, port) + path = "/hello" + s.client.sendall(("ABC " + path + " HTTP/1.1\r\nHost: " + dut + "\r\n\r\n").encode()) + s.read_resp_hdrs() + s.read_resp_data() + # Presently server sends back 400 Bad Request + # if not test_val("Server Error", "501", s.status): + # s.close() + # return False + if not test_val("Server Error", "400", s.status): + s.close() + return False + s.close() + Utility.console_log("Success") + return True + + +def code_505_version_not_supported(dut, port): + Utility.console_log("[test] 505 Version Not Supported =>", end=' ') + s = Session(dut, port) + path = "/hello" + s.client.sendall(("GET " + path + " HTTP/2.0\r\nHost: " + dut + "\r\n\r\n").encode()) + s.read_resp_hdrs() + s.read_resp_data() + if not test_val("Server Error", "505", s.status): + s.close() + return False + s.close() + Utility.console_log("Success") + return True + + +def code_400_bad_request(dut, port): + Utility.console_log("[test] 400 Bad Request =>", end=' ') + s = Session(dut, port) + path = "/hello" + s.client.sendall(("XYZ " + path + " HTTP/1.1\r\nHost: " + dut + "\r\n\r\n").encode()) + s.read_resp_hdrs() + s.read_resp_data() + if not test_val("Client Error", "400", s.status): + s.close() + return False + s.close() + Utility.console_log("Success") + return True + + +def code_404_not_found(dut, port): + Utility.console_log("[test] 404 Not Found =>", end=' ') + s = Session(dut, port) + path = "/dummy" + s.client.sendall(("GET " + path + " HTTP/1.1\r\nHost: " + dut + "\r\n\r\n").encode()) + s.read_resp_hdrs() + s.read_resp_data() + if not test_val("Client Error", "404", s.status): + s.close() + return False + s.close() + Utility.console_log("Success") + return True + + +def code_405_method_not_allowed(dut, port): + Utility.console_log("[test] 405 Method Not Allowed =>", end=' ') + s = Session(dut, port) + path = "/hello" + s.client.sendall(("POST " + path + " HTTP/1.1\r\nHost: " + dut + "\r\n\r\n").encode()) + s.read_resp_hdrs() + s.read_resp_data() + if not test_val("Client Error", "405", s.status): + s.close() + return False + s.close() + Utility.console_log("Success") + return True + + +def code_408_req_timeout(dut, port): + Utility.console_log("[test] 408 Request Timeout =>", end=' ') + s = Session(dut, port) + s.client.sendall(("POST /echo HTTP/1.1\r\nHost: " + dut + "\r\nContent-Length: 10\r\n\r\nABCD").encode()) + s.read_resp_hdrs() + s.read_resp_data() + if not test_val("Client Error", "408", s.status): + s.close() + return False + s.close() + Utility.console_log("Success") + return True + + +def code_411_length_required(dut, port): + Utility.console_log("[test] 411 Length Required =>", end=' ') + s = Session(dut, port) + path = "/echo" + s.client.sendall(("POST " + path + " HTTP/1.1\r\nHost: " + dut + "\r\nContent-Type: text/plain\r\nTransfer-Encoding: chunked\r\n\r\n").encode()) + s.read_resp_hdrs() + s.read_resp_data() + # Presently server sends back 400 Bad Request + # if not test_val("Client Error", "411", s.status): + # s.close() + # return False + if not test_val("Client Error", "400", s.status): + s.close() + return False + s.close() + Utility.console_log("Success") + return True + + +def send_getx_uri_len(dut, port, length): + s = Session(dut, port) + method = "GET " + version = " HTTP/1.1\r\n" + path = "/" + "x" * (length - len(method) - len(version) - len("/")) + s.client.sendall(method.encode()) + time.sleep(1) + s.client.sendall(path.encode()) + time.sleep(1) + s.client.sendall((version + "Host: " + dut + "\r\n\r\n").encode()) + s.read_resp_hdrs() + s.read_resp_data() + s.close() + return s.status + + +def code_414_uri_too_long(dut, port, max_uri_len): + Utility.console_log("[test] 414 URI Too Long =>", end=' ') + status = send_getx_uri_len(dut, port, max_uri_len) + if not test_val("Client Error", "404", status): + return False + status = send_getx_uri_len(dut, port, max_uri_len + 1) + if not test_val("Client Error", "414", status): + return False + Utility.console_log("Success") + return True + + +def send_postx_hdr_len(dut, port, length): + s = Session(dut, port) + path = "/echo" + host = "Host: " + dut + custom_hdr_field = "\r\nCustom: " + custom_hdr_val = "x" * (length - len(host) - len(custom_hdr_field) - len("\r\n\r\n") + len("0")) + request = ("POST " + path + " HTTP/1.1\r\n" + host + custom_hdr_field + custom_hdr_val + "\r\n\r\n").encode() + s.client.sendall(request[:length // 2]) + time.sleep(1) + s.client.sendall(request[length // 2:]) + hdr = s.read_resp_hdrs() + resp = s.read_resp_data() + s.close() + if hdr and ("Custom" in hdr): + return (hdr["Custom"] == custom_hdr_val), resp + return False, s.status + + +def code_431_hdr_too_long(dut, port, max_hdr_len): + Utility.console_log("[test] 431 Header Too Long =>", end=' ') + res, status = send_postx_hdr_len(dut, port, max_hdr_len) + if not res: + return False + res, status = send_postx_hdr_len(dut, port, max_hdr_len + 1) + if not test_val("Client Error", "431", status): + return False + Utility.console_log("Success") + return True + + +def test_upgrade_not_supported(dut, port): + Utility.console_log("[test] Upgrade Not Supported =>", end=' ') + s = Session(dut, port) + # path = "/hello" + s.client.sendall(("OPTIONS * HTTP/1.1\r\nHost:" + dut + "\r\nUpgrade: TLS/1.0\r\nConnection: Upgrade\r\n\r\n").encode()) + s.read_resp_hdrs() + s.read_resp_data() + if not test_val("Client Error", "400", s.status): + s.close() + return False + s.close() + Utility.console_log("Success") + return True + + +if __name__ == '__main__': + # Execution begins here... + # Configuration + # Max number of threads/sessions + max_sessions = 7 + max_uri_len = 512 + max_hdr_len = 512 + + parser = argparse.ArgumentParser(description='Run HTTPD Test') + parser.add_argument('-4','--ipv4', help='IPv4 address') + parser.add_argument('-6','--ipv6', help='IPv6 address') + parser.add_argument('-p','--port', help='Port') + args = vars(parser.parse_args()) + + dut4 = args['ipv4'] + dut6 = args['ipv6'] + port = args['port'] + dut = dut4 + + _verbose_ = True + + Utility.console_log("### Basic HTTP Client Tests") + get_hello(dut, port) + post_hello(dut, port) + put_hello(dut, port) + post_echo(dut, port) + get_echo(dut, port) + put_echo(dut, port) + get_hello_type(dut, port) + get_hello_status(dut, port) + get_false_uri(dut, port) + get_test_headers(dut, port) + + Utility.console_log("### Error code tests") + code_500_server_error_test(dut, port) + code_501_method_not_impl(dut, port) + code_505_version_not_supported(dut, port) + code_400_bad_request(dut, port) + code_404_not_found(dut, port) + code_405_method_not_allowed(dut, port) + code_408_req_timeout(dut, port) + code_414_uri_too_long(dut, port, max_uri_len) + code_431_hdr_too_long(dut, port, max_hdr_len) + test_upgrade_not_supported(dut, port) + + # Not supported yet (Error on chunked request) + # code_411_length_required(dut, port) + + Utility.console_log("### Sessions and Context Tests") + parallel_sessions_adder(dut, port, max_sessions) + leftover_data_test(dut, port) + async_response_test(dut, port) + spillover_session(dut, port, max_sessions) + recv_timeout_test(dut, port) + packet_size_limit_test(dut, port, 50 * 1024) + arbitrary_termination_test(dut, port) + get_hello(dut, port) + + sys.exit() diff --git a/tools/tiny-test-fw/Utility/Attenuator.py b/tools/ci/python_packages/idf_iperf_test_util/Attenuator.py similarity index 100% rename from tools/tiny-test-fw/Utility/Attenuator.py rename to tools/ci/python_packages/idf_iperf_test_util/Attenuator.py diff --git a/tools/tiny-test-fw/Utility/LineChart.py b/tools/ci/python_packages/idf_iperf_test_util/LineChart.py similarity index 100% rename from tools/tiny-test-fw/Utility/LineChart.py rename to tools/ci/python_packages/idf_iperf_test_util/LineChart.py diff --git a/tools/tiny-test-fw/Utility/PowerControl.py b/tools/ci/python_packages/idf_iperf_test_util/PowerControl.py similarity index 100% rename from tools/tiny-test-fw/Utility/PowerControl.py rename to tools/ci/python_packages/idf_iperf_test_util/PowerControl.py diff --git a/examples/wifi/iperf/test_report.py b/tools/ci/python_packages/idf_iperf_test_util/TestReport.py similarity index 100% rename from examples/wifi/iperf/test_report.py rename to tools/ci/python_packages/idf_iperf_test_util/TestReport.py diff --git a/tools/ci/python_packages/idf_iperf_test_util/__init__.py b/tools/ci/python_packages/idf_iperf_test_util/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/tools/tiny-test-fw/App.py b/tools/ci/python_packages/tiny_test_fw/App.py similarity index 94% rename from tools/tiny-test-fw/App.py rename to tools/ci/python_packages/tiny_test_fw/App.py index 1dbadf85af..77053bf8fd 100644 --- a/tools/tiny-test-fw/App.py +++ b/tools/ci/python_packages/tiny_test_fw/App.py @@ -38,9 +38,11 @@ class BaseApp(object): Also implements some common methods. :param app_path: the path for app. + :param config_name: app configuration to be tested + :param target: build target """ - def __init__(self, app_path): + def __init__(self, app_path, config_name=None, target=None): pass @classmethod diff --git a/tools/tiny-test-fw/DUT.py b/tools/ci/python_packages/tiny_test_fw/DUT.py similarity index 99% rename from tools/tiny-test-fw/DUT.py rename to tools/ci/python_packages/tiny_test_fw/DUT.py index 4ecf76cb65..71007d85b0 100644 --- a/tools/tiny-test-fw/DUT.py +++ b/tools/ci/python_packages/tiny_test_fw/DUT.py @@ -275,6 +275,7 @@ class BaseDUT(object): DEFAULT_EXPECT_TIMEOUT = 10 MAX_EXPECT_FAILURES_TO_SAVED = 10 RECV_THREAD_CLS = RecvThread + TARGET = None """ DUT subclass can specify RECV_THREAD_CLS to do add some extra stuff when receive data. For example, DUT can implement exception detect & analysis logic in receive thread subclass. """ LOG_THREAD = _LogThread() @@ -377,15 +378,14 @@ class BaseDUT(object): # methods that need to be overwritten by Tool @classmethod - def confirm_dut(cls, port, app, **kwargs): + def confirm_dut(cls, port, **kwargs): """ confirm if it's a DUT, usually used by auto detecting DUT in by Env config. subclass (tool) must overwrite this method. :param port: comport - :param app: app instance - :return: True or False + :return: tuple of result (bool), and target (str) """ pass diff --git a/tools/tiny-test-fw/Env.py b/tools/ci/python_packages/tiny_test_fw/Env.py similarity index 96% rename from tools/tiny-test-fw/Env.py rename to tools/ci/python_packages/tiny_test_fw/Env.py index 3622ba3824..c6a9fb9ab3 100644 --- a/tools/tiny-test-fw/Env.py +++ b/tools/ci/python_packages/tiny_test_fw/Env.py @@ -62,7 +62,7 @@ class Env(object): self.lock = threading.RLock() @_synced - def get_dut(self, dut_name, app_path, dut_class=None, app_class=None, **dut_init_args): + def get_dut(self, dut_name, app_path, dut_class=None, app_class=None, app_config_name=None, **dut_init_args): """ get_dut(dut_name, app_path, dut_class=None, app_class=None) @@ -70,6 +70,7 @@ class Env(object): :param app_path: application path, app instance will use this path to process application info :param dut_class: dut class, if not specified will use default dut class of env :param app_class: app class, if not specified will use default app of env + :param app_config_name: app build config :keyword dut_init_args: extra kwargs used when creating DUT instance :return: dut instance """ @@ -80,7 +81,6 @@ class Env(object): dut_class = self.default_dut_cls if app_class is None: app_class = self.app_cls - app_inst = app_class(app_path) try: port = self.config.get_variable(dut_name) except ValueError: @@ -89,10 +89,14 @@ class Env(object): available_ports = dut_class.list_available_ports() for port in available_ports: if port not in allocated_ports: - if dut_class.confirm_dut(port, app_inst): + result = dut_class.confirm_dut(port) + if result: break else: port = None + + app_inst = app_class(app_path, app_config_name) + if port: try: dut_config = self.get_variable(dut_name + "_port_config") diff --git a/tools/tiny-test-fw/EnvConfig.py b/tools/ci/python_packages/tiny_test_fw/EnvConfig.py similarity index 100% rename from tools/tiny-test-fw/EnvConfig.py rename to tools/ci/python_packages/tiny_test_fw/EnvConfig.py diff --git a/tools/tiny-test-fw/EnvConfigTemplate.yml b/tools/ci/python_packages/tiny_test_fw/EnvConfigTemplate.yml similarity index 100% rename from tools/tiny-test-fw/EnvConfigTemplate.yml rename to tools/ci/python_packages/tiny_test_fw/EnvConfigTemplate.yml diff --git a/tools/tiny-test-fw/TinyFW.py b/tools/ci/python_packages/tiny_test_fw/TinyFW.py similarity index 100% rename from tools/tiny-test-fw/TinyFW.py rename to tools/ci/python_packages/tiny_test_fw/TinyFW.py diff --git a/tools/tiny-test-fw/Utility/CIAssignTest.py b/tools/ci/python_packages/tiny_test_fw/Utility/CIAssignTest.py similarity index 71% rename from tools/tiny-test-fw/Utility/CIAssignTest.py rename to tools/ci/python_packages/tiny_test_fw/Utility/CIAssignTest.py index 6a93f9a885..056d75f1c9 100644 --- a/tools/tiny-test-fw/Utility/CIAssignTest.py +++ b/tools/ci/python_packages/tiny_test_fw/Utility/CIAssignTest.py @@ -44,8 +44,12 @@ import re import json import yaml +try: + from yaml import CLoader as Loader +except ImportError: + from yaml import Loader as Loader -from Utility import (CaseConfig, SearchCases, GitlabCIJob, console_log) +from . import (CaseConfig, SearchCases, GitlabCIJob, console_log) class Group(object): @@ -100,6 +104,20 @@ class Group(object): added = True return added + def add_extra_case(self, case): + """ + By default (``add_case`` method), cases will only be added when have equal values of all filters with group. + But in some cases, we also want to add cases which are not best fit. + For example, one group has can run cases require (A, B). It can also accept cases require (A, ) and (B, ). + When assign failed by best fit, we will use this method to try if we can assign all failed cases. + + If subclass want to retry, they need to overwrite this method. + Logic can be applied to handle such scenario could be different for different cases. + + :return: True if accepted else False + """ + pass + def output(self): """ output data for job configs @@ -150,7 +168,7 @@ class AssignTest(object): def _parse_gitlab_ci_config(self, ci_config_file): with open(ci_config_file, "r") as f: - ci_config = yaml.load(f) + ci_config = yaml.load(f, Loader=Loader) job_list = list() for job_name in ci_config: @@ -188,6 +206,26 @@ class AssignTest(object): groups.append(self.case_group(case)) return groups + def _assign_failed_cases(self, assigned_groups, failed_groups): + """ try to assign failed cases to already assigned test groups """ + still_failed_groups = [] + failed_cases = [] + for group in failed_groups: + failed_cases.extend(group.case_list) + for case in failed_cases: + # first try to assign to already assigned groups + for group in assigned_groups: + if group.add_extra_case(case): + break + else: + # if failed, group the failed cases + for group in still_failed_groups: + if group.add_case(case): + break + else: + still_failed_groups.append(self.case_group(case)) + return still_failed_groups + @staticmethod def _apply_bot_filter(): """ @@ -213,6 +251,21 @@ class AssignTest(object): test_count = int(test_count) self.test_cases *= test_count + @staticmethod + def _count_groups_by_keys(test_groups): + """ + Count the number of test groups by job match keys. + It's an important information to update CI config file. + """ + group_count = dict() + for group in test_groups: + key = ",".join(group.ci_job_match_keys) + try: + group_count[key] += 1 + except KeyError: + group_count[key] = 1 + return group_count + def assign_cases(self): """ separate test cases to groups and assign test cases to CI jobs. @@ -221,21 +274,46 @@ class AssignTest(object): :return: None """ failed_to_assign = [] + assigned_groups = [] case_filter = self._apply_bot_filter() self.test_cases = self._search_cases(self.test_case_path, case_filter) self._apply_bot_test_count() test_groups = self._group_cases() + for group in test_groups: for job in self.jobs: if job.match_group(group): job.assign_group(group) + assigned_groups.append(group) break else: failed_to_assign.append(group) + if failed_to_assign: - console_log("Too many test cases vs jobs to run. Please add the following jobs to .gitlab-ci.yml with specific tags:", "R") - for group in failed_to_assign: - console_log("* Add job with: " + ",".join(group.ci_job_match_keys), "R") + failed_to_assign = self._assign_failed_cases(assigned_groups, failed_to_assign) + + # print debug info + # total requirement of current pipeline + required_group_count = self._count_groups_by_keys(test_groups) + console_log("Required job count by tags:") + for tags in required_group_count: + console_log("\t{}: {}".format(tags, required_group_count[tags])) + + # number of unused jobs + not_used_jobs = [job for job in self.jobs if "case group" not in job] + if not_used_jobs: + console_log("{} jobs not used. Please check if you define too much jobs".format(len(not_used_jobs)), "O") + for job in not_used_jobs: + console_log("\t{}".format(job["name"]), "O") + + # failures + if failed_to_assign: + console_log("Too many test cases vs jobs to run. " + "Please increase parallel count in tools/ci/config/target-test.yml " + "for jobs with specific tags:", "R") + failed_group_count = self._count_groups_by_keys(failed_to_assign) + for tags in failed_group_count: + console_log("\t{}: {}".format(tags, failed_group_count[tags]), "R") raise RuntimeError("Failed to assign test case to CI jobs") def output_configs(self, output_path): diff --git a/tools/tiny-test-fw/Utility/CaseConfig.py b/tools/ci/python_packages/tiny_test_fw/Utility/CaseConfig.py similarity index 95% rename from tools/tiny-test-fw/Utility/CaseConfig.py rename to tools/ci/python_packages/tiny_test_fw/Utility/CaseConfig.py index ee6234da8e..b08c520c71 100644 --- a/tools/tiny-test-fw/Utility/CaseConfig.py +++ b/tools/ci/python_packages/tiny_test_fw/Utility/CaseConfig.py @@ -20,7 +20,7 @@ Template Config File:: TestConfig: app: - path: Users/Test/TinyTestFW/IDF/IDFApp.py + package: ttfw_idf class: Example dut: path: @@ -38,16 +38,19 @@ Template Config File:: extra_data: some extra data passed to case with kwarg extra_data overwrite: # overwrite test configs app: - path: Users/Test/TinyTestFW/IDF/IDFApp.py + package: ttfw_idf class: Example - name: xxx """ +import importlib import yaml +try: + from yaml import CLoader as Loader +except ImportError: + from yaml import Loader as Loader -import TestCase - -from Utility import load_source +from . import TestCase def _convert_to_lower_case_bytes(item): @@ -154,7 +157,7 @@ class Parser(object): configs = cls.DEFAULT_CONFIG.copy() if config_file: with open(config_file, "r") as f: - configs.update(yaml.load(f)) + configs.update(yaml.load(f, Loader=Loader)) return configs @classmethod @@ -167,9 +170,8 @@ class Parser(object): """ output = dict() for key in overwrite: - _path = overwrite[key]["path"] - _module = load_source(str(hash(_path)), overwrite[key]["path"]) - output[key] = _module.__getattribute__(overwrite[key]["class"]) + module = importlib.import_module(overwrite[key]["package"]) + output[key] = module.__getattribute__(overwrite[key]["class"]) return output @classmethod diff --git a/tools/tiny-test-fw/Utility/GitlabCIJob.py b/tools/ci/python_packages/tiny_test_fw/Utility/GitlabCIJob.py similarity index 100% rename from tools/tiny-test-fw/Utility/GitlabCIJob.py rename to tools/ci/python_packages/tiny_test_fw/Utility/GitlabCIJob.py diff --git a/tools/tiny-test-fw/Utility/SearchCases.py b/tools/ci/python_packages/tiny_test_fw/Utility/SearchCases.py similarity index 99% rename from tools/tiny-test-fw/Utility/SearchCases.py rename to tools/ci/python_packages/tiny_test_fw/Utility/SearchCases.py index a0930d307e..94cb9d2ca4 100644 --- a/tools/tiny-test-fw/Utility/SearchCases.py +++ b/tools/ci/python_packages/tiny_test_fw/Utility/SearchCases.py @@ -17,7 +17,8 @@ import os import fnmatch import types import copy -from Utility import load_source + +from . import load_source class Search(object): diff --git a/tools/tiny-test-fw/TestCase.py b/tools/ci/python_packages/tiny_test_fw/Utility/TestCase.py similarity index 100% rename from tools/tiny-test-fw/TestCase.py rename to tools/ci/python_packages/tiny_test_fw/Utility/TestCase.py diff --git a/tools/tiny-test-fw/Utility/__init__.py b/tools/ci/python_packages/tiny_test_fw/Utility/__init__.py similarity index 57% rename from tools/tiny-test-fw/Utility/__init__.py rename to tools/ci/python_packages/tiny_test_fw/Utility/__init__.py index 2a0759a7bc..fbd2989bb0 100644 --- a/tools/tiny-test-fw/Utility/__init__.py +++ b/tools/ci/python_packages/tiny_test_fw/Utility/__init__.py @@ -38,11 +38,23 @@ def console_log(data, color="white", end="\n"): sys.stdout.flush() +__LOADED_MODULES = dict() +# we should only load one module once. +# if we load one module twice, +# python will regard the same object loaded in the first time and second time as different objects. +# it will lead to strange errors like `isinstance(object, type_of_this_object)` return False + + def load_source(name, path): try: - from importlib.machinery import SourceFileLoader - return SourceFileLoader(name, path).load_module() - except ImportError: - # importlib.machinery doesn't exists in Python 2 so we will use imp (deprecated in Python 3) - import imp - return imp.load_source(name, path) + return __LOADED_MODULES[name] + except KeyError: + try: + from importlib.machinery import SourceFileLoader + ret = SourceFileLoader(name, path).load_module() + except ImportError: + # importlib.machinery doesn't exists in Python 2 so we will use imp (deprecated in Python 3) + import imp + ret = imp.load_source(name, path) + __LOADED_MODULES[name] = ret + return ret diff --git a/tools/ci/python_packages/tiny_test_fw/__init__.py b/tools/ci/python_packages/tiny_test_fw/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/tools/tiny-test-fw/Runner.py b/tools/ci/python_packages/tiny_test_fw/bin/Runner.py similarity index 96% rename from tools/tiny-test-fw/Runner.py rename to tools/ci/python_packages/tiny_test_fw/bin/Runner.py index ea124c1489..a233de64f5 100644 --- a/tools/tiny-test-fw/Runner.py +++ b/tools/ci/python_packages/tiny_test_fw/bin/Runner.py @@ -26,8 +26,8 @@ import sys import argparse import threading -import TinyFW -from Utility import SearchCases, CaseConfig +from tiny_test_fw import TinyFW +from tiny_test_fw.Utility import SearchCases, CaseConfig class Runner(threading.Thread): diff --git a/tools/tiny-test-fw/example.py b/tools/ci/python_packages/tiny_test_fw/bin/example.py similarity index 100% rename from tools/tiny-test-fw/example.py rename to tools/ci/python_packages/tiny_test_fw/bin/example.py diff --git a/tools/tiny-test-fw/docs/Makefile b/tools/ci/python_packages/tiny_test_fw/docs/Makefile similarity index 100% rename from tools/tiny-test-fw/docs/Makefile rename to tools/ci/python_packages/tiny_test_fw/docs/Makefile diff --git a/tools/ci/python_packages/tiny_test_fw/docs/_static/.keep b/tools/ci/python_packages/tiny_test_fw/docs/_static/.keep new file mode 100644 index 0000000000..e69de29bb2 diff --git a/tools/tiny-test-fw/docs/conf.py b/tools/ci/python_packages/tiny_test_fw/docs/conf.py similarity index 100% rename from tools/tiny-test-fw/docs/conf.py rename to tools/ci/python_packages/tiny_test_fw/docs/conf.py diff --git a/tools/tiny-test-fw/docs/index.rst b/tools/ci/python_packages/tiny_test_fw/docs/index.rst similarity index 100% rename from tools/tiny-test-fw/docs/index.rst rename to tools/ci/python_packages/tiny_test_fw/docs/index.rst diff --git a/tools/tiny-test-fw/requirements.txt b/tools/ci/python_packages/tiny_test_fw/requirements.txt similarity index 76% rename from tools/tiny-test-fw/requirements.txt rename to tools/ci/python_packages/tiny_test_fw/requirements.txt index aa6b53b4b6..2a742540e2 100644 --- a/tools/tiny-test-fw/requirements.txt +++ b/tools/ci/python_packages/tiny_test_fw/requirements.txt @@ -2,4 +2,3 @@ pyserial pyyaml junit_xml netifaces -matplotlib diff --git a/tools/ci/python_packages/ttfw_idf/CIAssignExampleTest.py b/tools/ci/python_packages/ttfw_idf/CIAssignExampleTest.py new file mode 100644 index 0000000000..ed27c58eb3 --- /dev/null +++ b/tools/ci/python_packages/ttfw_idf/CIAssignExampleTest.py @@ -0,0 +1,90 @@ +# Copyright 2015-2017 Espressif Systems (Shanghai) PTE LTD +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http:#www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +""" +Command line tool to assign example tests to CI test jobs. +""" + +# TODO: Need to handle running examples on different chips +import os +import re +import argparse +import json + +import gitlab_api +from tiny_test_fw.Utility import CIAssignTest + + +EXAMPLE_BUILD_JOB_NAMES = ["build_examples_cmake_esp32", "build_examples_cmake_esp32s2"] +IDF_PATH_FROM_ENV = os.getenv("IDF_PATH") +if IDF_PATH_FROM_ENV: + ARTIFACT_INDEX_FILE = os.path.join(IDF_PATH_FROM_ENV, + "build_examples", "artifact_index.json") +else: + ARTIFACT_INDEX_FILE = "artifact_index.json" + + +class ExampleGroup(CIAssignTest.Group): + SORT_KEYS = CI_JOB_MATCH_KEYS = ["env_tag", "chip"] + + +class CIExampleAssignTest(CIAssignTest.AssignTest): + CI_TEST_JOB_PATTERN = re.compile(r"^example_test_.+") + + +def create_artifact_index_file(project_id=None, pipeline_id=None): + if project_id is None: + project_id = os.getenv("CI_PROJECT_ID") + if pipeline_id is None: + pipeline_id = os.getenv("CI_PIPELINE_ID") + gitlab_inst = gitlab_api.Gitlab(project_id) + artifact_index_list = [] + + def format_build_log_path(): + return "build_examples/list_job_{}.json".format(job_info["parallel_num"]) + + for build_job_name in EXAMPLE_BUILD_JOB_NAMES: + job_info_list = gitlab_inst.find_job_id(build_job_name, pipeline_id=pipeline_id) + for job_info in job_info_list: + raw_data = gitlab_inst.download_artifact(job_info["id"], [format_build_log_path()])[0] + build_info_list = [json.loads(line) for line in raw_data.splitlines()] + for build_info in build_info_list: + build_info["ci_job_id"] = job_info["id"] + artifact_index_list.append(build_info) + try: + os.makedirs(os.path.dirname(ARTIFACT_INDEX_FILE)) + except OSError: + # already created + pass + + with open(ARTIFACT_INDEX_FILE, "w") as f: + json.dump(artifact_index_list, f) + + +if __name__ == '__main__': + parser = argparse.ArgumentParser() + parser.add_argument("test_case", + help="test case folder or file") + parser.add_argument("ci_config_file", + help="gitlab ci config file") + parser.add_argument("output_path", + help="output path of config files") + parser.add_argument("--pipeline_id", "-p", type=int, default=None, + help="pipeline_id") + args = parser.parse_args() + + assign_test = CIExampleAssignTest(args.test_case, args.ci_config_file, case_group=ExampleGroup) + assign_test.assign_cases() + assign_test.output_configs(args.output_path) + create_artifact_index_file() diff --git a/tools/tiny-test-fw/CIAssignUnitTest.py b/tools/ci/python_packages/ttfw_idf/CIAssignUnitTest.py similarity index 66% rename from tools/tiny-test-fw/CIAssignUnitTest.py rename to tools/ci/python_packages/ttfw_idf/CIAssignUnitTest.py index 2ea32e53d8..6f2ff00b69 100644 --- a/tools/tiny-test-fw/CIAssignUnitTest.py +++ b/tools/ci/python_packages/ttfw_idf/CIAssignUnitTest.py @@ -3,24 +3,21 @@ Command line tool to assign unit tests to CI test jobs. """ import re -import os -import sys import argparse import yaml try: - from Utility import CIAssignTest + from yaml import CLoader as Loader except ImportError: - test_fw_path = os.getenv("TEST_FW_PATH") - if test_fw_path: - sys.path.insert(0, test_fw_path) - from Utility import CIAssignTest + from yaml import Loader as Loader + +from tiny_test_fw.Utility import CIAssignTest class Group(CIAssignTest.Group): - SORT_KEYS = ["config", "SDK", "test environment", "multi_device", "multi_stage", "tags"] - MAX_CASE = 30 + SORT_KEYS = ["test environment", "tags"] + MAX_CASE = 50 ATTR_CONVERT_TABLE = { "execution_time": "execution time" } @@ -38,13 +35,28 @@ class Group(CIAssignTest.Group): attr = Group.ATTR_CONVERT_TABLE[attr] return case[attr] - def _create_extra_data(self, test_function): + def add_extra_case(self, case): + """ If current group contains all tags required by case, then add succeed """ + added = False + if self.accept_new_case(): + for key in self.filters: + if self._get_case_attr(case, key) != self.filters[key]: + if key == "tags": + if self._get_case_attr(case, key).issubset(self.filters[key]): + continue + break + else: + self.case_list.append(case) + added = True + return added + + def _create_extra_data(self, test_cases, test_function): """ For unit test case, we need to copy some attributes of test cases into config file. So unit test function knows how to run the case. """ case_data = [] - for case in self.case_list: + for case in test_cases: one_case_data = { "config": self._get_case_attr(case, "config"), "name": self._get_case_attr(case, "summary"), @@ -63,19 +75,26 @@ class Group(CIAssignTest.Group): case_data.append(one_case_data) return case_data - def _map_test_function(self): + def _divide_case_by_test_function(self): """ - determine which test function to use according to current test case + divide cases of current test group by test function they need to use - :return: test function name to use + :return: dict of list of cases for each test functions """ - if self.filters["multi_device"] == "Yes": - test_function = "run_multiple_devices_cases" - elif self.filters["multi_stage"] == "Yes": - test_function = "run_multiple_stage_cases" - else: - test_function = "run_unit_test_cases" - return test_function + case_by_test_function = { + "run_multiple_devices_cases": [], + "run_multiple_stage_cases": [], + "run_unit_test_cases": [], + } + + for case in self.case_list: + if case["multi_device"] == "Yes": + case_by_test_function["run_multiple_devices_cases"].append(case) + elif case["multi_stage"] == "Yes": + case_by_test_function["run_multiple_stage_cases"].append(case) + else: + case_by_test_function["run_unit_test_cases"].append(case) + return case_by_test_function def output(self): """ @@ -83,15 +102,16 @@ class Group(CIAssignTest.Group): :return: {"Filter": case filter, "CaseConfig": list of case configs for cases in this group} """ - test_function = self._map_test_function() + case_by_test_function = self._divide_case_by_test_function() + output_data = { # we don't need filter for test function, as UT uses a few test functions for all cases "CaseConfig": [ { "name": test_function, - "extra_data": self._create_extra_data(test_function), - } - ] + "extra_data": self._create_extra_data(test_cases, test_function), + } for test_function, test_cases in case_by_test_function.iteritems() if test_cases + ], } return output_data @@ -110,8 +130,10 @@ class UnitTestAssignTest(CIAssignTest.AssignTest): try: with open(test_case_path, "r") as f: - raw_data = yaml.load(f) + raw_data = yaml.load(f, Loader=Loader) test_cases = raw_data["test cases"] + for case in test_cases: + case["tags"] = set(case["tags"]) except IOError: print("Test case path is invalid. Should only happen when use @bot to skip unit test.") test_cases = [] @@ -137,6 +159,10 @@ class UnitTestAssignTest(CIAssignTest.AssignTest): # case don't have this key, regard as filter success filtered_cases.append(case) test_cases = filtered_cases + # sort cases with configs and test functions + # in later stage cases with similar attributes are more likely to be assigned to the same job + # it will reduce the count of flash DUT operations + test_cases.sort(key=lambda x: x["config"] + x["multi_stage"] + x["multi_device"]) return test_cases diff --git a/tools/ci/python_packages/ttfw_idf/IDFApp.py b/tools/ci/python_packages/ttfw_idf/IDFApp.py new file mode 100644 index 0000000000..c17072ca82 --- /dev/null +++ b/tools/ci/python_packages/ttfw_idf/IDFApp.py @@ -0,0 +1,383 @@ +# Copyright 2015-2017 Espressif Systems (Shanghai) PTE LTD +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http:#www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +""" IDF Test Applications """ +import subprocess + +import os +import json + +from tiny_test_fw import App +from . import CIAssignExampleTest + +try: + import gitlab_api +except ImportError: + gitlab_api = None + + +def parse_flash_settings(path): + file_name = os.path.basename(path) + if file_name == "flasher_args.json": + # CMake version using build metadata file + with open(path, "r") as f: + args = json.load(f) + flash_files = [(offs, binary) for (offs, binary) in args["flash_files"].items() if offs != ""] + flash_settings = args["flash_settings"] + app_name = os.path.splitext(args["app"]["file"])[0] + else: + # GNU Make version uses download.config arguments file + with open(path, "r") as f: + args = f.readlines()[-1].split(" ") + flash_files = [] + flash_settings = {} + for idx in range(0, len(args), 2): # process arguments in pairs + if args[idx].startswith("--"): + # strip the -- from the command line argument + flash_settings[args[idx][2:]] = args[idx + 1] + else: + # offs, filename + flash_files.append((args[idx], args[idx + 1])) + # we can only guess app name in download.config. + for p in flash_files: + if not os.path.dirname(p[1]) and "partition" not in p[1]: + # app bin usually in the same dir with download.config and it's not partition table + app_name = os.path.splitext(p[1])[0] + break + else: + app_name = None + return flash_files, flash_settings, app_name + + +class Artifacts(object): + def __init__(self, dest_root_path, artifact_index_file, app_path, config_name, target): + assert gitlab_api + # at least one of app_path or config_name is not None. otherwise we can't match artifact + assert app_path or config_name + assert os.path.exists(artifact_index_file) + self.gitlab_inst = gitlab_api.Gitlab(os.getenv("CI_PROJECT_ID")) + self.dest_root_path = dest_root_path + with open(artifact_index_file, "r") as f: + artifact_index = json.load(f) + self.artifact_info = self._find_artifact(artifact_index, app_path, config_name, target) + + @staticmethod + def _find_artifact(artifact_index, app_path, config_name, target): + for artifact_info in artifact_index: + match_result = True + if app_path: + match_result = app_path in artifact_info["app_dir"] + if config_name: + match_result = match_result and config_name == artifact_info["config"] + if target: + match_result = match_result and target == artifact_info["target"] + if match_result: + ret = artifact_info + break + else: + ret = None + return ret + + def download_artifacts(self): + if self.artifact_info: + base_path = os.path.join(self.artifact_info["work_dir"], self.artifact_info["build_dir"]) + job_id = self.artifact_info["ci_job_id"] + + # 1. download flash args file + if self.artifact_info["build_system"] == "cmake": + flash_arg_file = os.path.join(base_path, "flasher_args.json") + else: + flash_arg_file = os.path.join(base_path, "download.config") + + self.gitlab_inst.download_artifact(job_id, [flash_arg_file], self.dest_root_path) + + # 2. download all binary files + flash_files, flash_settings, app_name = parse_flash_settings(os.path.join(self.dest_root_path, + flash_arg_file)) + artifact_files = [os.path.join(base_path, p[1]) for p in flash_files] + artifact_files.append(os.path.join(base_path, app_name + ".elf")) + + self.gitlab_inst.download_artifact(job_id, artifact_files, self.dest_root_path) + + # 3. download sdkconfig file + self.gitlab_inst.download_artifact(job_id, [os.path.join(os.path.dirname(base_path), "sdkconfig")], + self.dest_root_path) + else: + base_path = None + return base_path + + def download_artifact_files(self, file_names): + if self.artifact_info: + base_path = os.path.join(self.artifact_info["work_dir"], self.artifact_info["build_dir"]) + job_id = self.artifact_info["ci_job_id"] + + # download all binary files + artifact_files = [os.path.join(base_path, fn) for fn in file_names] + self.gitlab_inst.download_artifact(job_id, artifact_files, self.dest_root_path) + + # download sdkconfig file + self.gitlab_inst.download_artifact(job_id, [os.path.join(os.path.dirname(base_path), "sdkconfig")], + self.dest_root_path) + else: + base_path = None + return base_path + + +class IDFApp(App.BaseApp): + """ + Implements common esp-idf application behavior. + idf applications should inherent from this class and overwrite method get_binary_path. + """ + + IDF_DOWNLOAD_CONFIG_FILE = "download.config" + IDF_FLASH_ARGS_FILE = "flasher_args.json" + + def __init__(self, app_path, config_name=None, target=None): + super(IDFApp, self).__init__(app_path) + self.config_name = config_name + self.target = target + self.idf_path = self.get_sdk_path() + self.binary_path = self.get_binary_path(app_path, config_name, target) + self.elf_file = self._get_elf_file_path(self.binary_path) + assert os.path.exists(self.binary_path) + if self.IDF_DOWNLOAD_CONFIG_FILE not in os.listdir(self.binary_path): + if self.IDF_FLASH_ARGS_FILE not in os.listdir(self.binary_path): + msg = ("Neither {} nor {} exists. " + "Try to run 'make print_flash_cmd | tail -n 1 > {}/{}' " + "or 'idf.py build' " + "for resolving the issue." + "").format(self.IDF_DOWNLOAD_CONFIG_FILE, self.IDF_FLASH_ARGS_FILE, + self.binary_path, self.IDF_DOWNLOAD_CONFIG_FILE) + raise AssertionError(msg) + + self.flash_files, self.flash_settings = self._parse_flash_download_config() + self.partition_table = self._parse_partition_table() + + @classmethod + def get_sdk_path(cls): + # type: () -> str + idf_path = os.getenv("IDF_PATH") + assert idf_path + assert os.path.exists(idf_path) + return idf_path + + def _get_sdkconfig_paths(self): + """ + returns list of possible paths where sdkconfig could be found + + Note: could be overwritten by a derived class to provide other locations or order + """ + return [os.path.join(self.binary_path, "sdkconfig"), os.path.join(self.binary_path, "..", "sdkconfig")] + + def get_sdkconfig(self): + """ + reads sdkconfig and returns a dictionary with all configuredvariables + + :raise: AssertionError: if sdkconfig file does not exist in defined paths + """ + d = {} + sdkconfig_file = None + for i in self._get_sdkconfig_paths(): + if os.path.exists(i): + sdkconfig_file = i + break + assert sdkconfig_file is not None + with open(sdkconfig_file) as f: + for line in f: + configs = line.split('=') + if len(configs) == 2: + d[configs[0]] = configs[1].rstrip() + return d + + def get_binary_path(self, app_path, config_name=None, target=None): + # type: (str, str, str) -> str + """ + get binary path according to input app_path. + + subclass must overwrite this method. + + :param app_path: path of application + :param config_name: name of the application build config. Will match any config if None + :param target: target name. Will match for target if None + :return: abs app binary path + """ + pass + + @staticmethod + def _get_elf_file_path(binary_path): + ret = "" + file_names = os.listdir(binary_path) + for fn in file_names: + if os.path.splitext(fn)[1] == ".elf": + ret = os.path.join(binary_path, fn) + return ret + + def _parse_flash_download_config(self): + """ + Parse flash download config from build metadata files + + Sets self.flash_files, self.flash_settings + + (Called from constructor) + + Returns (flash_files, flash_settings) + """ + + if self.IDF_FLASH_ARGS_FILE in os.listdir(self.binary_path): + # CMake version using build metadata file + path = os.path.join(self.binary_path, self.IDF_FLASH_ARGS_FILE) + else: + # GNU Make version uses download.config arguments file + path = os.path.join(self.binary_path, self.IDF_DOWNLOAD_CONFIG_FILE) + + flash_files, flash_settings, app_name = parse_flash_settings(path) + # The build metadata file does not currently have details, which files should be encrypted and which not. + # Assume that all files should be encrypted if flash encryption is enabled in development mode. + sdkconfig_dict = self.get_sdkconfig() + flash_settings["encrypt"] = "CONFIG_SECURE_FLASH_ENCRYPTION_MODE_DEVELOPMENT" in sdkconfig_dict + + # make file offsets into integers, make paths absolute + flash_files = [(int(offs, 0), os.path.join(self.binary_path, path.strip())) for (offs, path) in flash_files] + + return flash_files, flash_settings + + def _parse_partition_table(self): + """ + Parse partition table contents based on app binaries + + Returns partition_table data + + (Called from constructor) + """ + partition_tool = os.path.join(self.idf_path, + "components", + "partition_table", + "gen_esp32part.py") + assert os.path.exists(partition_tool) + + for (_, path) in self.flash_files: + if "partition" in path: + partition_file = os.path.join(self.binary_path, path) + break + else: + raise ValueError("No partition table found for IDF binary path: {}".format(self.binary_path)) + + process = subprocess.Popen(["python", partition_tool, partition_file], + stdout=subprocess.PIPE, stderr=subprocess.PIPE) + raw_data = process.stdout.read() + if isinstance(raw_data, bytes): + raw_data = raw_data.decode() + partition_table = dict() + + for line in raw_data.splitlines(): + if line[0] != "#": + try: + _name, _type, _subtype, _offset, _size, _flags = line.split(",") + if _size[-1] == "K": + _size = int(_size[:-1]) * 1024 + elif _size[-1] == "M": + _size = int(_size[:-1]) * 1024 * 1024 + else: + _size = int(_size) + except ValueError: + continue + partition_table[_name] = { + "type": _type, + "subtype": _subtype, + "offset": _offset, + "size": _size, + "flags": _flags + } + + return partition_table + + +class Example(IDFApp): + def _get_sdkconfig_paths(self): + """ + overrides the parent method to provide exact path of sdkconfig for example tests + """ + return [os.path.join(self.binary_path, "..", "sdkconfig")] + + def _try_get_binary_from_local_fs(self, app_path, config_name=None, target=None): + # build folder of example path + path = os.path.join(self.idf_path, app_path, "build") + if os.path.exists(path): + return path + + if not config_name: + config_name = "default" + + # Search for CI build folders. + # Path format: $IDF_PATH/build_examples/app_path_with_underscores/config/target + # (see tools/ci/build_examples_cmake.sh) + # For example: $IDF_PATH/build_examples/examples_get-started_blink/default/esp32 + app_path_underscored = app_path.replace(os.path.sep, "_") + example_path = os.path.join(self.idf_path, "build_examples") + for dirpath in os.listdir(example_path): + if os.path.basename(dirpath) == app_path_underscored: + path = os.path.join(example_path, dirpath, config_name, target, "build") + if os.path.exists(path): + return path + else: + return None + + def get_binary_path(self, app_path, config_name=None, target=None): + path = self._try_get_binary_from_local_fs(app_path, config_name, target) + if path: + return path + else: + artifacts = Artifacts(self.idf_path, CIAssignExampleTest.ARTIFACT_INDEX_FILE, + app_path, config_name, target) + path = artifacts.download_artifacts() + if path: + return os.path.join(self.idf_path, path) + else: + raise OSError("Failed to find example binary") + + +class UT(IDFApp): + def get_binary_path(self, app_path, config_name=None, target=None): + if not config_name: + config_name = "default" + + path = os.path.join(self.idf_path, app_path) + default_build_path = os.path.join(path, "build") + if os.path.exists(default_build_path): + return path + + # first try to get from build folder of unit-test-app + path = os.path.join(self.idf_path, "tools", "unit-test-app", "build") + if os.path.exists(path): + # found, use bin in build path + return path + + # ``make ut-build-all-configs`` or ``make ut-build-CONFIG`` will copy binary to output folder + path = os.path.join(self.idf_path, "tools", "unit-test-app", "output", config_name) + if os.path.exists(path): + return path + + raise OSError("Failed to get unit-test-app binary path") + + +class SSC(IDFApp): + def get_binary_path(self, app_path, config_name=None, target=None): + # TODO: to implement SSC get binary path + return app_path + + +class AT(IDFApp): + def get_binary_path(self, app_path, config_name=None, target=None): + # TODO: to implement AT get binary path + return app_path diff --git a/tools/tiny-test-fw/IDF/IDFDUT.py b/tools/ci/python_packages/ttfw_idf/IDFDUT.py similarity index 97% rename from tools/tiny-test-fw/IDF/IDFDUT.py rename to tools/ci/python_packages/ttfw_idf/IDFDUT.py index bb217ca2c8..9f64cbea9d 100644 --- a/tools/tiny-test-fw/IDF/IDFDUT.py +++ b/tools/ci/python_packages/ttfw_idf/IDFDUT.py @@ -30,8 +30,7 @@ except ImportError: from serial.tools import list_ports -import DUT -import Utility +from tiny_test_fw import DUT, Utility try: import esptool @@ -160,14 +159,14 @@ class IDFDUT(DUT.SerialDUT): self.performance_items = _queue.Queue() @classmethod - def get_mac(cls, app, port): + def get_mac(cls, port): """ get MAC address via esptool - :param app: application instance (to get tool) :param port: serial port as string :return: MAC address or None """ + esp = None try: esp = esptool.ESP32ROM(port) esp.connect() @@ -175,13 +174,14 @@ class IDFDUT(DUT.SerialDUT): except RuntimeError: return None finally: - # do hard reset after use esptool - esp.hard_reset() - esp._port.close() + if esp: + # do hard reset after use esptool + esp.hard_reset() + esp._port.close() @classmethod - def confirm_dut(cls, port, app, **kwargs): - return cls.get_mac(app, port) is not None + def confirm_dut(cls, port, **kwargs): + return cls.get_mac(port) is not None @_uses_esptool def _try_flash(self, esp, erase_nvs, baud_rate): diff --git a/tools/tiny-test-fw/IDF/__init__.py b/tools/ci/python_packages/ttfw_idf/__init__.py similarity index 94% rename from tools/tiny-test-fw/IDF/__init__.py rename to tools/ci/python_packages/ttfw_idf/__init__.py index 6887947231..001da23095 100644 --- a/tools/tiny-test-fw/IDF/__init__.py +++ b/tools/ci/python_packages/ttfw_idf/__init__.py @@ -14,10 +14,9 @@ import os import re -import TinyFW -import Utility -from IDF.IDFApp import IDFApp, Example, UT -from IDF.IDFDUT import IDFDUT +from tiny_test_fw import TinyFW, Utility +from IDFApp import IDFApp, Example, UT +from IDFDUT import IDFDUT def format_case_id(chip, case_name): @@ -25,7 +24,7 @@ def format_case_id(chip, case_name): def idf_example_test(app=Example, dut=IDFDUT, chip="ESP32", module="examples", execution_time=1, - level="example", erase_nvs=True, **kwargs): + level="example", erase_nvs=True, config_name=None, **kwargs): """ decorator for testing idf examples (with default values for some keyword args). @@ -36,6 +35,7 @@ def idf_example_test(app=Example, dut=IDFDUT, chip="ESP32", module="examples", e :param execution_time: execution time in minutes, int :param level: test level, could be used to filter test cases, string :param erase_nvs: if need to erase_nvs in DUT.start_app() + :param config_name: if specified, name of the app configuration :param kwargs: other keyword args :return: test method """ diff --git a/examples/provisioning/softap_prov/utils/wifi_tools.py b/tools/ci/python_packages/wifi_tools.py similarity index 100% rename from examples/provisioning/softap_prov/utils/wifi_tools.py rename to tools/ci/python_packages/wifi_tools.py diff --git a/tools/ci/setup_python.sh b/tools/ci/setup_python.sh index c134ba816b..a0e1723be0 100644 --- a/tools/ci/setup_python.sh +++ b/tools/ci/setup_python.sh @@ -51,3 +51,6 @@ else echo 'No /opt/pyenv/activate exists and no Python interpreter is found!' exit 1 fi + +# add esp-idf local package path to PYTHONPATH so it can be imported directly +export PYTHONPATH="$IDF_PATH/tools:$IDF_PATH/tools/ci/python_packages:$PYTHONPATH" diff --git a/tools/cmake/git_submodules.cmake b/tools/cmake/git_submodules.cmake index 09098b24af..e36ee919f7 100644 --- a/tools/cmake/git_submodules.cmake +++ b/tools/cmake/git_submodules.cmake @@ -11,6 +11,14 @@ if(NOT GIT_FOUND) else() function(git_submodule_check root_path) + # for internal use: + # skip submodule check if running on Gitlab CI and job is configured as not clone submodules + if($ENV{IDF_SKIP_CHECK_SUBMODULES}) + if($ENV{IDF_SKIP_CHECK_SUBMODULES} EQUAL 1) + message("skip submodule check on internal CI") + return() + endif() + endif() execute_process( COMMAND ${GIT_EXECUTABLE} submodule status diff --git a/tools/esp_prov/__init__.py b/tools/esp_prov/__init__.py new file mode 100644 index 0000000000..cbd5e5b60f --- /dev/null +++ b/tools/esp_prov/__init__.py @@ -0,0 +1 @@ +from esp_prov import * # noqa: export esp_prov module to users diff --git a/tools/find_apps.py b/tools/find_apps.py new file mode 100755 index 0000000000..e3dd48c4c0 --- /dev/null +++ b/tools/find_apps.py @@ -0,0 +1,283 @@ +#!/usr/bin/env python +# coding=utf-8 +# +# ESP-IDF helper script to enumerate the builds of multiple configurations of multiple apps. +# Produces the list of builds. The list can be consumed by build_apps.py, which performs the actual builds. + +import argparse +import os +import sys +import re +import glob +import logging +import typing +from find_build_apps import ( + BUILD_SYSTEMS, + BUILD_SYSTEM_CMAKE, + BuildSystem, + BuildItem, + setup_logging, + ConfigRule, + config_rules_from_str, + DEFAULT_TARGET, +) + +# Helper functions + + +def dict_from_sdkconfig(path): + """ + Parse the sdkconfig file at 'path', return name:value pairs as a dict + """ + regex = re.compile(r"^([^#=]+)=(.+)$") + result = {} + with open(path) as f: + for line in f: + m = regex.match(line) + if m: + result[m.group(1)] = m.group(2) + return result + + +# Main logic: enumerating apps and builds + + +def find_builds_for_app( + app_path, work_dir, build_dir, build_log, target_arg, build_system, + config_rules): # type: (str, str, str, str, str, str, typing.List[ConfigRule]) -> typing.List[BuildItem] + """ + Find configurations (sdkconfig file fragments) for the given app, return them as BuildItem objects + :param app_path: app directory (can be / usually will be a relative path) + :param work_dir: directory where the app should be copied before building. + May contain env. variables and placeholders. + :param build_dir: directory where the build will be done, relative to the work_dir. May contain placeholders. + :param build_log: path of the build log. May contain placeholders. May be None, in which case the log should go + into stdout/stderr. + :param target_arg: the value of IDF_TARGET passed to the script. Used to filter out configurations with + a different CONFIG_IDF_TARGET value. + :param build_system: name of the build system, index into BUILD_SYSTEMS dictionary + :param config_rules: mapping of sdkconfig file name patterns to configuration names + :return: list of BuildItems representing build configuration of the app + """ + build_items = [] # type: typing.List[BuildItem] + default_config_name = "" + + for rule in config_rules: + if not rule.file_name: + default_config_name = rule.config_name + continue + + sdkconfig_paths = glob.glob(os.path.join(app_path, rule.file_name)) + sdkconfig_paths = sorted(sdkconfig_paths) + for sdkconfig_path in sdkconfig_paths: + + # Check if the sdkconfig file specifies IDF_TARGET, and if it is matches the --target argument. + sdkconfig_dict = dict_from_sdkconfig(sdkconfig_path) + target_from_config = sdkconfig_dict.get("CONFIG_IDF_TARGET") + if target_from_config is not None and target_from_config != target_arg: + logging.debug("Skipping sdkconfig {} which requires target {}".format( + sdkconfig_path, target_from_config)) + continue + + # Figure out the config name + config_name = rule.config_name or "" + if "*" in rule.file_name: + # convert glob pattern into a regex + regex_str = r".*" + rule.file_name.replace(".", r"\.").replace("*", r"(.*)") + groups = re.match(regex_str, sdkconfig_path) + assert groups + config_name = groups.group(1) + + sdkconfig_path = os.path.relpath(sdkconfig_path, app_path) + logging.debug('Adding build: app {}, sdkconfig {}, config name "{}"'.format( + app_path, sdkconfig_path, config_name)) + build_items.append( + BuildItem( + app_path, + work_dir, + build_dir, + build_log, + target_arg, + sdkconfig_path, + config_name, + build_system, + )) + + if not build_items: + logging.debug('Adding build: app {}, default sdkconfig, config name "{}"'.format(app_path, default_config_name)) + return [ + BuildItem( + app_path, + work_dir, + build_dir, + build_log, + target_arg, + None, + default_config_name, + build_system, + ) + ] + + return build_items + + +def find_apps(build_system_class, path, recursive, exclude_list, + target): # type: (typing.Type[BuildSystem], str, bool, typing.List[str], str) -> typing.List[str] + """ + Find app directories in path (possibly recursively), which contain apps for the given build system, compatible + with the given target. + :param build_system_class: class derived from BuildSystem, representing the build system in use + :param path: path where to look for apps + :param recursive: whether to recursively descend into nested directories if no app is found + :param exclude_list: list of paths to be excluded from the recursive search + :param target: desired value of IDF_TARGET; apps incompatible with the given target are skipped. + :return: list of paths of the apps found + """ + build_system_name = build_system_class.NAME + logging.debug("Looking for {} apps in {}{}".format(build_system_name, path, " recursively" if recursive else "")) + if not recursive: + if exclude_list: + logging.warn("--exclude option is ignored when used without --recursive") + if not build_system_class.is_app(path): + logging.warn("Path {} specified without --recursive flag, but no {} app found there".format( + path, build_system_name)) + return [] + return [path] + + # The remaining part is for recursive == True + apps_found = [] # type: typing.List[str] + for root, dirs, _ in os.walk(path, topdown=True): + logging.debug("Entering {}".format(root)) + if root in exclude_list: + logging.debug("Skipping {} (excluded)".format(root)) + del dirs[:] + continue + + if build_system_class.is_app(root): + logging.debug("Found {} app in {}".format(build_system_name, root)) + # Don't recurse into app subdirectories + del dirs[:] + + supported_targets = build_system_class.supported_targets(root) + if supported_targets and target not in supported_targets: + logging.debug("Skipping, app only supports targets: " + ", ".join(supported_targets)) + continue + + apps_found.append(root) + + return apps_found + + +def main(): + parser = argparse.ArgumentParser(description="Tool to generate build steps for IDF apps") + parser.add_argument( + "-v", + "--verbose", + action="count", + help="Increase the logging level of the script. Can be specified multiple times.", + ) + parser.add_argument( + "--log-file", + type=argparse.FileType("w"), + help="Write the script log to the specified file, instead of stderr", + ) + parser.add_argument( + "--recursive", + action="store_true", + help="Look for apps in the specified directories recursively.", + ) + parser.add_argument("--build-system", choices=BUILD_SYSTEMS.keys(), default=BUILD_SYSTEM_CMAKE) + parser.add_argument( + "--work-dir", + help="If set, the app is first copied into the specified directory, and then built." + + "If not set, the work directory is the directory of the app.", + ) + parser.add_argument( + "--config", + action="append", + help="Adds configurations (sdkconfig file names) to build. This can either be " + + "FILENAME[=NAME] or FILEPATTERN. FILENAME is the name of the sdkconfig file, " + + "relative to the project directory, to be used. Optional NAME can be specified, " + + "which can be used as a name of this configuration. FILEPATTERN is the name of " + + "the sdkconfig file, relative to the project directory, with at most one wildcard. " + + "The part captured by the wildcard is used as the name of the configuration.", + ) + parser.add_argument( + "--build-dir", + help="If set, specifies the build directory name. Can expand placeholders. Can be either a " + + "name relative to the work directory, or an absolute path.", + ) + parser.add_argument( + "--build-log", + help="If specified, the build log will be written to this file. Can expand placeholders.", + ) + parser.add_argument("--target", help="Build apps for given target.") + parser.add_argument( + "--format", + default="json", + choices=["json"], + help="Format to write the list of builds as", + ) + parser.add_argument( + "--exclude", + action="append", + help="Ignore specified directory (if --recursive is given). Can be used multiple times.", + ) + parser.add_argument( + "-o", + "--output", + type=argparse.FileType("w"), + help="Output the list of builds to the specified file", + ) + parser.add_argument("paths", nargs="+", help="One or more app paths.") + args = parser.parse_args() + setup_logging(args) + + build_system_class = BUILD_SYSTEMS[args.build_system] + + # If the build target is not set explicitly, get it from the environment or use the default one (esp32) + if not args.target: + env_target = os.environ.get("IDF_TARGET") + if env_target: + logging.info("--target argument not set, using IDF_TARGET={} from the environment".format(env_target)) + args.target = env_target + else: + logging.info("--target argument not set, using IDF_TARGET={} as the default".format(DEFAULT_TARGET)) + args.target = DEFAULT_TARGET + + # Prepare the list of app paths + app_paths = [] # type: typing.List[str] + for path in args.paths: + app_paths += find_apps(build_system_class, path, args.recursive, args.exclude or [], args.target) + + if not app_paths: + logging.critical("No {} apps found".format(build_system_class.NAME)) + raise SystemExit(1) + logging.info("Found {} apps".format(len(app_paths))) + + app_paths = sorted(app_paths) + + # Find compatible configurations of each app, collect them as BuildItems + build_items = [] # type: typing.List[BuildItem] + config_rules = config_rules_from_str(args.config or []) + for app_path in app_paths: + build_items += find_builds_for_app( + app_path, + args.work_dir, + args.build_dir, + args.build_log, + args.target, + args.build_system, + config_rules, + ) + logging.info("Found {} builds".format(len(build_items))) + + # Write out the BuildItems. Only JSON supported now (will add YAML later). + if args.format != "json": + raise NotImplementedError() + out = args.output or sys.stdout + out.writelines([item.to_json() + "\n" for item in build_items]) + + +if __name__ == "__main__": + main() diff --git a/tools/find_build_apps/__init__.py b/tools/find_build_apps/__init__.py new file mode 100644 index 0000000000..c87ceebda0 --- /dev/null +++ b/tools/find_build_apps/__init__.py @@ -0,0 +1,31 @@ +from .common import ( + BuildItem, + BuildSystem, + BuildError, + ConfigRule, + config_rules_from_str, + setup_logging, + DEFAULT_TARGET, +) +from .cmake import CMakeBuildSystem, BUILD_SYSTEM_CMAKE +from .make import MakeBuildSystem, BUILD_SYSTEM_MAKE + +BUILD_SYSTEMS = { + BUILD_SYSTEM_MAKE: MakeBuildSystem, + BUILD_SYSTEM_CMAKE: CMakeBuildSystem, +} + +__all__ = [ + "BuildItem", + "BuildSystem", + "BuildError", + "ConfigRule", + "config_rules_from_str", + "setup_logging", + "DEFAULT_TARGET", + "CMakeBuildSystem", + "BUILD_SYSTEM_CMAKE", + "MakeBuildSystem", + "BUILD_SYSTEM_MAKE", + "BUILD_SYSTEMS", +] diff --git a/tools/find_build_apps/cmake.py b/tools/find_build_apps/cmake.py new file mode 100644 index 0000000000..8668eb2184 --- /dev/null +++ b/tools/find_build_apps/cmake.py @@ -0,0 +1,158 @@ +import os +import sys +import subprocess +import logging +import shutil +import re +from .common import BuildSystem, BuildItem, BuildError + +BUILD_SYSTEM_CMAKE = "cmake" +IDF_PY = "idf.py" + +# While ESP-IDF component CMakeLists files can be identified by the presence of 'idf_component_register' string, +# there is no equivalent for the project CMakeLists files. This seems to be the best option... +CMAKE_PROJECT_LINE = r"include($ENV{IDF_PATH}/tools/cmake/project.cmake)" + +SUPPORTED_TARGETS_REGEX = re.compile(r"set\(\s*SUPPORTED_TARGETS\s+([a-z_0-9\- ]+)\s*\)") + + +class CMakeBuildSystem(BuildSystem): + NAME = BUILD_SYSTEM_CMAKE + + @staticmethod + def build(build_item): # type: (BuildItem) -> None + app_path = build_item.app_dir + work_path = build_item.work_dir or app_path + if not build_item.build_dir: + build_path = os.path.join(work_path, "build") + elif os.path.isabs(build_item.build_dir): + build_path = build_item.build_dir + else: + build_path = os.path.join(work_path, build_item.build_dir) + + if work_path != app_path: + if os.path.exists(work_path): + logging.debug("Work directory {} exists, removing".format(work_path)) + if not build_item.dry_run: + shutil.rmtree(work_path) + logging.debug("Copying app from {} to {}".format(app_path, work_path)) + if not build_item.dry_run: + shutil.copytree(app_path, work_path) + + if os.path.exists(build_path): + logging.debug("Build directory {} exists, removing".format(build_path)) + if not build_item.dry_run: + shutil.rmtree(build_path) + + if not build_item.dry_run: + os.makedirs(build_path) + + # Prepare the sdkconfig file, from the contents of sdkconfig.defaults (if exists) and the contents of + # build_info.sdkconfig_path, i.e. the config-specific sdkconfig file. + # + # Note: the build system supports taking multiple sdkconfig.defaults files via SDKCONFIG_DEFAULTS + # CMake variable. However here we do this manually to perform environment variable expansion in the + # sdkconfig files. + sdkconfig_defaults_list = ["sdkconfig.defaults"] + if build_item.sdkconfig_path: + sdkconfig_defaults_list.append(build_item.sdkconfig_path) + + sdkconfig_file = os.path.join(work_path, "sdkconfig") + if os.path.exists(sdkconfig_file): + logging.debug("Removing sdkconfig file: {}".format(sdkconfig_file)) + if not build_item.dry_run: + os.unlink(sdkconfig_file) + + logging.debug("Creating sdkconfig file: {}".format(sdkconfig_file)) + if not build_item.dry_run: + with open(sdkconfig_file, "w") as f_out: + for sdkconfig_name in sdkconfig_defaults_list: + sdkconfig_path = os.path.join(work_path, sdkconfig_name) + if not sdkconfig_path or not os.path.exists(sdkconfig_path): + continue + logging.debug("Appending {} to sdkconfig".format(sdkconfig_name)) + with open(sdkconfig_path, "r") as f_in: + for line in f_in: + f_out.write(os.path.expandvars(line)) + # Also save the sdkconfig file in the build directory + shutil.copyfile( + os.path.join(work_path, "sdkconfig"), + os.path.join(build_path, "sdkconfig"), + ) + + else: + for sdkconfig_name in sdkconfig_defaults_list: + sdkconfig_path = os.path.join(app_path, sdkconfig_name) + if not sdkconfig_path: + continue + logging.debug("Considering sdkconfig {}".format(sdkconfig_path)) + if not os.path.exists(sdkconfig_path): + continue + logging.debug("Appending {} to sdkconfig".format(sdkconfig_name)) + + # Prepare the build arguments + args = [ + # Assume it is the responsibility of the caller to + # set up the environment (run . ./export.sh) + IDF_PY, + "-B", + build_path, + "-C", + work_path, + "-DIDF_TARGET=" + build_item.target, + ] + if build_item.verbose: + args.append("-v") + args.append("build") + cmdline = format(" ".join(args)) + logging.info("Running {}".format(cmdline)) + + if build_item.dry_run: + return + + log_file = None + build_stdout = sys.stdout + build_stderr = sys.stderr + if build_item.build_log_path: + logging.info("Writing build log to {}".format(build_item.build_log_path)) + log_file = open(build_item.build_log_path, "w") + build_stdout = log_file + build_stderr = log_file + + try: + subprocess.check_call(args, stdout=build_stdout, stderr=build_stderr) + except subprocess.CalledProcessError as e: + raise BuildError("Build failed with exit code {}".format(e.returncode)) + finally: + if log_file: + log_file.close() + + @staticmethod + def _read_cmakelists(app_path): + cmakelists_path = os.path.join(app_path, "CMakeLists.txt") + if not os.path.exists(cmakelists_path): + return None + with open(cmakelists_path, "r") as cmakelists_file: + return cmakelists_file.read() + + @staticmethod + def is_app(path): + cmakelists_file_content = CMakeBuildSystem._read_cmakelists(path) + if not cmakelists_file_content: + return False + if CMAKE_PROJECT_LINE not in cmakelists_file_content: + return False + return True + + @staticmethod + def supported_targets(app_path): + cmakelists_file_content = CMakeBuildSystem._read_cmakelists(app_path) + if not cmakelists_file_content: + return None + match = re.findall(SUPPORTED_TARGETS_REGEX, cmakelists_file_content) + if not match: + return None + if len(match) > 1: + raise NotImplementedError("Can't determine the value of SUPPORTED_TARGETS in {}".format(app_path)) + targets = match[0].split(" ") + return targets diff --git a/tools/find_build_apps/common.py b/tools/find_build_apps/common.py new file mode 100644 index 0000000000..fc4013d561 --- /dev/null +++ b/tools/find_build_apps/common.py @@ -0,0 +1,231 @@ +# coding=utf-8 + +import sys +import os +from collections import namedtuple +import logging +import json +import typing + +DEFAULT_TARGET = "esp32" + +TARGET_PLACEHOLDER = "@t" +WILDCARD_PLACEHOLDER = "@w" +NAME_PLACEHOLDER = "@n" +FULL_NAME_PLACEHOLDER = "@f" +INDEX_PLACEHOLDER = "@i" + +# ConfigRule represents one --config argument of find_apps.py. +# file_name is the name of the sdkconfig file fragment, optionally with a single wildcard ('*' character). +# file_name can also be empty to indicate that the default configuration of the app should be used. +# config_name is the name of the corresponding build configuration, or None if the value of wildcard is to be used. +# For example: +# filename='', config_name='default' — represents the default app configuration, and gives it a name 'default' +# filename='sdkconfig.*', config_name=None - represents the set of configurations, names match the wildcard value +ConfigRule = namedtuple("ConfigRule", ["file_name", "config_name"]) + + +def config_rules_from_str(rule_strings): # type: (typing.List[str]) -> typing.List[ConfigRule] + """ + Helper function to convert strings like 'file_name=config_name' into ConfigRule objects + :param rule_strings: list of rules as strings + :return: list of ConfigRules + """ + rules = [] # type: typing.List[ConfigRule] + for rule_str in rule_strings: + items = rule_str.split("=", 2) + rules.append(ConfigRule(items[0], items[1] if len(items) == 2 else None)) + return rules + + +class BuildItem(object): + """ + Instance of this class represents one build of an application. + The parameters which distinguish the build are passed to the constructor. + """ + def __init__( + self, + app_path, + work_dir, + build_path, + build_log_path, + target, + sdkconfig_path, + config_name, + build_system, + ): + # These internal variables store the paths with environment variables and placeholders; + # Public properties with similar names use the _expand method to get the actual paths. + self._app_dir = app_path + self._work_dir = work_dir + self._build_dir = build_path + self._build_log_path = build_log_path + + self.sdkconfig_path = sdkconfig_path + self.config_name = config_name + self.target = target + self.build_system = build_system + + self._app_name = os.path.basename(os.path.normpath(app_path)) + + # Some miscellaneous build properties which are set later, at the build stage + self.index = None + self.verbose = False + self.dry_run = False + self.keep_going = False + + @property + def app_dir(self): + """ + :return: directory of the app + """ + return self._expand(self._app_dir) + + @property + def work_dir(self): + """ + :return: directory where the app should be copied to, prior to the build. Can be None, which means that the app + directory should be used. + """ + return self._expand(self._work_dir) + + @property + def build_dir(self): + """ + :return: build directory, either relative to the work directory (if relative path is used) or absolute path. + """ + return self._expand(self._build_dir) + + @property + def build_log_path(self): + """ + :return: path of the build log file + """ + return self._expand(self._build_log_path) + + def __repr__(self): + return "Build app {} for target {}, sdkconfig {} in {}".format( + self.app_dir, + self.target, + self.sdkconfig_path or "(default)", + self.build_dir, + ) + + def to_json(self): # type: () -> str + """ + :return: JSON string representing this object + """ + return self._to_json(self._app_dir, self._work_dir, self._build_dir, self._build_log_path) + + def to_json_expanded(self): # type: () -> str + """ + :return: JSON string representing this object, with all placeholders in paths expanded + """ + return self._to_json(self.app_dir, self.work_dir, self.build_dir, self.build_log_path) + + def _to_json(self, app_dir, work_dir, build_dir, build_log_path): # type: (str, str, str, str) -> str + """ + Internal function, called by to_json and to_json_expanded + """ + return json.dumps({ + "build_system": self.build_system, + "app_dir": app_dir, + "work_dir": work_dir, + "build_dir": build_dir, + "build_log_path": build_log_path, + "sdkconfig": self.sdkconfig_path, + "config": self.config_name, + "target": self.target, + "verbose": self.verbose, + }) + + @staticmethod + def from_json(json_str): # type: (typing.Text) -> BuildItem + """ + :return: Get the BuildItem from a JSON string + """ + d = json.loads(str(json_str)) + result = BuildItem( + app_path=d["app_dir"], + work_dir=d["work_dir"], + build_path=d["build_dir"], + build_log_path=d["build_log_path"], + sdkconfig_path=d["sdkconfig"], + config_name=d["config"], + target=d["target"], + build_system=d["build_system"], + ) + result.verbose = d["verbose"] + return result + + def _expand(self, path): # type: (str) -> str + """ + Internal method, expands any of the placeholders in {app,work,build} paths. + """ + if not path: + return path + + if self.index is not None: + path = path.replace(INDEX_PLACEHOLDER, str(self.index)) + path = path.replace(TARGET_PLACEHOLDER, self.target) + path = path.replace(NAME_PLACEHOLDER, self._app_name) + if (FULL_NAME_PLACEHOLDER in path): # to avoid recursion to the call to app_dir in the next line: + path = path.replace(FULL_NAME_PLACEHOLDER, self.app_dir.replace(os.path.sep, "_")) + wildcard_pos = path.find(WILDCARD_PLACEHOLDER) + if wildcard_pos != -1: + if self.config_name: + # if config name is defined, put it in place of the placeholder + path = path.replace(WILDCARD_PLACEHOLDER, self.config_name) + else: + # otherwise, remove the placeholder and one character on the left + # (which is usually an underscore, dash, or other delimiter) + left_of_wildcard = max(0, wildcard_pos - 1) + right_of_wildcard = wildcard_pos + len(WILDCARD_PLACEHOLDER) + path = path[0:left_of_wildcard] + path[right_of_wildcard:] + path = os.path.expandvars(path) + return path + + +class BuildSystem(object): + """ + Class representing a build system. + Derived classes implement the methods below. + Objects of these classes aren't instantiated, instead the class (type object) is used. + """ + + NAME = "undefined" + + @staticmethod + def build(self): + raise NotImplementedError() + + @staticmethod + def is_app(path): + raise NotImplementedError() + + @staticmethod + def supported_targets(app_path): + raise NotImplementedError() + + +class BuildError(RuntimeError): + pass + + +def setup_logging(args): + """ + Configure logging module according to the number of '--verbose'/'-v' arguments and the --log-file argument. + :param args: namespace obtained from argparse + """ + if not args.verbose: + log_level = logging.WARNING + elif args.verbose == 1: + log_level = logging.INFO + else: + log_level = logging.DEBUG + + logging.basicConfig( + format="%(levelname)s: %(message)s", + stream=args.log_file or sys.stderr, + level=log_level, + ) diff --git a/tools/find_build_apps/make.py b/tools/find_build_apps/make.py new file mode 100644 index 0000000000..d296817baa --- /dev/null +++ b/tools/find_build_apps/make.py @@ -0,0 +1,30 @@ +import os +from .common import BuildSystem + +# Same for the Makefile projects: +MAKE_PROJECT_LINE = r"include $(IDF_PATH)/make/project.mk" + +BUILD_SYSTEM_MAKE = "make" + + +class MakeBuildSystem(BuildSystem): + NAME = BUILD_SYSTEM_MAKE + + @staticmethod + def build(build_item): + raise NotImplementedError() + + @staticmethod + def is_app(path): + makefile_path = os.path.join(path, "Makefile") + if not os.path.exists(makefile_path): + return False + with open(makefile_path, "r") as makefile: + makefile_content = makefile.read() + if MAKE_PROJECT_LINE not in makefile_content: + return False + return True + + @staticmethod + def supported_targets(app_path): + return ["esp32"] diff --git a/tools/tiny-test-fw/CIAssignExampleTest.py b/tools/tiny-test-fw/CIAssignExampleTest.py deleted file mode 100644 index bd6bdaf3b8..0000000000 --- a/tools/tiny-test-fw/CIAssignExampleTest.py +++ /dev/null @@ -1,56 +0,0 @@ -# Copyright 2015-2017 Espressif Systems (Shanghai) PTE LTD -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http:#www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -""" -Command line tool to assign example tests to CI test jobs. -""" - -# TODO: Need to handle running examples on different chips -import os -import sys -import re -import argparse - -try: - from Utility.CIAssignTest import AssignTest -except ImportError: - test_fw_path = os.getenv("TEST_FW_PATH") - if test_fw_path: - sys.path.insert(0, test_fw_path) - from Utility.CIAssignTest import AssignTest - -from Utility.CIAssignTest import Group - - -class ExampleGroup(Group): - SORT_KEYS = CI_JOB_MATCH_KEYS = ["env_tag", "chip"] - - -class CIExampleAssignTest(AssignTest): - CI_TEST_JOB_PATTERN = re.compile(r"^example_test_.+") - - -if __name__ == '__main__': - parser = argparse.ArgumentParser() - parser.add_argument("test_case", - help="test case folder or file") - parser.add_argument("ci_config_file", - help="gitlab ci config file") - parser.add_argument("output_path", - help="output path of config files") - args = parser.parse_args() - - assign_test = CIExampleAssignTest(args.test_case, args.ci_config_file, case_group=ExampleGroup) - assign_test.assign_cases() - assign_test.output_configs(args.output_path) diff --git a/tools/tiny-test-fw/IDF/IDFApp.py b/tools/tiny-test-fw/IDF/IDFApp.py deleted file mode 100644 index 82f42b0c35..0000000000 --- a/tools/tiny-test-fw/IDF/IDFApp.py +++ /dev/null @@ -1,259 +0,0 @@ -# Copyright 2015-2017 Espressif Systems (Shanghai) PTE LTD -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http:#www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -""" IDF Test Applications """ -import subprocess - -import os -import json -import App - - -class IDFApp(App.BaseApp): - """ - Implements common esp-idf application behavior. - idf applications should inherent from this class and overwrite method get_binary_path. - """ - - IDF_DOWNLOAD_CONFIG_FILE = "download.config" - IDF_FLASH_ARGS_FILE = "flasher_args.json" - - def __init__(self, app_path): - super(IDFApp, self).__init__(app_path) - self.idf_path = self.get_sdk_path() - self.binary_path = self.get_binary_path(app_path) - self.elf_file = self._get_elf_file_path(self.binary_path) - assert os.path.exists(self.binary_path) - if self.IDF_DOWNLOAD_CONFIG_FILE not in os.listdir(self.binary_path): - if self.IDF_FLASH_ARGS_FILE not in os.listdir(self.binary_path): - msg = ("Neither {} nor {} exists. " - "Try to run 'make print_flash_cmd | tail -n 1 > {}/{}' " - "or 'idf.py build' " - "for resolving the issue." - "").format(self.IDF_DOWNLOAD_CONFIG_FILE, self.IDF_FLASH_ARGS_FILE, - self.binary_path, self.IDF_DOWNLOAD_CONFIG_FILE) - raise AssertionError(msg) - - self.flash_files, self.flash_settings = self._parse_flash_download_config() - self.partition_table = self._parse_partition_table() - - @classmethod - def get_sdk_path(cls): - idf_path = os.getenv("IDF_PATH") - assert idf_path - assert os.path.exists(idf_path) - return idf_path - - def _get_sdkconfig_paths(self): - """ - returns list of possible paths where sdkconfig could be found - - Note: could be overwritten by a derived class to provide other locations or order - """ - return [os.path.join(self.binary_path, "sdkconfig"), os.path.join(self.binary_path, "..", "sdkconfig")] - - def get_sdkconfig(self): - """ - reads sdkconfig and returns a dictionary with all configuredvariables - - :param sdkconfig_file: location of sdkconfig - :raise: AssertionError: if sdkconfig file does not exist in defined paths - """ - d = {} - sdkconfig_file = None - for i in self._get_sdkconfig_paths(): - if os.path.exists(i): - sdkconfig_file = i - break - assert sdkconfig_file is not None - with open(sdkconfig_file) as f: - for line in f: - configs = line.split('=') - if len(configs) == 2: - d[configs[0]] = configs[1].rstrip() - return d - - def get_binary_path(self, app_path): - """ - get binary path according to input app_path. - - subclass must overwrite this method. - - :param app_path: path of application - :return: abs app binary path - """ - pass - - @staticmethod - def _get_elf_file_path(binary_path): - ret = "" - file_names = os.listdir(binary_path) - for fn in file_names: - if os.path.splitext(fn)[1] == ".elf": - ret = os.path.join(binary_path, fn) - return ret - - def _parse_flash_download_config(self): - """ - Parse flash download config from build metadata files - - Sets self.flash_files, self.flash_settings - - (Called from constructor) - - Returns (flash_files, flash_settings) - """ - - if self.IDF_FLASH_ARGS_FILE in os.listdir(self.binary_path): - # CMake version using build metadata file - with open(os.path.join(self.binary_path, self.IDF_FLASH_ARGS_FILE), "r") as f: - args = json.load(f) - flash_files = [(offs,file) for (offs,file) in args["flash_files"].items() if offs != ""] - flash_settings = args["flash_settings"] - else: - # GNU Make version uses download.config arguments file - with open(os.path.join(self.binary_path, self.IDF_DOWNLOAD_CONFIG_FILE), "r") as f: - args = f.readlines()[-1].split(" ") - flash_files = [] - flash_settings = {} - for idx in range(0, len(args), 2): # process arguments in pairs - if args[idx].startswith("--"): - # strip the -- from the command line argument - flash_settings[args[idx][2:]] = args[idx + 1] - else: - # offs, filename - flash_files.append((args[idx], args[idx + 1])) - - # The build metadata file does not currently have details, which files should be encrypted and which not. - # Assume that all files should be encrypted if flash encryption is enabled in development mode. - sdkconfig_dict = self.get_sdkconfig() - flash_settings["encrypt"] = "CONFIG_SECURE_FLASH_ENCRYPTION_MODE_DEVELOPMENT" in sdkconfig_dict - - # make file offsets into integers, make paths absolute - flash_files = [(int(offs, 0), os.path.join(self.binary_path, path.strip())) for (offs, path) in flash_files] - - return (flash_files, flash_settings) - - def _parse_partition_table(self): - """ - Parse partition table contents based on app binaries - - Returns partition_table data - - (Called from constructor) - """ - partition_tool = os.path.join(self.idf_path, - "components", - "partition_table", - "gen_esp32part.py") - assert os.path.exists(partition_tool) - - for (_, path) in self.flash_files: - if "partition" in path: - partition_file = os.path.join(self.binary_path, path) - break - else: - raise ValueError("No partition table found for IDF binary path: {}".format(self.binary_path)) - - process = subprocess.Popen(["python", partition_tool, partition_file], - stdout=subprocess.PIPE, stderr=subprocess.PIPE) - raw_data = process.stdout.read() - if isinstance(raw_data, bytes): - raw_data = raw_data.decode() - partition_table = dict() - - for line in raw_data.splitlines(): - if line[0] != "#": - try: - _name, _type, _subtype, _offset, _size, _flags = line.split(",") - if _size[-1] == "K": - _size = int(_size[:-1]) * 1024 - elif _size[-1] == "M": - _size = int(_size[:-1]) * 1024 * 1024 - else: - _size = int(_size) - except ValueError: - continue - partition_table[_name] = { - "type": _type, - "subtype": _subtype, - "offset": _offset, - "size": _size, - "flags": _flags - } - - return partition_table - - -class Example(IDFApp): - def _get_sdkconfig_paths(self): - """ - overrides the parent method to provide exact path of sdkconfig for example tests - """ - return [os.path.join(self.binary_path, "..", "sdkconfig")] - - def get_binary_path(self, app_path): - # build folder of example path - path = os.path.join(self.idf_path, app_path, "build") - if not os.path.exists(path): - # search for CI build folders - app = os.path.basename(app_path) - example_path = os.path.join(self.idf_path, "build_examples", "example_builds") - for dirpath, dirnames, files in os.walk(example_path): - if dirnames: - if dirnames[0] == app: - path = os.path.join(example_path, dirpath, dirnames[0], "build") - break - else: - raise OSError("Failed to find example binary") - return path - - -class UT(IDFApp): - def get_binary_path(self, app_path): - """ - :param app_path: app path or app config - :return: binary path - """ - if not app_path: - app_path = "default" - - path = os.path.join(self.idf_path, app_path) - if not os.path.exists(path): - while True: - # try to get by config - if app_path == "default": - # it's default config, we first try to get form build folder of unit-test-app - path = os.path.join(self.idf_path, "tools", "unit-test-app", "build") - if os.path.exists(path): - # found, use bin in build path - break - # ``make ut-build-all-configs`` or ``make ut-build-CONFIG`` will copy binary to output folder - path = os.path.join(self.idf_path, "tools", "unit-test-app", "output", app_path) - if os.path.exists(path): - break - raise OSError("Failed to get unit-test-app binary path") - return path - - -class SSC(IDFApp): - def get_binary_path(self, app_path): - # TODO: to implement SSC get binary path - return app_path - - -class AT(IDFApp): - def get_binary_path(self, app_path): - # TODO: to implement AT get binary path - return app_path diff --git a/tools/unit-test-app/unit_test.py b/tools/unit-test-app/unit_test.py index 2f80db6f5e..6028f6f809 100755 --- a/tools/unit-test-app/unit_test.py +++ b/tools/unit-test-app/unit_test.py @@ -19,33 +19,12 @@ Test script for unit test case. """ import re -import os -import sys import time import argparse import threading -try: - import TinyFW -except ImportError: - # if we want to run test case outside `tiny-test-fw` folder, - # we need to insert tiny-test-fw path into sys path - test_fw_path = os.getenv("TEST_FW_PATH") - if test_fw_path and test_fw_path not in sys.path: - sys.path.insert(0, test_fw_path) - else: - # or try the copy in IDF - idf_path = os.getenv("IDF_PATH") - tiny_test_path = idf_path + "/tools/tiny-test-fw" - if os.path.exists(tiny_test_path): - sys.path.insert(0, tiny_test_path) - import TinyFW - -import IDF -import Utility -import Env -from DUT import ExpectTimeout -from IDF.IDFApp import UT +from tiny_test_fw import TinyFW, Utility, Env, DUT +import ttfw_idf UT_APP_BOOT_UP_DONE = "Press ENTER to see the list of tests." @@ -56,6 +35,7 @@ FINISH_PATTERN = re.compile(r"1 Tests (\d) Failures (\d) Ignored") END_LIST_STR = r'\r?\nEnter test for running' TEST_PATTERN = re.compile(r'\((\d+)\)\s+"([^"]+)" ([^\r\n]+)\r?\n(' + END_LIST_STR + r')?') TEST_SUBMENU_PATTERN = re.compile(r'\s+\((\d+)\)\s+"[^"]+"\r?\n(?=(?=\()|(' + END_LIST_STR + r'))') +UT_APP_PATH = "tools/unit-test-app" SIMPLE_TEST_ID = 0 MULTI_STAGE_ID = 1 @@ -157,6 +137,10 @@ def replace_app_bin(dut, name, new_app_bin): break +def format_case_name(case): + return "[{}] {}".format(case["config"], case["name"]) + + def reset_dut(dut): dut.reset() # esptool ``run`` cmd takes quite long time. @@ -175,7 +159,7 @@ def reset_dut(dut): try: dut.expect("0 Tests 0 Failures 0 Ignored", timeout=TEST_HISTORY_CHECK_TIMEOUT) break - except ExpectTimeout: + except DUT.ExpectTimeout: pass else: raise AssertionError("Reset {} ({}) failed!".format(dut.name, dut.port)) @@ -202,9 +186,9 @@ def run_one_normal_case(dut, one_case, junit_test_case): test_finish.append(True) output = dut.stop_capture_raw_data() if result: - Utility.console_log("Success: " + one_case["name"], color="green") + Utility.console_log("Success: " + format_case_name(one_case), color="green") else: - Utility.console_log("Failed: " + one_case["name"], color="red") + Utility.console_log("Failed: " + format_case_name(one_case), color="red") junit_test_case.add_failure_info(output) raise TestCaseFailed() @@ -221,7 +205,7 @@ def run_one_normal_case(dut, one_case, junit_test_case): assert not exception_reset_list if int(data[1]): # case ignored - Utility.console_log("Ignored: " + one_case["name"], color="orange") + Utility.console_log("Ignored: " + format_case_name(one_case), color="orange") junit_test_case.add_skipped_info("ignored") one_case_finish(not int(data[0])) @@ -239,7 +223,7 @@ def run_one_normal_case(dut, one_case, junit_test_case): err_msg = "Reset Check Failed: \r\n\tExpected: {}\r\n\tGet: {}".format(one_case["reset"], exception_reset_list) Utility.console_log(err_msg, color="orange") - junit_test_case.add_error_info(err_msg) + junit_test_case.add_failure_info(err_msg) one_case_finish(result) while not test_finish: @@ -250,14 +234,14 @@ def run_one_normal_case(dut, one_case, junit_test_case): (FINISH_PATTERN, handle_test_finish), (UT_APP_BOOT_UP_DONE, handle_reset_finish), timeout=one_case["timeout"]) - except ExpectTimeout: + except DUT.ExpectTimeout: Utility.console_log("Timeout in expect", color="orange") junit_test_case.add_failure_info("timeout") one_case_finish(False) break -@IDF.idf_unit_test(env_tag="UT_T1_1", junit_report_by_case=True) +@ttfw_idf.idf_unit_test(env_tag="UT_T1_1", junit_report_by_case=True) def run_unit_test_cases(env, extra_data): """ extra_data can be three types of value @@ -284,7 +268,7 @@ def run_unit_test_cases(env, extra_data): for ut_config in case_config: Utility.console_log("Running unit test for config: " + ut_config, "O") - dut = env.get_dut("unit-test-app", app_path=ut_config, allow_dut_exception=True) + dut = env.get_dut("unit-test-app", app_path=UT_APP_PATH, app_config_name=ut_config, allow_dut_exception=True) if len(case_config[ut_config]) > 0: replace_app_bin(dut, "unit-test-app", case_config[ut_config][0].get('app_bin')) dut.start_app() @@ -298,13 +282,15 @@ def run_unit_test_cases(env, extra_data): run_one_normal_case(dut, one_case, junit_test_case) performance_items = dut.get_performance_items() except TestCaseFailed: - failed_cases.append(one_case["name"]) + failed_cases.append(format_case_name(one_case)) except Exception as e: junit_test_case.add_failure_info("Unexpected exception: " + str(e)) - failed_cases.append(one_case["name"]) + failed_cases.append(format_case_name(one_case)) finally: TinyFW.JunitReport.update_performance(performance_items) TinyFW.JunitReport.test_case_finish(junit_test_case) + # close DUT when finish running all cases for one config + env.close_dut(dut.name) # raise exception if any case fails if failed_cases: @@ -394,7 +380,7 @@ class Handler(threading.Thread): time.sleep(1) self.dut.write("\"{}\"".format(self.parent_case_name)) self.dut.expect("Running " + self.parent_case_name + "...") - except ExpectTimeout: + except DUT.ExpectTimeout: Utility.console_log("No case detected!", color="orange") while not self.finish and not self.force_stop.isSet(): try: @@ -404,7 +390,7 @@ class Handler(threading.Thread): (self.SEND_SIGNAL_PATTERN, device_send_action), # send signal pattern (self.FINISH_PATTERN, handle_device_test_finish), # test finish pattern timeout=self.timeout) - except ExpectTimeout: + except DUT.ExpectTimeout: Utility.console_log("Timeout in expect", color="orange") one_device_case_finish(False) break @@ -423,7 +409,7 @@ def get_dut(duts, env, name, ut_config, app_bin=None): if name in duts: dut = duts[name] else: - dut = env.get_dut(name, app_path=ut_config, allow_dut_exception=True) + dut = env.get_dut(name, app_path=UT_APP_PATH, app_config_name=ut_config, allow_dut_exception=True) duts[name] = dut replace_app_bin(dut, "unit-test-app", app_bin) dut.start_app() # download bin to board @@ -464,7 +450,7 @@ def run_one_multiple_devices_case(duts, ut_config, env, one_case, app_bin, junit return result -@IDF.idf_unit_test(env_tag="UT_T2_1", junit_report_by_case=True) +@ttfw_idf.idf_unit_test(env_tag="UT_T2_1", junit_report_by_case=True) def run_multiple_devices_cases(env, extra_data): """ extra_data can be two types of value @@ -501,11 +487,15 @@ def run_multiple_devices_cases(env, extra_data): junit_test_case.add_failure_info("Unexpected exception: " + str(e)) finally: if result: - Utility.console_log("Success: " + one_case["name"], color="green") + Utility.console_log("Success: " + format_case_name(one_case), color="green") else: - failed_cases.append(one_case["name"]) - Utility.console_log("Failed: " + one_case["name"], color="red") + failed_cases.append(format_case_name(one_case)) + Utility.console_log("Failed: " + format_case_name(one_case), color="red") TinyFW.JunitReport.test_case_finish(junit_test_case) + # close all DUTs when finish running all cases for one config + for dut in duts: + env.close_dut(dut) + duts = {} if failed_cases: Utility.console_log("Failed Cases:", color="red") @@ -562,9 +552,9 @@ def run_one_multiple_stage_case(dut, one_case, junit_test_case): result = result and check_reset() output = dut.stop_capture_raw_data() if result: - Utility.console_log("Success: " + one_case["name"], color="green") + Utility.console_log("Success: " + format_case_name(one_case), color="green") else: - Utility.console_log("Failed: " + one_case["name"], color="red") + Utility.console_log("Failed: " + format_case_name(one_case), color="red") junit_test_case.add_failure_info(output) raise TestCaseFailed() stage_finish.append("break") @@ -581,7 +571,7 @@ def run_one_multiple_stage_case(dut, one_case, junit_test_case): # in this scenario reset should not happen if int(data[1]): # case ignored - Utility.console_log("Ignored: " + one_case["name"], color="orange") + Utility.console_log("Ignored: " + format_case_name(one_case), color="orange") junit_test_case.add_skipped_info("ignored") # only passed in last stage will be regarded as real pass if last_stage(): @@ -607,7 +597,7 @@ def run_one_multiple_stage_case(dut, one_case, junit_test_case): (FINISH_PATTERN, handle_test_finish), (UT_APP_BOOT_UP_DONE, handle_next_stage), timeout=one_case["timeout"]) - except ExpectTimeout: + except DUT.ExpectTimeout: Utility.console_log("Timeout in expect", color="orange") one_case_finish(False) break @@ -616,7 +606,7 @@ def run_one_multiple_stage_case(dut, one_case, junit_test_case): break -@IDF.idf_unit_test(env_tag="UT_T1_1", junit_report_by_case=True) +@ttfw_idf.idf_unit_test(env_tag="UT_T1_1", junit_report_by_case=True) def run_multiple_stage_cases(env, extra_data): """ extra_data can be 2 types of value @@ -638,7 +628,7 @@ def run_multiple_stage_cases(env, extra_data): for ut_config in case_config: Utility.console_log("Running unit test for config: " + ut_config, "O") - dut = env.get_dut("unit-test-app", app_path=ut_config, allow_dut_exception=True) + dut = env.get_dut("unit-test-app", app_path=UT_APP_PATH, app_config_name=ut_config, allow_dut_exception=True) if len(case_config[ut_config]) > 0: replace_app_bin(dut, "unit-test-app", case_config[ut_config][0].get('app_bin')) dut.start_app() @@ -650,13 +640,15 @@ def run_multiple_stage_cases(env, extra_data): run_one_multiple_stage_case(dut, one_case, junit_test_case) performance_items = dut.get_performance_items() except TestCaseFailed: - failed_cases.append(one_case["name"]) + failed_cases.append(format_case_name(one_case)) except Exception as e: junit_test_case.add_failure_info("Unexpected exception: " + str(e)) - failed_cases.append(one_case["name"]) + failed_cases.append(format_case_name(one_case)) finally: TinyFW.JunitReport.update_performance(performance_items) TinyFW.JunitReport.test_case_finish(junit_test_case) + # close DUT when finish running all cases for one config + env.close_dut(dut.name) # raise exception if any case fails if failed_cases: @@ -671,7 +663,7 @@ def detect_update_unit_test_info(env, extra_data, app_bin): case_config = format_test_case_config(extra_data) for ut_config in case_config: - dut = env.get_dut("unit-test-app", app_path=ut_config) + dut = env.get_dut("unit-test-app", app_path=UT_APP_PATH, app_config_name=ut_config) replace_app_bin(dut, "unit-test-app", app_bin) dut.start_app() @@ -721,7 +713,7 @@ def detect_update_unit_test_info(env, extra_data, app_bin): for _dic in extra_data: if 'type' not in _dic: raise ValueError("Unit test \"{}\" doesn't exist in the flashed device!".format(_dic.get('name'))) - except ExpectTimeout: + except DUT.ExpectTimeout: Utility.console_log("Timeout during getting the test list", color="red") finally: dut.close() @@ -776,8 +768,8 @@ if __name__ == '__main__': TinyFW.set_default_config(env_config_file=args.env_config_file) env_config = TinyFW.get_default_config() - env_config['app'] = UT - env_config['dut'] = IDF.IDFDUT + env_config['app'] = ttfw_idf.UT + env_config['dut'] = ttfw_idf.IDFDUT env_config['test_suite_name'] = 'unit_test_parsing' test_env = Env.Env(**env_config) detect_update_unit_test_info(test_env, extra_data=list_of_dicts, app_bin=args.app_bin)