Merge branch 'bugfix/bootloader_utility_warnings_v4.4' into 'release/v4.4'

bootloader_support: Fix compiler warnings (v4.4)

See merge request espressif/esp-idf!16139
This commit is contained in:
Mahavir Jain
2021-11-26 13:32:28 +00:00

View File

@@ -700,7 +700,8 @@ static void set_cache_and_start_app(
uint32_t irom_size, uint32_t irom_size,
uint32_t entry_addr) uint32_t entry_addr)
{ {
int rc; int rc __attribute__((unused));
ESP_LOGD(TAG, "configure drom and irom and start"); ESP_LOGD(TAG, "configure drom and irom and start");
#if CONFIG_IDF_TARGET_ESP32 #if CONFIG_IDF_TARGET_ESP32
Cache_Read_Disable(0); Cache_Read_Disable(0);
@@ -720,7 +721,7 @@ static void set_cache_and_start_app(
#endif #endif
/* Clear the MMU entries that are already set up, /* Clear the MMU entries that are already set up,
so the new app only has the mappings it creates. * so the new app only has the mappings it creates.
*/ */
#if CONFIG_IDF_TARGET_ESP32 #if CONFIG_IDF_TARGET_ESP32
for (int i = 0; i < DPORT_FLASH_MMU_TABLE_SIZE; i++) { for (int i = 0; i < DPORT_FLASH_MMU_TABLE_SIZE; i++) {
@@ -732,31 +733,33 @@ static void set_cache_and_start_app(
} }
#endif #endif
uint32_t drom_load_addr_aligned = drom_load_addr & MMU_FLASH_MASK; uint32_t drom_load_addr_aligned = drom_load_addr & MMU_FLASH_MASK;
uint32_t drom_addr_aligned = drom_addr & MMU_FLASH_MASK;
uint32_t drom_page_count = bootloader_cache_pages_to_map(drom_size, drom_load_addr); uint32_t drom_page_count = bootloader_cache_pages_to_map(drom_size, drom_load_addr);
ESP_LOGV(TAG, "d mmu set paddr=%08x vaddr=%08x size=%d n=%d", ESP_LOGV(TAG, "d mmu set paddr=%08x vaddr=%08x size=%d n=%d",
drom_addr & MMU_FLASH_MASK, drom_load_addr_aligned, drom_size, drom_page_count); drom_addr_aligned, drom_load_addr_aligned, drom_size, drom_page_count);
#if CONFIG_IDF_TARGET_ESP32 #if CONFIG_IDF_TARGET_ESP32
rc = cache_flash_mmu_set(0, 0, drom_load_addr_aligned, drom_addr & MMU_FLASH_MASK, 64, drom_page_count); rc = cache_flash_mmu_set(0, 0, drom_load_addr_aligned, drom_addr_aligned, 64, drom_page_count);
#elif CONFIG_IDF_TARGET_ESP32S2 #elif CONFIG_IDF_TARGET_ESP32S2
rc = Cache_Ibus_MMU_Set(MMU_ACCESS_FLASH, drom_load_addr & 0xffff0000, drom_addr & 0xffff0000, 64, drom_page_count, 0); rc = Cache_Ibus_MMU_Set(MMU_ACCESS_FLASH, drom_load_addr_aligned, drom_addr_aligned, 64, drom_page_count, 0);
#elif CONFIG_IDF_TARGET_ESP32S3 #elif CONFIG_IDF_TARGET_ESP32S3
rc = Cache_Dbus_MMU_Set(MMU_ACCESS_FLASH, drom_load_addr & 0xffff0000, drom_addr & 0xffff0000, 64, drom_page_count, 0); rc = Cache_Dbus_MMU_Set(MMU_ACCESS_FLASH, drom_load_addr_aligned, drom_addr_aligned, 64, drom_page_count, 0);
#elif CONFIG_IDF_TARGET_ESP32C3 #elif CONFIG_IDF_TARGET_ESP32C3
rc = Cache_Dbus_MMU_Set(MMU_ACCESS_FLASH, drom_load_addr & 0xffff0000, drom_addr & 0xffff0000, 64, drom_page_count, 0); rc = Cache_Dbus_MMU_Set(MMU_ACCESS_FLASH, drom_load_addr_aligned, drom_addr_aligned, 64, drom_page_count, 0);
#elif CONFIG_IDF_TARGET_ESP32H2 #elif CONFIG_IDF_TARGET_ESP32H2
rc = Cache_Dbus_MMU_Set(MMU_ACCESS_FLASH, drom_load_addr & 0xffff0000, drom_addr & 0xffff0000, 64, drom_page_count, 0); rc = Cache_Dbus_MMU_Set(MMU_ACCESS_FLASH, drom_load_addr_aligned, drom_addr_aligned, 64, drom_page_count, 0);
#endif #endif
ESP_LOGV(TAG, "rc=%d", rc); ESP_LOGV(TAG, "rc=%d", rc);
#if CONFIG_IDF_TARGET_ESP32 #if CONFIG_IDF_TARGET_ESP32
rc = cache_flash_mmu_set(1, 0, drom_load_addr_aligned, drom_addr & MMU_FLASH_MASK, 64, drom_page_count); rc = cache_flash_mmu_set(1, 0, drom_load_addr_aligned, drom_addr_aligned, 64, drom_page_count);
ESP_LOGV(TAG, "rc=%d", rc); ESP_LOGV(TAG, "rc=%d", rc);
#endif #endif
uint32_t irom_load_addr_aligned = irom_load_addr & MMU_FLASH_MASK; uint32_t irom_load_addr_aligned = irom_load_addr & MMU_FLASH_MASK;
uint32_t irom_addr_aligned = irom_addr & MMU_FLASH_MASK;
uint32_t irom_page_count = bootloader_cache_pages_to_map(irom_size, irom_load_addr); uint32_t irom_page_count = bootloader_cache_pages_to_map(irom_size, irom_load_addr);
ESP_LOGV(TAG, "i mmu set paddr=%08x vaddr=%08x size=%d n=%d", ESP_LOGV(TAG, "i mmu set paddr=%08x vaddr=%08x size=%d n=%d",
irom_addr & MMU_FLASH_MASK, irom_load_addr_aligned, irom_size, irom_page_count); irom_addr_aligned, irom_load_addr_aligned, irom_size, irom_page_count);
#if CONFIG_IDF_TARGET_ESP32 #if CONFIG_IDF_TARGET_ESP32
rc = cache_flash_mmu_set(0, 0, irom_load_addr_aligned, irom_addr & MMU_FLASH_MASK, 64, irom_page_count); rc = cache_flash_mmu_set(0, 0, irom_load_addr_aligned, irom_addr_aligned, 64, irom_page_count);
#elif CONFIG_IDF_TARGET_ESP32S2 #elif CONFIG_IDF_TARGET_ESP32S2
uint32_t iram1_used = 0; uint32_t iram1_used = 0;
if (irom_load_addr + irom_size > IRAM1_ADDRESS_LOW) { if (irom_load_addr + irom_size > IRAM1_ADDRESS_LOW) {
@@ -767,17 +770,17 @@ static void set_cache_and_start_app(
rc = Cache_Ibus_MMU_Set(MMU_ACCESS_FLASH, IRAM1_ADDRESS_LOW, 0, 64, 64, 1); rc = Cache_Ibus_MMU_Set(MMU_ACCESS_FLASH, IRAM1_ADDRESS_LOW, 0, 64, 64, 1);
REG_CLR_BIT(EXTMEM_PRO_ICACHE_CTRL1_REG, EXTMEM_PRO_ICACHE_MASK_IRAM1); REG_CLR_BIT(EXTMEM_PRO_ICACHE_CTRL1_REG, EXTMEM_PRO_ICACHE_MASK_IRAM1);
} }
rc = Cache_Ibus_MMU_Set(MMU_ACCESS_FLASH, irom_load_addr & 0xffff0000, irom_addr & 0xffff0000, 64, irom_page_count, 0); rc = Cache_Ibus_MMU_Set(MMU_ACCESS_FLASH, irom_load_addr_aligned, irom_addr_aligned, 64, irom_page_count, 0);
#elif CONFIG_IDF_TARGET_ESP32S3 #elif CONFIG_IDF_TARGET_ESP32S3
rc = Cache_Ibus_MMU_Set(MMU_ACCESS_FLASH, irom_load_addr & 0xffff0000, irom_addr & 0xffff0000, 64, irom_page_count, 0); rc = Cache_Ibus_MMU_Set(MMU_ACCESS_FLASH, irom_load_addr_aligned, irom_addr_aligned, 64, irom_page_count, 0);
#elif CONFIG_IDF_TARGET_ESP32C3 #elif CONFIG_IDF_TARGET_ESP32C3
rc = Cache_Ibus_MMU_Set(MMU_ACCESS_FLASH, irom_load_addr & 0xffff0000, irom_addr & 0xffff0000, 64, irom_page_count, 0); rc = Cache_Ibus_MMU_Set(MMU_ACCESS_FLASH, irom_load_addr_aligned, irom_addr_aligned, 64, irom_page_count, 0);
#elif CONFIG_IDF_TARGET_ESP32H2 #elif CONFIG_IDF_TARGET_ESP32H2
rc = Cache_Ibus_MMU_Set(MMU_ACCESS_FLASH, irom_load_addr & 0xffff0000, irom_addr & 0xffff0000, 64, irom_page_count, 0); rc = Cache_Ibus_MMU_Set(MMU_ACCESS_FLASH, irom_load_addr_aligned, irom_addr_aligned, 64, irom_page_count, 0);
#endif #endif
ESP_LOGV(TAG, "rc=%d", rc); ESP_LOGV(TAG, "rc=%d", rc);
#if CONFIG_IDF_TARGET_ESP32 #if CONFIG_IDF_TARGET_ESP32
rc = cache_flash_mmu_set(1, 0, irom_load_addr_aligned, irom_addr & MMU_FLASH_MASK, 64, irom_page_count); rc = cache_flash_mmu_set(1, 0, irom_load_addr_aligned, irom_addr_aligned, 64, irom_page_count);
ESP_LOGV(TAG, "rc=%d", rc); ESP_LOGV(TAG, "rc=%d", rc);
DPORT_REG_CLR_BIT( DPORT_PRO_CACHE_CTRL1_REG, DPORT_REG_CLR_BIT( DPORT_PRO_CACHE_CTRL1_REG,
(DPORT_PRO_CACHE_MASK_IRAM0) | (DPORT_PRO_CACHE_MASK_IRAM1 & 0) | (DPORT_PRO_CACHE_MASK_IRAM0) | (DPORT_PRO_CACHE_MASK_IRAM1 & 0) |