feat(cache): msync driver support on h4

This commit is contained in:
armando
2025-07-10 14:16:09 +08:00
committed by Armando (Dou Yiwen)
parent fb3e616750
commit 84ae601fef
7 changed files with 628 additions and 52 deletions

View File

@@ -1,5 +1,5 @@
/*
* SPDX-FileCopyrightText: 2023-2024 Espressif Systems (Shanghai) CO LTD
* SPDX-FileCopyrightText: 2023-2025 Espressif Systems (Shanghai) CO LTD
*
* SPDX-License-Identifier: Apache-2.0
*/
@@ -40,12 +40,12 @@ const static char *TAG = "CACHE_TEST";
#elif CONFIG_IDF_TARGET_ESP32P4
#define TEST_SYNC_START (SOC_DRAM_PSRAM_ADDRESS_LOW + TEST_OFFSET)
#define TEST_SYNC_SIZE CONFIG_CACHE_L2_CACHE_SIZE
#elif CONFIG_IDF_TARGET_ESP32C5
#elif CONFIG_IDF_TARGET_ESP32C5 || CONFIG_IDF_TARGET_ESP32C61
#define TEST_SYNC_START (SOC_DRAM_PSRAM_ADDRESS_LOW + TEST_OFFSET)
#define TEST_SYNC_SIZE CONFIG_CACHE_L1_CACHE_SIZE
#elif CONFIG_IDF_TARGET_ESP32C61
#elif CONFIG_IDF_TARGET_ESP32H4
#define TEST_SYNC_START (SOC_DRAM_PSRAM_ADDRESS_LOW + TEST_OFFSET)
#define TEST_SYNC_SIZE CONFIG_CACHE_L1_CACHE_SIZE
#define TEST_SYNC_SIZE CONFIG_CACHE_L1_DCACHE_SIZE
#endif
#define RECORD_TIME_PREPARE() uint32_t __t1, __t2

View File

@@ -0,0 +1,19 @@
menu "Cache config"
config CACHE_L1_ICACHE_SIZE
hex
default 0x4000
config CACHE_L1_DCACHE_SIZE
hex
default 0x8000
config CACHE_L1_ICACHE_LINE_SIZE
int
default 32
config CACHE_L1_DCACHE_LINE_SIZE
int
default 32
endmenu # Cache config

View File

@@ -15,8 +15,6 @@
#include "hal/assert.h"
#include "rom/cache.h"
//TODO: [ESP32H4] IDF-12289 inherited from verification branch, need check
#ifdef __cplusplus
extern "C" {
#endif
@@ -28,12 +26,52 @@ extern "C" {
#define CACHE_LL_L1_ACCESS_EVENT_MASK (1<<4)
#define CACHE_LL_L1_ACCESS_EVENT_CACHE_FAIL (1<<4)
#define CACHE_LL_ID_ALL 1 //All of the caches in a type and level, make this value greater than any ID
#define CACHE_LL_ID_ALL 2 //All of the caches in a type and level, make this value greater than any ID
#define CACHE_LL_LEVEL_INT_MEM 0 //Cache level for accessing internal mem
#define CACHE_LL_LEVEL_EXT_MEM 1 //Cache level for accessing external mem
#define CACHE_LL_LEVEL_ALL 2 //All of the cache levels, make this value greater than any level
#define CACHE_LL_LEVEL_NUMS 1 //Number of cache levels
#define CACHE_LL_L1_ICACHE_AUTOLOAD (1<<0)
#define CACHE_LL_CACHE_AUTOLOAD (1<<0)
/**
* @brief Check if L1 ICache autoload is enabled or not
*
* @param cache_id id of the cache in this type and level
*
* @return true: enabled; false: disabled
*/
__attribute__((always_inline))
static inline bool cache_ll_l1_is_icache_autoload_enabled(uint32_t cache_id)
{
bool enabled = false;
if (cache_id == 0) {
enabled = REG_GET_BIT(CACHE_L1_ICACHE0_AUTOLOAD_CTRL_REG, CACHE_L1_ICACHE0_AUTOLOAD_ENA);
} else if (cache_id == 1) {
enabled = REG_GET_BIT(CACHE_L1_ICACHE1_AUTOLOAD_CTRL_REG, CACHE_L1_ICACHE1_AUTOLOAD_ENA);
} else if (cache_id == CACHE_LL_ID_ALL) {
enabled = REG_GET_BIT(CACHE_L1_ICACHE0_AUTOLOAD_CTRL_REG, CACHE_L1_ICACHE0_AUTOLOAD_ENA) && REG_GET_BIT(CACHE_L1_ICACHE1_AUTOLOAD_CTRL_REG, CACHE_L1_ICACHE1_AUTOLOAD_ENA);
}
return enabled;
}
/**
* @brief Check if L1 DCache autoload is enabled or not
*
* @param cache_id id of the cache in this type and level
*
* @return true: enabled; false: disabled
*/
__attribute__((always_inline))
static inline bool cache_ll_l1_is_dcache_autoload_enabled(uint32_t cache_id)
{
bool enabled = false;
if (cache_id == 0 || cache_id == CACHE_LL_ID_ALL) {
enabled = REG_GET_BIT(CACHE_L1_DCACHE_AUTOLOAD_CTRL_REG, CACHE_L1_DCACHE_AUTOLOAD_ENA);
}
return enabled;
}
/**
* @brief Check if Cache auto preload is enabled or not.
@@ -47,15 +85,60 @@ extern "C" {
__attribute__((always_inline))
static inline bool cache_ll_is_cache_autoload_enabled(uint32_t cache_level, cache_type_t type, uint32_t cache_id)
{
HAL_ASSERT(cache_id <= CACHE_LL_ID_ALL);
bool enabled = false;
// if (REG_GET_BIT(CACHE_L1_CACHE_AUTOLOAD_CTRL_REG, CACHE_L1_CACHE_AUTOLOAD_ENA)) {
// enabled = true;
// }
//TODO
if (cache_level == 1) {
switch (type) {
case CACHE_TYPE_INSTRUCTION:
enabled = cache_ll_l1_is_icache_autoload_enabled(cache_id);
break;
case CACHE_TYPE_DATA:
enabled = cache_ll_l1_is_dcache_autoload_enabled(cache_id);
break;
case CACHE_TYPE_ALL:
default: //CACHE_TYPE_ALL
enabled = cache_ll_l1_is_icache_autoload_enabled(CACHE_LL_ID_ALL) && cache_ll_l1_is_dcache_autoload_enabled(cache_id);
break;
}
}
return enabled;
}
/*------------------------------------------------------------------------------
* Disable
*----------------------------------------------------------------------------*/
/**
* @brief Disable L1 ICache
*
* @param cache_id id of the cache in this type and level
*/
__attribute__((always_inline))
static inline void cache_ll_l1_disable_icache(uint32_t cache_id)
{
if (cache_id == 0) {
Cache_Disable_Cache(CACHE_MAP_ICACHE0);
} else if (cache_id == 1) {
Cache_Disable_Cache(CACHE_MAP_ICACHE1);
} else if (cache_id == CACHE_LL_ID_ALL) {
Cache_Disable_Cache(CACHE_MAP_ICACHE0);
Cache_Disable_Cache(CACHE_MAP_ICACHE1);
}
}
/**
* @brief Disable L1 DCache
*
* @param cache_id id of the cache in this type and level
*/
__attribute__((always_inline))
static inline void cache_ll_l1_disable_dcache(uint32_t cache_id)
{
if (cache_id == 0 || cache_id == CACHE_LL_ID_ALL) {
Cache_Disable_Cache(CACHE_MAP_DCACHE);
}
}
/**
* @brief Disable Cache
*
@@ -66,8 +149,56 @@ static inline bool cache_ll_is_cache_autoload_enabled(uint32_t cache_level, cach
__attribute__((always_inline))
static inline void cache_ll_disable_cache(uint32_t cache_level, cache_type_t type, uint32_t cache_id)
{
(void) type;
Cache_Disable_Cache(CACHE_MAP_ALL);
if (cache_level == 1) {
switch (type) {
case CACHE_TYPE_INSTRUCTION:
cache_ll_l1_disable_icache(cache_id);
break;
case CACHE_TYPE_DATA:
cache_ll_l1_disable_dcache(cache_id);
break;
case CACHE_TYPE_ALL:
default:
cache_ll_l1_disable_icache(CACHE_LL_ID_ALL);
cache_ll_l1_disable_dcache(cache_id);
break;
}
}
}
/*------------------------------------------------------------------------------
* Enable
*----------------------------------------------------------------------------*/
/**
* @brief Enable L1 ICache
*
* @param cache_id id of the cache in this type and level
* @param inst_autoload_en icache autoload enabled or not
*/
__attribute__((always_inline))
static inline void cache_ll_l1_enable_icache(uint32_t cache_id, bool inst_autoload_en)
{
if (cache_id == 0) {
Cache_Enable_Cache(CACHE_MAP_ICACHE0, inst_autoload_en ? CACHE_LL_CACHE_AUTOLOAD : 0);
} else if (cache_id == 1) {
Cache_Enable_Cache(CACHE_MAP_ICACHE1, inst_autoload_en ? CACHE_LL_CACHE_AUTOLOAD : 0);
} else if (cache_id == CACHE_LL_ID_ALL) {
Cache_Enable_Cache(CACHE_MAP_ICACHE0, inst_autoload_en ? CACHE_LL_CACHE_AUTOLOAD : 0);
Cache_Enable_Cache(CACHE_MAP_ICACHE1, inst_autoload_en ? CACHE_LL_CACHE_AUTOLOAD : 0);
}
}
/**
* @brief Enable L1 DCache
*
* @param cache_id id of the cache in this type and level
* @param data_autoload_en dcache autoload enabled or not
*/
__attribute__((always_inline))
static inline void cache_ll_l1_enable_dcache(uint32_t cache_id, bool data_autoload_en)
{
if (cache_id == 0 || cache_id == CACHE_LL_ID_ALL) {
Cache_Enable_Cache(CACHE_MAP_DCACHE, data_autoload_en ? CACHE_LL_CACHE_AUTOLOAD : 0);
}
}
/**
@@ -82,8 +213,57 @@ static inline void cache_ll_disable_cache(uint32_t cache_level, cache_type_t typ
__attribute__((always_inline))
static inline void cache_ll_enable_cache(uint32_t cache_level, cache_type_t type, uint32_t cache_id, bool inst_autoload_en, bool data_autoload_en)
{
Cache_Enable_Cache(CACHE_MAP_ALL, inst_autoload_en ? CACHE_LL_L1_ICACHE_AUTOLOAD : 0);
//TODO
HAL_ASSERT(cache_level == 1 || cache_level == 2);
if (cache_level == 1) {
switch (type) {
case CACHE_TYPE_INSTRUCTION:
cache_ll_l1_enable_icache(cache_id, inst_autoload_en);
break;
case CACHE_TYPE_DATA:
cache_ll_l1_enable_dcache(cache_id, data_autoload_en);
break;
case CACHE_TYPE_ALL:
default:
cache_ll_l1_enable_dcache(cache_id, data_autoload_en);
cache_ll_l1_enable_icache(cache_id, inst_autoload_en);
break;
}
}
}
/*------------------------------------------------------------------------------
* Suspend
*----------------------------------------------------------------------------*/
/**
* @brief Suspend L1 ICache
*
* @param cache_id id of the cache in this type and level
*/
__attribute__((always_inline))
static inline void cache_ll_l1_suspend_icache(uint32_t cache_id)
{
if (cache_id == 0) {
Cache_Suspend_Cache(CACHE_MAP_ICACHE0);
} else if (cache_id == 1) {
Cache_Suspend_Cache(CACHE_MAP_ICACHE1);
} else if (cache_id == CACHE_LL_ID_ALL) {
Cache_Suspend_Cache(CACHE_MAP_ICACHE0);
Cache_Suspend_Cache(CACHE_MAP_ICACHE1);
}
}
/**
* @brief Suspend L1 DCache
*
* @param cache_id id of the cache in this type and level
*/
__attribute__((always_inline))
static inline void cache_ll_l1_suspend_dcache(uint32_t cache_id)
{
if (cache_id == 0 || cache_id == CACHE_LL_ID_ALL) {
Cache_Suspend_Cache(CACHE_MAP_DCACHE);
}
}
/**
@@ -96,7 +276,59 @@ static inline void cache_ll_enable_cache(uint32_t cache_level, cache_type_t type
__attribute__((always_inline))
static inline void cache_ll_suspend_cache(uint32_t cache_level, cache_type_t type, uint32_t cache_id)
{
Cache_Suspend_Cache(CACHE_MAP_ALL);
HAL_ASSERT(cache_level == 1 || cache_level == 2);
if (cache_level == 1) {
switch (type) {
case CACHE_TYPE_INSTRUCTION:
cache_ll_l1_suspend_icache(cache_id);
break;
case CACHE_TYPE_DATA:
cache_ll_l1_suspend_dcache(cache_id);
break;
case CACHE_TYPE_ALL:
default:
cache_ll_l1_suspend_icache(CACHE_LL_ID_ALL);
cache_ll_l1_suspend_dcache(cache_id);
break;
}
}
}
/*------------------------------------------------------------------------------
* Resume
*----------------------------------------------------------------------------*/
/**
* @brief Resume L1 ICache
*
* @param cache_id id of the cache in this type and level
* @param inst_autoload_en icache autoload enabled or not
*/
__attribute__((always_inline))
static inline void cache_ll_l1_resume_icache(uint32_t cache_id, bool inst_autoload_en)
{
if (cache_id == 0) {
Cache_Resume_Cache(CACHE_MAP_ICACHE0, inst_autoload_en ? CACHE_LL_CACHE_AUTOLOAD : 0);
} else if (cache_id == 1) {
Cache_Resume_Cache(CACHE_MAP_ICACHE1, inst_autoload_en ? CACHE_LL_CACHE_AUTOLOAD : 0);
} else if (cache_id == CACHE_LL_ID_ALL) {
Cache_Resume_Cache(CACHE_MAP_ICACHE0, inst_autoload_en ? CACHE_LL_CACHE_AUTOLOAD : 0);
Cache_Resume_Cache(CACHE_MAP_ICACHE1, inst_autoload_en ? CACHE_LL_CACHE_AUTOLOAD : 0);
}
}
/**
* @brief Resume L1 DCache
*
* @param cache_id id of the cache in this type and level
* @param data_autoload_en dcache autoload enabled or not
*/
__attribute__((always_inline))
static inline void cache_ll_l1_resume_dcache(uint32_t cache_id, bool data_autoload_en)
{
if (cache_id == 0 || cache_id == CACHE_LL_ID_ALL) {
Cache_Resume_Cache(CACHE_MAP_DCACHE, data_autoload_en ? CACHE_LL_CACHE_AUTOLOAD : 0);
}
}
/**
@@ -111,8 +343,58 @@ static inline void cache_ll_suspend_cache(uint32_t cache_level, cache_type_t typ
__attribute__((always_inline))
static inline void cache_ll_resume_cache(uint32_t cache_level, cache_type_t type, uint32_t cache_id, bool inst_autoload_en, bool data_autoload_en)
{
Cache_Resume_Cache(CACHE_MAP_ALL, inst_autoload_en ? CACHE_LL_L1_ICACHE_AUTOLOAD : 0);
//TODO
if (cache_level == 1) {
switch (type) {
case CACHE_TYPE_INSTRUCTION:
cache_ll_l1_resume_icache(cache_id, inst_autoload_en);
break;
case CACHE_TYPE_DATA:
cache_ll_l1_resume_dcache(cache_id, data_autoload_en);
break;
case CACHE_TYPE_ALL:
default:
cache_ll_l1_resume_dcache(cache_id, data_autoload_en);
cache_ll_l1_resume_icache(CACHE_LL_ID_ALL, inst_autoload_en);
break;
}
}
}
/*------------------------------------------------------------------------------
* Invalidate
*----------------------------------------------------------------------------*/
/**
* @brief Invalidate L1 ICache addr
*
* @param cache_id id of the cache in this type and level
* @param vaddr start address of the region to be invalidated
* @param size size of the region to be invalidated
*/
__attribute__((always_inline))
static inline void cache_ll_l1_invalidate_icache_addr(uint32_t cache_id, uint32_t vaddr, uint32_t size)
{
if (cache_id == 0) {
Cache_Invalidate_Addr(CACHE_MAP_ICACHE0, vaddr, size);
} else if (cache_id == 1) {
Cache_Invalidate_Addr(CACHE_MAP_ICACHE1, vaddr, size);
} else if (cache_id == CACHE_LL_ID_ALL) {
Cache_Invalidate_Addr(CACHE_MAP_ICACHE0 | CACHE_MAP_ICACHE1, vaddr, size);
}
}
/**
* @brief Invalidate L1 DCache addr
*
* @param cache_id id of the cache in this type and level
* @param vaddr start address of the region to be invalidated
* @param size size of the region to be invalidated
*/
__attribute__((always_inline))
static inline void cache_ll_l1_invalidate_dcache_addr(uint32_t cache_id, uint32_t vaddr, uint32_t size)
{
if (cache_id == 0 || cache_id == CACHE_LL_ID_ALL) {
Cache_Invalidate_Addr(CACHE_MAP_DCACHE, vaddr, size);
}
}
/**
@@ -129,7 +411,190 @@ static inline void cache_ll_resume_cache(uint32_t cache_level, cache_type_t type
__attribute__((always_inline))
static inline void cache_ll_invalidate_addr(uint32_t cache_level, cache_type_t type, uint32_t cache_id, uint32_t vaddr, uint32_t size)
{
Cache_Invalidate_Addr(CACHE_MAP_ALL, vaddr, size);
if (cache_level == 1 || cache_level == 2 || cache_level == CACHE_LL_LEVEL_ALL) {
switch (type) {
case CACHE_TYPE_INSTRUCTION:
cache_ll_l1_invalidate_icache_addr(cache_id, vaddr, size);
break;
case CACHE_TYPE_DATA:
cache_ll_l1_invalidate_dcache_addr(cache_id, vaddr, size);
break;
case CACHE_TYPE_ALL:
default:
cache_ll_l1_invalidate_icache_addr(cache_id, vaddr, size);
cache_ll_l1_invalidate_dcache_addr(cache_id, vaddr, size);
break;
}
}
}
/**
* @brief Invalidate L1 ICache all
*
* @param cache_id id of the cache in this type and level
*/
__attribute__((always_inline))
static inline void cache_ll_l1_invalidate_icache_all(uint32_t cache_id)
{
if (cache_id == 0) {
Cache_Invalidate_All(CACHE_MAP_ICACHE0);
} else if (cache_id == 1) {
Cache_Invalidate_All(CACHE_MAP_ICACHE1);
} else if (cache_id == CACHE_LL_ID_ALL) {
Cache_Invalidate_All(CACHE_MAP_ICACHE0 | CACHE_MAP_ICACHE1);
}
}
/**
* @brief Invalidate L1 DCache all
*
* @param cache_id id of the cache in this type and level
*/
__attribute__((always_inline))
static inline void cache_ll_l1_invalidate_dcache_all(uint32_t cache_id)
{
if (cache_id == 0 || cache_id == CACHE_LL_ID_ALL) {
Cache_Invalidate_All(CACHE_MAP_DCACHE);
}
}
/**
* @brief Invalidate all
*
* @param cache_level level of the cache
* @param type see `cache_type_t`
* @param cache_id id of the cache in this type and level
*/
__attribute__((always_inline))
static inline void cache_ll_invalidate_all(uint32_t cache_level, cache_type_t type, uint32_t cache_id)
{
if (cache_level == 1 || cache_level == 2 || cache_level == CACHE_LL_LEVEL_ALL) {
switch (type) {
case CACHE_TYPE_INSTRUCTION:
cache_ll_l1_invalidate_icache_all(cache_id);
break;
case CACHE_TYPE_DATA:
cache_ll_l1_invalidate_dcache_all(cache_id);
break;
case CACHE_TYPE_ALL:
default:
cache_ll_l1_invalidate_icache_all(cache_id);
cache_ll_l1_invalidate_dcache_all(cache_id);
break;
}
}
}
/*------------------------------------------------------------------------------
* Writeback
*----------------------------------------------------------------------------*/
/**
* @brief Writeback L1 DCache addr
*
* @param cache_id id of the cache in this type and level
* @param vaddr start address of the region to be written back
* @param size size of the region to be written back
*/
__attribute__((always_inline))
static inline void cache_ll_l1_writeback_dcache_addr(uint32_t cache_id, uint32_t vaddr, uint32_t size)
{
if (cache_id == 0 || cache_id == CACHE_LL_ID_ALL) {
Cache_WriteBack_Addr(vaddr, size);
}
}
/**
* @brief Writeback cache supported addr
*
* Writeback a cache item
*
* @param cache_level level of the cache
* @param type see `cache_type_t`
* @param cache_id id of the cache in this type and level
* @param vaddr start address of the region to be written back
* @param size size of the region to be written back
*/
__attribute__((always_inline))
static inline void cache_ll_writeback_addr(uint32_t cache_level, cache_type_t type, uint32_t cache_id, uint32_t vaddr, uint32_t size)
{
if (cache_level == 1 || cache_level == 2 || cache_level == CACHE_LL_LEVEL_ALL) {
switch (type) {
case CACHE_TYPE_DATA:
cache_ll_l1_writeback_dcache_addr(cache_id, vaddr, size);
break;
default:
HAL_ASSERT(false);
}
}
}
/**
* @brief Writeback L1 DCache all
*
* @param cache_id id of the cache in this type and level
*/
__attribute__((always_inline))
static inline void cache_ll_l1_writeback_dcache_all(uint32_t cache_id)
{
if (cache_id == 0 || cache_id == CACHE_LL_ID_ALL) {
Cache_WriteBack_All();
}
}
/**
* @brief Writeback all
*
* @param cache_level level of the cache
* @param type see `cache_type_t`
* @param cache_id id of the cache in this type and level
*/
__attribute__((always_inline))
static inline void cache_ll_writeback_all(uint32_t cache_level, cache_type_t type, uint32_t cache_id)
{
if (cache_level == 1 || cache_level == 2 || cache_level == CACHE_LL_LEVEL_ALL) {
switch (type) {
case CACHE_TYPE_DATA:
cache_ll_l1_writeback_dcache_all(cache_id);
break;
default:
HAL_ASSERT(false);
}
}
}
/*------------------------------------------------------------------------------
* Freeze
*----------------------------------------------------------------------------*/
/**
* @brief Freeze L1 ICache
*
* @param cache_id id of the cache in this type and level
*/
__attribute__((always_inline))
static inline void cache_ll_l1_freeze_icache(uint32_t cache_id)
{
if (cache_id == 0) {
Cache_Freeze_Enable(CACHE_MAP_ICACHE0, CACHE_FREEZE_ACK_BUSY);
} else if (cache_id == 1) {
Cache_Freeze_Enable(CACHE_MAP_ICACHE1, CACHE_FREEZE_ACK_BUSY);
} else if (cache_id == CACHE_LL_ID_ALL) {
Cache_Freeze_Enable(CACHE_MAP_ICACHE0, CACHE_FREEZE_ACK_BUSY);
Cache_Freeze_Enable(CACHE_MAP_ICACHE1, CACHE_FREEZE_ACK_BUSY);
}
}
/**
* @brief Freeze L1 DCache
*
* @param cache_id id of the cache in this type and level
*/
__attribute__((always_inline))
static inline void cache_ll_l1_freeze_dcache(uint32_t cache_id)
{
if (cache_id == 0 || cache_id == CACHE_LL_ID_ALL) {
Cache_Freeze_Enable(CACHE_MAP_DCACHE, CACHE_FREEZE_ACK_BUSY);
}
}
/**
@@ -142,7 +607,54 @@ static inline void cache_ll_invalidate_addr(uint32_t cache_level, cache_type_t t
__attribute__((always_inline))
static inline void cache_ll_freeze_cache(uint32_t cache_level, cache_type_t type, uint32_t cache_id)
{
Cache_Freeze_Enable(CACHE_MAP_ALL, CACHE_FREEZE_ACK_BUSY);
if (cache_level == 1) {
switch (type) {
case CACHE_TYPE_INSTRUCTION:
cache_ll_l1_freeze_icache(cache_id);
break;
case CACHE_TYPE_DATA:
cache_ll_l1_freeze_dcache(cache_id);
break;
default:
cache_ll_l1_freeze_icache(CACHE_LL_ID_ALL);
cache_ll_l1_freeze_dcache(cache_id);
break;
}
}
}
/*------------------------------------------------------------------------------
* Unfreeze
*----------------------------------------------------------------------------*/
/**
* @brief Unfreeze L1 ICache
*
* @param cache_id id of the cache in this type and level
*/
__attribute__((always_inline))
static inline void cache_ll_l1_unfreeze_icache(uint32_t cache_id)
{
if (cache_id == 0) {
Cache_Freeze_Disable(CACHE_MAP_ICACHE0);
} else if (cache_id == 1) {
Cache_Freeze_Disable(CACHE_MAP_ICACHE1);
} else if (cache_id == CACHE_LL_ID_ALL) {
Cache_Freeze_Disable(CACHE_MAP_ICACHE0);
Cache_Freeze_Disable(CACHE_MAP_ICACHE1);
}
}
/**
* @brief Unfreeze L1 DCache
*
* @param cache_id id of the cache in this type and level
*/
__attribute__((always_inline))
static inline void cache_ll_l1_unfreeze_dcache(uint32_t cache_id)
{
if (cache_id == 0 || cache_id == CACHE_LL_ID_ALL) {
Cache_Freeze_Disable(CACHE_MAP_DCACHE);
}
}
/**
@@ -155,9 +667,26 @@ static inline void cache_ll_freeze_cache(uint32_t cache_level, cache_type_t type
__attribute__((always_inline))
static inline void cache_ll_unfreeze_cache(uint32_t cache_level, cache_type_t type, uint32_t cache_id)
{
Cache_Freeze_Disable(CACHE_MAP_ALL);
if (cache_level == 1) {
switch (type) {
case CACHE_TYPE_INSTRUCTION:
cache_ll_l1_unfreeze_icache(cache_id);
break;
case CACHE_TYPE_DATA:
cache_ll_l1_unfreeze_dcache(cache_id);
break;
case CACHE_TYPE_ALL:
default:
cache_ll_l1_unfreeze_dcache(cache_id);
cache_ll_l1_unfreeze_icache(CACHE_LL_ID_ALL);
break;
}
}
}
/*------------------------------------------------------------------------------
* Cache Line Size
*----------------------------------------------------------------------------*/
/**
* @brief Get Cache line size, in bytes
*
@@ -170,9 +699,7 @@ static inline void cache_ll_unfreeze_cache(uint32_t cache_level, cache_type_t ty
__attribute__((always_inline))
static inline uint32_t cache_ll_get_line_size(uint32_t cache_level, cache_type_t type, uint32_t cache_id)
{
uint32_t size = 0;
size = Cache_Get_Line_Size(CACHE_MAP_DCACHE);
return size;
return 32;
}
/**
@@ -181,21 +708,21 @@ static inline uint32_t cache_ll_get_line_size(uint32_t cache_level, cache_type_t
* External virtual address can only be accessed when the involved cache buses are enabled.
* This API is to get the cache buses where the memory region (from `vaddr_start` to `vaddr_start + len`) reside.
*
* @param cache_id cache ID (when l1 cache is per core)
* @param bus_id bus ID
* @param vaddr_start virtual address start
* @param len vaddr length
*/
#if !BOOTLOADER_BUILD
__attribute__((always_inline))
#endif
static inline cache_bus_mask_t cache_ll_l1_get_bus(uint32_t cache_id, uint32_t vaddr_start, uint32_t len)
static inline cache_bus_mask_t cache_ll_l1_get_bus(uint32_t bus_id, uint32_t vaddr_start, uint32_t len)
{
HAL_ASSERT(cache_id <= CACHE_LL_ID_ALL);
HAL_ASSERT(bus_id <= CACHE_LL_ID_ALL);
cache_bus_mask_t mask = (cache_bus_mask_t)0;
uint32_t vaddr_end = vaddr_start + len - 1;
if (vaddr_start >= SOC_IRAM0_CACHE_ADDRESS_LOW && vaddr_end < SOC_IRAM0_CACHE_ADDRESS_HIGH) {
//c6 the I/D bus memory are shared, so we always return `CACHE_BUS_IBUS0 | CACHE_BUS_DBUS0`
//h4 the I/D bus memory are shared, so we always return `CACHE_BUS_IBUS0 | CACHE_BUS_DBUS0`
mask = (cache_bus_mask_t)(mask | (CACHE_BUS_IBUS0 | CACHE_BUS_DBUS0));
} else {
HAL_ASSERT(0); //Out of region
@@ -215,18 +742,25 @@ __attribute__((always_inline))
#endif
static inline void cache_ll_l1_enable_bus(uint32_t cache_id, cache_bus_mask_t mask)
{
// HAL_ASSERT(cache_id <= CACHE_LL_ID_ALL);
// //On esp32h4, only `CACHE_BUS_IBUS0` and `CACHE_BUS_DBUS0` are supported. Use `cache_ll_l1_get_bus()` to get your bus first
// HAL_ASSERT((mask & (CACHE_BUS_IBUS1 | CACHE_BUS_IBUS2 | CACHE_BUS_DBUS1 | CACHE_BUS_DBUS2)) == 0);
HAL_ASSERT(cache_id <= CACHE_LL_ID_ALL);
//On esp32h4, only `CACHE_BUS_IBUS0` and `CACHE_BUS_DBUS0` are supported. Use `cache_ll_l1_get_bus()` to get your bus first
HAL_ASSERT((mask & (CACHE_BUS_IBUS1 | CACHE_BUS_IBUS2 | CACHE_BUS_DBUS1 | CACHE_BUS_DBUS2)) == 0);
// uint32_t ibus_mask = 0;
// ibus_mask = ibus_mask | ((mask & CACHE_BUS_IBUS0) ? CACHE_L1_CACHE_SHUT_BUS0 : 0);
// REG_CLR_BIT(CACHE_L1_CACHE_CTRL_REG, ibus_mask);
uint32_t ibus_mask = 0;
if (cache_id == 0) {
ibus_mask = ibus_mask | ((mask & CACHE_BUS_IBUS0) ? CACHE_L1_ICACHE_SHUT_IBUS0 : 0);
} else {
ibus_mask = ibus_mask | ((mask & CACHE_BUS_IBUS0) ? CACHE_L1_ICACHE_SHUT_IBUS1 : 0);
}
REG_CLR_BIT(CACHE_L1_ICACHE_CTRL_REG, ibus_mask);
// uint32_t dbus_mask = 0;
// dbus_mask = dbus_mask | ((mask & CACHE_BUS_DBUS0) ? CACHE_L1_CACHE_SHUT_BUS1 : 0);
// REG_CLR_BIT(CACHE_L1_CACHE_CTRL_REG, dbus_mask);
//TODO
uint32_t dbus_mask = 0;
if (cache_id == 1) {
dbus_mask = dbus_mask | ((mask & CACHE_BUS_DBUS0) ? CACHE_L1_DCACHE_SHUT_DBUS0 : 0);
} else {
dbus_mask = dbus_mask | ((mask & CACHE_BUS_DBUS0) ? CACHE_L1_DCACHE_SHUT_DBUS1 : 0);
}
REG_CLR_BIT(CACHE_L1_DCACHE_CTRL_REG, dbus_mask);
}
/**
@@ -238,18 +772,25 @@ static inline void cache_ll_l1_enable_bus(uint32_t cache_id, cache_bus_mask_t ma
__attribute__((always_inline))
static inline void cache_ll_l1_disable_bus(uint32_t cache_id, cache_bus_mask_t mask)
{
// HAL_ASSERT(cache_id <= CACHE_LL_ID_ALL);
// //On esp32h4, only `CACHE_BUS_IBUS0` and `CACHE_BUS_DBUS0` are supported. Use `cache_ll_l1_get_bus()` to get your bus first
// HAL_ASSERT((mask & (CACHE_BUS_IBUS1 | CACHE_BUS_IBUS2 | CACHE_BUS_DBUS1 | CACHE_BUS_DBUS2)) == 0);
HAL_ASSERT(cache_id <= CACHE_LL_ID_ALL);
//On esp32h4, only `CACHE_BUS_IBUS0` and `CACHE_BUS_DBUS0` are supported. Use `cache_ll_l1_get_bus()` to get your bus first
HAL_ASSERT((mask & (CACHE_BUS_IBUS1 | CACHE_BUS_IBUS2| CACHE_BUS_DBUS1 | CACHE_BUS_DBUS2)) == 0);
// uint32_t ibus_mask = 0;
// ibus_mask = ibus_mask | ((mask & CACHE_BUS_IBUS0) ? CACHE_L1_CACHE_SHUT_BUS0 : 0);
// REG_SET_BIT(CACHE_L1_CACHE_CTRL_REG, ibus_mask);
uint32_t ibus_mask = 0;
if (cache_id == 0) {
ibus_mask = ibus_mask | ((mask & CACHE_BUS_IBUS0) ? CACHE_L1_ICACHE_SHUT_IBUS0 : 0);
} else {
ibus_mask = ibus_mask | ((mask & CACHE_BUS_IBUS0) ? CACHE_L1_ICACHE_SHUT_IBUS1 : 0);
}
REG_SET_BIT(CACHE_L1_ICACHE_CTRL_REG, ibus_mask);
// uint32_t dbus_mask = 0;
// dbus_mask = dbus_mask | ((mask & CACHE_BUS_DBUS0) ? CACHE_L1_CACHE_SHUT_BUS1 : 0);
// REG_SET_BIT(CACHE_L1_CACHE_CTRL_REG, dbus_mask);
//TODO
uint32_t dbus_mask = 0;
if (cache_id == 1) {
dbus_mask = dbus_mask | ((mask & CACHE_BUS_DBUS0) ? CACHE_L1_DCACHE_SHUT_DBUS0 : 0);
} else {
dbus_mask = dbus_mask | ((mask & CACHE_BUS_DBUS0) ? CACHE_L1_DCACHE_SHUT_DBUS1 : 0);
}
REG_SET_BIT(CACHE_L1_DCACHE_CTRL_REG, dbus_mask);
}
/**

View File

@@ -20,6 +20,8 @@
extern "C" {
#endif
#define MMU_LL_FLASH_MMU_ID 0
#define MMU_LL_PSRAM_MMU_ID 0
#define MMU_LL_END_DROM_ENTRY_VADDR (SOC_DRAM_FLASH_ADDRESS_HIGH - SOC_MMU_PAGE_SIZE)
#define MMU_LL_END_DROM_ENTRY_ID (SOC_MMU_ENTRY_NUM - 1)

View File

@@ -115,6 +115,14 @@ config SOC_APB_BACKUP_DMA
bool
default n
config SOC_CACHE_WRITEBACK_SUPPORTED
bool
default y
config SOC_CACHE_FREEZE_SUPPORTED
bool
default y
config SOC_CPU_CORES_NUM
int
default 2

View File

@@ -33,6 +33,12 @@ extern "C" {
#define SOC_DRAM_FLASH_ADDRESS_LOW SOC_DRAM0_CACHE_ADDRESS_LOW
#define SOC_DRAM_FLASH_ADDRESS_HIGH SOC_DRAM0_CACHE_ADDRESS_HIGH
#define SOC_IRAM_PSRAM_ADDRESS_LOW SOC_IRAM0_CACHE_ADDRESS_LOW
#define SOC_IRAM_PSRAM_ADDRESS_HIGH SOC_IRAM0_CACHE_ADDRESS_HIGH
#define SOC_DRAM_PSRAM_ADDRESS_LOW SOC_DRAM0_CACHE_ADDRESS_LOW
#define SOC_DRAM_PSRAM_ADDRESS_HIGH SOC_DRAM0_CACHE_ADDRESS_HIGH
#define SOC_BUS_SIZE(bus_name) (bus_name##_ADDRESS_HIGH - bus_name##_ADDRESS_LOW)
#define SOC_ADDRESS_IN_BUS(bus_name, vaddr) ((vaddr) >= bus_name##_ADDRESS_LOW && (vaddr) < bus_name##_ADDRESS_HIGH)

View File

@@ -148,8 +148,8 @@
// #define SOC_BROWNOUT_RESET_SUPPORTED 1
/*-------------------------- CACHE CAPS --------------------------------------*/
// #define SOC_SHARED_IDCACHE_SUPPORTED 1 //Shared Cache for both instructions and data
// #define SOC_CACHE_FREEZE_SUPPORTED 1
#define SOC_CACHE_WRITEBACK_SUPPORTED 1
#define SOC_CACHE_FREEZE_SUPPORTED 1
/*-------------------------- CPU CAPS ----------------------------------------*/
#define SOC_CPU_CORES_NUM (2U)