mirror of
https://github.com/espressif/esp-idf.git
synced 2025-08-02 20:24:32 +02:00
Merge branch 'feat/h4_cache_msync' into 'master'
cache: h4 cache msync support Closes IDF-12289 See merge request espressif/esp-idf!40491
This commit is contained in:
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* SPDX-FileCopyrightText: 2023-2024 Espressif Systems (Shanghai) CO LTD
|
||||
* SPDX-FileCopyrightText: 2023-2025 Espressif Systems (Shanghai) CO LTD
|
||||
*
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
@@ -40,12 +40,12 @@ const static char *TAG = "CACHE_TEST";
|
||||
#elif CONFIG_IDF_TARGET_ESP32P4
|
||||
#define TEST_SYNC_START (SOC_DRAM_PSRAM_ADDRESS_LOW + TEST_OFFSET)
|
||||
#define TEST_SYNC_SIZE CONFIG_CACHE_L2_CACHE_SIZE
|
||||
#elif CONFIG_IDF_TARGET_ESP32C5
|
||||
#elif CONFIG_IDF_TARGET_ESP32C5 || CONFIG_IDF_TARGET_ESP32C61
|
||||
#define TEST_SYNC_START (SOC_DRAM_PSRAM_ADDRESS_LOW + TEST_OFFSET)
|
||||
#define TEST_SYNC_SIZE CONFIG_CACHE_L1_CACHE_SIZE
|
||||
#elif CONFIG_IDF_TARGET_ESP32C61
|
||||
#elif CONFIG_IDF_TARGET_ESP32H4
|
||||
#define TEST_SYNC_START (SOC_DRAM_PSRAM_ADDRESS_LOW + TEST_OFFSET)
|
||||
#define TEST_SYNC_SIZE CONFIG_CACHE_L1_CACHE_SIZE
|
||||
#define TEST_SYNC_SIZE CONFIG_CACHE_L1_DCACHE_SIZE
|
||||
#endif
|
||||
|
||||
#define RECORD_TIME_PREPARE() uint32_t __t1, __t2
|
||||
|
19
components/esp_system/port/soc/esp32h4/Kconfig.cache
Normal file
19
components/esp_system/port/soc/esp32h4/Kconfig.cache
Normal file
@@ -0,0 +1,19 @@
|
||||
menu "Cache config"
|
||||
|
||||
config CACHE_L1_ICACHE_SIZE
|
||||
hex
|
||||
default 0x4000
|
||||
|
||||
config CACHE_L1_DCACHE_SIZE
|
||||
hex
|
||||
default 0x8000
|
||||
|
||||
config CACHE_L1_ICACHE_LINE_SIZE
|
||||
int
|
||||
default 32
|
||||
|
||||
config CACHE_L1_DCACHE_LINE_SIZE
|
||||
int
|
||||
default 32
|
||||
|
||||
endmenu # Cache config
|
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* SPDX-FileCopyrightText: 2022-2023 Espressif Systems (Shanghai) CO LTD
|
||||
* SPDX-FileCopyrightText: 2022-2025 Espressif Systems (Shanghai) CO LTD
|
||||
*
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
@@ -100,7 +100,6 @@ __attribute__((always_inline))
|
||||
#endif
|
||||
static inline cache_bus_mask_t cache_ll_l1_get_bus(uint32_t cache_id, uint32_t vaddr_start, uint32_t len)
|
||||
{
|
||||
HAL_ASSERT(cache_id <= CACHE_LL_ID_ALL);
|
||||
cache_bus_mask_t mask = (cache_bus_mask_t)0;
|
||||
|
||||
uint32_t vaddr_end = vaddr_start + len - 1;
|
||||
@@ -131,20 +130,18 @@ static inline cache_bus_mask_t cache_ll_l1_get_bus(uint32_t cache_id, uint32_t v
|
||||
/**
|
||||
* Enable the Cache Buses
|
||||
*
|
||||
* @param cache_id cache ID (when l1 cache is per core)
|
||||
* @param bus_id bus ID
|
||||
* @param mask To know which buses should be enabled
|
||||
* @param enable 1: enable; 0: disable
|
||||
*/
|
||||
#if !BOOTLOADER_BUILD
|
||||
__attribute__((always_inline))
|
||||
#endif
|
||||
static inline void cache_ll_l1_enable_bus(uint32_t cache_id, cache_bus_mask_t mask)
|
||||
static inline void cache_ll_l1_enable_bus(uint32_t bus_id, cache_bus_mask_t mask)
|
||||
{
|
||||
(void) mask;
|
||||
HAL_ASSERT(cache_id <= CACHE_LL_ID_ALL);
|
||||
|
||||
uint32_t bus_mask = 0;
|
||||
if (cache_id == 0) {
|
||||
if (bus_id == 0) {
|
||||
bus_mask = bus_mask | ((mask & CACHE_BUS_IBUS0) ? DPORT_PRO_CACHE_MASK_IRAM0 : 0);
|
||||
bus_mask = bus_mask | ((mask & CACHE_BUS_IBUS1) ? DPORT_PRO_CACHE_MASK_IRAM1 : 0);
|
||||
bus_mask = bus_mask | ((mask & CACHE_BUS_IBUS2) ? DPORT_PRO_CACHE_MASK_IROM0 : 0);
|
||||
@@ -200,18 +197,16 @@ static inline cache_bus_mask_t cache_ll_l1_get_enabled_bus(uint32_t cache_id)
|
||||
/**
|
||||
* Disable the Cache Buses
|
||||
*
|
||||
* @param cache_id cache ID (when l1 cache is per core)
|
||||
* @param bus_id bus ID
|
||||
* @param mask To know which buses should be enabled
|
||||
* @param enable 1: enable; 0: disable
|
||||
*/
|
||||
__attribute__((always_inline))
|
||||
static inline void cache_ll_l1_disable_bus(uint32_t cache_id, cache_bus_mask_t mask)
|
||||
static inline void cache_ll_l1_disable_bus(uint32_t bus_id, cache_bus_mask_t mask)
|
||||
{
|
||||
(void) mask;
|
||||
HAL_ASSERT(cache_id <= CACHE_LL_ID_ALL);
|
||||
|
||||
uint32_t bus_mask = 0;
|
||||
if (cache_id == 0) {
|
||||
if (bus_id == 0) {
|
||||
bus_mask = bus_mask | ((mask & CACHE_BUS_IBUS0) ? DPORT_PRO_CACHE_MASK_IRAM0 : 0);
|
||||
bus_mask = bus_mask | ((mask & CACHE_BUS_IBUS1) ? DPORT_PRO_CACHE_MASK_IRAM1 : 0);
|
||||
bus_mask = bus_mask | ((mask & CACHE_BUS_IBUS2) ? DPORT_PRO_CACHE_MASK_IROM0 : 0);
|
||||
|
@@ -220,7 +220,6 @@ __attribute__((always_inline))
|
||||
#endif
|
||||
static inline cache_bus_mask_t cache_ll_l1_get_bus(uint32_t cache_id, uint32_t vaddr_start, uint32_t len)
|
||||
{
|
||||
HAL_ASSERT(cache_id <= CACHE_LL_ID_ALL);
|
||||
cache_bus_mask_t mask = (cache_bus_mask_t)0;
|
||||
|
||||
uint32_t vaddr_end = vaddr_start + len - 1;
|
||||
@@ -238,15 +237,14 @@ static inline cache_bus_mask_t cache_ll_l1_get_bus(uint32_t cache_id, uint32_t v
|
||||
/**
|
||||
* Enable the Cache Buses
|
||||
*
|
||||
* @param cache_id cache ID (when l1 cache is per core)
|
||||
* @param bus_id bus ID
|
||||
* @param mask To know which buses should be enabled
|
||||
*/
|
||||
#if !BOOTLOADER_BUILD
|
||||
__attribute__((always_inline))
|
||||
#endif
|
||||
static inline void cache_ll_l1_enable_bus(uint32_t cache_id, cache_bus_mask_t mask)
|
||||
static inline void cache_ll_l1_enable_bus(uint32_t bus_id, cache_bus_mask_t mask)
|
||||
{
|
||||
HAL_ASSERT(cache_id <= CACHE_LL_ID_ALL);
|
||||
//On esp32c2, only `CACHE_BUS_IBUS0` and `CACHE_BUS_DBUS0` are supported. Use `cache_ll_l1_get_bus()` to get your bus first
|
||||
HAL_ASSERT((mask & (CACHE_BUS_IBUS1 | CACHE_BUS_IBUS2| CACHE_BUS_DBUS1 | CACHE_BUS_DBUS2)) == 0);
|
||||
|
||||
@@ -262,13 +260,12 @@ static inline void cache_ll_l1_enable_bus(uint32_t cache_id, cache_bus_mask_t ma
|
||||
/**
|
||||
* Disable the Cache Buses
|
||||
*
|
||||
* @param cache_id cache ID (when l1 cache is per core)
|
||||
* @param bus_id bus ID
|
||||
* @param mask To know which buses should be disabled
|
||||
*/
|
||||
__attribute__((always_inline))
|
||||
static inline void cache_ll_l1_disable_bus(uint32_t cache_id, cache_bus_mask_t mask)
|
||||
static inline void cache_ll_l1_disable_bus(uint32_t bus_id, cache_bus_mask_t mask)
|
||||
{
|
||||
HAL_ASSERT(cache_id <= CACHE_LL_ID_ALL);
|
||||
//On esp32c2, only `CACHE_BUS_IBUS0` and `CACHE_BUS_DBUS0` are supported. Use `cache_ll_l1_get_bus()` to get your bus first
|
||||
HAL_ASSERT((mask & (CACHE_BUS_IBUS1 | CACHE_BUS_IBUS2| CACHE_BUS_DBUS1 | CACHE_BUS_DBUS2)) == 0);
|
||||
|
||||
|
@@ -223,7 +223,6 @@ __attribute__((always_inline))
|
||||
#endif
|
||||
static inline cache_bus_mask_t cache_ll_l1_get_bus(uint32_t cache_id, uint32_t vaddr_start, uint32_t len)
|
||||
{
|
||||
HAL_ASSERT(cache_id <= CACHE_LL_ID_ALL);
|
||||
cache_bus_mask_t mask = (cache_bus_mask_t)0;
|
||||
|
||||
uint32_t vaddr_end = vaddr_start + len - 1;
|
||||
@@ -247,9 +246,8 @@ static inline cache_bus_mask_t cache_ll_l1_get_bus(uint32_t cache_id, uint32_t v
|
||||
#if !BOOTLOADER_BUILD
|
||||
__attribute__((always_inline))
|
||||
#endif
|
||||
static inline void cache_ll_l1_enable_bus(uint32_t cache_id, cache_bus_mask_t mask)
|
||||
static inline void cache_ll_l1_enable_bus(uint32_t bus_id, cache_bus_mask_t mask)
|
||||
{
|
||||
HAL_ASSERT(cache_id <= CACHE_LL_ID_ALL);
|
||||
//On esp32c3, only `CACHE_BUS_IBUS0` and `CACHE_BUS_DBUS0` are supported. Use `cache_ll_l1_get_bus()` to get your bus first
|
||||
HAL_ASSERT((mask & (CACHE_BUS_IBUS1 | CACHE_BUS_IBUS2| CACHE_BUS_DBUS1 | CACHE_BUS_DBUS2)) == 0);
|
||||
|
||||
@@ -269,9 +267,8 @@ static inline void cache_ll_l1_enable_bus(uint32_t cache_id, cache_bus_mask_t ma
|
||||
* @param mask To know which buses should be disabled
|
||||
*/
|
||||
__attribute__((always_inline))
|
||||
static inline void cache_ll_l1_disable_bus(uint32_t cache_id, cache_bus_mask_t mask)
|
||||
static inline void cache_ll_l1_disable_bus(uint32_t bus_id, cache_bus_mask_t mask)
|
||||
{
|
||||
HAL_ASSERT(cache_id <= CACHE_LL_ID_ALL);
|
||||
//On esp32c3, only `CACHE_BUS_IBUS0` and `CACHE_BUS_DBUS0` are supported. Use `cache_ll_l1_get_bus()` to get your bus first
|
||||
HAL_ASSERT((mask & (CACHE_BUS_IBUS1 | CACHE_BUS_IBUS2| CACHE_BUS_DBUS1 | CACHE_BUS_DBUS2)) == 0);
|
||||
|
||||
|
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* SPDX-FileCopyrightText: 2022-2024 Espressif Systems (Shanghai) CO LTD
|
||||
* SPDX-FileCopyrightText: 2022-2025 Espressif Systems (Shanghai) CO LTD
|
||||
*
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
@@ -217,7 +217,6 @@ __attribute__((always_inline))
|
||||
#endif
|
||||
static inline cache_bus_mask_t cache_ll_l1_get_bus(uint32_t cache_id, uint32_t vaddr_start, uint32_t len)
|
||||
{
|
||||
HAL_ASSERT(cache_id <= CACHE_LL_ID_ALL);
|
||||
cache_bus_mask_t mask = (cache_bus_mask_t)0;
|
||||
|
||||
uint32_t vaddr_end = vaddr_start + len - 1;
|
||||
@@ -234,15 +233,14 @@ static inline cache_bus_mask_t cache_ll_l1_get_bus(uint32_t cache_id, uint32_t v
|
||||
/**
|
||||
* Enable the Cache Buses
|
||||
*
|
||||
* @param cache_id cache ID (when l1 cache is per core)
|
||||
* @param bus_id bus ID
|
||||
* @param mask To know which buses should be enabled
|
||||
*/
|
||||
#if !BOOTLOADER_BUILD
|
||||
__attribute__((always_inline))
|
||||
#endif
|
||||
static inline void cache_ll_l1_enable_bus(uint32_t cache_id, cache_bus_mask_t mask)
|
||||
static inline void cache_ll_l1_enable_bus(uint32_t bus_id, cache_bus_mask_t mask)
|
||||
{
|
||||
HAL_ASSERT(cache_id <= CACHE_LL_ID_ALL);
|
||||
//On esp32c5, only `CACHE_BUS_IBUS0` and `CACHE_BUS_DBUS0` are supported. Use `cache_ll_l1_get_bus()` to get your bus first
|
||||
HAL_ASSERT((mask & (CACHE_BUS_IBUS1 | CACHE_BUS_IBUS2 | CACHE_BUS_DBUS1 | CACHE_BUS_DBUS2)) == 0);
|
||||
|
||||
@@ -258,13 +256,12 @@ static inline void cache_ll_l1_enable_bus(uint32_t cache_id, cache_bus_mask_t ma
|
||||
/**
|
||||
* Disable the Cache Buses
|
||||
*
|
||||
* @param cache_id cache ID (when l1 cache is per core)
|
||||
* @param bus_id bus ID
|
||||
* @param mask To know which buses should be disabled
|
||||
*/
|
||||
__attribute__((always_inline))
|
||||
static inline void cache_ll_l1_disable_bus(uint32_t cache_id, cache_bus_mask_t mask)
|
||||
static inline void cache_ll_l1_disable_bus(uint32_t bus_id, cache_bus_mask_t mask)
|
||||
{
|
||||
HAL_ASSERT(cache_id <= CACHE_LL_ID_ALL);
|
||||
//On esp32c5, only `CACHE_BUS_IBUS0` and `CACHE_BUS_DBUS0` are supported. Use `cache_ll_l1_get_bus()` to get your bus first
|
||||
HAL_ASSERT((mask & (CACHE_BUS_IBUS1 | CACHE_BUS_IBUS2 | CACHE_BUS_DBUS1 | CACHE_BUS_DBUS2)) == 0);
|
||||
|
||||
|
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* SPDX-FileCopyrightText: 2022-2024 Espressif Systems (Shanghai) CO LTD
|
||||
* SPDX-FileCopyrightText: 2022-2025 Espressif Systems (Shanghai) CO LTD
|
||||
*
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
@@ -198,7 +198,6 @@ __attribute__((always_inline))
|
||||
#endif
|
||||
static inline cache_bus_mask_t cache_ll_l1_get_bus(uint32_t cache_id, uint32_t vaddr_start, uint32_t len)
|
||||
{
|
||||
HAL_ASSERT(cache_id <= CACHE_LL_ID_ALL);
|
||||
cache_bus_mask_t mask = (cache_bus_mask_t)0;
|
||||
|
||||
uint32_t vaddr_end = vaddr_start + len - 1;
|
||||
@@ -215,15 +214,14 @@ static inline cache_bus_mask_t cache_ll_l1_get_bus(uint32_t cache_id, uint32_t v
|
||||
/**
|
||||
* Enable the Cache Buses
|
||||
*
|
||||
* @param cache_id cache ID (when l1 cache is per core)
|
||||
* @param bus_id bus ID
|
||||
* @param mask To know which buses should be enabled
|
||||
*/
|
||||
#if !BOOTLOADER_BUILD
|
||||
__attribute__((always_inline))
|
||||
#endif
|
||||
static inline void cache_ll_l1_enable_bus(uint32_t cache_id, cache_bus_mask_t mask)
|
||||
static inline void cache_ll_l1_enable_bus(uint32_t bus_id, cache_bus_mask_t mask)
|
||||
{
|
||||
HAL_ASSERT(cache_id <= CACHE_LL_ID_ALL);
|
||||
//On esp32c6, only `CACHE_BUS_IBUS0` and `CACHE_BUS_DBUS0` are supported. Use `cache_ll_l1_get_bus()` to get your bus first
|
||||
HAL_ASSERT((mask & (CACHE_BUS_IBUS1 | CACHE_BUS_IBUS2 | CACHE_BUS_DBUS1 | CACHE_BUS_DBUS2)) == 0);
|
||||
|
||||
@@ -239,13 +237,12 @@ static inline void cache_ll_l1_enable_bus(uint32_t cache_id, cache_bus_mask_t ma
|
||||
/**
|
||||
* Disable the Cache Buses
|
||||
*
|
||||
* @param cache_id cache ID (when l1 cache is per core)
|
||||
* @param bus_id bus ID
|
||||
* @param mask To know which buses should be disabled
|
||||
*/
|
||||
__attribute__((always_inline))
|
||||
static inline void cache_ll_l1_disable_bus(uint32_t cache_id, cache_bus_mask_t mask)
|
||||
static inline void cache_ll_l1_disable_bus(uint32_t bus_id, cache_bus_mask_t mask)
|
||||
{
|
||||
HAL_ASSERT(cache_id <= CACHE_LL_ID_ALL);
|
||||
//On esp32c6, only `CACHE_BUS_IBUS0` and `CACHE_BUS_DBUS0` are supported. Use `cache_ll_l1_get_bus()` to get your bus first
|
||||
HAL_ASSERT((mask & (CACHE_BUS_IBUS1 | CACHE_BUS_IBUS2 | CACHE_BUS_DBUS1 | CACHE_BUS_DBUS2)) == 0);
|
||||
|
||||
|
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* SPDX-FileCopyrightText: 2024 Espressif Systems (Shanghai) CO LTD
|
||||
* SPDX-FileCopyrightText: 2024-2025 Espressif Systems (Shanghai) CO LTD
|
||||
*
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
@@ -216,7 +216,6 @@ __attribute__((always_inline))
|
||||
#endif
|
||||
static inline cache_bus_mask_t cache_ll_l1_get_bus(uint32_t cache_id, uint32_t vaddr_start, uint32_t len)
|
||||
{
|
||||
HAL_ASSERT(cache_id <= CACHE_LL_ID_ALL);
|
||||
cache_bus_mask_t mask = (cache_bus_mask_t)0;
|
||||
|
||||
uint32_t vaddr_end = vaddr_start + len - 1;
|
||||
@@ -233,15 +232,14 @@ static inline cache_bus_mask_t cache_ll_l1_get_bus(uint32_t cache_id, uint32_t v
|
||||
/**
|
||||
* Enable the Cache Buses
|
||||
*
|
||||
* @param cache_id cache ID (when l1 cache is per core)
|
||||
* @param bus_id bus ID
|
||||
* @param mask To know which buses should be enabled
|
||||
*/
|
||||
#if !BOOTLOADER_BUILD
|
||||
__attribute__((always_inline))
|
||||
#endif
|
||||
static inline void cache_ll_l1_enable_bus(uint32_t cache_id, cache_bus_mask_t mask)
|
||||
static inline void cache_ll_l1_enable_bus(uint32_t bus_id, cache_bus_mask_t mask)
|
||||
{
|
||||
HAL_ASSERT(cache_id <= CACHE_LL_ID_ALL);
|
||||
//On esp32c61, only `CACHE_BUS_IBUS0` and `CACHE_BUS_DBUS0` are supported. Use `cache_ll_l1_get_bus()` to get your bus first
|
||||
HAL_ASSERT((mask & (CACHE_BUS_IBUS1 | CACHE_BUS_IBUS2 | CACHE_BUS_DBUS1 | CACHE_BUS_DBUS2)) == 0);
|
||||
|
||||
@@ -257,13 +255,12 @@ static inline void cache_ll_l1_enable_bus(uint32_t cache_id, cache_bus_mask_t ma
|
||||
/**
|
||||
* Disable the Cache Buses
|
||||
*
|
||||
* @param cache_id cache ID (when l1 cache is per core)
|
||||
* @param bus_id bus ID
|
||||
* @param mask To know which buses should be disabled
|
||||
*/
|
||||
__attribute__((always_inline))
|
||||
static inline void cache_ll_l1_disable_bus(uint32_t cache_id, cache_bus_mask_t mask)
|
||||
static inline void cache_ll_l1_disable_bus(uint32_t bus_id, cache_bus_mask_t mask)
|
||||
{
|
||||
HAL_ASSERT(cache_id <= CACHE_LL_ID_ALL);
|
||||
//On esp32c61, only `CACHE_BUS_IBUS0` and `CACHE_BUS_DBUS0` are supported. Use `cache_ll_l1_get_bus()` to get your bus first
|
||||
HAL_ASSERT((mask & (CACHE_BUS_IBUS1 | CACHE_BUS_IBUS2 | CACHE_BUS_DBUS1 | CACHE_BUS_DBUS2)) == 0);
|
||||
|
||||
|
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* SPDX-FileCopyrightText: 2022-2024 Espressif Systems (Shanghai) CO LTD
|
||||
* SPDX-FileCopyrightText: 2022-2025 Espressif Systems (Shanghai) CO LTD
|
||||
*
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
@@ -198,7 +198,6 @@ __attribute__((always_inline))
|
||||
#endif
|
||||
static inline cache_bus_mask_t cache_ll_l1_get_bus(uint32_t cache_id, uint32_t vaddr_start, uint32_t len)
|
||||
{
|
||||
HAL_ASSERT(cache_id <= CACHE_LL_ID_ALL);
|
||||
cache_bus_mask_t mask = (cache_bus_mask_t)0;
|
||||
|
||||
uint32_t vaddr_end = vaddr_start + len - 1;
|
||||
@@ -215,15 +214,14 @@ static inline cache_bus_mask_t cache_ll_l1_get_bus(uint32_t cache_id, uint32_t v
|
||||
/**
|
||||
* Enable the Cache Buses
|
||||
*
|
||||
* @param cache_id cache ID (when l1 cache is per core)
|
||||
* @param bus_id bus ID
|
||||
* @param mask To know which buses should be enabled
|
||||
*/
|
||||
#if !BOOTLOADER_BUILD
|
||||
__attribute__((always_inline))
|
||||
#endif
|
||||
static inline void cache_ll_l1_enable_bus(uint32_t cache_id, cache_bus_mask_t mask)
|
||||
static inline void cache_ll_l1_enable_bus(uint32_t bus_id, cache_bus_mask_t mask)
|
||||
{
|
||||
HAL_ASSERT(cache_id <= CACHE_LL_ID_ALL);
|
||||
//On esp32h2, only `CACHE_BUS_IBUS0` and `CACHE_BUS_DBUS0` are supported. Use `cache_ll_l1_get_bus()` to get your bus first
|
||||
HAL_ASSERT((mask & (CACHE_BUS_IBUS1 | CACHE_BUS_IBUS2 | CACHE_BUS_DBUS1 | CACHE_BUS_DBUS2)) == 0);
|
||||
|
||||
@@ -239,13 +237,12 @@ static inline void cache_ll_l1_enable_bus(uint32_t cache_id, cache_bus_mask_t ma
|
||||
/**
|
||||
* Disable the Cache Buses
|
||||
*
|
||||
* @param cache_id cache ID (when l1 cache is per core)
|
||||
* @param bus_id bus ID
|
||||
* @param mask To know which buses should be disabled
|
||||
*/
|
||||
__attribute__((always_inline))
|
||||
static inline void cache_ll_l1_disable_bus(uint32_t cache_id, cache_bus_mask_t mask)
|
||||
static inline void cache_ll_l1_disable_bus(uint32_t bus_id, cache_bus_mask_t mask)
|
||||
{
|
||||
HAL_ASSERT(cache_id <= CACHE_LL_ID_ALL);
|
||||
//On esp32h2, only `CACHE_BUS_IBUS0` and `CACHE_BUS_DBUS0` are supported. Use `cache_ll_l1_get_bus()` to get your bus first
|
||||
HAL_ASSERT((mask & (CACHE_BUS_IBUS1 | CACHE_BUS_IBUS2 | CACHE_BUS_DBUS1 | CACHE_BUS_DBUS2)) == 0);
|
||||
|
||||
|
@@ -186,7 +186,6 @@ __attribute__((always_inline))
|
||||
#endif
|
||||
static inline cache_bus_mask_t cache_ll_l1_get_bus(uint32_t cache_id, uint32_t vaddr_start, uint32_t len)
|
||||
{
|
||||
HAL_ASSERT(cache_id <= CACHE_LL_ID_ALL);
|
||||
cache_bus_mask_t mask = (cache_bus_mask_t)0;
|
||||
|
||||
uint32_t vaddr_end = vaddr_start + len - 1;
|
||||
@@ -203,15 +202,14 @@ static inline cache_bus_mask_t cache_ll_l1_get_bus(uint32_t cache_id, uint32_t v
|
||||
/**
|
||||
* Enable the Cache Buses
|
||||
*
|
||||
* @param cache_id cache ID (when l1 cache is per core)
|
||||
* @param bus_id bus ID
|
||||
* @param mask To know which buses should be enabled
|
||||
*/
|
||||
#if !BOOTLOADER_BUILD
|
||||
__attribute__((always_inline))
|
||||
#endif
|
||||
static inline void cache_ll_l1_enable_bus(uint32_t cache_id, cache_bus_mask_t mask)
|
||||
static inline void cache_ll_l1_enable_bus(uint32_t bus_id, cache_bus_mask_t mask)
|
||||
{
|
||||
HAL_ASSERT(cache_id <= CACHE_LL_ID_ALL);
|
||||
//On esp32h21, only `CACHE_BUS_IBUS0` and `CACHE_BUS_DBUS0` are supported. Use `cache_ll_l1_get_bus()` to get your bus first
|
||||
HAL_ASSERT((mask & (CACHE_BUS_IBUS1 | CACHE_BUS_IBUS2 | CACHE_BUS_DBUS1 | CACHE_BUS_DBUS2)) == 0);
|
||||
|
||||
@@ -227,13 +225,12 @@ static inline void cache_ll_l1_enable_bus(uint32_t cache_id, cache_bus_mask_t ma
|
||||
/**
|
||||
* Disable the Cache Buses
|
||||
*
|
||||
* @param cache_id cache ID (when l1 cache is per core)
|
||||
* @param bus_id bus ID
|
||||
* @param mask To know which buses should be disabled
|
||||
*/
|
||||
__attribute__((always_inline))
|
||||
static inline void cache_ll_l1_disable_bus(uint32_t cache_id, cache_bus_mask_t mask)
|
||||
static inline void cache_ll_l1_disable_bus(uint32_t bus_id, cache_bus_mask_t mask)
|
||||
{
|
||||
HAL_ASSERT(cache_id <= CACHE_LL_ID_ALL);
|
||||
//On esp32h21, only `CACHE_BUS_IBUS0` and `CACHE_BUS_DBUS0` are supported. Use `cache_ll_l1_get_bus()` to get your bus first
|
||||
HAL_ASSERT((mask & (CACHE_BUS_IBUS1 | CACHE_BUS_IBUS2 | CACHE_BUS_DBUS1 | CACHE_BUS_DBUS2)) == 0);
|
||||
|
||||
|
@@ -15,8 +15,6 @@
|
||||
#include "hal/assert.h"
|
||||
#include "rom/cache.h"
|
||||
|
||||
//TODO: [ESP32H4] IDF-12289 inherited from verification branch, need check
|
||||
|
||||
#ifdef __cplusplus
|
||||
extern "C" {
|
||||
#endif
|
||||
@@ -28,12 +26,52 @@ extern "C" {
|
||||
#define CACHE_LL_L1_ACCESS_EVENT_MASK (1<<4)
|
||||
#define CACHE_LL_L1_ACCESS_EVENT_CACHE_FAIL (1<<4)
|
||||
|
||||
#define CACHE_LL_ID_ALL 1 //All of the caches in a type and level, make this value greater than any ID
|
||||
#define CACHE_LL_ID_ALL 2 //All of the caches in a type and level, make this value greater than any ID
|
||||
#define CACHE_LL_LEVEL_INT_MEM 0 //Cache level for accessing internal mem
|
||||
#define CACHE_LL_LEVEL_EXT_MEM 1 //Cache level for accessing external mem
|
||||
#define CACHE_LL_LEVEL_ALL 2 //All of the cache levels, make this value greater than any level
|
||||
#define CACHE_LL_LEVEL_NUMS 1 //Number of cache levels
|
||||
#define CACHE_LL_L1_ICACHE_AUTOLOAD (1<<0)
|
||||
#define CACHE_LL_CACHE_AUTOLOAD (1<<0)
|
||||
|
||||
/**
|
||||
* @brief Check if L1 ICache autoload is enabled or not
|
||||
*
|
||||
* @param cache_id id of the cache in this type and level
|
||||
*
|
||||
* @return true: enabled; false: disabled
|
||||
*/
|
||||
__attribute__((always_inline))
|
||||
static inline bool cache_ll_l1_is_icache_autoload_enabled(uint32_t cache_id)
|
||||
{
|
||||
bool enabled = false;
|
||||
|
||||
if (cache_id == 0) {
|
||||
enabled = REG_GET_BIT(CACHE_L1_ICACHE0_AUTOLOAD_CTRL_REG, CACHE_L1_ICACHE0_AUTOLOAD_ENA);
|
||||
} else if (cache_id == 1) {
|
||||
enabled = REG_GET_BIT(CACHE_L1_ICACHE1_AUTOLOAD_CTRL_REG, CACHE_L1_ICACHE1_AUTOLOAD_ENA);
|
||||
} else if (cache_id == CACHE_LL_ID_ALL) {
|
||||
enabled = REG_GET_BIT(CACHE_L1_ICACHE0_AUTOLOAD_CTRL_REG, CACHE_L1_ICACHE0_AUTOLOAD_ENA) && REG_GET_BIT(CACHE_L1_ICACHE1_AUTOLOAD_CTRL_REG, CACHE_L1_ICACHE1_AUTOLOAD_ENA);
|
||||
}
|
||||
|
||||
return enabled;
|
||||
}
|
||||
|
||||
/**
|
||||
* @brief Check if L1 DCache autoload is enabled or not
|
||||
*
|
||||
* @param cache_id id of the cache in this type and level
|
||||
*
|
||||
* @return true: enabled; false: disabled
|
||||
*/
|
||||
__attribute__((always_inline))
|
||||
static inline bool cache_ll_l1_is_dcache_autoload_enabled(uint32_t cache_id)
|
||||
{
|
||||
bool enabled = false;
|
||||
if (cache_id == 0 || cache_id == CACHE_LL_ID_ALL) {
|
||||
enabled = REG_GET_BIT(CACHE_L1_DCACHE_AUTOLOAD_CTRL_REG, CACHE_L1_DCACHE_AUTOLOAD_ENA);
|
||||
}
|
||||
return enabled;
|
||||
}
|
||||
|
||||
/**
|
||||
* @brief Check if Cache auto preload is enabled or not.
|
||||
@@ -47,15 +85,60 @@ extern "C" {
|
||||
__attribute__((always_inline))
|
||||
static inline bool cache_ll_is_cache_autoload_enabled(uint32_t cache_level, cache_type_t type, uint32_t cache_id)
|
||||
{
|
||||
HAL_ASSERT(cache_id <= CACHE_LL_ID_ALL);
|
||||
bool enabled = false;
|
||||
// if (REG_GET_BIT(CACHE_L1_CACHE_AUTOLOAD_CTRL_REG, CACHE_L1_CACHE_AUTOLOAD_ENA)) {
|
||||
// enabled = true;
|
||||
// }
|
||||
//TODO
|
||||
|
||||
if (cache_level == 1) {
|
||||
switch (type) {
|
||||
case CACHE_TYPE_INSTRUCTION:
|
||||
enabled = cache_ll_l1_is_icache_autoload_enabled(cache_id);
|
||||
break;
|
||||
case CACHE_TYPE_DATA:
|
||||
enabled = cache_ll_l1_is_dcache_autoload_enabled(cache_id);
|
||||
break;
|
||||
case CACHE_TYPE_ALL:
|
||||
default: //CACHE_TYPE_ALL
|
||||
enabled = cache_ll_l1_is_icache_autoload_enabled(CACHE_LL_ID_ALL) && cache_ll_l1_is_dcache_autoload_enabled(cache_id);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
return enabled;
|
||||
}
|
||||
|
||||
/*------------------------------------------------------------------------------
|
||||
* Disable
|
||||
*----------------------------------------------------------------------------*/
|
||||
/**
|
||||
* @brief Disable L1 ICache
|
||||
*
|
||||
* @param cache_id id of the cache in this type and level
|
||||
*/
|
||||
__attribute__((always_inline))
|
||||
static inline void cache_ll_l1_disable_icache(uint32_t cache_id)
|
||||
{
|
||||
if (cache_id == 0) {
|
||||
Cache_Disable_Cache(CACHE_MAP_ICACHE0);
|
||||
} else if (cache_id == 1) {
|
||||
Cache_Disable_Cache(CACHE_MAP_ICACHE1);
|
||||
} else if (cache_id == CACHE_LL_ID_ALL) {
|
||||
Cache_Disable_Cache(CACHE_MAP_ICACHE0);
|
||||
Cache_Disable_Cache(CACHE_MAP_ICACHE1);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @brief Disable L1 DCache
|
||||
*
|
||||
* @param cache_id id of the cache in this type and level
|
||||
*/
|
||||
__attribute__((always_inline))
|
||||
static inline void cache_ll_l1_disable_dcache(uint32_t cache_id)
|
||||
{
|
||||
if (cache_id == 0 || cache_id == CACHE_LL_ID_ALL) {
|
||||
Cache_Disable_Cache(CACHE_MAP_DCACHE);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @brief Disable Cache
|
||||
*
|
||||
@@ -66,8 +149,56 @@ static inline bool cache_ll_is_cache_autoload_enabled(uint32_t cache_level, cach
|
||||
__attribute__((always_inline))
|
||||
static inline void cache_ll_disable_cache(uint32_t cache_level, cache_type_t type, uint32_t cache_id)
|
||||
{
|
||||
(void) type;
|
||||
Cache_Disable_Cache(CACHE_MAP_ALL);
|
||||
if (cache_level == 1) {
|
||||
switch (type) {
|
||||
case CACHE_TYPE_INSTRUCTION:
|
||||
cache_ll_l1_disable_icache(cache_id);
|
||||
break;
|
||||
case CACHE_TYPE_DATA:
|
||||
cache_ll_l1_disable_dcache(cache_id);
|
||||
break;
|
||||
case CACHE_TYPE_ALL:
|
||||
default:
|
||||
cache_ll_l1_disable_icache(CACHE_LL_ID_ALL);
|
||||
cache_ll_l1_disable_dcache(cache_id);
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
/*------------------------------------------------------------------------------
|
||||
* Enable
|
||||
*----------------------------------------------------------------------------*/
|
||||
/**
|
||||
* @brief Enable L1 ICache
|
||||
*
|
||||
* @param cache_id id of the cache in this type and level
|
||||
* @param inst_autoload_en icache autoload enabled or not
|
||||
*/
|
||||
__attribute__((always_inline))
|
||||
static inline void cache_ll_l1_enable_icache(uint32_t cache_id, bool inst_autoload_en)
|
||||
{
|
||||
if (cache_id == 0) {
|
||||
Cache_Enable_Cache(CACHE_MAP_ICACHE0, inst_autoload_en ? CACHE_LL_CACHE_AUTOLOAD : 0);
|
||||
} else if (cache_id == 1) {
|
||||
Cache_Enable_Cache(CACHE_MAP_ICACHE1, inst_autoload_en ? CACHE_LL_CACHE_AUTOLOAD : 0);
|
||||
} else if (cache_id == CACHE_LL_ID_ALL) {
|
||||
Cache_Enable_Cache(CACHE_MAP_ICACHE0, inst_autoload_en ? CACHE_LL_CACHE_AUTOLOAD : 0);
|
||||
Cache_Enable_Cache(CACHE_MAP_ICACHE1, inst_autoload_en ? CACHE_LL_CACHE_AUTOLOAD : 0);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @brief Enable L1 DCache
|
||||
*
|
||||
* @param cache_id id of the cache in this type and level
|
||||
* @param data_autoload_en dcache autoload enabled or not
|
||||
*/
|
||||
__attribute__((always_inline))
|
||||
static inline void cache_ll_l1_enable_dcache(uint32_t cache_id, bool data_autoload_en)
|
||||
{
|
||||
if (cache_id == 0 || cache_id == CACHE_LL_ID_ALL) {
|
||||
Cache_Enable_Cache(CACHE_MAP_DCACHE, data_autoload_en ? CACHE_LL_CACHE_AUTOLOAD : 0);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -82,8 +213,55 @@ static inline void cache_ll_disable_cache(uint32_t cache_level, cache_type_t typ
|
||||
__attribute__((always_inline))
|
||||
static inline void cache_ll_enable_cache(uint32_t cache_level, cache_type_t type, uint32_t cache_id, bool inst_autoload_en, bool data_autoload_en)
|
||||
{
|
||||
Cache_Enable_Cache(CACHE_MAP_ALL, inst_autoload_en ? CACHE_LL_L1_ICACHE_AUTOLOAD : 0);
|
||||
//TODO
|
||||
if (cache_level == 1) {
|
||||
switch (type) {
|
||||
case CACHE_TYPE_INSTRUCTION:
|
||||
cache_ll_l1_enable_icache(cache_id, inst_autoload_en);
|
||||
break;
|
||||
case CACHE_TYPE_DATA:
|
||||
cache_ll_l1_enable_dcache(cache_id, data_autoload_en);
|
||||
break;
|
||||
case CACHE_TYPE_ALL:
|
||||
default:
|
||||
cache_ll_l1_enable_dcache(cache_id, data_autoload_en);
|
||||
cache_ll_l1_enable_icache(cache_id, inst_autoload_en);
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/*------------------------------------------------------------------------------
|
||||
* Suspend
|
||||
*----------------------------------------------------------------------------*/
|
||||
/**
|
||||
* @brief Suspend L1 ICache
|
||||
*
|
||||
* @param cache_id id of the cache in this type and level
|
||||
*/
|
||||
__attribute__((always_inline))
|
||||
static inline void cache_ll_l1_suspend_icache(uint32_t cache_id)
|
||||
{
|
||||
if (cache_id == 0) {
|
||||
Cache_Suspend_Cache(CACHE_MAP_ICACHE0);
|
||||
} else if (cache_id == 1) {
|
||||
Cache_Suspend_Cache(CACHE_MAP_ICACHE1);
|
||||
} else if (cache_id == CACHE_LL_ID_ALL) {
|
||||
Cache_Suspend_Cache(CACHE_MAP_ICACHE0);
|
||||
Cache_Suspend_Cache(CACHE_MAP_ICACHE1);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @brief Suspend L1 DCache
|
||||
*
|
||||
* @param cache_id id of the cache in this type and level
|
||||
*/
|
||||
__attribute__((always_inline))
|
||||
static inline void cache_ll_l1_suspend_dcache(uint32_t cache_id)
|
||||
{
|
||||
if (cache_id == 0 || cache_id == CACHE_LL_ID_ALL) {
|
||||
Cache_Suspend_Cache(CACHE_MAP_DCACHE);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -96,7 +274,57 @@ static inline void cache_ll_enable_cache(uint32_t cache_level, cache_type_t type
|
||||
__attribute__((always_inline))
|
||||
static inline void cache_ll_suspend_cache(uint32_t cache_level, cache_type_t type, uint32_t cache_id)
|
||||
{
|
||||
Cache_Suspend_Cache(CACHE_MAP_ALL);
|
||||
if (cache_level == 1) {
|
||||
switch (type) {
|
||||
case CACHE_TYPE_INSTRUCTION:
|
||||
cache_ll_l1_suspend_icache(cache_id);
|
||||
break;
|
||||
case CACHE_TYPE_DATA:
|
||||
cache_ll_l1_suspend_dcache(cache_id);
|
||||
break;
|
||||
case CACHE_TYPE_ALL:
|
||||
default:
|
||||
cache_ll_l1_suspend_icache(CACHE_LL_ID_ALL);
|
||||
cache_ll_l1_suspend_dcache(cache_id);
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/*------------------------------------------------------------------------------
|
||||
* Resume
|
||||
*----------------------------------------------------------------------------*/
|
||||
/**
|
||||
* @brief Resume L1 ICache
|
||||
*
|
||||
* @param cache_id id of the cache in this type and level
|
||||
* @param inst_autoload_en icache autoload enabled or not
|
||||
*/
|
||||
__attribute__((always_inline))
|
||||
static inline void cache_ll_l1_resume_icache(uint32_t cache_id, bool inst_autoload_en)
|
||||
{
|
||||
if (cache_id == 0) {
|
||||
Cache_Resume_Cache(CACHE_MAP_ICACHE0, inst_autoload_en ? CACHE_LL_CACHE_AUTOLOAD : 0);
|
||||
} else if (cache_id == 1) {
|
||||
Cache_Resume_Cache(CACHE_MAP_ICACHE1, inst_autoload_en ? CACHE_LL_CACHE_AUTOLOAD : 0);
|
||||
} else if (cache_id == CACHE_LL_ID_ALL) {
|
||||
Cache_Resume_Cache(CACHE_MAP_ICACHE0, inst_autoload_en ? CACHE_LL_CACHE_AUTOLOAD : 0);
|
||||
Cache_Resume_Cache(CACHE_MAP_ICACHE1, inst_autoload_en ? CACHE_LL_CACHE_AUTOLOAD : 0);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @brief Resume L1 DCache
|
||||
*
|
||||
* @param cache_id id of the cache in this type and level
|
||||
* @param data_autoload_en dcache autoload enabled or not
|
||||
*/
|
||||
__attribute__((always_inline))
|
||||
static inline void cache_ll_l1_resume_dcache(uint32_t cache_id, bool data_autoload_en)
|
||||
{
|
||||
if (cache_id == 0 || cache_id == CACHE_LL_ID_ALL) {
|
||||
Cache_Resume_Cache(CACHE_MAP_DCACHE, data_autoload_en ? CACHE_LL_CACHE_AUTOLOAD : 0);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -111,8 +339,58 @@ static inline void cache_ll_suspend_cache(uint32_t cache_level, cache_type_t typ
|
||||
__attribute__((always_inline))
|
||||
static inline void cache_ll_resume_cache(uint32_t cache_level, cache_type_t type, uint32_t cache_id, bool inst_autoload_en, bool data_autoload_en)
|
||||
{
|
||||
Cache_Resume_Cache(CACHE_MAP_ALL, inst_autoload_en ? CACHE_LL_L1_ICACHE_AUTOLOAD : 0);
|
||||
//TODO
|
||||
if (cache_level == 1) {
|
||||
switch (type) {
|
||||
case CACHE_TYPE_INSTRUCTION:
|
||||
cache_ll_l1_resume_icache(cache_id, inst_autoload_en);
|
||||
break;
|
||||
case CACHE_TYPE_DATA:
|
||||
cache_ll_l1_resume_dcache(cache_id, data_autoload_en);
|
||||
break;
|
||||
case CACHE_TYPE_ALL:
|
||||
default:
|
||||
cache_ll_l1_resume_dcache(cache_id, data_autoload_en);
|
||||
cache_ll_l1_resume_icache(CACHE_LL_ID_ALL, inst_autoload_en);
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/*------------------------------------------------------------------------------
|
||||
* Invalidate
|
||||
*----------------------------------------------------------------------------*/
|
||||
/**
|
||||
* @brief Invalidate L1 ICache addr
|
||||
*
|
||||
* @param cache_id id of the cache in this type and level
|
||||
* @param vaddr start address of the region to be invalidated
|
||||
* @param size size of the region to be invalidated
|
||||
*/
|
||||
__attribute__((always_inline))
|
||||
static inline void cache_ll_l1_invalidate_icache_addr(uint32_t cache_id, uint32_t vaddr, uint32_t size)
|
||||
{
|
||||
if (cache_id == 0) {
|
||||
Cache_Invalidate_Addr(CACHE_MAP_ICACHE0, vaddr, size);
|
||||
} else if (cache_id == 1) {
|
||||
Cache_Invalidate_Addr(CACHE_MAP_ICACHE1, vaddr, size);
|
||||
} else if (cache_id == CACHE_LL_ID_ALL) {
|
||||
Cache_Invalidate_Addr(CACHE_MAP_ICACHE0 | CACHE_MAP_ICACHE1, vaddr, size);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @brief Invalidate L1 DCache addr
|
||||
*
|
||||
* @param cache_id id of the cache in this type and level
|
||||
* @param vaddr start address of the region to be invalidated
|
||||
* @param size size of the region to be invalidated
|
||||
*/
|
||||
__attribute__((always_inline))
|
||||
static inline void cache_ll_l1_invalidate_dcache_addr(uint32_t cache_id, uint32_t vaddr, uint32_t size)
|
||||
{
|
||||
if (cache_id == 0 || cache_id == CACHE_LL_ID_ALL) {
|
||||
Cache_Invalidate_Addr(CACHE_MAP_DCACHE, vaddr, size);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -129,7 +407,190 @@ static inline void cache_ll_resume_cache(uint32_t cache_level, cache_type_t type
|
||||
__attribute__((always_inline))
|
||||
static inline void cache_ll_invalidate_addr(uint32_t cache_level, cache_type_t type, uint32_t cache_id, uint32_t vaddr, uint32_t size)
|
||||
{
|
||||
Cache_Invalidate_Addr(CACHE_MAP_ALL, vaddr, size);
|
||||
if (cache_level == 1 || cache_level == 2 || cache_level == CACHE_LL_LEVEL_ALL) {
|
||||
switch (type) {
|
||||
case CACHE_TYPE_INSTRUCTION:
|
||||
cache_ll_l1_invalidate_icache_addr(cache_id, vaddr, size);
|
||||
break;
|
||||
case CACHE_TYPE_DATA:
|
||||
cache_ll_l1_invalidate_dcache_addr(cache_id, vaddr, size);
|
||||
break;
|
||||
case CACHE_TYPE_ALL:
|
||||
default:
|
||||
cache_ll_l1_invalidate_icache_addr(cache_id, vaddr, size);
|
||||
cache_ll_l1_invalidate_dcache_addr(cache_id, vaddr, size);
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @brief Invalidate L1 ICache all
|
||||
*
|
||||
* @param cache_id id of the cache in this type and level
|
||||
*/
|
||||
__attribute__((always_inline))
|
||||
static inline void cache_ll_l1_invalidate_icache_all(uint32_t cache_id)
|
||||
{
|
||||
if (cache_id == 0) {
|
||||
Cache_Invalidate_All(CACHE_MAP_ICACHE0);
|
||||
} else if (cache_id == 1) {
|
||||
Cache_Invalidate_All(CACHE_MAP_ICACHE1);
|
||||
} else if (cache_id == CACHE_LL_ID_ALL) {
|
||||
Cache_Invalidate_All(CACHE_MAP_ICACHE0 | CACHE_MAP_ICACHE1);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @brief Invalidate L1 DCache all
|
||||
*
|
||||
* @param cache_id id of the cache in this type and level
|
||||
*/
|
||||
__attribute__((always_inline))
|
||||
static inline void cache_ll_l1_invalidate_dcache_all(uint32_t cache_id)
|
||||
{
|
||||
if (cache_id == 0 || cache_id == CACHE_LL_ID_ALL) {
|
||||
Cache_Invalidate_All(CACHE_MAP_DCACHE);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @brief Invalidate all
|
||||
*
|
||||
* @param cache_level level of the cache
|
||||
* @param type see `cache_type_t`
|
||||
* @param cache_id id of the cache in this type and level
|
||||
*/
|
||||
__attribute__((always_inline))
|
||||
static inline void cache_ll_invalidate_all(uint32_t cache_level, cache_type_t type, uint32_t cache_id)
|
||||
{
|
||||
if (cache_level == 1 || cache_level == 2 || cache_level == CACHE_LL_LEVEL_ALL) {
|
||||
switch (type) {
|
||||
case CACHE_TYPE_INSTRUCTION:
|
||||
cache_ll_l1_invalidate_icache_all(cache_id);
|
||||
break;
|
||||
case CACHE_TYPE_DATA:
|
||||
cache_ll_l1_invalidate_dcache_all(cache_id);
|
||||
break;
|
||||
case CACHE_TYPE_ALL:
|
||||
default:
|
||||
cache_ll_l1_invalidate_icache_all(cache_id);
|
||||
cache_ll_l1_invalidate_dcache_all(cache_id);
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/*------------------------------------------------------------------------------
|
||||
* Writeback
|
||||
*----------------------------------------------------------------------------*/
|
||||
/**
|
||||
* @brief Writeback L1 DCache addr
|
||||
*
|
||||
* @param cache_id id of the cache in this type and level
|
||||
* @param vaddr start address of the region to be written back
|
||||
* @param size size of the region to be written back
|
||||
*/
|
||||
__attribute__((always_inline))
|
||||
static inline void cache_ll_l1_writeback_dcache_addr(uint32_t cache_id, uint32_t vaddr, uint32_t size)
|
||||
{
|
||||
if (cache_id == 0 || cache_id == CACHE_LL_ID_ALL) {
|
||||
Cache_WriteBack_Addr(vaddr, size);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @brief Writeback cache supported addr
|
||||
*
|
||||
* Writeback a cache item
|
||||
*
|
||||
* @param cache_level level of the cache
|
||||
* @param type see `cache_type_t`
|
||||
* @param cache_id id of the cache in this type and level
|
||||
* @param vaddr start address of the region to be written back
|
||||
* @param size size of the region to be written back
|
||||
*/
|
||||
__attribute__((always_inline))
|
||||
static inline void cache_ll_writeback_addr(uint32_t cache_level, cache_type_t type, uint32_t cache_id, uint32_t vaddr, uint32_t size)
|
||||
{
|
||||
if (cache_level == 1 || cache_level == 2 || cache_level == CACHE_LL_LEVEL_ALL) {
|
||||
switch (type) {
|
||||
case CACHE_TYPE_DATA:
|
||||
cache_ll_l1_writeback_dcache_addr(cache_id, vaddr, size);
|
||||
break;
|
||||
default:
|
||||
HAL_ASSERT(false);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @brief Writeback L1 DCache all
|
||||
*
|
||||
* @param cache_id id of the cache in this type and level
|
||||
*/
|
||||
__attribute__((always_inline))
|
||||
static inline void cache_ll_l1_writeback_dcache_all(uint32_t cache_id)
|
||||
{
|
||||
if (cache_id == 0 || cache_id == CACHE_LL_ID_ALL) {
|
||||
Cache_WriteBack_All();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @brief Writeback all
|
||||
*
|
||||
* @param cache_level level of the cache
|
||||
* @param type see `cache_type_t`
|
||||
* @param cache_id id of the cache in this type and level
|
||||
*/
|
||||
__attribute__((always_inline))
|
||||
static inline void cache_ll_writeback_all(uint32_t cache_level, cache_type_t type, uint32_t cache_id)
|
||||
{
|
||||
if (cache_level == 1 || cache_level == 2 || cache_level == CACHE_LL_LEVEL_ALL) {
|
||||
switch (type) {
|
||||
case CACHE_TYPE_DATA:
|
||||
cache_ll_l1_writeback_dcache_all(cache_id);
|
||||
break;
|
||||
default:
|
||||
HAL_ASSERT(false);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/*------------------------------------------------------------------------------
|
||||
* Freeze
|
||||
*----------------------------------------------------------------------------*/
|
||||
/**
|
||||
* @brief Freeze L1 ICache
|
||||
*
|
||||
* @param cache_id id of the cache in this type and level
|
||||
*/
|
||||
__attribute__((always_inline))
|
||||
static inline void cache_ll_l1_freeze_icache(uint32_t cache_id)
|
||||
{
|
||||
if (cache_id == 0) {
|
||||
Cache_Freeze_Enable(CACHE_MAP_ICACHE0, CACHE_FREEZE_ACK_BUSY);
|
||||
} else if (cache_id == 1) {
|
||||
Cache_Freeze_Enable(CACHE_MAP_ICACHE1, CACHE_FREEZE_ACK_BUSY);
|
||||
} else if (cache_id == CACHE_LL_ID_ALL) {
|
||||
Cache_Freeze_Enable(CACHE_MAP_ICACHE0, CACHE_FREEZE_ACK_BUSY);
|
||||
Cache_Freeze_Enable(CACHE_MAP_ICACHE1, CACHE_FREEZE_ACK_BUSY);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @brief Freeze L1 DCache
|
||||
*
|
||||
* @param cache_id id of the cache in this type and level
|
||||
*/
|
||||
__attribute__((always_inline))
|
||||
static inline void cache_ll_l1_freeze_dcache(uint32_t cache_id)
|
||||
{
|
||||
if (cache_id == 0 || cache_id == CACHE_LL_ID_ALL) {
|
||||
Cache_Freeze_Enable(CACHE_MAP_DCACHE, CACHE_FREEZE_ACK_BUSY);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -142,7 +603,54 @@ static inline void cache_ll_invalidate_addr(uint32_t cache_level, cache_type_t t
|
||||
__attribute__((always_inline))
|
||||
static inline void cache_ll_freeze_cache(uint32_t cache_level, cache_type_t type, uint32_t cache_id)
|
||||
{
|
||||
Cache_Freeze_Enable(CACHE_MAP_ALL, CACHE_FREEZE_ACK_BUSY);
|
||||
if (cache_level == 1) {
|
||||
switch (type) {
|
||||
case CACHE_TYPE_INSTRUCTION:
|
||||
cache_ll_l1_freeze_icache(cache_id);
|
||||
break;
|
||||
case CACHE_TYPE_DATA:
|
||||
cache_ll_l1_freeze_dcache(cache_id);
|
||||
break;
|
||||
default:
|
||||
cache_ll_l1_freeze_icache(CACHE_LL_ID_ALL);
|
||||
cache_ll_l1_freeze_dcache(cache_id);
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/*------------------------------------------------------------------------------
|
||||
* Unfreeze
|
||||
*----------------------------------------------------------------------------*/
|
||||
/**
|
||||
* @brief Unfreeze L1 ICache
|
||||
*
|
||||
* @param cache_id id of the cache in this type and level
|
||||
*/
|
||||
__attribute__((always_inline))
|
||||
static inline void cache_ll_l1_unfreeze_icache(uint32_t cache_id)
|
||||
{
|
||||
if (cache_id == 0) {
|
||||
Cache_Freeze_Disable(CACHE_MAP_ICACHE0);
|
||||
} else if (cache_id == 1) {
|
||||
Cache_Freeze_Disable(CACHE_MAP_ICACHE1);
|
||||
} else if (cache_id == CACHE_LL_ID_ALL) {
|
||||
Cache_Freeze_Disable(CACHE_MAP_ICACHE0);
|
||||
Cache_Freeze_Disable(CACHE_MAP_ICACHE1);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @brief Unfreeze L1 DCache
|
||||
*
|
||||
* @param cache_id id of the cache in this type and level
|
||||
*/
|
||||
__attribute__((always_inline))
|
||||
static inline void cache_ll_l1_unfreeze_dcache(uint32_t cache_id)
|
||||
{
|
||||
if (cache_id == 0 || cache_id == CACHE_LL_ID_ALL) {
|
||||
Cache_Freeze_Disable(CACHE_MAP_DCACHE);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -155,9 +663,26 @@ static inline void cache_ll_freeze_cache(uint32_t cache_level, cache_type_t type
|
||||
__attribute__((always_inline))
|
||||
static inline void cache_ll_unfreeze_cache(uint32_t cache_level, cache_type_t type, uint32_t cache_id)
|
||||
{
|
||||
Cache_Freeze_Disable(CACHE_MAP_ALL);
|
||||
if (cache_level == 1) {
|
||||
switch (type) {
|
||||
case CACHE_TYPE_INSTRUCTION:
|
||||
cache_ll_l1_unfreeze_icache(cache_id);
|
||||
break;
|
||||
case CACHE_TYPE_DATA:
|
||||
cache_ll_l1_unfreeze_dcache(cache_id);
|
||||
break;
|
||||
case CACHE_TYPE_ALL:
|
||||
default:
|
||||
cache_ll_l1_unfreeze_dcache(cache_id);
|
||||
cache_ll_l1_unfreeze_icache(CACHE_LL_ID_ALL);
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/*------------------------------------------------------------------------------
|
||||
* Cache Line Size
|
||||
*----------------------------------------------------------------------------*/
|
||||
/**
|
||||
* @brief Get Cache line size, in bytes
|
||||
*
|
||||
@@ -170,9 +695,7 @@ static inline void cache_ll_unfreeze_cache(uint32_t cache_level, cache_type_t ty
|
||||
__attribute__((always_inline))
|
||||
static inline uint32_t cache_ll_get_line_size(uint32_t cache_level, cache_type_t type, uint32_t cache_id)
|
||||
{
|
||||
uint32_t size = 0;
|
||||
size = Cache_Get_Line_Size(CACHE_MAP_DCACHE);
|
||||
return size;
|
||||
return 32;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -190,12 +713,11 @@ __attribute__((always_inline))
|
||||
#endif
|
||||
static inline cache_bus_mask_t cache_ll_l1_get_bus(uint32_t cache_id, uint32_t vaddr_start, uint32_t len)
|
||||
{
|
||||
HAL_ASSERT(cache_id <= CACHE_LL_ID_ALL);
|
||||
cache_bus_mask_t mask = (cache_bus_mask_t)0;
|
||||
|
||||
uint32_t vaddr_end = vaddr_start + len - 1;
|
||||
if (vaddr_start >= SOC_IRAM0_CACHE_ADDRESS_LOW && vaddr_end < SOC_IRAM0_CACHE_ADDRESS_HIGH) {
|
||||
//c6 the I/D bus memory are shared, so we always return `CACHE_BUS_IBUS0 | CACHE_BUS_DBUS0`
|
||||
//h4 the I/D bus memory are shared, so we always return `CACHE_BUS_IBUS0 | CACHE_BUS_DBUS0`
|
||||
mask = (cache_bus_mask_t)(mask | (CACHE_BUS_IBUS0 | CACHE_BUS_DBUS0));
|
||||
} else {
|
||||
HAL_ASSERT(0); //Out of region
|
||||
@@ -207,49 +729,61 @@ static inline cache_bus_mask_t cache_ll_l1_get_bus(uint32_t cache_id, uint32_t v
|
||||
/**
|
||||
* Enable the Cache Buses
|
||||
*
|
||||
* @param cache_id cache ID (when l1 cache is per core)
|
||||
* @param bus_id bus ID
|
||||
* @param mask To know which buses should be enabled
|
||||
*/
|
||||
#if !BOOTLOADER_BUILD
|
||||
__attribute__((always_inline))
|
||||
#endif
|
||||
static inline void cache_ll_l1_enable_bus(uint32_t cache_id, cache_bus_mask_t mask)
|
||||
static inline void cache_ll_l1_enable_bus(uint32_t bus_id, cache_bus_mask_t mask)
|
||||
{
|
||||
// HAL_ASSERT(cache_id <= CACHE_LL_ID_ALL);
|
||||
// //On esp32h4, only `CACHE_BUS_IBUS0` and `CACHE_BUS_DBUS0` are supported. Use `cache_ll_l1_get_bus()` to get your bus first
|
||||
// HAL_ASSERT((mask & (CACHE_BUS_IBUS1 | CACHE_BUS_IBUS2 | CACHE_BUS_DBUS1 | CACHE_BUS_DBUS2)) == 0);
|
||||
//On esp32h4, only `CACHE_BUS_IBUS0` and `CACHE_BUS_DBUS0` are supported. Use `cache_ll_l1_get_bus()` to get your bus first
|
||||
HAL_ASSERT((mask & (CACHE_BUS_IBUS1 | CACHE_BUS_IBUS2 | CACHE_BUS_DBUS1 | CACHE_BUS_DBUS2)) == 0);
|
||||
|
||||
// uint32_t ibus_mask = 0;
|
||||
// ibus_mask = ibus_mask | ((mask & CACHE_BUS_IBUS0) ? CACHE_L1_CACHE_SHUT_BUS0 : 0);
|
||||
// REG_CLR_BIT(CACHE_L1_CACHE_CTRL_REG, ibus_mask);
|
||||
uint32_t ibus_mask = 0;
|
||||
if (bus_id == 0) {
|
||||
ibus_mask = ibus_mask | ((mask & CACHE_BUS_IBUS0) ? CACHE_L1_ICACHE_SHUT_IBUS0 : 0);
|
||||
} else {
|
||||
ibus_mask = ibus_mask | ((mask & CACHE_BUS_IBUS0) ? CACHE_L1_ICACHE_SHUT_IBUS1 : 0);
|
||||
}
|
||||
REG_CLR_BIT(CACHE_L1_ICACHE_CTRL_REG, ibus_mask);
|
||||
|
||||
// uint32_t dbus_mask = 0;
|
||||
// dbus_mask = dbus_mask | ((mask & CACHE_BUS_DBUS0) ? CACHE_L1_CACHE_SHUT_BUS1 : 0);
|
||||
// REG_CLR_BIT(CACHE_L1_CACHE_CTRL_REG, dbus_mask);
|
||||
//TODO
|
||||
uint32_t dbus_mask = 0;
|
||||
if (bus_id == 1) {
|
||||
dbus_mask = dbus_mask | ((mask & CACHE_BUS_DBUS0) ? CACHE_L1_DCACHE_SHUT_DBUS0 : 0);
|
||||
} else {
|
||||
dbus_mask = dbus_mask | ((mask & CACHE_BUS_DBUS0) ? CACHE_L1_DCACHE_SHUT_DBUS1 : 0);
|
||||
}
|
||||
REG_CLR_BIT(CACHE_L1_DCACHE_CTRL_REG, dbus_mask);
|
||||
}
|
||||
|
||||
/**
|
||||
* Disable the Cache Buses
|
||||
*
|
||||
* @param cache_id cache ID (when l1 cache is per core)
|
||||
* @param bus_id bus ID
|
||||
* @param mask To know which buses should be disabled
|
||||
*/
|
||||
__attribute__((always_inline))
|
||||
static inline void cache_ll_l1_disable_bus(uint32_t cache_id, cache_bus_mask_t mask)
|
||||
static inline void cache_ll_l1_disable_bus(uint32_t bus_id, cache_bus_mask_t mask)
|
||||
{
|
||||
// HAL_ASSERT(cache_id <= CACHE_LL_ID_ALL);
|
||||
// //On esp32h4, only `CACHE_BUS_IBUS0` and `CACHE_BUS_DBUS0` are supported. Use `cache_ll_l1_get_bus()` to get your bus first
|
||||
// HAL_ASSERT((mask & (CACHE_BUS_IBUS1 | CACHE_BUS_IBUS2 | CACHE_BUS_DBUS1 | CACHE_BUS_DBUS2)) == 0);
|
||||
//On esp32h4, only `CACHE_BUS_IBUS0` and `CACHE_BUS_DBUS0` are supported. Use `cache_ll_l1_get_bus()` to get your bus first
|
||||
HAL_ASSERT((mask & (CACHE_BUS_IBUS1 | CACHE_BUS_IBUS2| CACHE_BUS_DBUS1 | CACHE_BUS_DBUS2)) == 0);
|
||||
|
||||
// uint32_t ibus_mask = 0;
|
||||
// ibus_mask = ibus_mask | ((mask & CACHE_BUS_IBUS0) ? CACHE_L1_CACHE_SHUT_BUS0 : 0);
|
||||
// REG_SET_BIT(CACHE_L1_CACHE_CTRL_REG, ibus_mask);
|
||||
uint32_t ibus_mask = 0;
|
||||
if (bus_id == 0) {
|
||||
ibus_mask = ibus_mask | ((mask & CACHE_BUS_IBUS0) ? CACHE_L1_ICACHE_SHUT_IBUS0 : 0);
|
||||
} else {
|
||||
ibus_mask = ibus_mask | ((mask & CACHE_BUS_IBUS0) ? CACHE_L1_ICACHE_SHUT_IBUS1 : 0);
|
||||
}
|
||||
REG_SET_BIT(CACHE_L1_ICACHE_CTRL_REG, ibus_mask);
|
||||
|
||||
// uint32_t dbus_mask = 0;
|
||||
// dbus_mask = dbus_mask | ((mask & CACHE_BUS_DBUS0) ? CACHE_L1_CACHE_SHUT_BUS1 : 0);
|
||||
// REG_SET_BIT(CACHE_L1_CACHE_CTRL_REG, dbus_mask);
|
||||
//TODO
|
||||
uint32_t dbus_mask = 0;
|
||||
if (bus_id == 1) {
|
||||
dbus_mask = dbus_mask | ((mask & CACHE_BUS_DBUS0) ? CACHE_L1_DCACHE_SHUT_DBUS0 : 0);
|
||||
} else {
|
||||
dbus_mask = dbus_mask | ((mask & CACHE_BUS_DBUS0) ? CACHE_L1_DCACHE_SHUT_DBUS1 : 0);
|
||||
}
|
||||
REG_SET_BIT(CACHE_L1_DCACHE_CTRL_REG, dbus_mask);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@@ -20,6 +20,8 @@
|
||||
extern "C" {
|
||||
#endif
|
||||
|
||||
#define MMU_LL_FLASH_MMU_ID 0
|
||||
#define MMU_LL_PSRAM_MMU_ID 0
|
||||
#define MMU_LL_END_DROM_ENTRY_VADDR (SOC_DRAM_FLASH_ADDRESS_HIGH - SOC_MMU_PAGE_SIZE)
|
||||
#define MMU_LL_END_DROM_ENTRY_ID (SOC_MMU_ENTRY_NUM - 1)
|
||||
|
||||
|
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* SPDX-FileCopyrightText: 2022-2024 Espressif Systems (Shanghai) CO LTD
|
||||
* SPDX-FileCopyrightText: 2022-2025 Espressif Systems (Shanghai) CO LTD
|
||||
*
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
@@ -1006,7 +1006,7 @@ static inline uint32_t cache_ll_get_line_size(uint32_t cache_level, cache_type_t
|
||||
* @param len vaddr length
|
||||
*/
|
||||
__attribute__((always_inline))
|
||||
static inline cache_bus_mask_t cache_ll_l1_get_bus(uint32_t cache_id, uint32_t vaddr_start, uint32_t len)
|
||||
static inline cache_bus_mask_t cache_ll_l1_get_bus(uint32_t bus_id, uint32_t vaddr_start, uint32_t len)
|
||||
{
|
||||
return (cache_bus_mask_t)(CACHE_LL_DEFAULT_IBUS_MASK | CACHE_LL_DEFAULT_DBUS_MASK);
|
||||
}
|
||||
@@ -1018,7 +1018,7 @@ static inline cache_bus_mask_t cache_ll_l1_get_bus(uint32_t cache_id, uint32_t v
|
||||
* @param mask To know which buses should be enabled
|
||||
*/
|
||||
__attribute__((always_inline))
|
||||
static inline void cache_ll_l1_enable_bus(uint32_t cache_id, cache_bus_mask_t mask)
|
||||
static inline void cache_ll_l1_enable_bus(uint32_t bus_id, cache_bus_mask_t mask)
|
||||
{
|
||||
//not used, for compatibility
|
||||
}
|
||||
@@ -1030,7 +1030,7 @@ static inline void cache_ll_l1_enable_bus(uint32_t cache_id, cache_bus_mask_t ma
|
||||
* @param mask To know which buses should be disabled
|
||||
*/
|
||||
__attribute__((always_inline))
|
||||
static inline void cache_ll_l1_disable_bus(uint32_t cache_id, cache_bus_mask_t mask)
|
||||
static inline void cache_ll_l1_disable_bus(uint32_t bus_id, cache_bus_mask_t mask)
|
||||
{
|
||||
//not used, for compatibility
|
||||
}
|
||||
@@ -1038,14 +1038,13 @@ static inline void cache_ll_l1_disable_bus(uint32_t cache_id, cache_bus_mask_t m
|
||||
/**
|
||||
* @brief Get the buses of a particular cache that are mapped to a virtual address range
|
||||
*
|
||||
* @param cache_id cache ID
|
||||
* @param cache_id cache ID (when l1 cache is per core)
|
||||
* @param vaddr_start virtual address start
|
||||
* @param len vaddr length
|
||||
*/
|
||||
__attribute__((always_inline))
|
||||
static inline cache_bus_mask_t cache_ll_l2_get_bus(uint32_t cache_id, uint32_t vaddr_start, uint32_t len)
|
||||
{
|
||||
(void)cache_id;
|
||||
cache_bus_mask_t mask = (cache_bus_mask_t)0;
|
||||
|
||||
uint32_t vaddr_end = vaddr_start + len - 1;
|
||||
|
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* SPDX-FileCopyrightText: 2022-2024 Espressif Systems (Shanghai) CO LTD
|
||||
* SPDX-FileCopyrightText: 2022-2025 Espressif Systems (Shanghai) CO LTD
|
||||
*
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
@@ -461,8 +461,6 @@ __attribute__((always_inline))
|
||||
#endif
|
||||
static inline cache_bus_mask_t cache_ll_l1_get_bus(uint32_t cache_id, uint32_t vaddr_start, uint32_t len)
|
||||
{
|
||||
(void)cache_id;
|
||||
|
||||
cache_bus_mask_t mask = (cache_bus_mask_t)0;
|
||||
uint32_t vaddr_end = vaddr_start + len - 1;
|
||||
if (vaddr_start >= SOC_IRAM1_ADDRESS_LOW) {
|
||||
@@ -502,15 +500,15 @@ static inline cache_bus_mask_t cache_ll_l1_get_bus(uint32_t cache_id, uint32_t v
|
||||
/**
|
||||
* Enable the Cache Buses
|
||||
*
|
||||
* @param cache_id cache ID (when l1 cache is per core)
|
||||
* @param bus_id bus ID
|
||||
* @param mask To know which buses should be enabled
|
||||
*/
|
||||
#if !BOOTLOADER_BUILD
|
||||
__attribute__((always_inline))
|
||||
#endif
|
||||
static inline void cache_ll_l1_enable_bus(uint32_t cache_id, cache_bus_mask_t mask)
|
||||
static inline void cache_ll_l1_enable_bus(uint32_t bus_id, cache_bus_mask_t mask)
|
||||
{
|
||||
(void)cache_id;
|
||||
(void)bus_id;
|
||||
|
||||
uint32_t ibus_mask = 0;
|
||||
ibus_mask = ibus_mask | ((mask & CACHE_BUS_IBUS0) ? EXTMEM_PRO_ICACHE_MASK_IRAM0 : 0);
|
||||
@@ -528,13 +526,13 @@ static inline void cache_ll_l1_enable_bus(uint32_t cache_id, cache_bus_mask_t ma
|
||||
/**
|
||||
* Disable the Cache Buses
|
||||
*
|
||||
* @param cache_id cache ID (when l1 cache is per core)
|
||||
* @param bus_id bus ID
|
||||
* @param mask To know which buses should be disabled
|
||||
*/
|
||||
__attribute__((always_inline))
|
||||
static inline void cache_ll_l1_disable_bus(uint32_t cache_id, cache_bus_mask_t mask)
|
||||
static inline void cache_ll_l1_disable_bus(uint32_t bus_id, cache_bus_mask_t mask)
|
||||
{
|
||||
(void)cache_id;
|
||||
(void)bus_id;
|
||||
|
||||
uint32_t ibus_mask = 0;
|
||||
ibus_mask = ibus_mask | ((mask & CACHE_BUS_IBUS0) ? EXTMEM_PRO_ICACHE_MASK_IRAM0 : 0);
|
||||
|
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* SPDX-FileCopyrightText: 2022-2024 Espressif Systems (Shanghai) CO LTD
|
||||
* SPDX-FileCopyrightText: 2022-2025 Espressif Systems (Shanghai) CO LTD
|
||||
*
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
@@ -558,8 +558,6 @@ __attribute__((always_inline))
|
||||
#endif
|
||||
static inline cache_bus_mask_t cache_ll_l1_get_bus(uint32_t cache_id, uint32_t vaddr_start, uint32_t len)
|
||||
{
|
||||
HAL_ASSERT(cache_id <= CACHE_LL_ID_ALL);
|
||||
|
||||
cache_bus_mask_t mask = (cache_bus_mask_t)0;
|
||||
uint32_t vaddr_end = vaddr_start + len - 1;
|
||||
if (vaddr_start >= SOC_IRAM0_CACHE_ADDRESS_LOW && vaddr_end < SOC_IRAM0_CACHE_ADDRESS_HIGH) {
|
||||
@@ -582,14 +580,13 @@ static inline cache_bus_mask_t cache_ll_l1_get_bus(uint32_t cache_id, uint32_t v
|
||||
#if !BOOTLOADER_BUILD
|
||||
__attribute__((always_inline))
|
||||
#endif
|
||||
static inline void cache_ll_l1_enable_bus(uint32_t cache_id, cache_bus_mask_t mask)
|
||||
static inline void cache_ll_l1_enable_bus(uint32_t bus_id, cache_bus_mask_t mask)
|
||||
{
|
||||
HAL_ASSERT(cache_id <= CACHE_LL_ID_ALL);
|
||||
//On esp32s3, only `CACHE_BUS_IBUS0` and `CACHE_BUS_DBUS0` are supported. Use `cache_ll_l1_get_bus()` to get your bus first
|
||||
HAL_ASSERT((mask & (CACHE_BUS_IBUS1 | CACHE_BUS_IBUS2| CACHE_BUS_DBUS1 | CACHE_BUS_DBUS2)) == 0);
|
||||
|
||||
uint32_t ibus_mask = 0;
|
||||
if (cache_id == 0) {
|
||||
if (bus_id == 0) {
|
||||
ibus_mask = ibus_mask | ((mask & CACHE_BUS_IBUS0) ? EXTMEM_ICACHE_SHUT_CORE0_BUS : 0);
|
||||
} else {
|
||||
ibus_mask = ibus_mask | ((mask & CACHE_BUS_IBUS0) ? EXTMEM_ICACHE_SHUT_CORE1_BUS : 0);
|
||||
@@ -597,7 +594,7 @@ static inline void cache_ll_l1_enable_bus(uint32_t cache_id, cache_bus_mask_t ma
|
||||
REG_CLR_BIT(EXTMEM_ICACHE_CTRL1_REG, ibus_mask);
|
||||
|
||||
uint32_t dbus_mask = 0;
|
||||
if (cache_id == 1) {
|
||||
if (bus_id == 1) {
|
||||
dbus_mask = dbus_mask | ((mask & CACHE_BUS_DBUS0) ? EXTMEM_DCACHE_SHUT_CORE0_BUS : 0);
|
||||
} else {
|
||||
dbus_mask = dbus_mask | ((mask & CACHE_BUS_DBUS0) ? EXTMEM_DCACHE_SHUT_CORE1_BUS : 0);
|
||||
@@ -643,14 +640,13 @@ static inline cache_bus_mask_t cache_ll_l1_get_enabled_bus(uint32_t cache_id)
|
||||
* @param mask To know which buses should be disabled
|
||||
*/
|
||||
__attribute__((always_inline))
|
||||
static inline void cache_ll_l1_disable_bus(uint32_t cache_id, cache_bus_mask_t mask)
|
||||
static inline void cache_ll_l1_disable_bus(uint32_t bus_id, cache_bus_mask_t mask)
|
||||
{
|
||||
HAL_ASSERT(cache_id <= CACHE_LL_ID_ALL);
|
||||
//On esp32s3, only `CACHE_BUS_IBUS0` and `CACHE_BUS_DBUS0` are supported. Use `cache_ll_l1_get_bus()` to get your bus first
|
||||
HAL_ASSERT((mask & (CACHE_BUS_IBUS1 | CACHE_BUS_IBUS2| CACHE_BUS_DBUS1 | CACHE_BUS_DBUS2)) == 0);
|
||||
|
||||
uint32_t ibus_mask = 0;
|
||||
if (cache_id == 0) {
|
||||
if (bus_id == 0) {
|
||||
ibus_mask = ibus_mask | ((mask & CACHE_BUS_IBUS0) ? EXTMEM_ICACHE_SHUT_CORE0_BUS : 0);
|
||||
} else {
|
||||
ibus_mask = ibus_mask | ((mask & CACHE_BUS_IBUS0) ? EXTMEM_ICACHE_SHUT_CORE1_BUS : 0);
|
||||
@@ -658,7 +654,7 @@ static inline void cache_ll_l1_disable_bus(uint32_t cache_id, cache_bus_mask_t m
|
||||
REG_SET_BIT(EXTMEM_ICACHE_CTRL1_REG, ibus_mask);
|
||||
|
||||
uint32_t dbus_mask = 0;
|
||||
if (cache_id == 1) {
|
||||
if (bus_id == 1) {
|
||||
dbus_mask = dbus_mask | ((mask & CACHE_BUS_DBUS0) ? EXTMEM_DCACHE_SHUT_CORE0_BUS : 0);
|
||||
} else {
|
||||
dbus_mask = dbus_mask | ((mask & CACHE_BUS_DBUS0) ? EXTMEM_DCACHE_SHUT_CORE1_BUS : 0);
|
||||
|
@@ -23,7 +23,7 @@ typedef enum {
|
||||
*
|
||||
* @note
|
||||
* These enumurations are abstract concepts. Virtual address reside in one of these buses.
|
||||
* Therefore, use `cache_ll_l1_get_bus(cache_id, vaddr_start, len)` to convert your vaddr into buses first
|
||||
* Therefore, use `cache_ll_l1_get_bus(bus_id, vaddr_start, len)` to convert your vaddr into buses first
|
||||
*/
|
||||
typedef enum {
|
||||
CACHE_BUS_IBUS0 = BIT(0),
|
||||
|
@@ -115,6 +115,14 @@ config SOC_APB_BACKUP_DMA
|
||||
bool
|
||||
default n
|
||||
|
||||
config SOC_CACHE_WRITEBACK_SUPPORTED
|
||||
bool
|
||||
default y
|
||||
|
||||
config SOC_CACHE_FREEZE_SUPPORTED
|
||||
bool
|
||||
default y
|
||||
|
||||
config SOC_CPU_CORES_NUM
|
||||
int
|
||||
default 2
|
||||
|
@@ -33,6 +33,12 @@ extern "C" {
|
||||
#define SOC_DRAM_FLASH_ADDRESS_LOW SOC_DRAM0_CACHE_ADDRESS_LOW
|
||||
#define SOC_DRAM_FLASH_ADDRESS_HIGH SOC_DRAM0_CACHE_ADDRESS_HIGH
|
||||
|
||||
#define SOC_IRAM_PSRAM_ADDRESS_LOW SOC_IRAM0_CACHE_ADDRESS_LOW
|
||||
#define SOC_IRAM_PSRAM_ADDRESS_HIGH SOC_IRAM0_CACHE_ADDRESS_HIGH
|
||||
|
||||
#define SOC_DRAM_PSRAM_ADDRESS_LOW SOC_DRAM0_CACHE_ADDRESS_LOW
|
||||
#define SOC_DRAM_PSRAM_ADDRESS_HIGH SOC_DRAM0_CACHE_ADDRESS_HIGH
|
||||
|
||||
#define SOC_BUS_SIZE(bus_name) (bus_name##_ADDRESS_HIGH - bus_name##_ADDRESS_LOW)
|
||||
#define SOC_ADDRESS_IN_BUS(bus_name, vaddr) ((vaddr) >= bus_name##_ADDRESS_LOW && (vaddr) < bus_name##_ADDRESS_HIGH)
|
||||
|
||||
|
@@ -148,8 +148,8 @@
|
||||
// #define SOC_BROWNOUT_RESET_SUPPORTED 1
|
||||
|
||||
/*-------------------------- CACHE CAPS --------------------------------------*/
|
||||
// #define SOC_SHARED_IDCACHE_SUPPORTED 1 //Shared Cache for both instructions and data
|
||||
// #define SOC_CACHE_FREEZE_SUPPORTED 1
|
||||
#define SOC_CACHE_WRITEBACK_SUPPORTED 1
|
||||
#define SOC_CACHE_FREEZE_SUPPORTED 1
|
||||
|
||||
/*-------------------------- CPU CAPS ----------------------------------------*/
|
||||
#define SOC_CPU_CORES_NUM (2U)
|
||||
|
Reference in New Issue
Block a user