/ components / spi_flash / cache_utils.c
cache_utils.c
  1  // Copyright 2015-2016 Espressif Systems (Shanghai) PTE LTD
  2  //
  3  // Licensed under the Apache License, Version 2.0 (the "License");
  4  // you may not use this file except in compliance with the License.
  5  // You may obtain a copy of the License at
  6  //
  7  //     http://www.apache.org/licenses/LICENSE-2.0
  8  //
  9  // Unless required by applicable law or agreed to in writing, software
 10  // distributed under the License is distributed on an "AS IS" BASIS,
 11  // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 12  // See the License for the specific language governing permissions and
 13  // limitations under the License.
 14  
 15  #include <stdlib.h>
 16  #include <assert.h>
 17  #include <string.h>
 18  #include <stdio.h>
 19  
 20  #include <freertos/FreeRTOS.h>
 21  #include <freertos/task.h>
 22  #include <freertos/semphr.h>
 23  #if CONFIG_IDF_TARGET_ESP32
 24  #include <esp32/rom/spi_flash.h>
 25  #include <esp32/rom/cache.h>
 26  #elif CONFIG_IDF_TARGET_ESP32S2
 27  #include "esp32s2/rom/spi_flash.h"
 28  #include "esp32s2/rom/cache.h"
 29  #include "soc/extmem_reg.h"
 30  #include "soc/cache_memory.h"
 31  #elif CONFIG_IDF_TARGET_ESP32S3
 32  #include "esp32s3/rom/spi_flash.h"
 33  #include "esp32s3/rom/cache.h"
 34  #include "soc/extmem_reg.h"
 35  #include "soc/cache_memory.h"
 36  #endif
 37  #include <soc/soc.h>
 38  #include <soc/dport_reg.h>
 39  #include "sdkconfig.h"
 40  #ifndef CONFIG_FREERTOS_UNICORE
 41  #include "esp_ipc.h"
 42  #endif
 43  #include "esp_attr.h"
 44  #include "esp_intr_alloc.h"
 45  #include "esp_spi_flash.h"
 46  #include "esp_log.h"
 47  
 48  static __attribute__((unused)) const char *TAG = "cache";
 49  
 50  #define DPORT_CACHE_BIT(cpuid, regid) DPORT_ ## cpuid ## regid
 51  
 52  #define DPORT_CACHE_MASK(cpuid) (DPORT_CACHE_BIT(cpuid, _CACHE_MASK_OPSDRAM) | DPORT_CACHE_BIT(cpuid, _CACHE_MASK_DROM0) | \
 53                                  DPORT_CACHE_BIT(cpuid, _CACHE_MASK_DRAM1) | DPORT_CACHE_BIT(cpuid, _CACHE_MASK_IROM0) | \
 54                                  DPORT_CACHE_BIT(cpuid, _CACHE_MASK_IRAM1) | DPORT_CACHE_BIT(cpuid, _CACHE_MASK_IRAM0) )
 55  
 56  #define DPORT_CACHE_VAL(cpuid) (~(DPORT_CACHE_BIT(cpuid, _CACHE_MASK_DROM0) | \
 57                                          DPORT_CACHE_BIT(cpuid, _CACHE_MASK_DRAM1) | \
 58                                          DPORT_CACHE_BIT(cpuid, _CACHE_MASK_IRAM0)))
 59  
 60  #define DPORT_CACHE_GET_VAL(cpuid) (cpuid == 0) ? DPORT_CACHE_VAL(PRO) : DPORT_CACHE_VAL(APP)
 61  #define DPORT_CACHE_GET_MASK(cpuid) (cpuid == 0) ? DPORT_CACHE_MASK(PRO) : DPORT_CACHE_MASK(APP)
 62  
 63  static void IRAM_ATTR spi_flash_disable_cache(uint32_t cpuid, uint32_t *saved_state);
 64  static void IRAM_ATTR spi_flash_restore_cache(uint32_t cpuid, uint32_t saved_state);
 65  
 66  static uint32_t s_flash_op_cache_state[2];
 67  
 68  #ifndef CONFIG_FREERTOS_UNICORE
 69  static SemaphoreHandle_t s_flash_op_mutex;
 70  static volatile bool s_flash_op_can_start = false;
 71  static volatile bool s_flash_op_complete = false;
 72  #ifndef NDEBUG
 73  static volatile int s_flash_op_cpu = -1;
 74  #endif
 75  
 76  void spi_flash_init_lock(void)
 77  {
 78      s_flash_op_mutex = xSemaphoreCreateRecursiveMutex();
 79      assert(s_flash_op_mutex != NULL);
 80  }
 81  
 82  void spi_flash_op_lock(void)
 83  {
 84      xSemaphoreTakeRecursive(s_flash_op_mutex, portMAX_DELAY);
 85  }
 86  
 87  void spi_flash_op_unlock(void)
 88  {
 89      xSemaphoreGiveRecursive(s_flash_op_mutex);
 90  }
 91  /*
 92   If you're going to modify this, keep in mind that while the flash caches of the pro and app
 93   cpu are separate, the psram cache is *not*. If one of the CPUs returns from a flash routine
 94   with its cache enabled but the other CPUs cache is not enabled yet, you will have problems
 95   when accessing psram from the former CPU.
 96  */
 97  
 98  void IRAM_ATTR spi_flash_op_block_func(void *arg)
 99  {
100      // Disable scheduler on this CPU
101      vTaskSuspendAll();
102      // Restore interrupts that aren't located in IRAM
103      esp_intr_noniram_disable();
104      uint32_t cpuid = (uint32_t) arg;
105      // s_flash_op_complete flag is cleared on *this* CPU, otherwise the other
106      // CPU may reset the flag back to false before IPC task has a chance to check it
107      // (if it is preempted by an ISR taking non-trivial amount of time)
108      s_flash_op_complete = false;
109      s_flash_op_can_start = true;
110      while (!s_flash_op_complete) {
111          // busy loop here and wait for the other CPU to finish flash operation
112      }
113      // Flash operation is complete, re-enable cache
114      spi_flash_restore_cache(cpuid, s_flash_op_cache_state[cpuid]);
115      // Restore interrupts that aren't located in IRAM
116      esp_intr_noniram_enable();
117      // Re-enable scheduler
118      xTaskResumeAll();
119  }
120  
121  void IRAM_ATTR spi_flash_disable_interrupts_caches_and_other_cpu(void)
122  {
123      assert(esp_ptr_in_dram((const void *)get_sp()));
124  
125      spi_flash_op_lock();
126  
127      const uint32_t cpuid = xPortGetCoreID();
128      const uint32_t other_cpuid = (cpuid == 0) ? 1 : 0;
129  #ifndef NDEBUG
130      // For sanity check later: record the CPU which has started doing flash operation
131      assert(s_flash_op_cpu == -1);
132      s_flash_op_cpu = cpuid;
133  #endif
134  
135      if (xTaskGetSchedulerState() == taskSCHEDULER_NOT_STARTED) {
136          // Scheduler hasn't been started yet, it means that spi_flash API is being
137          // called from the 2nd stage bootloader or from user_start_cpu0, i.e. from
138          // PRO CPU. APP CPU is either in reset or spinning inside user_start_cpu1,
139          // which is in IRAM. So it is safe to disable cache for the other_cpuid here.
140          assert(other_cpuid == 1);
141          spi_flash_disable_cache(other_cpuid, &s_flash_op_cache_state[other_cpuid]);
142      } else {
143          // Temporarily raise current task priority to prevent a deadlock while
144          // waiting for IPC task to start on the other CPU
145          int old_prio = uxTaskPriorityGet(NULL);
146          vTaskPrioritySet(NULL, configMAX_PRIORITIES - 1);
147          // Signal to the spi_flash_op_block_task on the other CPU that we need it to
148          // disable cache there and block other tasks from executing.
149          s_flash_op_can_start = false;
150          esp_err_t ret = esp_ipc_call(other_cpuid, &spi_flash_op_block_func, (void *) other_cpuid);
151          assert(ret == ESP_OK);
152          while (!s_flash_op_can_start) {
153              // Busy loop and wait for spi_flash_op_block_func to disable cache
154              // on the other CPU
155          }
156          // Disable scheduler on the current CPU
157          vTaskSuspendAll();
158          // Can now set the priority back to the normal one
159          vTaskPrioritySet(NULL, old_prio);
160          // This is guaranteed to run on CPU <cpuid> because the other CPU is now
161          // occupied by highest priority task
162          assert(xPortGetCoreID() == cpuid);
163      }
164      // Kill interrupts that aren't located in IRAM
165      esp_intr_noniram_disable();
166      // This CPU executes this routine, with non-IRAM interrupts and the scheduler
167      // disabled. The other CPU is spinning in the spi_flash_op_block_func task, also
168      // with non-iram interrupts and the scheduler disabled. None of these CPUs will
169      // touch external RAM or flash this way, so we can safely disable caches.
170      spi_flash_disable_cache(cpuid, &s_flash_op_cache_state[cpuid]);
171      spi_flash_disable_cache(other_cpuid, &s_flash_op_cache_state[other_cpuid]);
172  }
173  
174  void IRAM_ATTR spi_flash_enable_interrupts_caches_and_other_cpu(void)
175  {
176      const uint32_t cpuid = xPortGetCoreID();
177      const uint32_t other_cpuid = (cpuid == 0) ? 1 : 0;
178  #ifndef NDEBUG
179      // Sanity check: flash operation ends on the same CPU as it has started
180      assert(cpuid == s_flash_op_cpu);
181      // More sanity check: if scheduler isn't started, only CPU0 can call this.
182      assert(!(xTaskGetSchedulerState() == taskSCHEDULER_NOT_STARTED && cpuid != 0));
183      s_flash_op_cpu = -1;
184  #endif
185  
186      // Re-enable cache on both CPUs. After this, cache (flash and external RAM) should work again.
187      spi_flash_restore_cache(cpuid, s_flash_op_cache_state[cpuid]);
188      spi_flash_restore_cache(other_cpuid, s_flash_op_cache_state[other_cpuid]);
189  
190      if (xTaskGetSchedulerState() != taskSCHEDULER_NOT_STARTED) {
191          // Signal to spi_flash_op_block_task that flash operation is complete
192          s_flash_op_complete = true;
193      }
194  
195      // Re-enable non-iram interrupts
196      esp_intr_noniram_enable();
197  
198      // Resume tasks on the current CPU, if the scheduler has started.
199      // NOTE: enabling non-IRAM interrupts has to happen before this,
200      // because once the scheduler has started, due to preemption the
201      // current task can end up being moved to the other CPU.
202      // But esp_intr_noniram_enable has to be called on the same CPU which
203      // called esp_intr_noniram_disable
204      if (xTaskGetSchedulerState() != taskSCHEDULER_NOT_STARTED) {
205          xTaskResumeAll();
206      }
207      // Release API lock
208      spi_flash_op_unlock();
209  }
210  
211  void IRAM_ATTR spi_flash_disable_interrupts_caches_and_other_cpu_no_os(void)
212  {
213      const uint32_t cpuid = xPortGetCoreID();
214      const uint32_t other_cpuid = (cpuid == 0) ? 1 : 0;
215  
216      // do not care about other CPU, it was halted upon entering panic handler
217      spi_flash_disable_cache(other_cpuid, &s_flash_op_cache_state[other_cpuid]);
218      // Kill interrupts that aren't located in IRAM
219      esp_intr_noniram_disable();
220      // Disable cache on this CPU as well
221      spi_flash_disable_cache(cpuid, &s_flash_op_cache_state[cpuid]);
222  }
223  
224  void IRAM_ATTR spi_flash_enable_interrupts_caches_no_os(void)
225  {
226      const uint32_t cpuid = xPortGetCoreID();
227  
228      // Re-enable cache on this CPU
229      spi_flash_restore_cache(cpuid, s_flash_op_cache_state[cpuid]);
230      // Re-enable non-iram interrupts
231      esp_intr_noniram_enable();
232  }
233  
234  #else // CONFIG_FREERTOS_UNICORE
235  
236  void spi_flash_init_lock(void)
237  {
238  }
239  
240  void spi_flash_op_lock(void)
241  {
242      vTaskSuspendAll();
243  }
244  
245  void spi_flash_op_unlock(void)
246  {
247      xTaskResumeAll();
248  }
249  
250  
251  void IRAM_ATTR spi_flash_disable_interrupts_caches_and_other_cpu(void)
252  {
253      spi_flash_op_lock();
254      esp_intr_noniram_disable();
255      spi_flash_disable_cache(0, &s_flash_op_cache_state[0]);
256  }
257  
258  void IRAM_ATTR spi_flash_enable_interrupts_caches_and_other_cpu(void)
259  {
260      spi_flash_restore_cache(0, s_flash_op_cache_state[0]);
261      esp_intr_noniram_enable();
262      spi_flash_op_unlock();
263  }
264  
265  void IRAM_ATTR spi_flash_disable_interrupts_caches_and_other_cpu_no_os(void)
266  {
267      // Kill interrupts that aren't located in IRAM
268      esp_intr_noniram_disable();
269      // Disable cache on this CPU as well
270      spi_flash_disable_cache(0, &s_flash_op_cache_state[0]);
271  }
272  
273  void IRAM_ATTR spi_flash_enable_interrupts_caches_no_os(void)
274  {
275      // Re-enable cache on this CPU
276      spi_flash_restore_cache(0, s_flash_op_cache_state[0]);
277      // Re-enable non-iram interrupts
278      esp_intr_noniram_enable();
279  }
280  
281  #endif // CONFIG_FREERTOS_UNICORE
282  
283  /**
284   * The following two functions are replacements for Cache_Read_Disable and Cache_Read_Enable
285   * function in ROM. They are used to work around a bug where Cache_Read_Disable requires a call to
286   * Cache_Flush before Cache_Read_Enable, even if cached data was not modified.
287   */
288  static void IRAM_ATTR spi_flash_disable_cache(uint32_t cpuid, uint32_t *saved_state)
289  {
290  #if CONFIG_IDF_TARGET_ESP32
291      uint32_t ret = 0;
292      const uint32_t cache_mask = DPORT_CACHE_GET_MASK(cpuid);
293      if (cpuid == 0) {
294          ret |= DPORT_GET_PERI_REG_BITS2(DPORT_PRO_CACHE_CTRL1_REG, cache_mask, 0);
295          while (DPORT_GET_PERI_REG_BITS2(DPORT_PRO_DCACHE_DBUG0_REG, DPORT_PRO_CACHE_STATE, DPORT_PRO_CACHE_STATE_S) != 1) {
296              ;
297          }
298          DPORT_SET_PERI_REG_BITS(DPORT_PRO_CACHE_CTRL_REG, 1, 0, DPORT_PRO_CACHE_ENABLE_S);
299      }
300  #if !CONFIG_FREERTOS_UNICORE
301      else {
302          ret |= DPORT_GET_PERI_REG_BITS2(DPORT_APP_CACHE_CTRL1_REG, cache_mask, 0);
303          while (DPORT_GET_PERI_REG_BITS2(DPORT_APP_DCACHE_DBUG0_REG, DPORT_APP_CACHE_STATE, DPORT_APP_CACHE_STATE_S) != 1) {
304              ;
305          }
306          DPORT_SET_PERI_REG_BITS(DPORT_APP_CACHE_CTRL_REG, 1, 0, DPORT_APP_CACHE_ENABLE_S);
307      }
308  #endif
309      *saved_state = ret;
310  #elif CONFIG_IDF_TARGET_ESP32S2
311      *saved_state = Cache_Suspend_ICache();
312  #elif CONFIG_IDF_TARGET_ESP32S3
313      uint32_t icache_state, dcache_state;
314      icache_state = Cache_Suspend_ICache() << 16;
315      dcache_state = Cache_Suspend_DCache();
316      *saved_state = icache_state | dcache_state;
317  #endif
318  }
319  
320  static void IRAM_ATTR spi_flash_restore_cache(uint32_t cpuid, uint32_t saved_state)
321  {
322  #if CONFIG_IDF_TARGET_ESP32
323      const uint32_t cache_mask = DPORT_CACHE_GET_MASK(cpuid);
324      if (cpuid == 0) {
325          DPORT_SET_PERI_REG_BITS(DPORT_PRO_CACHE_CTRL_REG, 1, 1, DPORT_PRO_CACHE_ENABLE_S);
326          DPORT_SET_PERI_REG_BITS(DPORT_PRO_CACHE_CTRL1_REG, cache_mask, saved_state, 0);
327      }
328  #if !CONFIG_FREERTOS_UNICORE
329      else {
330          DPORT_SET_PERI_REG_BITS(DPORT_APP_CACHE_CTRL_REG, 1, 1, DPORT_APP_CACHE_ENABLE_S);
331          DPORT_SET_PERI_REG_BITS(DPORT_APP_CACHE_CTRL1_REG, cache_mask, saved_state, 0);
332      }
333  #endif
334  #elif CONFIG_IDF_TARGET_ESP32S2
335      Cache_Resume_ICache(saved_state);
336  #elif CONFIG_IDF_TARGET_ESP32S3
337      Cache_Resume_DCache(saved_state & 0xffff);
338      Cache_Resume_ICache(saved_state >> 16);
339  #endif
340  }
341  
342  IRAM_ATTR bool spi_flash_cache_enabled(void)
343  {
344  #if CONFIG_IDF_TARGET_ESP32
345      bool result = (DPORT_REG_GET_BIT(DPORT_PRO_CACHE_CTRL_REG, DPORT_PRO_CACHE_ENABLE) != 0);
346  #if portNUM_PROCESSORS == 2
347      result = result && (DPORT_REG_GET_BIT(DPORT_APP_CACHE_CTRL_REG, DPORT_APP_CACHE_ENABLE) != 0);
348  #endif
349  #elif CONFIG_IDF_TARGET_ESP32S2
350      bool result = (REG_GET_BIT(EXTMEM_PRO_ICACHE_CTRL_REG, EXTMEM_PRO_ICACHE_ENABLE) != 0);
351  #elif CONFIG_IDF_TARGET_ESP32S3
352      bool result = (REG_GET_BIT(EXTMEM_ICACHE_CTRL_REG, EXTMEM_ICACHE_ENABLE) != 0);
353  #endif
354      return result;
355  }
356  
357  #if CONFIG_IDF_TARGET_ESP32S2
358  IRAM_ATTR void esp_config_instruction_cache_mode(void)
359  {
360      cache_size_t cache_size;
361      cache_ways_t cache_ways;
362      cache_line_size_t cache_line_size;
363  
364  #if CONFIG_ESP32S2_INSTRUCTION_CACHE_8KB
365      Cache_Allocate_SRAM(CACHE_MEMORY_ICACHE_LOW, CACHE_MEMORY_INVALID, CACHE_MEMORY_INVALID, CACHE_MEMORY_INVALID);
366      cache_size = CACHE_SIZE_8KB;
367  #else
368      Cache_Allocate_SRAM(CACHE_MEMORY_ICACHE_LOW, CACHE_MEMORY_ICACHE_HIGH, CACHE_MEMORY_INVALID, CACHE_MEMORY_INVALID);
369      cache_size = CACHE_SIZE_16KB;
370  #endif
371      cache_ways = CACHE_4WAYS_ASSOC;
372  #if CONFIG_ESP32S2_INSTRUCTION_CACHE_LINE_16B
373      cache_line_size = CACHE_LINE_SIZE_16B;
374  #else
375      cache_line_size = CACHE_LINE_SIZE_32B;
376  #endif
377      ESP_EARLY_LOGI(TAG, "Instruction cache \t: size %dKB, %dWays, cache line size %dByte", cache_size == CACHE_SIZE_8KB ? 8 : 16, 4, cache_line_size == CACHE_LINE_SIZE_16B ? 16 : 32);
378      Cache_Suspend_ICache();
379      Cache_Set_ICache_Mode(cache_size, cache_ways, cache_line_size);
380      Cache_Invalidate_ICache_All();
381      Cache_Resume_ICache(0);
382  }
383  
384  IRAM_ATTR void esp_config_data_cache_mode(void)
385  {
386      cache_size_t cache_size;
387      cache_ways_t cache_ways;
388      cache_line_size_t cache_line_size;
389  
390  #if CONFIG_ESP32S2_INSTRUCTION_CACHE_8KB
391  #if CONFIG_ESP32S2_DATA_CACHE_8KB
392      Cache_Allocate_SRAM(CACHE_MEMORY_ICACHE_LOW, CACHE_MEMORY_DCACHE_LOW, CACHE_MEMORY_INVALID, CACHE_MEMORY_INVALID);
393      cache_size = CACHE_SIZE_8KB;
394  #else
395      Cache_Allocate_SRAM(CACHE_MEMORY_ICACHE_LOW, CACHE_MEMORY_DCACHE_LOW, CACHE_MEMORY_DCACHE_HIGH, CACHE_MEMORY_INVALID);
396      cache_size = CACHE_SIZE_16KB;
397  #endif
398  #else
399  #if CONFIG_ESP32S2_DATA_CACHE_8KB
400      Cache_Allocate_SRAM(CACHE_MEMORY_ICACHE_LOW, CACHE_MEMORY_ICACHE_HIGH, CACHE_MEMORY_DCACHE_LOW, CACHE_MEMORY_INVALID);
401      cache_size = CACHE_SIZE_8KB;
402  #else
403      Cache_Allocate_SRAM(CACHE_MEMORY_ICACHE_LOW, CACHE_MEMORY_ICACHE_HIGH, CACHE_MEMORY_DCACHE_LOW, CACHE_MEMORY_DCACHE_HIGH);
404      cache_size = CACHE_SIZE_16KB;
405  #endif
406  #endif
407  
408      cache_ways = CACHE_4WAYS_ASSOC;
409  #if CONFIG_ESP32S2_DATA_CACHE_LINE_16B
410      cache_line_size = CACHE_LINE_SIZE_16B;
411  #else
412      cache_line_size = CACHE_LINE_SIZE_32B;
413  #endif
414      ESP_EARLY_LOGI(TAG, "Data cache \t\t: size %dKB, %dWays, cache line size %dByte", cache_size == CACHE_SIZE_8KB ? 8 : 16, 4, cache_line_size == CACHE_LINE_SIZE_16B ? 16 : 32);
415      Cache_Set_DCache_Mode(cache_size, cache_ways, cache_line_size);
416      Cache_Invalidate_DCache_All();
417  }
418  
419  static IRAM_ATTR void esp_enable_cache_flash_wrap(bool icache, bool dcache)
420  {
421      uint32_t i_autoload, d_autoload;
422      if (icache) {
423          i_autoload = Cache_Suspend_ICache();
424      }
425      if (dcache) {
426          d_autoload = Cache_Suspend_DCache();
427      }
428      REG_SET_BIT(EXTMEM_PRO_CACHE_WRAP_AROUND_CTRL_REG, EXTMEM_PRO_CACHE_FLASH_WRAP_AROUND);
429      if (icache) {
430          Cache_Resume_ICache(i_autoload);
431      }
432      if (dcache) {
433          Cache_Resume_DCache(d_autoload);
434      }
435  }
436  
437  #if CONFIG_ESP32S2_SPIRAM_SUPPORT
438  static IRAM_ATTR void esp_enable_cache_spiram_wrap(bool icache, bool dcache)
439  {
440      uint32_t i_autoload, d_autoload;
441      if (icache) {
442          i_autoload = Cache_Suspend_ICache();
443      }
444      if (dcache) {
445          d_autoload = Cache_Suspend_DCache();
446      }
447      REG_SET_BIT(EXTMEM_PRO_CACHE_WRAP_AROUND_CTRL_REG, EXTMEM_PRO_CACHE_SRAM_RD_WRAP_AROUND);
448      if (icache) {
449          Cache_Resume_ICache(i_autoload);
450      }
451      if (dcache) {
452          Cache_Resume_DCache(d_autoload);
453      }
454  }
455  #endif
456  
457  esp_err_t esp_enable_cache_wrap(bool icache_wrap_enable, bool dcache_wrap_enable)
458  {
459      int icache_wrap_size = 0, dcache_wrap_size = 0;
460      int flash_wrap_sizes[2] = {-1, -1}, spiram_wrap_sizes[2] = {-1, -1};
461      int flash_wrap_size = 0, spiram_wrap_size = 0;
462      int flash_count = 0, spiram_count = 0;
463      int i;
464      bool flash_spiram_wrap_together, flash_support_wrap = true, spiram_support_wrap = true;
465      uint32_t drom0_in_icache = 1;//always 1 in esp32s2
466  #if CONFIG_IDF_TARGET_ESP32S3
467      drom0_in_icache = 0;
468  #endif
469  
470      if (icache_wrap_enable) {
471  #if CONFIG_ESP32S2_INSTRUCTION_CACHE_LINE_16B || CONFIG_ESP32S3_INSTRUCTION_CACHE_LINE_16B
472          icache_wrap_size = 16;
473  #else
474          icache_wrap_size = 32;
475  #endif
476      }
477      if (dcache_wrap_enable) {
478  #if CONFIG_ESP32S2_DATA_CACHE_LINE_16B || CONFIG_ESP32S3_DATA_CACHE_LINE_16B
479          dcache_wrap_size = 16;
480  #else
481          dcache_wrap_size = 32;
482  #endif
483      }
484  
485      uint32_t instruction_use_spiram = 0;
486      uint32_t rodata_use_spiram = 0;
487  #if CONFIG_SPIRAM_FETCH_INSTRUCTIONS
488      extern uint32_t esp_spiram_instruction_access_enabled(void);
489      instruction_use_spiram = esp_spiram_instruction_access_enabled();
490  #endif
491  #if CONFIG_SPIRAM_RODATA
492      extern uint32_t esp_spiram_rodata_access_enabled(void);
493      rodata_use_spiram = esp_spiram_rodata_access_enabled();
494  #endif
495  
496      if (instruction_use_spiram) {
497          spiram_wrap_sizes[0] = icache_wrap_size;
498      } else {
499          flash_wrap_sizes[0] = icache_wrap_size;
500      }
501      if (rodata_use_spiram) {
502          if (drom0_in_icache) {
503              spiram_wrap_sizes[0] = icache_wrap_size;
504          } else {
505              spiram_wrap_sizes[1] = dcache_wrap_size;
506              flash_wrap_sizes[1] = dcache_wrap_size;
507          }
508  #ifdef CONFIG_EXT_RODATA_SUPPORT
509          spiram_wrap_sizes[1] = dcache_wrap_size;
510  #endif
511      } else {
512          if (drom0_in_icache) {
513              flash_wrap_sizes[0] = icache_wrap_size;
514          } else {
515              flash_wrap_sizes[1] = dcache_wrap_size;
516          }
517  #ifdef CONFIG_EXT_RODATA_SUPPORT
518          flash_wrap_sizes[1] = dcache_wrap_size;
519  #endif
520      }
521  #ifdef CONFIG_ESP32S2_SPIRAM_SUPPORT
522      spiram_wrap_sizes[1] = dcache_wrap_size;
523  #endif
524      for (i = 0; i < 2; i++) {
525          if (flash_wrap_sizes[i] != -1) {
526              flash_count++;
527              flash_wrap_size = flash_wrap_sizes[i];
528          }
529      }
530      for (i = 0; i < 2; i++) {
531          if (spiram_wrap_sizes[i] != -1) {
532              spiram_count++;
533              spiram_wrap_size = spiram_wrap_sizes[i];
534          }
535      }
536      if (flash_count + spiram_count <= 2) {
537          flash_spiram_wrap_together = false;
538      } else {
539          flash_spiram_wrap_together = true;
540      }
541      ESP_EARLY_LOGI(TAG, "flash_count=%d, size=%d, spiram_count=%d, size=%d,together=%d", flash_count, flash_wrap_size, spiram_count, spiram_wrap_size, flash_spiram_wrap_together);
542      if (flash_count > 1 && flash_wrap_sizes[0] != flash_wrap_sizes[1]) {
543          ESP_EARLY_LOGW(TAG, "Flash wrap with different length %d and %d, abort wrap.", flash_wrap_sizes[0], flash_wrap_sizes[1]);
544          if (spiram_wrap_size == 0) {
545              return ESP_FAIL;
546          }
547          if (flash_spiram_wrap_together) {
548              ESP_EARLY_LOGE(TAG, "Abort spiram wrap because flash wrap length not fixed.");
549              return ESP_FAIL;
550          }
551      }
552      if (spiram_count > 1 && spiram_wrap_sizes[0] != spiram_wrap_sizes[1]) {
553          ESP_EARLY_LOGW(TAG, "SPIRAM wrap with different length %d and %d, abort wrap.", spiram_wrap_sizes[0], spiram_wrap_sizes[1]);
554          if (flash_wrap_size == 0) {
555              return ESP_FAIL;
556          }
557          if (flash_spiram_wrap_together) {
558              ESP_EARLY_LOGW(TAG, "Abort flash wrap because spiram wrap length not fixed.");
559              return ESP_FAIL;
560          }
561      }
562  
563      if (flash_spiram_wrap_together && flash_wrap_size != spiram_wrap_size) {
564          ESP_EARLY_LOGW(TAG, "SPIRAM has different wrap length with flash, %d and %d, abort wrap.", spiram_wrap_size, flash_wrap_size);
565          return ESP_FAIL;
566      }
567  
568  #ifdef CONFIG_FLASHMODE_QIO
569      flash_support_wrap = true;
570      extern bool spi_flash_support_wrap_size(uint32_t wrap_size);
571      if (!spi_flash_support_wrap_size(flash_wrap_size)) {
572          flash_support_wrap = false;
573          ESP_EARLY_LOGW(TAG, "Flash do not support wrap size %d.", flash_wrap_size);
574      }
575  #else
576      ESP_EARLY_LOGW(TAG, "Flash is not in QIO mode, do not support wrap.");
577  #endif
578  
579  #ifdef CONFIG_ESP32S2_SPIRAM_SUPPORT
580      extern bool psram_support_wrap_size(uint32_t wrap_size);
581      if (!psram_support_wrap_size(spiram_wrap_size)) {
582          spiram_support_wrap = false;
583          ESP_EARLY_LOGW(TAG, "SPIRAM do not support wrap size %d.", spiram_wrap_size);
584      }
585  #endif
586  
587      if (flash_spiram_wrap_together && !(flash_support_wrap && spiram_support_wrap)) {
588          ESP_EARLY_LOGW(TAG, "Flash and SPIRAM should support wrap together.");
589          return ESP_FAIL;
590      }
591  
592      extern esp_err_t spi_flash_enable_wrap(uint32_t wrap_size);
593      if (flash_support_wrap && flash_wrap_size > 0) {
594          ESP_EARLY_LOGI(TAG, "Flash wrap enabled, size = %d.", flash_wrap_size);
595          spi_flash_enable_wrap(flash_wrap_size);
596          esp_enable_cache_flash_wrap((flash_wrap_sizes[0] > 0), (flash_wrap_sizes[1] > 0));
597      }
598  #if CONFIG_ESP32S2_SPIRAM_SUPPORT
599      extern esp_err_t psram_enable_wrap(uint32_t wrap_size);
600      if (spiram_support_wrap && spiram_wrap_size > 0) {
601          ESP_EARLY_LOGI(TAG, "SPIRAM wrap enabled, size = %d.", spiram_wrap_size);
602          psram_enable_wrap(spiram_wrap_size);
603          esp_enable_cache_spiram_wrap((spiram_wrap_sizes[0] > 0), (spiram_wrap_sizes[1] > 0));
604      }
605  #endif
606  
607      return ESP_OK;
608  
609  }
610  #endif
611  #if CONFIG_IDF_TARGET_ESP32S3
612  IRAM_ATTR void esp_config_instruction_cache_mode(void)
613  {
614      cache_size_t cache_size;
615      cache_ways_t cache_ways;
616      cache_line_size_t cache_line_size;
617  
618  #if CONFIG_ESP32S3_INSTRUCTION_CACHE_16KB
619      Cache_Occupy_ICache_MEMORY(CACHE_MEMORY_IBANK0, CACHE_MEMORY_INVALID);
620      cache_size = CACHE_SIZE_HALF;
621  #else
622      Cache_Occupy_ICache_MEMORY(CACHE_MEMORY_IBANK0, CACHE_MEMORY_IBANK1);
623      cache_size = CACHE_SIZE_FULL;
624  #endif
625  #if CONFIG_ESP32S3_INSTRUCTION_CACHE_4WAYS
626      cache_ways = CACHE_4WAYS_ASSOC;
627  #else
628      cache_ways = CACHE_8WAYS_ASSOC;
629  #endif
630  #if CONFIG_ESP32S3_INSTRUCTION_CACHE_LINE_16B
631      cache_line_size = CACHE_LINE_SIZE_16B;
632  #elif CONFIG_ESP32S3_INSTRUCTION_CACHE_LINE_32B
633      cache_line_size = CACHE_LINE_SIZE_32B;
634  #else
635      cache_line_size = CACHE_LINE_SIZE_64B;
636  #endif
637      ESP_EARLY_LOGI(TAG, "Instruction cache: size %dKB, %dWays, cache line size %dByte", cache_size == CACHE_SIZE_HALF ? 16 : 32, cache_ways == CACHE_4WAYS_ASSOC ? 4 : 8, cache_line_size == CACHE_LINE_SIZE_16B ? 16 : (cache_line_size == CACHE_LINE_SIZE_32B ? 32 : 64));
638      Cache_Set_ICache_Mode(cache_size, cache_ways, cache_line_size);
639      Cache_Invalidate_ICache_All();
640      extern void Cache_Enable_ICache(uint32_t autoload);
641      Cache_Enable_ICache(0);
642  }
643  
644  IRAM_ATTR void esp_config_data_cache_mode(void)
645  {
646      cache_size_t cache_size;
647      cache_ways_t cache_ways;
648      cache_line_size_t cache_line_size;
649  
650  #if CONFIG_ESP32S3_DATA_CACHE_32KB
651      Cache_Occupy_DCache_MEMORY(CACHE_MEMORY_DBANK1, CACHE_MEMORY_INVALID);
652      cache_size = CACHE_SIZE_HALF;
653  #else
654      Cache_Occupy_DCache_MEMORY(CACHE_MEMORY_DBANK0, CACHE_MEMORY_DBANK1);
655      cache_size = CACHE_SIZE_FULL;
656  #endif
657  #if CONFIG_ESP32S3_DATA_CACHE_4WAYS
658      cache_ways = CACHE_4WAYS_ASSOC;
659  #else
660      cache_ways = CACHE_8WAYS_ASSOC;
661  #endif
662  #if CONFIG_ESP32S3_DATA_CACHE_LINE_16B
663      cache_line_size = CACHE_LINE_SIZE_16B;
664  #elif CONFIG_ESP32S3_DATA_CACHE_LINE_32B
665      cache_line_size = CACHE_LINE_SIZE_32B;
666  #else
667      cache_line_size = CACHE_LINE_SIZE_64B;
668  #endif
669      // ESP_EARLY_LOGI(TAG, "Data cache: size %dKB, %dWays, cache line size %dByte", cache_size == CACHE_SIZE_HALF ? 32 : 64, cache_ways == CACHE_4WAYS_ASSOC ? 4 : 8, cache_line_size == CACHE_LINE_SIZE_16B ? 16 : (cache_line_size == CACHE_LINE_SIZE_32B ? 32 : 64));
670      Cache_Set_DCache_Mode(cache_size, cache_ways, cache_line_size);
671      Cache_Invalidate_DCache_All();
672  }
673  
674  static IRAM_ATTR void esp_enable_cache_flash_wrap(bool icache, bool dcache)
675  {
676      uint32_t i_autoload, d_autoload;
677      if (icache) {
678          i_autoload = Cache_Suspend_ICache();
679      }
680      if (dcache) {
681          d_autoload = Cache_Suspend_DCache();
682      }
683      REG_SET_BIT(EXTMEM_CACHE_WRAP_AROUND_CTRL_REG, EXTMEM_CACHE_FLASH_WRAP_AROUND);
684      if (icache) {
685          Cache_Resume_ICache(i_autoload);
686      }
687      if (dcache) {
688          Cache_Resume_DCache(d_autoload);
689      }
690  }
691  
692  #if CONFIG_ESP32S3_SPIRAM_SUPPORT
693  static IRAM_ATTR void esp_enable_cache_spiram_wrap(bool icache, bool dcache)
694  {
695      uint32_t i_autoload, d_autoload;
696      if (icache) {
697          i_autoload = Cache_Suspend_ICache();
698      }
699      if (dcache) {
700          d_autoload = Cache_Suspend_DCache();
701      }
702      REG_SET_BIT(EXTMEM_CACHE_WRAP_AROUND_CTRL_REG, EXTMEM_CACHE_SRAM_RD_WRAP_AROUND);
703      if (icache) {
704          Cache_Resume_ICache(i_autoload);
705      }
706      if (dcache) {
707          Cache_Resume_DCache(d_autoload);
708      }
709  }
710  #endif
711  
712  esp_err_t esp_enable_cache_wrap(bool icache_wrap_enable, bool dcache_wrap_enable)
713  {
714      int icache_wrap_size = 0, dcache_wrap_size = 0;
715      int flash_wrap_sizes[2] = {-1, -1}, spiram_wrap_sizes[2] = {-1, -1};
716      int flash_wrap_size = 0, spiram_wrap_size = 0;
717      int flash_count = 0, spiram_count = 0;
718      int i;
719      bool flash_spiram_wrap_together, flash_support_wrap = false, spiram_support_wrap = true;
720      uint32_t drom0_in_icache = 0;//always 0 in chip7.2.4
721  
722      if (icache_wrap_enable) {
723  #if CONFIG_ESP32S3_INSTRUCTION_CACHE_LINE_16B
724          icache_wrap_size = 16;
725  #elif CONFIG_ESP32S3_INSTRUCTION_CACHE_LINE_32B
726          icache_wrap_size = 32;
727  #else
728          icache_wrap_size = 64;
729  #endif
730      }
731      if (dcache_wrap_enable) {
732  #if CONFIG_ESP32S3_DATA_CACHE_LINE_16B
733          dcache_wrap_size = 16;
734  #elif CONFIG_ESP32S3_DATA_CACHE_LINE_32B
735          dcache_wrap_size = 32;
736  #else
737          dcache_wrap_size = 64;
738  #endif
739      }
740  
741      uint32_t instruction_use_spiram = 0;
742      uint32_t rodata_use_spiram = 0;
743  #if CONFIG_SPIRAM_FETCH_INSTRUCTIONS
744      extern uint32_t esp_spiram_instruction_access_enabled();
745      instruction_use_spiram = esp_spiram_instruction_access_enabled();
746  #endif
747  #if CONFIG_SPIRAM_RODATA
748      extern uint32_t esp_spiram_rodata_access_enabled();
749      rodata_use_spiram = esp_spiram_rodata_access_enabled();
750  #endif
751  
752      if (instruction_use_spiram) {
753          spiram_wrap_sizes[0] = icache_wrap_size;
754      } else {
755          flash_wrap_sizes[0] = icache_wrap_size;
756      }
757      if (rodata_use_spiram) {
758          if (drom0_in_icache) {
759              spiram_wrap_sizes[0] = icache_wrap_size;
760          } else {
761              spiram_wrap_sizes[1] = dcache_wrap_size;
762          }
763  #ifdef CONFIG_EXT_RODATA_SUPPORT
764          spiram_wrap_sizes[1] = dcache_wrap_size;
765  #endif
766      } else {
767          if (drom0_in_icache) {
768              flash_wrap_sizes[0] = icache_wrap_size;
769          } else {
770              flash_wrap_sizes[1] = dcache_wrap_size;
771          }
772  #ifdef CONFIG_EXT_RODATA_SUPPORT
773          flash_wrap_sizes[1] = dcache_wrap_size;
774  #endif
775      }
776  #ifdef CONFIG_ESP32S3_SPIRAM_SUPPORT
777      spiram_wrap_sizes[1] = dcache_wrap_size;
778  #endif
779      for (i = 0; i < 2; i++) {
780          if (flash_wrap_sizes[i] != -1) {
781              flash_count++;
782              flash_wrap_size = flash_wrap_sizes[i];
783          }
784      }
785      for (i = 0; i < 2; i++) {
786          if (spiram_wrap_sizes[i] != -1) {
787              spiram_count++;
788              spiram_wrap_size = spiram_wrap_sizes[i];
789          }
790      }
791      if (flash_count + spiram_count <= 2) {
792          flash_spiram_wrap_together = false;
793      } else {
794          flash_spiram_wrap_together = true;
795      }
796      if (flash_count > 1 && flash_wrap_sizes[0] != flash_wrap_sizes[1]) {
797          ESP_EARLY_LOGW(TAG, "Flash wrap with different length %d and %d, abort wrap.", flash_wrap_sizes[0], flash_wrap_sizes[1]);
798          if (spiram_wrap_size == 0) {
799              return ESP_FAIL;
800          }
801          if (flash_spiram_wrap_together) {
802              ESP_EARLY_LOGE(TAG, "Abort spiram wrap because flash wrap length not fixed.");
803              return ESP_FAIL;
804          }
805      }
806      if (spiram_count > 1 && spiram_wrap_sizes[0] != spiram_wrap_sizes[1]) {
807          ESP_EARLY_LOGW(TAG, "SPIRAM wrap with different length %d and %d, abort wrap.", spiram_wrap_sizes[0], spiram_wrap_sizes[1]);
808          if (flash_wrap_size == 0) {
809              return ESP_FAIL;
810          }
811          if (flash_spiram_wrap_together) {
812              ESP_EARLY_LOGW(TAG, "Abort flash wrap because spiram wrap length not fixed.");
813              return ESP_FAIL;
814          }
815      }
816  
817      if (flash_spiram_wrap_together && flash_wrap_size != spiram_wrap_size) {
818          ESP_EARLY_LOGW(TAG, "SPIRAM has different wrap length with flash, %d and %d, abort wrap.", spiram_wrap_size, flash_wrap_size);
819          return ESP_FAIL;
820      }
821  
822  #ifdef CONFIG_FLASHMODE_QIO
823      flash_support_wrap = true;
824      extern bool spi_flash_support_wrap_size(uint32_t wrap_size);
825      if (!spi_flash_support_wrap_size(flash_wrap_size)) {
826          flash_support_wrap = false;
827          ESP_EARLY_LOGW(TAG, "Flash do not support wrap size %d.", flash_wrap_size);
828      }
829  #else
830      ESP_EARLY_LOGW(TAG, "Flash is not in QIO mode, do not support wrap.");
831  #endif
832  
833  
834  #ifdef CONFIG_ESP32S3_SPIRAM_SUPPORT
835      extern bool psram_support_wrap_size(uint32_t wrap_size);
836      if (!psram_support_wrap_size(spiram_wrap_size)) {
837          spiram_support_wrap = false;
838          ESP_EARLY_LOGW(TAG, "SPIRAM do not support wrap size %d.", spiram_wrap_size);
839      }
840  #endif
841  
842      if (flash_spiram_wrap_together && !(flash_support_wrap && spiram_support_wrap)) {
843          ESP_EARLY_LOGW(TAG, "Flash and SPIRAM should support wrap together.");
844          return ESP_FAIL;
845      }
846  
847      extern esp_err_t spi_flash_enable_wrap(uint32_t wrap_size);
848      if (flash_support_wrap && flash_wrap_size > 0) {
849          ESP_EARLY_LOGI(TAG, "Flash wrap enabled, size = %d.", flash_wrap_size);
850          spi_flash_enable_wrap(flash_wrap_size);
851          esp_enable_cache_flash_wrap((flash_wrap_sizes[0] > 0), (flash_wrap_sizes[1] > 0));
852      }
853  #if CONFIG_ESP32S3_SPIRAM_SUPPORT
854      extern esp_err_t psram_enable_wrap(uint32_t wrap_size);
855      if (spiram_support_wrap && spiram_wrap_size > 0) {
856          ESP_EARLY_LOGI(TAG, "SPIRAM wrap enabled, size = %d.", spiram_wrap_size);
857          psram_enable_wrap(spiram_wrap_size);
858          esp_enable_cache_spiram_wrap((spiram_wrap_sizes[0] > 0), (spiram_wrap_sizes[1] > 0));
859      }
860  #endif
861  
862      return ESP_OK;
863  
864  }
865  #endif
866  
867  void IRAM_ATTR spi_flash_enable_cache(uint32_t cpuid)
868  {
869  #if CONFIG_IDF_TARGET_ESP32
870      uint32_t cache_value = DPORT_CACHE_GET_VAL(cpuid);
871      cache_value &= DPORT_CACHE_GET_MASK(cpuid);
872  
873      // Re-enable cache on this CPU
874      spi_flash_restore_cache(cpuid, cache_value);
875  #else
876      spi_flash_restore_cache(0, 0); // TODO cache_value should be non-zero
877  #endif
878  }
879