cache_v7.c 4.8 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213
  1. // SPDX-License-Identifier: GPL-2.0+
  2. /*
  3. * (C) Copyright 2010
  4. * Texas Instruments, <www.ti.com>
  5. * Aneesh V <aneesh@ti.com>
  6. */
  7. #include <linux/types.h>
  8. #include <common.h>
  9. #include <asm/armv7.h>
  10. #include <asm/utils.h>
  11. #define ARMV7_DCACHE_INVAL_RANGE 1
  12. #define ARMV7_DCACHE_CLEAN_INVAL_RANGE 2
  13. #ifndef CONFIG_SYS_DCACHE_OFF
  14. /* Asm functions from cache_v7_asm.S */
  15. void v7_flush_dcache_all(void);
  16. void v7_invalidate_dcache_all(void);
  17. static u32 get_ccsidr(void)
  18. {
  19. u32 ccsidr;
  20. /* Read current CP15 Cache Size ID Register */
  21. asm volatile ("mrc p15, 1, %0, c0, c0, 0" : "=r" (ccsidr));
  22. return ccsidr;
  23. }
  24. static void v7_dcache_clean_inval_range(u32 start, u32 stop, u32 line_len)
  25. {
  26. u32 mva;
  27. /* Align start to cache line boundary */
  28. start &= ~(line_len - 1);
  29. for (mva = start; mva < stop; mva = mva + line_len) {
  30. /* DCCIMVAC - Clean & Invalidate data cache by MVA to PoC */
  31. asm volatile ("mcr p15, 0, %0, c7, c14, 1" : : "r" (mva));
  32. }
  33. }
  34. static void v7_dcache_inval_range(u32 start, u32 stop, u32 line_len)
  35. {
  36. u32 mva;
  37. if (!check_cache_range(start, stop))
  38. return;
  39. for (mva = start; mva < stop; mva = mva + line_len) {
  40. /* DCIMVAC - Invalidate data cache by MVA to PoC */
  41. asm volatile ("mcr p15, 0, %0, c7, c6, 1" : : "r" (mva));
  42. }
  43. }
  44. static void v7_dcache_maint_range(u32 start, u32 stop, u32 range_op)
  45. {
  46. u32 line_len, ccsidr;
  47. ccsidr = get_ccsidr();
  48. line_len = ((ccsidr & CCSIDR_LINE_SIZE_MASK) >>
  49. CCSIDR_LINE_SIZE_OFFSET) + 2;
  50. /* Converting from words to bytes */
  51. line_len += 2;
  52. /* converting from log2(linelen) to linelen */
  53. line_len = 1 << line_len;
  54. switch (range_op) {
  55. case ARMV7_DCACHE_CLEAN_INVAL_RANGE:
  56. v7_dcache_clean_inval_range(start, stop, line_len);
  57. break;
  58. case ARMV7_DCACHE_INVAL_RANGE:
  59. v7_dcache_inval_range(start, stop, line_len);
  60. break;
  61. }
  62. /* DSB to make sure the operation is complete */
  63. dsb();
  64. }
  65. /* Invalidate TLB */
  66. static void v7_inval_tlb(void)
  67. {
  68. /* Invalidate entire unified TLB */
  69. asm volatile ("mcr p15, 0, %0, c8, c7, 0" : : "r" (0));
  70. /* Invalidate entire data TLB */
  71. asm volatile ("mcr p15, 0, %0, c8, c6, 0" : : "r" (0));
  72. /* Invalidate entire instruction TLB */
  73. asm volatile ("mcr p15, 0, %0, c8, c5, 0" : : "r" (0));
  74. /* Full system DSB - make sure that the invalidation is complete */
  75. dsb();
  76. /* Full system ISB - make sure the instruction stream sees it */
  77. isb();
  78. }
  79. void invalidate_dcache_all(void)
  80. {
  81. v7_invalidate_dcache_all();
  82. v7_outer_cache_inval_all();
  83. }
  84. /*
  85. * Performs a clean & invalidation of the entire data cache
  86. * at all levels
  87. */
  88. void flush_dcache_all(void)
  89. {
  90. v7_flush_dcache_all();
  91. v7_outer_cache_flush_all();
  92. }
  93. /*
  94. * Invalidates range in all levels of D-cache/unified cache used:
  95. * Affects the range [start, stop - 1]
  96. */
  97. void invalidate_dcache_range(unsigned long start, unsigned long stop)
  98. {
  99. check_cache_range(start, stop);
  100. v7_dcache_maint_range(start, stop, ARMV7_DCACHE_INVAL_RANGE);
  101. v7_outer_cache_inval_range(start, stop);
  102. }
  103. /*
  104. * Flush range(clean & invalidate) from all levels of D-cache/unified
  105. * cache used:
  106. * Affects the range [start, stop - 1]
  107. */
  108. void flush_dcache_range(unsigned long start, unsigned long stop)
  109. {
  110. check_cache_range(start, stop);
  111. v7_dcache_maint_range(start, stop, ARMV7_DCACHE_CLEAN_INVAL_RANGE);
  112. v7_outer_cache_flush_range(start, stop);
  113. }
  114. void arm_init_before_mmu(void)
  115. {
  116. v7_outer_cache_enable();
  117. invalidate_dcache_all();
  118. v7_inval_tlb();
  119. }
  120. void mmu_page_table_flush(unsigned long start, unsigned long stop)
  121. {
  122. flush_dcache_range(start, stop);
  123. v7_inval_tlb();
  124. }
  125. #else /* #ifndef CONFIG_SYS_DCACHE_OFF */
  126. void invalidate_dcache_all(void)
  127. {
  128. }
  129. void flush_dcache_all(void)
  130. {
  131. }
  132. void invalidate_dcache_range(unsigned long start, unsigned long stop)
  133. {
  134. }
  135. void flush_dcache_range(unsigned long start, unsigned long stop)
  136. {
  137. }
  138. void arm_init_before_mmu(void)
  139. {
  140. }
  141. void mmu_page_table_flush(unsigned long start, unsigned long stop)
  142. {
  143. }
  144. void arm_init_domains(void)
  145. {
  146. }
  147. #endif /* #ifndef CONFIG_SYS_DCACHE_OFF */
  148. #ifndef CONFIG_SYS_ICACHE_OFF
  149. /* Invalidate entire I-cache and branch predictor array */
  150. void invalidate_icache_all(void)
  151. {
  152. /*
  153. * Invalidate all instruction caches to PoU.
  154. * Also flushes branch target cache.
  155. */
  156. asm volatile ("mcr p15, 0, %0, c7, c5, 0" : : "r" (0));
  157. /* Invalidate entire branch predictor array */
  158. asm volatile ("mcr p15, 0, %0, c7, c5, 6" : : "r" (0));
  159. /* Full system DSB - make sure that the invalidation is complete */
  160. dsb();
  161. /* ISB - make sure the instruction stream sees it */
  162. isb();
  163. }
  164. #else
  165. void invalidate_icache_all(void)
  166. {
  167. }
  168. #endif
  169. /* Stub implementations for outer cache operations */
  170. __weak void v7_outer_cache_enable(void) {}
  171. __weak void v7_outer_cache_disable(void) {}
  172. __weak void v7_outer_cache_flush_all(void) {}
  173. __weak void v7_outer_cache_inval_all(void) {}
  174. __weak void v7_outer_cache_flush_range(u32 start, u32 end) {}
  175. __weak void v7_outer_cache_inval_range(u32 start, u32 end) {}