cache_init.S 5.8 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241
  1. /*
  2. * Cache-handling routined for MIPS CPUs
  3. *
  4. * Copyright (c) 2003 Wolfgang Denk <wd@denx.de>
  5. *
  6. * SPDX-License-Identifier: GPL-2.0+
  7. */
  8. #include <asm-offsets.h>
  9. #include <config.h>
  10. #include <asm/asm.h>
  11. #include <asm/regdef.h>
  12. #include <asm/mipsregs.h>
  13. #include <asm/addrspace.h>
  14. #include <asm/cacheops.h>
  15. #ifndef CONFIG_SYS_MIPS_CACHE_MODE
  16. #define CONFIG_SYS_MIPS_CACHE_MODE CONF_CM_CACHABLE_NONCOHERENT
  17. #endif
  18. #define INDEX_BASE CKSEG0
  19. .macro f_fill64 dst, offset, val
  20. LONG_S \val, (\offset + 0 * LONGSIZE)(\dst)
  21. LONG_S \val, (\offset + 1 * LONGSIZE)(\dst)
  22. LONG_S \val, (\offset + 2 * LONGSIZE)(\dst)
  23. LONG_S \val, (\offset + 3 * LONGSIZE)(\dst)
  24. LONG_S \val, (\offset + 4 * LONGSIZE)(\dst)
  25. LONG_S \val, (\offset + 5 * LONGSIZE)(\dst)
  26. LONG_S \val, (\offset + 6 * LONGSIZE)(\dst)
  27. LONG_S \val, (\offset + 7 * LONGSIZE)(\dst)
  28. #if LONGSIZE == 4
  29. LONG_S \val, (\offset + 8 * LONGSIZE)(\dst)
  30. LONG_S \val, (\offset + 9 * LONGSIZE)(\dst)
  31. LONG_S \val, (\offset + 10 * LONGSIZE)(\dst)
  32. LONG_S \val, (\offset + 11 * LONGSIZE)(\dst)
  33. LONG_S \val, (\offset + 12 * LONGSIZE)(\dst)
  34. LONG_S \val, (\offset + 13 * LONGSIZE)(\dst)
  35. LONG_S \val, (\offset + 14 * LONGSIZE)(\dst)
  36. LONG_S \val, (\offset + 15 * LONGSIZE)(\dst)
  37. #endif
  38. .endm
  39. .macro cache_loop curr, end, line_sz, op
  40. 10: cache \op, 0(\curr)
  41. PTR_ADDU \curr, \curr, \line_sz
  42. bne \curr, \end, 10b
  43. .endm
  44. .macro l1_info sz, line_sz, off
  45. .set push
  46. .set noat
  47. mfc0 $1, CP0_CONFIG, 1
  48. /* detect line size */
  49. srl \line_sz, $1, \off + MIPS_CONF1_DL_SHIFT - MIPS_CONF1_DA_SHIFT
  50. andi \line_sz, \line_sz, (MIPS_CONF1_DL >> MIPS_CONF1_DL_SHIFT)
  51. move \sz, zero
  52. beqz \line_sz, 10f
  53. li \sz, 2
  54. sllv \line_sz, \sz, \line_sz
  55. /* detect associativity */
  56. srl \sz, $1, \off + MIPS_CONF1_DA_SHIFT - MIPS_CONF1_DA_SHIFT
  57. andi \sz, \sz, (MIPS_CONF1_DA >> MIPS_CONF1_DA_SHIFT)
  58. addi \sz, \sz, 1
  59. /* sz *= line_sz */
  60. mul \sz, \sz, \line_sz
  61. /* detect log32(sets) */
  62. srl $1, $1, \off + MIPS_CONF1_DS_SHIFT - MIPS_CONF1_DA_SHIFT
  63. andi $1, $1, (MIPS_CONF1_DS >> MIPS_CONF1_DS_SHIFT)
  64. addiu $1, $1, 1
  65. andi $1, $1, 0x7
  66. /* sz <<= log32(sets) */
  67. sllv \sz, \sz, $1
  68. /* sz *= 32 */
  69. li $1, 32
  70. mul \sz, \sz, $1
  71. 10:
  72. .set pop
  73. .endm
  74. /*
  75. * mips_cache_reset - low level initialisation of the primary caches
  76. *
  77. * This routine initialises the primary caches to ensure that they have good
  78. * parity. It must be called by the ROM before any cached locations are used
  79. * to prevent the possibility of data with bad parity being written to memory.
  80. *
  81. * To initialise the instruction cache it is essential that a source of data
  82. * with good parity is available. This routine will initialise an area of
  83. * memory starting at location zero to be used as a source of parity.
  84. *
  85. * RETURNS: N/A
  86. *
  87. */
  88. LEAF(mips_cache_reset)
  89. #ifdef CONFIG_SYS_ICACHE_SIZE
  90. li t2, CONFIG_SYS_ICACHE_SIZE
  91. li t8, CONFIG_SYS_CACHELINE_SIZE
  92. #else
  93. l1_info t2, t8, MIPS_CONF1_IA_SHIFT
  94. #endif
  95. #ifdef CONFIG_SYS_DCACHE_SIZE
  96. li t3, CONFIG_SYS_DCACHE_SIZE
  97. li t9, CONFIG_SYS_CACHELINE_SIZE
  98. #else
  99. l1_info t3, t9, MIPS_CONF1_DA_SHIFT
  100. #endif
  101. #ifdef CONFIG_SYS_MIPS_CACHE_INIT_RAM_LOAD
  102. /* Determine the largest L1 cache size */
  103. #if defined(CONFIG_SYS_ICACHE_SIZE) && defined(CONFIG_SYS_DCACHE_SIZE)
  104. #if CONFIG_SYS_ICACHE_SIZE > CONFIG_SYS_DCACHE_SIZE
  105. li v0, CONFIG_SYS_ICACHE_SIZE
  106. #else
  107. li v0, CONFIG_SYS_DCACHE_SIZE
  108. #endif
  109. #else
  110. move v0, t2
  111. sltu t1, t2, t3
  112. movn v0, t3, t1
  113. #endif
  114. /*
  115. * Now clear that much memory starting from zero.
  116. */
  117. PTR_LI a0, CKSEG1
  118. PTR_ADDU a1, a0, v0
  119. 2: PTR_ADDIU a0, 64
  120. f_fill64 a0, -64, zero
  121. bne a0, a1, 2b
  122. #endif /* CONFIG_SYS_MIPS_CACHE_INIT_RAM_LOAD */
  123. /*
  124. * The TagLo registers used depend upon the CPU implementation, but the
  125. * architecture requires that it is safe for software to write to both
  126. * TagLo selects 0 & 2 covering supported cases.
  127. */
  128. mtc0 zero, CP0_TAGLO
  129. mtc0 zero, CP0_TAGLO, 2
  130. /*
  131. * The caches are probably in an indeterminate state, so we force good
  132. * parity into them by doing an invalidate for each line. If
  133. * CONFIG_SYS_MIPS_CACHE_INIT_RAM_LOAD is set then we'll proceed to
  134. * perform a load/fill & a further invalidate for each line, assuming
  135. * that the bottom of RAM (having just been cleared) will generate good
  136. * parity for the cache.
  137. */
  138. /*
  139. * Initialize the I-cache first,
  140. */
  141. blez t2, 1f
  142. PTR_LI t0, INDEX_BASE
  143. PTR_ADDU t1, t0, t2
  144. /* clear tag to invalidate */
  145. cache_loop t0, t1, t8, INDEX_STORE_TAG_I
  146. #ifdef CONFIG_SYS_MIPS_CACHE_INIT_RAM_LOAD
  147. /* fill once, so data field parity is correct */
  148. PTR_LI t0, INDEX_BASE
  149. cache_loop t0, t1, t8, FILL
  150. /* invalidate again - prudent but not strictly neccessary */
  151. PTR_LI t0, INDEX_BASE
  152. cache_loop t0, t1, t8, INDEX_STORE_TAG_I
  153. #endif
  154. /*
  155. * then initialize D-cache.
  156. */
  157. 1: blez t3, 3f
  158. PTR_LI t0, INDEX_BASE
  159. PTR_ADDU t1, t0, t3
  160. /* clear all tags */
  161. cache_loop t0, t1, t9, INDEX_STORE_TAG_D
  162. #ifdef CONFIG_SYS_MIPS_CACHE_INIT_RAM_LOAD
  163. /* load from each line (in cached space) */
  164. PTR_LI t0, INDEX_BASE
  165. 2: LONG_L zero, 0(t0)
  166. PTR_ADDU t0, t9
  167. bne t0, t1, 2b
  168. /* clear all tags */
  169. PTR_LI t0, INDEX_BASE
  170. cache_loop t0, t1, t9, INDEX_STORE_TAG_D
  171. #endif
  172. 3: jr ra
  173. END(mips_cache_reset)
  174. /*
  175. * dcache_status - get cache status
  176. *
  177. * RETURNS: 0 - cache disabled; 1 - cache enabled
  178. *
  179. */
  180. LEAF(dcache_status)
  181. mfc0 t0, CP0_CONFIG
  182. li t1, CONF_CM_UNCACHED
  183. andi t0, t0, CONF_CM_CMASK
  184. move v0, zero
  185. beq t0, t1, 2f
  186. li v0, 1
  187. 2: jr ra
  188. END(dcache_status)
  189. /*
  190. * dcache_disable - disable cache
  191. *
  192. * RETURNS: N/A
  193. *
  194. */
  195. LEAF(dcache_disable)
  196. mfc0 t0, CP0_CONFIG
  197. li t1, -8
  198. and t0, t0, t1
  199. ori t0, t0, CONF_CM_UNCACHED
  200. mtc0 t0, CP0_CONFIG
  201. jr ra
  202. END(dcache_disable)
  203. /*
  204. * dcache_enable - enable cache
  205. *
  206. * RETURNS: N/A
  207. *
  208. */
  209. LEAF(dcache_enable)
  210. mfc0 t0, CP0_CONFIG
  211. ori t0, CONF_CM_CMASK
  212. xori t0, CONF_CM_CMASK
  213. ori t0, CONFIG_SYS_MIPS_CACHE_MODE
  214. mtc0 t0, CP0_CONFIG
  215. jr ra
  216. END(dcache_enable)