cache_init.S 6.0 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254
  1. /*
  2. * Cache-handling routined for MIPS CPUs
  3. *
  4. * Copyright (c) 2003 Wolfgang Denk <wd@denx.de>
  5. *
  6. * SPDX-License-Identifier: GPL-2.0+
  7. */
  8. #include <asm-offsets.h>
  9. #include <config.h>
  10. #include <asm/asm.h>
  11. #include <asm/regdef.h>
  12. #include <asm/mipsregs.h>
  13. #include <asm/addrspace.h>
  14. #include <asm/cacheops.h>
  15. #ifndef CONFIG_SYS_MIPS_CACHE_MODE
  16. #define CONFIG_SYS_MIPS_CACHE_MODE CONF_CM_CACHABLE_NONCOHERENT
  17. #endif
  18. #define INDEX_BASE CKSEG0
  19. .macro f_fill64 dst, offset, val
  20. LONG_S \val, (\offset + 0 * LONGSIZE)(\dst)
  21. LONG_S \val, (\offset + 1 * LONGSIZE)(\dst)
  22. LONG_S \val, (\offset + 2 * LONGSIZE)(\dst)
  23. LONG_S \val, (\offset + 3 * LONGSIZE)(\dst)
  24. LONG_S \val, (\offset + 4 * LONGSIZE)(\dst)
  25. LONG_S \val, (\offset + 5 * LONGSIZE)(\dst)
  26. LONG_S \val, (\offset + 6 * LONGSIZE)(\dst)
  27. LONG_S \val, (\offset + 7 * LONGSIZE)(\dst)
  28. #if LONGSIZE == 4
  29. LONG_S \val, (\offset + 8 * LONGSIZE)(\dst)
  30. LONG_S \val, (\offset + 9 * LONGSIZE)(\dst)
  31. LONG_S \val, (\offset + 10 * LONGSIZE)(\dst)
  32. LONG_S \val, (\offset + 11 * LONGSIZE)(\dst)
  33. LONG_S \val, (\offset + 12 * LONGSIZE)(\dst)
  34. LONG_S \val, (\offset + 13 * LONGSIZE)(\dst)
  35. LONG_S \val, (\offset + 14 * LONGSIZE)(\dst)
  36. LONG_S \val, (\offset + 15 * LONGSIZE)(\dst)
  37. #endif
  38. .endm
  39. .macro cache_loop curr, end, line_sz, op
  40. 10: cache \op, 0(\curr)
  41. PTR_ADDU \curr, \curr, \line_sz
  42. bne \curr, \end, 10b
  43. .endm
  44. .macro l1_info sz, line_sz, off
  45. .set push
  46. .set noat
  47. mfc0 $1, CP0_CONFIG, 1
  48. /* detect line size */
  49. srl \line_sz, $1, \off + MIPS_CONF1_DL_SHF - MIPS_CONF1_DA_SHF
  50. andi \line_sz, \line_sz, (MIPS_CONF1_DL >> MIPS_CONF1_DL_SHF)
  51. move \sz, zero
  52. beqz \line_sz, 10f
  53. li \sz, 2
  54. sllv \line_sz, \sz, \line_sz
  55. /* detect associativity */
  56. srl \sz, $1, \off + MIPS_CONF1_DA_SHF - MIPS_CONF1_DA_SHF
  57. andi \sz, \sz, (MIPS_CONF1_DA >> MIPS_CONF1_DA_SHF)
  58. addiu \sz, \sz, 1
  59. /* sz *= line_sz */
  60. mul \sz, \sz, \line_sz
  61. /* detect log32(sets) */
  62. srl $1, $1, \off + MIPS_CONF1_DS_SHF - MIPS_CONF1_DA_SHF
  63. andi $1, $1, (MIPS_CONF1_DS >> MIPS_CONF1_DS_SHF)
  64. addiu $1, $1, 1
  65. andi $1, $1, 0x7
  66. /* sz <<= log32(sets) */
  67. sllv \sz, \sz, $1
  68. /* sz *= 32 */
  69. li $1, 32
  70. mul \sz, \sz, $1
  71. 10:
  72. .set pop
  73. .endm
  74. /*
  75. * mips_cache_reset - low level initialisation of the primary caches
  76. *
  77. * This routine initialises the primary caches to ensure that they have good
  78. * parity. It must be called by the ROM before any cached locations are used
  79. * to prevent the possibility of data with bad parity being written to memory.
  80. *
  81. * To initialise the instruction cache it is essential that a source of data
  82. * with good parity is available. This routine will initialise an area of
  83. * memory starting at location zero to be used as a source of parity.
  84. *
  85. * RETURNS: N/A
  86. *
  87. */
  88. LEAF(mips_cache_reset)
  89. #ifndef CONFIG_SYS_CACHE_SIZE_AUTO
  90. li t2, CONFIG_SYS_ICACHE_SIZE
  91. li t8, CONFIG_SYS_ICACHE_LINE_SIZE
  92. #else
  93. l1_info t2, t8, MIPS_CONF1_IA_SHF
  94. #endif
  95. #ifndef CONFIG_SYS_CACHE_SIZE_AUTO
  96. li t3, CONFIG_SYS_DCACHE_SIZE
  97. li t9, CONFIG_SYS_DCACHE_LINE_SIZE
  98. #else
  99. l1_info t3, t9, MIPS_CONF1_DA_SHF
  100. #endif
  101. #ifdef CONFIG_SYS_MIPS_CACHE_INIT_RAM_LOAD
  102. /* Determine the largest L1 cache size */
  103. #ifndef CONFIG_SYS_CACHE_SIZE_AUTO
  104. #if CONFIG_SYS_ICACHE_SIZE > CONFIG_SYS_DCACHE_SIZE
  105. li v0, CONFIG_SYS_ICACHE_SIZE
  106. #else
  107. li v0, CONFIG_SYS_DCACHE_SIZE
  108. #endif
  109. #else
  110. move v0, t2
  111. sltu t1, t2, t3
  112. movn v0, t3, t1
  113. #endif
  114. /*
  115. * Now clear that much memory starting from zero.
  116. */
  117. PTR_LI a0, CKSEG1
  118. PTR_ADDU a1, a0, v0
  119. 2: PTR_ADDIU a0, 64
  120. f_fill64 a0, -64, zero
  121. bne a0, a1, 2b
  122. #endif /* CONFIG_SYS_MIPS_CACHE_INIT_RAM_LOAD */
  123. /*
  124. * The TagLo registers used depend upon the CPU implementation, but the
  125. * architecture requires that it is safe for software to write to both
  126. * TagLo selects 0 & 2 covering supported cases.
  127. */
  128. mtc0 zero, CP0_TAGLO
  129. mtc0 zero, CP0_TAGLO, 2
  130. /*
  131. * The caches are probably in an indeterminate state, so we force good
  132. * parity into them by doing an invalidate for each line. If
  133. * CONFIG_SYS_MIPS_CACHE_INIT_RAM_LOAD is set then we'll proceed to
  134. * perform a load/fill & a further invalidate for each line, assuming
  135. * that the bottom of RAM (having just been cleared) will generate good
  136. * parity for the cache.
  137. */
  138. /*
  139. * Initialize the I-cache first,
  140. */
  141. blez t2, 1f
  142. PTR_LI t0, INDEX_BASE
  143. PTR_ADDU t1, t0, t2
  144. /* clear tag to invalidate */
  145. cache_loop t0, t1, t8, INDEX_STORE_TAG_I
  146. #ifdef CONFIG_SYS_MIPS_CACHE_INIT_RAM_LOAD
  147. /* fill once, so data field parity is correct */
  148. PTR_LI t0, INDEX_BASE
  149. cache_loop t0, t1, t8, FILL
  150. /* invalidate again - prudent but not strictly neccessary */
  151. PTR_LI t0, INDEX_BASE
  152. cache_loop t0, t1, t8, INDEX_STORE_TAG_I
  153. #endif
  154. /* Enable use of the I-cache by setting Config.K0 */
  155. sync
  156. mfc0 t0, CP0_CONFIG
  157. li t1, CONFIG_SYS_MIPS_CACHE_MODE
  158. #if __mips_isa_rev >= 2
  159. ins t0, t1, 0, 3
  160. #else
  161. ori t0, t0, CONF_CM_CMASK
  162. xori t0, t0, CONF_CM_CMASK
  163. or t0, t0, t1
  164. #endif
  165. mtc0 t0, CP0_CONFIG
  166. /*
  167. * then initialize D-cache.
  168. */
  169. 1: blez t3, 3f
  170. PTR_LI t0, INDEX_BASE
  171. PTR_ADDU t1, t0, t3
  172. /* clear all tags */
  173. cache_loop t0, t1, t9, INDEX_STORE_TAG_D
  174. #ifdef CONFIG_SYS_MIPS_CACHE_INIT_RAM_LOAD
  175. /* load from each line (in cached space) */
  176. PTR_LI t0, INDEX_BASE
  177. 2: LONG_L zero, 0(t0)
  178. PTR_ADDU t0, t9
  179. bne t0, t1, 2b
  180. /* clear all tags */
  181. PTR_LI t0, INDEX_BASE
  182. cache_loop t0, t1, t9, INDEX_STORE_TAG_D
  183. #endif
  184. 3: jr ra
  185. END(mips_cache_reset)
  186. /*
  187. * dcache_status - get cache status
  188. *
  189. * RETURNS: 0 - cache disabled; 1 - cache enabled
  190. *
  191. */
  192. LEAF(dcache_status)
  193. mfc0 t0, CP0_CONFIG
  194. li t1, CONF_CM_UNCACHED
  195. andi t0, t0, CONF_CM_CMASK
  196. move v0, zero
  197. beq t0, t1, 2f
  198. li v0, 1
  199. 2: jr ra
  200. END(dcache_status)
  201. /*
  202. * dcache_disable - disable cache
  203. *
  204. * RETURNS: N/A
  205. *
  206. */
  207. LEAF(dcache_disable)
  208. mfc0 t0, CP0_CONFIG
  209. li t1, -8
  210. and t0, t0, t1
  211. ori t0, t0, CONF_CM_UNCACHED
  212. mtc0 t0, CP0_CONFIG
  213. jr ra
  214. END(dcache_disable)
  215. /*
  216. * dcache_enable - enable cache
  217. *
  218. * RETURNS: N/A
  219. *
  220. */
  221. LEAF(dcache_enable)
  222. mfc0 t0, CP0_CONFIG
  223. ori t0, CONF_CM_CMASK
  224. xori t0, CONF_CM_CMASK
  225. ori t0, CONFIG_SYS_MIPS_CACHE_MODE
  226. mtc0 t0, CP0_CONFIG
  227. jr ra
  228. END(dcache_enable)