cache_init.S 6.2 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277
  1. /*
  2. * Cache-handling routined for MIPS CPUs
  3. *
  4. * Copyright (c) 2003 Wolfgang Denk <wd@denx.de>
  5. *
  6. * SPDX-License-Identifier: GPL-2.0+
  7. */
  8. #include <asm-offsets.h>
  9. #include <config.h>
  10. #include <asm/asm.h>
  11. #include <asm/regdef.h>
  12. #include <asm/mipsregs.h>
  13. #include <asm/addrspace.h>
  14. #include <asm/cacheops.h>
  15. #ifndef CONFIG_SYS_MIPS_CACHE_MODE
  16. #define CONFIG_SYS_MIPS_CACHE_MODE CONF_CM_CACHABLE_NONCOHERENT
  17. #endif
  18. #ifdef CONFIG_64BIT
  19. # define RA ta3
  20. #else
  21. # define RA t7
  22. #endif
  23. #define INDEX_BASE CKSEG0
  24. .macro f_fill64 dst, offset, val
  25. LONG_S \val, (\offset + 0 * LONGSIZE)(\dst)
  26. LONG_S \val, (\offset + 1 * LONGSIZE)(\dst)
  27. LONG_S \val, (\offset + 2 * LONGSIZE)(\dst)
  28. LONG_S \val, (\offset + 3 * LONGSIZE)(\dst)
  29. LONG_S \val, (\offset + 4 * LONGSIZE)(\dst)
  30. LONG_S \val, (\offset + 5 * LONGSIZE)(\dst)
  31. LONG_S \val, (\offset + 6 * LONGSIZE)(\dst)
  32. LONG_S \val, (\offset + 7 * LONGSIZE)(\dst)
  33. #if LONGSIZE == 4
  34. LONG_S \val, (\offset + 8 * LONGSIZE)(\dst)
  35. LONG_S \val, (\offset + 9 * LONGSIZE)(\dst)
  36. LONG_S \val, (\offset + 10 * LONGSIZE)(\dst)
  37. LONG_S \val, (\offset + 11 * LONGSIZE)(\dst)
  38. LONG_S \val, (\offset + 12 * LONGSIZE)(\dst)
  39. LONG_S \val, (\offset + 13 * LONGSIZE)(\dst)
  40. LONG_S \val, (\offset + 14 * LONGSIZE)(\dst)
  41. LONG_S \val, (\offset + 15 * LONGSIZE)(\dst)
  42. #endif
  43. .endm
  44. /*
  45. * mips_init_icache(uint PRId, ulong icache_size, unchar icache_linesz)
  46. */
  47. LEAF(mips_init_icache)
  48. blez a1, 9f
  49. mtc0 zero, CP0_TAGLO
  50. /* clear tag to invalidate */
  51. PTR_LI t0, INDEX_BASE
  52. PTR_ADDU t1, t0, a1
  53. 1: cache INDEX_STORE_TAG_I, 0(t0)
  54. PTR_ADDU t0, a2
  55. bne t0, t1, 1b
  56. /* fill once, so data field parity is correct */
  57. PTR_LI t0, INDEX_BASE
  58. 2: cache FILL, 0(t0)
  59. PTR_ADDU t0, a2
  60. bne t0, t1, 2b
  61. /* invalidate again - prudent but not strictly neccessary */
  62. PTR_LI t0, INDEX_BASE
  63. 1: cache INDEX_STORE_TAG_I, 0(t0)
  64. PTR_ADDU t0, a2
  65. bne t0, t1, 1b
  66. 9: jr ra
  67. END(mips_init_icache)
  68. /*
  69. * mips_init_dcache(uint PRId, ulong dcache_size, unchar dcache_linesz)
  70. */
  71. LEAF(mips_init_dcache)
  72. blez a1, 9f
  73. mtc0 zero, CP0_TAGLO
  74. /* clear all tags */
  75. PTR_LI t0, INDEX_BASE
  76. PTR_ADDU t1, t0, a1
  77. 1: cache INDEX_STORE_TAG_D, 0(t0)
  78. PTR_ADDU t0, a2
  79. bne t0, t1, 1b
  80. /* load from each line (in cached space) */
  81. PTR_LI t0, INDEX_BASE
  82. 2: LONG_L zero, 0(t0)
  83. PTR_ADDU t0, a2
  84. bne t0, t1, 2b
  85. /* clear all tags */
  86. PTR_LI t0, INDEX_BASE
  87. 1: cache INDEX_STORE_TAG_D, 0(t0)
  88. PTR_ADDU t0, a2
  89. bne t0, t1, 1b
  90. 9: jr ra
  91. END(mips_init_dcache)
  92. /*
  93. * mips_cache_reset - low level initialisation of the primary caches
  94. *
  95. * This routine initialises the primary caches to ensure that they have good
  96. * parity. It must be called by the ROM before any cached locations are used
  97. * to prevent the possibility of data with bad parity being written to memory.
  98. *
  99. * To initialise the instruction cache it is essential that a source of data
  100. * with good parity is available. This routine will initialise an area of
  101. * memory starting at location zero to be used as a source of parity.
  102. *
  103. * RETURNS: N/A
  104. *
  105. */
  106. NESTED(mips_cache_reset, 0, ra)
  107. move RA, ra
  108. #if !defined(CONFIG_SYS_ICACHE_SIZE) || !defined(CONFIG_SYS_DCACHE_SIZE) || \
  109. !defined(CONFIG_SYS_CACHELINE_SIZE)
  110. /* read Config1 for use below */
  111. mfc0 t5, CP0_CONFIG, 1
  112. #endif
  113. #ifdef CONFIG_SYS_CACHELINE_SIZE
  114. li t9, CONFIG_SYS_CACHELINE_SIZE
  115. li t8, CONFIG_SYS_CACHELINE_SIZE
  116. #else
  117. /* Detect I-cache line size. */
  118. srl t8, t5, MIPS_CONF1_IL_SHIFT
  119. andi t8, t8, (MIPS_CONF1_IL >> MIPS_CONF1_IL_SHIFT)
  120. beqz t8, 1f
  121. li t6, 2
  122. sllv t8, t6, t8
  123. 1: /* Detect D-cache line size. */
  124. srl t9, t5, MIPS_CONF1_DL_SHIFT
  125. andi t9, t9, (MIPS_CONF1_DL >> MIPS_CONF1_DL_SHIFT)
  126. beqz t9, 1f
  127. li t6, 2
  128. sllv t9, t6, t9
  129. 1:
  130. #endif
  131. #ifdef CONFIG_SYS_ICACHE_SIZE
  132. li t2, CONFIG_SYS_ICACHE_SIZE
  133. #else
  134. /* Detect I-cache size. */
  135. srl t6, t5, MIPS_CONF1_IS_SHIFT
  136. andi t6, t6, (MIPS_CONF1_IS >> MIPS_CONF1_IS_SHIFT)
  137. li t4, 32
  138. xori t2, t6, 0x7
  139. beqz t2, 1f
  140. addi t6, t6, 1
  141. sllv t4, t4, t6
  142. 1: /* At this point t4 == I-cache sets. */
  143. mul t2, t4, t8
  144. srl t6, t5, MIPS_CONF1_IA_SHIFT
  145. andi t6, t6, (MIPS_CONF1_IA >> MIPS_CONF1_IA_SHIFT)
  146. addi t6, t6, 1
  147. /* At this point t6 == I-cache ways. */
  148. mul t2, t2, t6
  149. #endif
  150. #ifdef CONFIG_SYS_DCACHE_SIZE
  151. li t3, CONFIG_SYS_DCACHE_SIZE
  152. #else
  153. /* Detect D-cache size. */
  154. srl t6, t5, MIPS_CONF1_DS_SHIFT
  155. andi t6, t6, (MIPS_CONF1_DS >> MIPS_CONF1_DS_SHIFT)
  156. li t4, 32
  157. xori t3, t6, 0x7
  158. beqz t3, 1f
  159. addi t6, t6, 1
  160. sllv t4, t4, t6
  161. 1: /* At this point t4 == I-cache sets. */
  162. mul t3, t4, t9
  163. srl t6, t5, MIPS_CONF1_DA_SHIFT
  164. andi t6, t6, (MIPS_CONF1_DA >> MIPS_CONF1_DA_SHIFT)
  165. addi t6, t6, 1
  166. /* At this point t6 == I-cache ways. */
  167. mul t3, t3, t6
  168. #endif
  169. /* Determine the largest L1 cache size */
  170. #if defined(CONFIG_SYS_ICACHE_SIZE) && defined(CONFIG_SYS_DCACHE_SIZE)
  171. #if CONFIG_SYS_ICACHE_SIZE > CONFIG_SYS_DCACHE_SIZE
  172. li v0, CONFIG_SYS_ICACHE_SIZE
  173. #else
  174. li v0, CONFIG_SYS_DCACHE_SIZE
  175. #endif
  176. #else
  177. move v0, t2
  178. sltu t1, t2, t3
  179. movn v0, t3, t1
  180. #endif
  181. /*
  182. * Now clear that much memory starting from zero.
  183. */
  184. PTR_LI a0, CKSEG1
  185. PTR_ADDU a1, a0, v0
  186. 2: PTR_ADDIU a0, 64
  187. f_fill64 a0, -64, zero
  188. bne a0, a1, 2b
  189. /*
  190. * The caches are probably in an indeterminate state,
  191. * so we force good parity into them by doing an
  192. * invalidate, load/fill, invalidate for each line.
  193. */
  194. /*
  195. * Assume bottom of RAM will generate good parity for the cache.
  196. */
  197. /*
  198. * Initialize the I-cache first,
  199. */
  200. move a1, t2
  201. move a2, t8
  202. PTR_LA v1, mips_init_icache
  203. jalr v1
  204. /*
  205. * then initialize D-cache.
  206. */
  207. move a1, t3
  208. move a2, t9
  209. PTR_LA v1, mips_init_dcache
  210. jalr v1
  211. jr RA
  212. END(mips_cache_reset)
  213. /*
  214. * dcache_status - get cache status
  215. *
  216. * RETURNS: 0 - cache disabled; 1 - cache enabled
  217. *
  218. */
  219. LEAF(dcache_status)
  220. mfc0 t0, CP0_CONFIG
  221. li t1, CONF_CM_UNCACHED
  222. andi t0, t0, CONF_CM_CMASK
  223. move v0, zero
  224. beq t0, t1, 2f
  225. li v0, 1
  226. 2: jr ra
  227. END(dcache_status)
  228. /*
  229. * dcache_disable - disable cache
  230. *
  231. * RETURNS: N/A
  232. *
  233. */
  234. LEAF(dcache_disable)
  235. mfc0 t0, CP0_CONFIG
  236. li t1, -8
  237. and t0, t0, t1
  238. ori t0, t0, CONF_CM_UNCACHED
  239. mtc0 t0, CP0_CONFIG
  240. jr ra
  241. END(dcache_disable)
  242. /*
  243. * dcache_enable - enable cache
  244. *
  245. * RETURNS: N/A
  246. *
  247. */
  248. LEAF(dcache_enable)
  249. mfc0 t0, CP0_CONFIG
  250. ori t0, CONF_CM_CMASK
  251. xori t0, CONF_CM_CMASK
  252. ori t0, CONFIG_SYS_MIPS_CACHE_MODE
  253. mtc0 t0, CP0_CONFIG
  254. jr ra
  255. END(dcache_enable)