cache.S 6.2 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281
  1. /*
  2. * Cache-handling routined for MIPS CPUs
  3. *
  4. * Copyright (c) 2003 Wolfgang Denk <wd@denx.de>
  5. *
  6. * SPDX-License-Identifier: GPL-2.0+
  7. */
  8. #include <asm-offsets.h>
  9. #include <config.h>
  10. #include <asm/asm.h>
  11. #include <asm/regdef.h>
  12. #include <asm/mipsregs.h>
  13. #include <asm/addrspace.h>
  14. #include <asm/cacheops.h>
  15. #ifndef CONFIG_SYS_MIPS_CACHE_MODE
  16. #define CONFIG_SYS_MIPS_CACHE_MODE CONF_CM_CACHABLE_NONCOHERENT
  17. #endif
  18. #define RA t9
  19. #define INDEX_BASE CKSEG0
  20. .macro cache_op op addr
  21. .set push
  22. .set noreorder
  23. .set mips3
  24. cache \op, 0(\addr)
  25. .set pop
  26. .endm
  27. .macro f_fill64 dst, offset, val
  28. LONG_S \val, (\offset + 0 * LONGSIZE)(\dst)
  29. LONG_S \val, (\offset + 1 * LONGSIZE)(\dst)
  30. LONG_S \val, (\offset + 2 * LONGSIZE)(\dst)
  31. LONG_S \val, (\offset + 3 * LONGSIZE)(\dst)
  32. LONG_S \val, (\offset + 4 * LONGSIZE)(\dst)
  33. LONG_S \val, (\offset + 5 * LONGSIZE)(\dst)
  34. LONG_S \val, (\offset + 6 * LONGSIZE)(\dst)
  35. LONG_S \val, (\offset + 7 * LONGSIZE)(\dst)
  36. #if LONGSIZE == 4
  37. LONG_S \val, (\offset + 8 * LONGSIZE)(\dst)
  38. LONG_S \val, (\offset + 9 * LONGSIZE)(\dst)
  39. LONG_S \val, (\offset + 10 * LONGSIZE)(\dst)
  40. LONG_S \val, (\offset + 11 * LONGSIZE)(\dst)
  41. LONG_S \val, (\offset + 12 * LONGSIZE)(\dst)
  42. LONG_S \val, (\offset + 13 * LONGSIZE)(\dst)
  43. LONG_S \val, (\offset + 14 * LONGSIZE)(\dst)
  44. LONG_S \val, (\offset + 15 * LONGSIZE)(\dst)
  45. #endif
  46. .endm
  47. /*
  48. * mips_init_icache(uint PRId, ulong icache_size, unchar icache_linesz)
  49. */
  50. LEAF(mips_init_icache)
  51. blez a1, 9f
  52. mtc0 zero, CP0_TAGLO
  53. /* clear tag to invalidate */
  54. PTR_LI t0, INDEX_BASE
  55. PTR_ADDU t1, t0, a1
  56. 1: cache_op INDEX_STORE_TAG_I t0
  57. PTR_ADDU t0, a2
  58. bne t0, t1, 1b
  59. /* fill once, so data field parity is correct */
  60. PTR_LI t0, INDEX_BASE
  61. 2: cache_op FILL t0
  62. PTR_ADDU t0, a2
  63. bne t0, t1, 2b
  64. /* invalidate again - prudent but not strictly neccessary */
  65. PTR_LI t0, INDEX_BASE
  66. 1: cache_op INDEX_STORE_TAG_I t0
  67. PTR_ADDU t0, a2
  68. bne t0, t1, 1b
  69. 9: jr ra
  70. END(mips_init_icache)
  71. /*
  72. * mips_init_dcache(uint PRId, ulong dcache_size, unchar dcache_linesz)
  73. */
  74. LEAF(mips_init_dcache)
  75. blez a1, 9f
  76. mtc0 zero, CP0_TAGLO
  77. /* clear all tags */
  78. PTR_LI t0, INDEX_BASE
  79. PTR_ADDU t1, t0, a1
  80. 1: cache_op INDEX_STORE_TAG_D t0
  81. PTR_ADDU t0, a2
  82. bne t0, t1, 1b
  83. /* load from each line (in cached space) */
  84. PTR_LI t0, INDEX_BASE
  85. 2: LONG_L zero, 0(t0)
  86. PTR_ADDU t0, a2
  87. bne t0, t1, 2b
  88. /* clear all tags */
  89. PTR_LI t0, INDEX_BASE
  90. 1: cache_op INDEX_STORE_TAG_D t0
  91. PTR_ADDU t0, a2
  92. bne t0, t1, 1b
  93. 9: jr ra
  94. END(mips_init_dcache)
  95. /*
  96. * mips_cache_reset - low level initialisation of the primary caches
  97. *
  98. * This routine initialises the primary caches to ensure that they have good
  99. * parity. It must be called by the ROM before any cached locations are used
  100. * to prevent the possibility of data with bad parity being written to memory.
  101. *
  102. * To initialise the instruction cache it is essential that a source of data
  103. * with good parity is available. This routine will initialise an area of
  104. * memory starting at location zero to be used as a source of parity.
  105. *
  106. * RETURNS: N/A
  107. *
  108. */
  109. NESTED(mips_cache_reset, 0, ra)
  110. move RA, ra
  111. #if !defined(CONFIG_SYS_ICACHE_SIZE) || !defined(CONFIG_SYS_DCACHE_SIZE) || \
  112. !defined(CONFIG_SYS_CACHELINE_SIZE)
  113. /* read Config1 for use below */
  114. mfc0 t5, CP0_CONFIG, 1
  115. #endif
  116. #ifdef CONFIG_SYS_CACHELINE_SIZE
  117. li t7, CONFIG_SYS_CACHELINE_SIZE
  118. li t8, CONFIG_SYS_CACHELINE_SIZE
  119. #else
  120. /* Detect I-cache line size. */
  121. srl t8, t5, MIPS_CONF1_IL_SHIFT
  122. andi t8, t8, (MIPS_CONF1_IL >> MIPS_CONF1_IL_SHIFT)
  123. beqz t8, 1f
  124. li t6, 2
  125. sllv t8, t6, t8
  126. 1: /* Detect D-cache line size. */
  127. srl t7, t5, MIPS_CONF1_DL_SHIFT
  128. andi t7, t7, (MIPS_CONF1_DL >> MIPS_CONF1_DL_SHIFT)
  129. beqz t7, 1f
  130. li t6, 2
  131. sllv t7, t6, t7
  132. 1:
  133. #endif
  134. #ifdef CONFIG_SYS_ICACHE_SIZE
  135. li t2, CONFIG_SYS_ICACHE_SIZE
  136. #else
  137. /* Detect I-cache size. */
  138. srl t6, t5, MIPS_CONF1_IS_SHIFT
  139. andi t6, t6, (MIPS_CONF1_IS >> MIPS_CONF1_IS_SHIFT)
  140. li t4, 32
  141. xori t2, t6, 0x7
  142. beqz t2, 1f
  143. addi t6, t6, 1
  144. sllv t4, t4, t6
  145. 1: /* At this point t4 == I-cache sets. */
  146. mul t2, t4, t8
  147. srl t6, t5, MIPS_CONF1_IA_SHIFT
  148. andi t6, t6, (MIPS_CONF1_IA >> MIPS_CONF1_IA_SHIFT)
  149. addi t6, t6, 1
  150. /* At this point t6 == I-cache ways. */
  151. mul t2, t2, t6
  152. #endif
  153. #ifdef CONFIG_SYS_DCACHE_SIZE
  154. li t3, CONFIG_SYS_DCACHE_SIZE
  155. #else
  156. /* Detect D-cache size. */
  157. srl t6, t5, MIPS_CONF1_DS_SHIFT
  158. andi t6, t6, (MIPS_CONF1_DS >> MIPS_CONF1_DS_SHIFT)
  159. li t4, 32
  160. xori t3, t6, 0x7
  161. beqz t3, 1f
  162. addi t6, t6, 1
  163. sllv t4, t4, t6
  164. 1: /* At this point t4 == I-cache sets. */
  165. mul t3, t4, t7
  166. srl t6, t5, MIPS_CONF1_DA_SHIFT
  167. andi t6, t6, (MIPS_CONF1_DA >> MIPS_CONF1_DA_SHIFT)
  168. addi t6, t6, 1
  169. /* At this point t6 == I-cache ways. */
  170. mul t3, t3, t6
  171. #endif
  172. /* Determine the largest L1 cache size */
  173. #if defined(CONFIG_SYS_ICACHE_SIZE) && defined(CONFIG_SYS_DCACHE_SIZE)
  174. #if CONFIG_SYS_ICACHE_SIZE > CONFIG_SYS_DCACHE_SIZE
  175. li v0, CONFIG_SYS_ICACHE_SIZE
  176. #else
  177. li v0, CONFIG_SYS_DCACHE_SIZE
  178. #endif
  179. #else
  180. move v0, t2
  181. sltu t1, t2, t3
  182. movn v0, t3, t1
  183. #endif
  184. /*
  185. * Now clear that much memory starting from zero.
  186. */
  187. PTR_LI a0, CKSEG1
  188. PTR_ADDU a1, a0, v0
  189. 2: PTR_ADDIU a0, 64
  190. f_fill64 a0, -64, zero
  191. bne a0, a1, 2b
  192. /*
  193. * The caches are probably in an indeterminate state,
  194. * so we force good parity into them by doing an
  195. * invalidate, load/fill, invalidate for each line.
  196. */
  197. /*
  198. * Assume bottom of RAM will generate good parity for the cache.
  199. */
  200. /*
  201. * Initialize the I-cache first,
  202. */
  203. move a1, t2
  204. move a2, t8
  205. PTR_LA v1, mips_init_icache
  206. jalr v1
  207. /*
  208. * then initialize D-cache.
  209. */
  210. move a1, t3
  211. move a2, t7
  212. PTR_LA v1, mips_init_dcache
  213. jalr v1
  214. jr RA
  215. END(mips_cache_reset)
  216. /*
  217. * dcache_status - get cache status
  218. *
  219. * RETURNS: 0 - cache disabled; 1 - cache enabled
  220. *
  221. */
  222. LEAF(dcache_status)
  223. mfc0 t0, CP0_CONFIG
  224. li t1, CONF_CM_UNCACHED
  225. andi t0, t0, CONF_CM_CMASK
  226. move v0, zero
  227. beq t0, t1, 2f
  228. li v0, 1
  229. 2: jr ra
  230. END(dcache_status)
  231. /*
  232. * dcache_disable - disable cache
  233. *
  234. * RETURNS: N/A
  235. *
  236. */
  237. LEAF(dcache_disable)
  238. mfc0 t0, CP0_CONFIG
  239. li t1, -8
  240. and t0, t0, t1
  241. ori t0, t0, CONF_CM_UNCACHED
  242. mtc0 t0, CP0_CONFIG
  243. jr ra
  244. END(dcache_disable)
  245. /*
  246. * dcache_enable - enable cache
  247. *
  248. * RETURNS: N/A
  249. *
  250. */
  251. LEAF(dcache_enable)
  252. mfc0 t0, CP0_CONFIG
  253. ori t0, CONF_CM_CMASK
  254. xori t0, CONF_CM_CMASK
  255. ori t0, CONFIG_SYS_MIPS_CACHE_MODE
  256. mtc0 t0, CP0_CONFIG
  257. jr ra
  258. END(dcache_enable)