cache.c 2.1 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114
  1. /*
  2. * (C) Copyright 2007
  3. * Nobuhiro Iwamatsu <iwamatsu@nigauri.org>
  4. *
  5. * SPDX-License-Identifier: GPL-2.0+
  6. */
  7. #include <common.h>
  8. #include <command.h>
  9. #include <asm/processor.h>
  10. #include <asm/io.h>
  11. /*
  12. * Jump to P2 area.
  13. * When handling TLB or caches, we need to do it from P2 area.
  14. */
  15. #define jump_to_P2() \
  16. do { \
  17. unsigned long __dummy; \
  18. __asm__ __volatile__( \
  19. "mov.l 1f, %0\n\t" \
  20. "or %1, %0\n\t" \
  21. "jmp @%0\n\t" \
  22. " nop\n\t" \
  23. ".balign 4\n" \
  24. "1: .long 2f\n" \
  25. "2:" \
  26. : "=&r" (__dummy) \
  27. : "r" (0x20000000)); \
  28. } while (0)
  29. /*
  30. * Back to P1 area.
  31. */
  32. #define back_to_P1() \
  33. do { \
  34. unsigned long __dummy; \
  35. __asm__ __volatile__( \
  36. "nop;nop;nop;nop;nop;nop;nop\n\t" \
  37. "mov.l 1f, %0\n\t" \
  38. "jmp @%0\n\t" \
  39. " nop\n\t" \
  40. ".balign 4\n" \
  41. "1: .long 2f\n" \
  42. "2:" \
  43. : "=&r" (__dummy)); \
  44. } while (0)
  45. #define CACHE_VALID 1
  46. #define CACHE_UPDATED 2
  47. static inline void cache_wback_all(void)
  48. {
  49. unsigned long addr, data, i, j;
  50. jump_to_P2();
  51. for (i = 0; i < CACHE_OC_NUM_ENTRIES; i++){
  52. for (j = 0; j < CACHE_OC_NUM_WAYS; j++) {
  53. addr = CACHE_OC_ADDRESS_ARRAY | (j << CACHE_OC_WAY_SHIFT)
  54. | (i << CACHE_OC_ENTRY_SHIFT);
  55. data = inl(addr);
  56. if (data & CACHE_UPDATED) {
  57. data &= ~CACHE_UPDATED;
  58. outl(data, addr);
  59. }
  60. }
  61. }
  62. back_to_P1();
  63. }
  64. #define CACHE_ENABLE 0
  65. #define CACHE_DISABLE 1
  66. int cache_control(unsigned int cmd)
  67. {
  68. unsigned long ccr;
  69. jump_to_P2();
  70. ccr = inl(CCR);
  71. if (ccr & CCR_CACHE_ENABLE)
  72. cache_wback_all();
  73. if (cmd == CACHE_DISABLE)
  74. outl(CCR_CACHE_STOP, CCR);
  75. else
  76. outl(CCR_CACHE_INIT, CCR);
  77. back_to_P1();
  78. return 0;
  79. }
  80. void flush_dcache_range(unsigned long start, unsigned long end)
  81. {
  82. u32 v;
  83. start &= ~(L1_CACHE_BYTES - 1);
  84. for (v = start; v < end; v += L1_CACHE_BYTES) {
  85. asm volatile ("ocbwb %0" : /* no output */
  86. : "m" (__m(v)));
  87. }
  88. }
  89. void invalidate_dcache_range(unsigned long start, unsigned long end)
  90. {
  91. u32 v;
  92. start &= ~(L1_CACHE_BYTES - 1);
  93. for (v = start; v < end; v += L1_CACHE_BYTES) {
  94. asm volatile ("ocbi %0" : /* no output */
  95. : "m" (__m(v)));
  96. }
  97. }