macro.h 4.8 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204
  1. /*
  2. * include/asm-arm/macro.h
  3. *
  4. * Copyright (C) 2009 Jean-Christophe PLAGNIOL-VILLARD <plagnioj@jcrosoft.com>
  5. *
  6. * SPDX-License-Identifier: GPL-2.0+
  7. */
  8. #ifndef __ASM_ARM_MACRO_H__
  9. #define __ASM_ARM_MACRO_H__
  10. #ifdef __ASSEMBLY__
  11. /*
  12. * These macros provide a convenient way to write 8, 16 and 32 bit data
  13. * to any address.
  14. * Registers r4 and r5 are used, any data in these registers are
  15. * overwritten by the macros.
  16. * The macros are valid for any ARM architecture, they do not implement
  17. * any memory barriers so caution is recommended when using these when the
  18. * caches are enabled or on a multi-core system.
  19. */
  20. .macro write32, addr, data
  21. ldr r4, =\addr
  22. ldr r5, =\data
  23. str r5, [r4]
  24. .endm
  25. .macro write16, addr, data
  26. ldr r4, =\addr
  27. ldrh r5, =\data
  28. strh r5, [r4]
  29. .endm
  30. .macro write8, addr, data
  31. ldr r4, =\addr
  32. ldrb r5, =\data
  33. strb r5, [r4]
  34. .endm
  35. /*
  36. * This macro generates a loop that can be used for delays in the code.
  37. * Register r4 is used, any data in this register is overwritten by the
  38. * macro.
  39. * The macro is valid for any ARM architeture. The actual time spent in the
  40. * loop will vary from CPU to CPU though.
  41. */
  42. .macro wait_timer, time
  43. ldr r4, =\time
  44. 1:
  45. nop
  46. subs r4, r4, #1
  47. bcs 1b
  48. .endm
  49. #ifdef CONFIG_ARM64
  50. /*
  51. * Register aliases.
  52. */
  53. lr .req x30
  54. /*
  55. * Branch according to exception level
  56. */
  57. .macro switch_el, xreg, el3_label, el2_label, el1_label
  58. mrs \xreg, CurrentEL
  59. cmp \xreg, 0xc
  60. b.eq \el3_label
  61. cmp \xreg, 0x8
  62. b.eq \el2_label
  63. cmp \xreg, 0x4
  64. b.eq \el1_label
  65. .endm
  66. /*
  67. * Branch if current processor is a slave,
  68. * choose processor with all zero affinity value as the master.
  69. */
  70. .macro branch_if_slave, xreg, slave_label
  71. mrs \xreg, mpidr_el1
  72. tst \xreg, #0xff /* Test Affinity 0 */
  73. b.ne \slave_label
  74. lsr \xreg, \xreg, #8
  75. tst \xreg, #0xff /* Test Affinity 1 */
  76. b.ne \slave_label
  77. lsr \xreg, \xreg, #8
  78. tst \xreg, #0xff /* Test Affinity 2 */
  79. b.ne \slave_label
  80. lsr \xreg, \xreg, #16
  81. tst \xreg, #0xff /* Test Affinity 3 */
  82. b.ne \slave_label
  83. .endm
  84. /*
  85. * Branch if current processor is a master,
  86. * choose processor with all zero affinity value as the master.
  87. */
  88. .macro branch_if_master, xreg1, xreg2, master_label
  89. mrs \xreg1, mpidr_el1
  90. lsr \xreg2, \xreg1, #32
  91. lsl \xreg1, \xreg1, #40
  92. lsr \xreg1, \xreg1, #40
  93. orr \xreg1, \xreg1, \xreg2
  94. cbz \xreg1, \master_label
  95. .endm
  96. .macro armv8_switch_to_el2_m, xreg1
  97. /* 64bit EL2 | HCE | SMD | RES1 (Bits[5:4]) | Non-secure EL0/EL1 */
  98. mov \xreg1, #0x5b1
  99. msr scr_el3, \xreg1
  100. msr cptr_el3, xzr /* Disable coprocessor traps to EL3 */
  101. mov \xreg1, #0x33ff
  102. msr cptr_el2, \xreg1 /* Disable coprocessor traps to EL2 */
  103. /* Initialize SCTLR_EL2
  104. *
  105. * setting RES1 bits (29,28,23,22,18,16,11,5,4) to 1
  106. * and RES0 bits (31,30,27,26,24,21,20,17,15-13,10-6) +
  107. * EE,WXN,I,SA,C,A,M to 0
  108. */
  109. mov \xreg1, #0x0830
  110. movk \xreg1, #0x30C5, lsl #16
  111. msr sctlr_el2, \xreg1
  112. /* Return to the EL2_SP2 mode from EL3 */
  113. mov \xreg1, sp
  114. msr sp_el2, \xreg1 /* Migrate SP */
  115. mrs \xreg1, vbar_el3
  116. msr vbar_el2, \xreg1 /* Migrate VBAR */
  117. mov \xreg1, #0x3c9
  118. msr spsr_el3, \xreg1 /* EL2_SP2 | D | A | I | F */
  119. msr elr_el3, lr
  120. eret
  121. .endm
  122. .macro armv8_switch_to_el1_m, xreg1, xreg2
  123. /* Initialize Generic Timers */
  124. mrs \xreg1, cnthctl_el2
  125. orr \xreg1, \xreg1, #0x3 /* Enable EL1 access to timers */
  126. msr cnthctl_el2, \xreg1
  127. msr cntvoff_el2, xzr
  128. /* Initilize MPID/MPIDR registers */
  129. mrs \xreg1, midr_el1
  130. mrs \xreg2, mpidr_el1
  131. msr vpidr_el2, \xreg1
  132. msr vmpidr_el2, \xreg2
  133. /* Disable coprocessor traps */
  134. mov \xreg1, #0x33ff
  135. msr cptr_el2, \xreg1 /* Disable coprocessor traps to EL2 */
  136. msr hstr_el2, xzr /* Disable coprocessor traps to EL2 */
  137. mov \xreg1, #3 << 20
  138. msr cpacr_el1, \xreg1 /* Enable FP/SIMD at EL1 */
  139. /* Initialize HCR_EL2 */
  140. mov \xreg1, #(1 << 31) /* 64bit EL1 */
  141. orr \xreg1, \xreg1, #(1 << 29) /* Disable HVC */
  142. msr hcr_el2, \xreg1
  143. /* SCTLR_EL1 initialization
  144. *
  145. * setting RES1 bits (29,28,23,22,20,11) to 1
  146. * and RES0 bits (31,30,27,21,17,13,10,6) +
  147. * UCI,EE,EOE,WXN,nTWE,nTWI,UCT,DZE,I,UMA,SED,ITD,
  148. * CP15BEN,SA0,SA,C,A,M to 0
  149. */
  150. mov \xreg1, #0x0800
  151. movk \xreg1, #0x30d0, lsl #16
  152. msr sctlr_el1, \xreg1
  153. /* Return to the EL1_SP1 mode from EL2 */
  154. mov \xreg1, sp
  155. msr sp_el1, \xreg1 /* Migrate SP */
  156. mrs \xreg1, vbar_el2
  157. msr vbar_el1, \xreg1 /* Migrate VBAR */
  158. mov \xreg1, #0x3c5
  159. msr spsr_el2, \xreg1 /* EL1_SP1 | D | A | I | F */
  160. msr elr_el2, lr
  161. eret
  162. .endm
  163. #if defined(CONFIG_GICV3)
  164. .macro gic_wait_for_interrupt_m xreg1
  165. 0 : wfi
  166. mrs \xreg1, ICC_IAR1_EL1
  167. msr ICC_EOIR1_EL1, \xreg1
  168. cbnz \xreg1, 0b
  169. .endm
  170. #elif defined(CONFIG_GICV2)
  171. .macro gic_wait_for_interrupt_m xreg1, wreg2
  172. 0 : wfi
  173. ldr \wreg2, [\xreg1, GICC_AIAR]
  174. str \wreg2, [\xreg1, GICC_AEOIR]
  175. and \wreg2, \wreg2, #3ff
  176. cbnz \wreg2, 0b
  177. .endm
  178. #endif
  179. #endif /* CONFIG_ARM64 */
  180. #endif /* __ASSEMBLY__ */
  181. #endif /* __ASM_ARM_MACRO_H__ */