emif.h 40 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304
  1. /*
  2. * OMAP44xx EMIF header
  3. *
  4. * Copyright (C) 2009-2010 Texas Instruments, Inc.
  5. *
  6. * Aneesh V <aneesh@ti.com>
  7. *
  8. * This program is free software; you can redistribute it and/or modify
  9. * it under the terms of the GNU General Public License version 2 as
  10. * published by the Free Software Foundation.
  11. */
  12. #ifndef _EMIF_H_
  13. #define _EMIF_H_
  14. #include <asm/types.h>
  15. #include <common.h>
  16. #include <asm/io.h>
  17. /* Base address */
  18. #ifndef EMIF1_BASE
  19. #define EMIF1_BASE 0x4c000000
  20. #endif
  21. #define EMIF2_BASE 0x4d000000
  22. #define EMIF_4D 0x4
  23. #define EMIF_4D5 0x5
  24. /* Registers shifts, masks and values */
  25. /* EMIF_MOD_ID_REV */
  26. #define EMIF_REG_SCHEME_SHIFT 30
  27. #define EMIF_REG_SCHEME_MASK (0x3 << 30)
  28. #define EMIF_REG_MODULE_ID_SHIFT 16
  29. #define EMIF_REG_MODULE_ID_MASK (0xfff << 16)
  30. #define EMIF_REG_RTL_VERSION_SHIFT 11
  31. #define EMIF_REG_RTL_VERSION_MASK (0x1f << 11)
  32. #define EMIF_REG_MAJOR_REVISION_SHIFT 8
  33. #define EMIF_REG_MAJOR_REVISION_MASK (0x7 << 8)
  34. #define EMIF_REG_MINOR_REVISION_SHIFT 0
  35. #define EMIF_REG_MINOR_REVISION_MASK (0x3f << 0)
  36. /* STATUS */
  37. #define EMIF_REG_BE_SHIFT 31
  38. #define EMIF_REG_BE_MASK (1 << 31)
  39. #define EMIF_REG_DUAL_CLK_MODE_SHIFT 30
  40. #define EMIF_REG_DUAL_CLK_MODE_MASK (1 << 30)
  41. #define EMIF_REG_FAST_INIT_SHIFT 29
  42. #define EMIF_REG_FAST_INIT_MASK (1 << 29)
  43. #define EMIF_REG_LEVLING_TO_SHIFT 4
  44. #define EMIF_REG_LEVELING_TO_MASK (7 << 4)
  45. #define EMIF_REG_PHY_DLL_READY_SHIFT 2
  46. #define EMIF_REG_PHY_DLL_READY_MASK (1 << 2)
  47. /* SDRAM_CONFIG */
  48. #define EMIF_REG_SDRAM_TYPE_SHIFT 29
  49. #define EMIF_REG_SDRAM_TYPE_MASK (0x7 << 29)
  50. #define EMIF_REG_SDRAM_TYPE_DDR1 0
  51. #define EMIF_REG_SDRAM_TYPE_LPDDR1 1
  52. #define EMIF_REG_SDRAM_TYPE_DDR2 2
  53. #define EMIF_REG_SDRAM_TYPE_DDR3 3
  54. #define EMIF_REG_SDRAM_TYPE_LPDDR2_S4 4
  55. #define EMIF_REG_SDRAM_TYPE_LPDDR2_S2 5
  56. #define EMIF_REG_IBANK_POS_SHIFT 27
  57. #define EMIF_REG_IBANK_POS_MASK (0x3 << 27)
  58. #define EMIF_REG_DDR_TERM_SHIFT 24
  59. #define EMIF_REG_DDR_TERM_MASK (0x7 << 24)
  60. #define EMIF_REG_DDR2_DDQS_SHIFT 23
  61. #define EMIF_REG_DDR2_DDQS_MASK (1 << 23)
  62. #define EMIF_REG_DYN_ODT_SHIFT 21
  63. #define EMIF_REG_DYN_ODT_MASK (0x3 << 21)
  64. #define EMIF_REG_DDR_DISABLE_DLL_SHIFT 20
  65. #define EMIF_REG_DDR_DISABLE_DLL_MASK (1 << 20)
  66. #define EMIF_REG_SDRAM_DRIVE_SHIFT 18
  67. #define EMIF_REG_SDRAM_DRIVE_MASK (0x3 << 18)
  68. #define EMIF_REG_CWL_SHIFT 16
  69. #define EMIF_REG_CWL_MASK (0x3 << 16)
  70. #define EMIF_REG_NARROW_MODE_SHIFT 14
  71. #define EMIF_REG_NARROW_MODE_MASK (0x3 << 14)
  72. #define EMIF_REG_CL_SHIFT 10
  73. #define EMIF_REG_CL_MASK (0xf << 10)
  74. #define EMIF_REG_ROWSIZE_SHIFT 7
  75. #define EMIF_REG_ROWSIZE_MASK (0x7 << 7)
  76. #define EMIF_REG_IBANK_SHIFT 4
  77. #define EMIF_REG_IBANK_MASK (0x7 << 4)
  78. #define EMIF_REG_EBANK_SHIFT 3
  79. #define EMIF_REG_EBANK_MASK (1 << 3)
  80. #define EMIF_REG_PAGESIZE_SHIFT 0
  81. #define EMIF_REG_PAGESIZE_MASK (0x7 << 0)
  82. /* SDRAM_CONFIG_2 */
  83. #define EMIF_REG_CS1NVMEN_SHIFT 30
  84. #define EMIF_REG_CS1NVMEN_MASK (1 << 30)
  85. #define EMIF_REG_EBANK_POS_SHIFT 27
  86. #define EMIF_REG_EBANK_POS_MASK (1 << 27)
  87. #define EMIF_REG_RDBNUM_SHIFT 4
  88. #define EMIF_REG_RDBNUM_MASK (0x3 << 4)
  89. #define EMIF_REG_RDBSIZE_SHIFT 0
  90. #define EMIF_REG_RDBSIZE_MASK (0x7 << 0)
  91. /* SDRAM_REF_CTRL */
  92. #define EMIF_REG_INITREF_DIS_SHIFT 31
  93. #define EMIF_REG_INITREF_DIS_MASK (1 << 31)
  94. #define EMIF_REG_SRT_SHIFT 29
  95. #define EMIF_REG_SRT_MASK (1 << 29)
  96. #define EMIF_REG_ASR_SHIFT 28
  97. #define EMIF_REG_ASR_MASK (1 << 28)
  98. #define EMIF_REG_PASR_SHIFT 24
  99. #define EMIF_REG_PASR_MASK (0x7 << 24)
  100. #define EMIF_REG_REFRESH_RATE_SHIFT 0
  101. #define EMIF_REG_REFRESH_RATE_MASK (0xffff << 0)
  102. /* SDRAM_REF_CTRL_SHDW */
  103. #define EMIF_REG_REFRESH_RATE_SHDW_SHIFT 0
  104. #define EMIF_REG_REFRESH_RATE_SHDW_MASK (0xffff << 0)
  105. /* SDRAM_TIM_1 */
  106. #define EMIF_REG_T_RP_SHIFT 25
  107. #define EMIF_REG_T_RP_MASK (0xf << 25)
  108. #define EMIF_REG_T_RCD_SHIFT 21
  109. #define EMIF_REG_T_RCD_MASK (0xf << 21)
  110. #define EMIF_REG_T_WR_SHIFT 17
  111. #define EMIF_REG_T_WR_MASK (0xf << 17)
  112. #define EMIF_REG_T_RAS_SHIFT 12
  113. #define EMIF_REG_T_RAS_MASK (0x1f << 12)
  114. #define EMIF_REG_T_RC_SHIFT 6
  115. #define EMIF_REG_T_RC_MASK (0x3f << 6)
  116. #define EMIF_REG_T_RRD_SHIFT 3
  117. #define EMIF_REG_T_RRD_MASK (0x7 << 3)
  118. #define EMIF_REG_T_WTR_SHIFT 0
  119. #define EMIF_REG_T_WTR_MASK (0x7 << 0)
  120. /* SDRAM_TIM_1_SHDW */
  121. #define EMIF_REG_T_RP_SHDW_SHIFT 25
  122. #define EMIF_REG_T_RP_SHDW_MASK (0xf << 25)
  123. #define EMIF_REG_T_RCD_SHDW_SHIFT 21
  124. #define EMIF_REG_T_RCD_SHDW_MASK (0xf << 21)
  125. #define EMIF_REG_T_WR_SHDW_SHIFT 17
  126. #define EMIF_REG_T_WR_SHDW_MASK (0xf << 17)
  127. #define EMIF_REG_T_RAS_SHDW_SHIFT 12
  128. #define EMIF_REG_T_RAS_SHDW_MASK (0x1f << 12)
  129. #define EMIF_REG_T_RC_SHDW_SHIFT 6
  130. #define EMIF_REG_T_RC_SHDW_MASK (0x3f << 6)
  131. #define EMIF_REG_T_RRD_SHDW_SHIFT 3
  132. #define EMIF_REG_T_RRD_SHDW_MASK (0x7 << 3)
  133. #define EMIF_REG_T_WTR_SHDW_SHIFT 0
  134. #define EMIF_REG_T_WTR_SHDW_MASK (0x7 << 0)
  135. /* SDRAM_TIM_2 */
  136. #define EMIF_REG_T_XP_SHIFT 28
  137. #define EMIF_REG_T_XP_MASK (0x7 << 28)
  138. #define EMIF_REG_T_ODT_SHIFT 25
  139. #define EMIF_REG_T_ODT_MASK (0x7 << 25)
  140. #define EMIF_REG_T_XSNR_SHIFT 16
  141. #define EMIF_REG_T_XSNR_MASK (0x1ff << 16)
  142. #define EMIF_REG_T_XSRD_SHIFT 6
  143. #define EMIF_REG_T_XSRD_MASK (0x3ff << 6)
  144. #define EMIF_REG_T_RTP_SHIFT 3
  145. #define EMIF_REG_T_RTP_MASK (0x7 << 3)
  146. #define EMIF_REG_T_CKE_SHIFT 0
  147. #define EMIF_REG_T_CKE_MASK (0x7 << 0)
  148. /* SDRAM_TIM_2_SHDW */
  149. #define EMIF_REG_T_XP_SHDW_SHIFT 28
  150. #define EMIF_REG_T_XP_SHDW_MASK (0x7 << 28)
  151. #define EMIF_REG_T_ODT_SHDW_SHIFT 25
  152. #define EMIF_REG_T_ODT_SHDW_MASK (0x7 << 25)
  153. #define EMIF_REG_T_XSNR_SHDW_SHIFT 16
  154. #define EMIF_REG_T_XSNR_SHDW_MASK (0x1ff << 16)
  155. #define EMIF_REG_T_XSRD_SHDW_SHIFT 6
  156. #define EMIF_REG_T_XSRD_SHDW_MASK (0x3ff << 6)
  157. #define EMIF_REG_T_RTP_SHDW_SHIFT 3
  158. #define EMIF_REG_T_RTP_SHDW_MASK (0x7 << 3)
  159. #define EMIF_REG_T_CKE_SHDW_SHIFT 0
  160. #define EMIF_REG_T_CKE_SHDW_MASK (0x7 << 0)
  161. /* SDRAM_TIM_3 */
  162. #define EMIF_REG_T_CKESR_SHIFT 21
  163. #define EMIF_REG_T_CKESR_MASK (0x7 << 21)
  164. #define EMIF_REG_ZQ_ZQCS_SHIFT 15
  165. #define EMIF_REG_ZQ_ZQCS_MASK (0x3f << 15)
  166. #define EMIF_REG_T_TDQSCKMAX_SHIFT 13
  167. #define EMIF_REG_T_TDQSCKMAX_MASK (0x3 << 13)
  168. #define EMIF_REG_T_RFC_SHIFT 4
  169. #define EMIF_REG_T_RFC_MASK (0x1ff << 4)
  170. #define EMIF_REG_T_RAS_MAX_SHIFT 0
  171. #define EMIF_REG_T_RAS_MAX_MASK (0xf << 0)
  172. /* SDRAM_TIM_3_SHDW */
  173. #define EMIF_REG_T_CKESR_SHDW_SHIFT 21
  174. #define EMIF_REG_T_CKESR_SHDW_MASK (0x7 << 21)
  175. #define EMIF_REG_ZQ_ZQCS_SHDW_SHIFT 15
  176. #define EMIF_REG_ZQ_ZQCS_SHDW_MASK (0x3f << 15)
  177. #define EMIF_REG_T_TDQSCKMAX_SHDW_SHIFT 13
  178. #define EMIF_REG_T_TDQSCKMAX_SHDW_MASK (0x3 << 13)
  179. #define EMIF_REG_T_RFC_SHDW_SHIFT 4
  180. #define EMIF_REG_T_RFC_SHDW_MASK (0x1ff << 4)
  181. #define EMIF_REG_T_RAS_MAX_SHDW_SHIFT 0
  182. #define EMIF_REG_T_RAS_MAX_SHDW_MASK (0xf << 0)
  183. /* LPDDR2_NVM_TIM */
  184. #define EMIF_REG_NVM_T_XP_SHIFT 28
  185. #define EMIF_REG_NVM_T_XP_MASK (0x7 << 28)
  186. #define EMIF_REG_NVM_T_WTR_SHIFT 24
  187. #define EMIF_REG_NVM_T_WTR_MASK (0x7 << 24)
  188. #define EMIF_REG_NVM_T_RP_SHIFT 20
  189. #define EMIF_REG_NVM_T_RP_MASK (0xf << 20)
  190. #define EMIF_REG_NVM_T_WRA_SHIFT 16
  191. #define EMIF_REG_NVM_T_WRA_MASK (0xf << 16)
  192. #define EMIF_REG_NVM_T_RRD_SHIFT 8
  193. #define EMIF_REG_NVM_T_RRD_MASK (0xff << 8)
  194. #define EMIF_REG_NVM_T_RCDMIN_SHIFT 0
  195. #define EMIF_REG_NVM_T_RCDMIN_MASK (0xff << 0)
  196. /* LPDDR2_NVM_TIM_SHDW */
  197. #define EMIF_REG_NVM_T_XP_SHDW_SHIFT 28
  198. #define EMIF_REG_NVM_T_XP_SHDW_MASK (0x7 << 28)
  199. #define EMIF_REG_NVM_T_WTR_SHDW_SHIFT 24
  200. #define EMIF_REG_NVM_T_WTR_SHDW_MASK (0x7 << 24)
  201. #define EMIF_REG_NVM_T_RP_SHDW_SHIFT 20
  202. #define EMIF_REG_NVM_T_RP_SHDW_MASK (0xf << 20)
  203. #define EMIF_REG_NVM_T_WRA_SHDW_SHIFT 16
  204. #define EMIF_REG_NVM_T_WRA_SHDW_MASK (0xf << 16)
  205. #define EMIF_REG_NVM_T_RRD_SHDW_SHIFT 8
  206. #define EMIF_REG_NVM_T_RRD_SHDW_MASK (0xff << 8)
  207. #define EMIF_REG_NVM_T_RCDMIN_SHDW_SHIFT 0
  208. #define EMIF_REG_NVM_T_RCDMIN_SHDW_MASK (0xff << 0)
  209. /* PWR_MGMT_CTRL */
  210. #define EMIF_REG_IDLEMODE_SHIFT 30
  211. #define EMIF_REG_IDLEMODE_MASK (0x3 << 30)
  212. #define EMIF_REG_PD_TIM_SHIFT 12
  213. #define EMIF_REG_PD_TIM_MASK (0xf << 12)
  214. #define EMIF_REG_DPD_EN_SHIFT 11
  215. #define EMIF_REG_DPD_EN_MASK (1 << 11)
  216. #define EMIF_REG_LP_MODE_SHIFT 8
  217. #define EMIF_REG_LP_MODE_MASK (0x7 << 8)
  218. #define EMIF_REG_SR_TIM_SHIFT 4
  219. #define EMIF_REG_SR_TIM_MASK (0xf << 4)
  220. #define EMIF_REG_CS_TIM_SHIFT 0
  221. #define EMIF_REG_CS_TIM_MASK (0xf << 0)
  222. /* PWR_MGMT_CTRL_SHDW */
  223. #define EMIF_REG_PD_TIM_SHDW_SHIFT 12
  224. #define EMIF_REG_PD_TIM_SHDW_MASK (0xf << 12)
  225. #define EMIF_REG_SR_TIM_SHDW_SHIFT 4
  226. #define EMIF_REG_SR_TIM_SHDW_MASK (0xf << 4)
  227. #define EMIF_REG_CS_TIM_SHDW_SHIFT 0
  228. #define EMIF_REG_CS_TIM_SHDW_MASK (0xf << 0)
  229. /* LPDDR2_MODE_REG_DATA */
  230. #define EMIF_REG_VALUE_0_SHIFT 0
  231. #define EMIF_REG_VALUE_0_MASK (0x7f << 0)
  232. /* LPDDR2_MODE_REG_CFG */
  233. #define EMIF_REG_CS_SHIFT 31
  234. #define EMIF_REG_CS_MASK (1 << 31)
  235. #define EMIF_REG_REFRESH_EN_SHIFT 30
  236. #define EMIF_REG_REFRESH_EN_MASK (1 << 30)
  237. #define EMIF_REG_ADDRESS_SHIFT 0
  238. #define EMIF_REG_ADDRESS_MASK (0xff << 0)
  239. /* OCP_CONFIG */
  240. #define EMIF_REG_SYS_THRESH_MAX_SHIFT 24
  241. #define EMIF_REG_SYS_THRESH_MAX_MASK (0xf << 24)
  242. #define EMIF_REG_MPU_THRESH_MAX_SHIFT 20
  243. #define EMIF_REG_MPU_THRESH_MAX_MASK (0xf << 20)
  244. #define EMIF_REG_LL_THRESH_MAX_SHIFT 16
  245. #define EMIF_REG_LL_THRESH_MAX_MASK (0xf << 16)
  246. #define EMIF_REG_PR_OLD_COUNT_SHIFT 0
  247. #define EMIF_REG_PR_OLD_COUNT_MASK (0xff << 0)
  248. /* OCP_CFG_VAL_1 */
  249. #define EMIF_REG_SYS_BUS_WIDTH_SHIFT 30
  250. #define EMIF_REG_SYS_BUS_WIDTH_MASK (0x3 << 30)
  251. #define EMIF_REG_LL_BUS_WIDTH_SHIFT 28
  252. #define EMIF_REG_LL_BUS_WIDTH_MASK (0x3 << 28)
  253. #define EMIF_REG_WR_FIFO_DEPTH_SHIFT 8
  254. #define EMIF_REG_WR_FIFO_DEPTH_MASK (0xff << 8)
  255. #define EMIF_REG_CMD_FIFO_DEPTH_SHIFT 0
  256. #define EMIF_REG_CMD_FIFO_DEPTH_MASK (0xff << 0)
  257. /* OCP_CFG_VAL_2 */
  258. #define EMIF_REG_RREG_FIFO_DEPTH_SHIFT 16
  259. #define EMIF_REG_RREG_FIFO_DEPTH_MASK (0xff << 16)
  260. #define EMIF_REG_RSD_FIFO_DEPTH_SHIFT 8
  261. #define EMIF_REG_RSD_FIFO_DEPTH_MASK (0xff << 8)
  262. #define EMIF_REG_RCMD_FIFO_DEPTH_SHIFT 0
  263. #define EMIF_REG_RCMD_FIFO_DEPTH_MASK (0xff << 0)
  264. /* IODFT_TLGC */
  265. #define EMIF_REG_TLEC_SHIFT 16
  266. #define EMIF_REG_TLEC_MASK (0xffff << 16)
  267. #define EMIF_REG_MT_SHIFT 14
  268. #define EMIF_REG_MT_MASK (1 << 14)
  269. #define EMIF_REG_ACT_CAP_EN_SHIFT 13
  270. #define EMIF_REG_ACT_CAP_EN_MASK (1 << 13)
  271. #define EMIF_REG_OPG_LD_SHIFT 12
  272. #define EMIF_REG_OPG_LD_MASK (1 << 12)
  273. #define EMIF_REG_RESET_PHY_SHIFT 10
  274. #define EMIF_REG_RESET_PHY_MASK (1 << 10)
  275. #define EMIF_REG_MMS_SHIFT 8
  276. #define EMIF_REG_MMS_MASK (1 << 8)
  277. #define EMIF_REG_MC_SHIFT 4
  278. #define EMIF_REG_MC_MASK (0x3 << 4)
  279. #define EMIF_REG_PC_SHIFT 1
  280. #define EMIF_REG_PC_MASK (0x7 << 1)
  281. #define EMIF_REG_TM_SHIFT 0
  282. #define EMIF_REG_TM_MASK (1 << 0)
  283. /* IODFT_CTRL_MISR_RSLT */
  284. #define EMIF_REG_DQM_TLMR_SHIFT 16
  285. #define EMIF_REG_DQM_TLMR_MASK (0x3ff << 16)
  286. #define EMIF_REG_CTL_TLMR_SHIFT 0
  287. #define EMIF_REG_CTL_TLMR_MASK (0x7ff << 0)
  288. /* IODFT_ADDR_MISR_RSLT */
  289. #define EMIF_REG_ADDR_TLMR_SHIFT 0
  290. #define EMIF_REG_ADDR_TLMR_MASK (0x1fffff << 0)
  291. /* IODFT_DATA_MISR_RSLT_1 */
  292. #define EMIF_REG_DATA_TLMR_31_0_SHIFT 0
  293. #define EMIF_REG_DATA_TLMR_31_0_MASK (0xffffffff << 0)
  294. /* IODFT_DATA_MISR_RSLT_2 */
  295. #define EMIF_REG_DATA_TLMR_63_32_SHIFT 0
  296. #define EMIF_REG_DATA_TLMR_63_32_MASK (0xffffffff << 0)
  297. /* IODFT_DATA_MISR_RSLT_3 */
  298. #define EMIF_REG_DATA_TLMR_66_64_SHIFT 0
  299. #define EMIF_REG_DATA_TLMR_66_64_MASK (0x7 << 0)
  300. /* PERF_CNT_1 */
  301. #define EMIF_REG_COUNTER1_SHIFT 0
  302. #define EMIF_REG_COUNTER1_MASK (0xffffffff << 0)
  303. /* PERF_CNT_2 */
  304. #define EMIF_REG_COUNTER2_SHIFT 0
  305. #define EMIF_REG_COUNTER2_MASK (0xffffffff << 0)
  306. /* PERF_CNT_CFG */
  307. #define EMIF_REG_CNTR2_MCONNID_EN_SHIFT 31
  308. #define EMIF_REG_CNTR2_MCONNID_EN_MASK (1 << 31)
  309. #define EMIF_REG_CNTR2_REGION_EN_SHIFT 30
  310. #define EMIF_REG_CNTR2_REGION_EN_MASK (1 << 30)
  311. #define EMIF_REG_CNTR2_CFG_SHIFT 16
  312. #define EMIF_REG_CNTR2_CFG_MASK (0xf << 16)
  313. #define EMIF_REG_CNTR1_MCONNID_EN_SHIFT 15
  314. #define EMIF_REG_CNTR1_MCONNID_EN_MASK (1 << 15)
  315. #define EMIF_REG_CNTR1_REGION_EN_SHIFT 14
  316. #define EMIF_REG_CNTR1_REGION_EN_MASK (1 << 14)
  317. #define EMIF_REG_CNTR1_CFG_SHIFT 0
  318. #define EMIF_REG_CNTR1_CFG_MASK (0xf << 0)
  319. /* PERF_CNT_SEL */
  320. #define EMIF_REG_MCONNID2_SHIFT 24
  321. #define EMIF_REG_MCONNID2_MASK (0xff << 24)
  322. #define EMIF_REG_REGION_SEL2_SHIFT 16
  323. #define EMIF_REG_REGION_SEL2_MASK (0x3 << 16)
  324. #define EMIF_REG_MCONNID1_SHIFT 8
  325. #define EMIF_REG_MCONNID1_MASK (0xff << 8)
  326. #define EMIF_REG_REGION_SEL1_SHIFT 0
  327. #define EMIF_REG_REGION_SEL1_MASK (0x3 << 0)
  328. /* PERF_CNT_TIM */
  329. #define EMIF_REG_TOTAL_TIME_SHIFT 0
  330. #define EMIF_REG_TOTAL_TIME_MASK (0xffffffff << 0)
  331. /* READ_IDLE_CTRL */
  332. #define EMIF_REG_READ_IDLE_LEN_SHIFT 16
  333. #define EMIF_REG_READ_IDLE_LEN_MASK (0xf << 16)
  334. #define EMIF_REG_READ_IDLE_INTERVAL_SHIFT 0
  335. #define EMIF_REG_READ_IDLE_INTERVAL_MASK (0x1ff << 0)
  336. /* READ_IDLE_CTRL_SHDW */
  337. #define EMIF_REG_READ_IDLE_LEN_SHDW_SHIFT 16
  338. #define EMIF_REG_READ_IDLE_LEN_SHDW_MASK (0xf << 16)
  339. #define EMIF_REG_READ_IDLE_INTERVAL_SHDW_SHIFT 0
  340. #define EMIF_REG_READ_IDLE_INTERVAL_SHDW_MASK (0x1ff << 0)
  341. /* IRQ_EOI */
  342. #define EMIF_REG_EOI_SHIFT 0
  343. #define EMIF_REG_EOI_MASK (1 << 0)
  344. /* IRQSTATUS_RAW_SYS */
  345. #define EMIF_REG_DNV_SYS_SHIFT 2
  346. #define EMIF_REG_DNV_SYS_MASK (1 << 2)
  347. #define EMIF_REG_TA_SYS_SHIFT 1
  348. #define EMIF_REG_TA_SYS_MASK (1 << 1)
  349. #define EMIF_REG_ERR_SYS_SHIFT 0
  350. #define EMIF_REG_ERR_SYS_MASK (1 << 0)
  351. /* IRQSTATUS_RAW_LL */
  352. #define EMIF_REG_DNV_LL_SHIFT 2
  353. #define EMIF_REG_DNV_LL_MASK (1 << 2)
  354. #define EMIF_REG_TA_LL_SHIFT 1
  355. #define EMIF_REG_TA_LL_MASK (1 << 1)
  356. #define EMIF_REG_ERR_LL_SHIFT 0
  357. #define EMIF_REG_ERR_LL_MASK (1 << 0)
  358. /* IRQSTATUS_SYS */
  359. /* IRQSTATUS_LL */
  360. /* IRQENABLE_SET_SYS */
  361. #define EMIF_REG_EN_DNV_SYS_SHIFT 2
  362. #define EMIF_REG_EN_DNV_SYS_MASK (1 << 2)
  363. #define EMIF_REG_EN_TA_SYS_SHIFT 1
  364. #define EMIF_REG_EN_TA_SYS_MASK (1 << 1)
  365. #define EMIF_REG_EN_ERR_SYS_SHIFT 0
  366. #define EMIF_REG_EN_ERR_SYS_MASK (1 << 0)
  367. /* IRQENABLE_SET_LL */
  368. #define EMIF_REG_EN_DNV_LL_SHIFT 2
  369. #define EMIF_REG_EN_DNV_LL_MASK (1 << 2)
  370. #define EMIF_REG_EN_TA_LL_SHIFT 1
  371. #define EMIF_REG_EN_TA_LL_MASK (1 << 1)
  372. #define EMIF_REG_EN_ERR_LL_SHIFT 0
  373. #define EMIF_REG_EN_ERR_LL_MASK (1 << 0)
  374. /* IRQENABLE_CLR_SYS */
  375. /* IRQENABLE_CLR_LL */
  376. /* ZQ_CONFIG */
  377. #define EMIF_REG_ZQ_CS1EN_SHIFT 31
  378. #define EMIF_REG_ZQ_CS1EN_MASK (1 << 31)
  379. #define EMIF_REG_ZQ_CS0EN_SHIFT 30
  380. #define EMIF_REG_ZQ_CS0EN_MASK (1 << 30)
  381. #define EMIF_REG_ZQ_DUALCALEN_SHIFT 29
  382. #define EMIF_REG_ZQ_DUALCALEN_MASK (1 << 29)
  383. #define EMIF_REG_ZQ_SFEXITEN_SHIFT 28
  384. #define EMIF_REG_ZQ_SFEXITEN_MASK (1 << 28)
  385. #define EMIF_REG_ZQ_ZQINIT_MULT_SHIFT 18
  386. #define EMIF_REG_ZQ_ZQINIT_MULT_MASK (0x3 << 18)
  387. #define EMIF_REG_ZQ_ZQCL_MULT_SHIFT 16
  388. #define EMIF_REG_ZQ_ZQCL_MULT_MASK (0x3 << 16)
  389. #define EMIF_REG_ZQ_REFINTERVAL_SHIFT 0
  390. #define EMIF_REG_ZQ_REFINTERVAL_MASK (0xffff << 0)
  391. /* TEMP_ALERT_CONFIG */
  392. #define EMIF_REG_TA_CS1EN_SHIFT 31
  393. #define EMIF_REG_TA_CS1EN_MASK (1 << 31)
  394. #define EMIF_REG_TA_CS0EN_SHIFT 30
  395. #define EMIF_REG_TA_CS0EN_MASK (1 << 30)
  396. #define EMIF_REG_TA_SFEXITEN_SHIFT 28
  397. #define EMIF_REG_TA_SFEXITEN_MASK (1 << 28)
  398. #define EMIF_REG_TA_DEVWDT_SHIFT 26
  399. #define EMIF_REG_TA_DEVWDT_MASK (0x3 << 26)
  400. #define EMIF_REG_TA_DEVCNT_SHIFT 24
  401. #define EMIF_REG_TA_DEVCNT_MASK (0x3 << 24)
  402. #define EMIF_REG_TA_REFINTERVAL_SHIFT 0
  403. #define EMIF_REG_TA_REFINTERVAL_MASK (0x3fffff << 0)
  404. /* OCP_ERR_LOG */
  405. #define EMIF_REG_MADDRSPACE_SHIFT 14
  406. #define EMIF_REG_MADDRSPACE_MASK (0x3 << 14)
  407. #define EMIF_REG_MBURSTSEQ_SHIFT 11
  408. #define EMIF_REG_MBURSTSEQ_MASK (0x7 << 11)
  409. #define EMIF_REG_MCMD_SHIFT 8
  410. #define EMIF_REG_MCMD_MASK (0x7 << 8)
  411. #define EMIF_REG_MCONNID_SHIFT 0
  412. #define EMIF_REG_MCONNID_MASK (0xff << 0)
  413. /* DDR_PHY_CTRL_1 */
  414. #define EMIF_REG_DDR_PHY_CTRL_1_SHIFT 4
  415. #define EMIF_REG_DDR_PHY_CTRL_1_MASK (0xfffffff << 4)
  416. #define EMIF_REG_READ_LATENCY_SHIFT 0
  417. #define EMIF_REG_READ_LATENCY_MASK (0xf << 0)
  418. #define EMIF_REG_DLL_SLAVE_DLY_CTRL_SHIFT 4
  419. #define EMIF_REG_DLL_SLAVE_DLY_CTRL_MASK (0xFF << 4)
  420. #define EMIF_EMIF_DDR_PHY_CTRL_1_BASE_VAL_SHIFT 12
  421. #define EMIF_EMIF_DDR_PHY_CTRL_1_BASE_VAL_MASK (0xFFFFF << 12)
  422. /* DDR_PHY_CTRL_1_SHDW */
  423. #define EMIF_REG_DDR_PHY_CTRL_1_SHDW_SHIFT 4
  424. #define EMIF_REG_DDR_PHY_CTRL_1_SHDW_MASK (0xfffffff << 4)
  425. #define EMIF_REG_READ_LATENCY_SHDW_SHIFT 0
  426. #define EMIF_REG_READ_LATENCY_SHDW_MASK (0xf << 0)
  427. #define EMIF_REG_DLL_SLAVE_DLY_CTRL_SHDW_SHIFT 4
  428. #define EMIF_REG_DLL_SLAVE_DLY_CTRL_SHDW_MASK (0xFF << 4)
  429. #define EMIF_EMIF_DDR_PHY_CTRL_1_BASE_VAL_SHDW_SHIFT 12
  430. #define EMIF_EMIF_DDR_PHY_CTRL_1_BASE_VAL_SHDW_MASK (0xFFFFF << 12)
  431. #define EMIF_DDR_PHY_CTRL_1_WRLVL_MASK_SHIFT 25
  432. #define EMIF_DDR_PHY_CTRL_1_WRLVL_MASK_MASK (1 << 25)
  433. #define EMIF_DDR_PHY_CTRL_1_RDLVLGATE_MASK_SHIFT 26
  434. #define EMIF_DDR_PHY_CTRL_1_RDLVLGATE_MASK_MASK (1 << 26)
  435. #define EMIF_DDR_PHY_CTRL_1_RDLVL_MASK_SHIFT 27
  436. #define EMIF_DDR_PHY_CTRL_1_RDLVL_MASK_MASK (1 << 27)
  437. /* DDR_PHY_CTRL_2 */
  438. #define EMIF_REG_DDR_PHY_CTRL_2_SHIFT 0
  439. #define EMIF_REG_DDR_PHY_CTRL_2_MASK (0xffffffff << 0)
  440. /*EMIF_READ_WRITE_LEVELING_CONTROL*/
  441. #define EMIF_REG_RDWRLVLFULL_START_SHIFT 31
  442. #define EMIF_REG_RDWRLVLFULL_START_MASK (1 << 31)
  443. #define EMIF_REG_RDWRLVLINC_PRE_SHIFT 24
  444. #define EMIF_REG_RDWRLVLINC_PRE_MASK (0x7F << 24)
  445. #define EMIF_REG_RDLVLINC_INT_SHIFT 16
  446. #define EMIF_REG_RDLVLINC_INT_MASK (0xFF << 16)
  447. #define EMIF_REG_RDLVLGATEINC_INT_SHIFT 8
  448. #define EMIF_REG_RDLVLGATEINC_INT_MASK (0xFF << 8)
  449. #define EMIF_REG_WRLVLINC_INT_SHIFT 0
  450. #define EMIF_REG_WRLVLINC_INT_MASK (0xFF << 0)
  451. /*EMIF_READ_WRITE_LEVELING_RAMP_CONTROL*/
  452. #define EMIF_REG_RDWRLVL_EN_SHIFT 31
  453. #define EMIF_REG_RDWRLVL_EN_MASK (1 << 31)
  454. #define EMIF_REG_RDWRLVLINC_RMP_PRE_SHIFT 24
  455. #define EMIF_REG_RDWRLVLINC_RMP_PRE_MASK (0x7F << 24)
  456. #define EMIF_REG_RDLVLINC_RMP_INT_SHIFT 16
  457. #define EMIF_REG_RDLVLINC_RMP_INT_MASK (0xFF << 16)
  458. #define EMIF_REG_RDLVLGATEINC_RMP_INT_SHIFT 8
  459. #define EMIF_REG_RDLVLGATEINC_RMP_INT_MASK (0xFF << 8)
  460. #define EMIF_REG_WRLVLINC_RMP_INT_SHIFT 0
  461. #define EMIF_REG_WRLVLINC_RMP_INT_MASK (0xFF << 0)
  462. /*EMIF_READ_WRITE_LEVELING_RAMP_WINDOW*/
  463. #define EMIF_REG_RDWRLVLINC_RMP_WIN_SHIFT 0
  464. #define EMIF_REG_RDWRLVLINC_RMP_WIN_MASK (0x1FFF << 0)
  465. /* EMIF_PHY_CTRL_36 */
  466. #define EMIF_REG_PHY_FIFO_WE_IN_MISALINED_CLR (1 << 8)
  467. #define PHY_RDDQS_RATIO_REGS 5
  468. #define PHY_FIFO_WE_SLAVE_RATIO_REGS 5
  469. #define PHY_REG_WR_DQ_SLAVE_RATIO_REGS 10
  470. /*Leveling Fields */
  471. #define DDR3_WR_LVL_INT 0x73
  472. #define DDR3_RD_LVL_INT 0x33
  473. #define DDR3_RD_LVL_GATE_INT 0x59
  474. #define RD_RW_LVL_INC_PRE 0x0
  475. #define DDR3_FULL_LVL (1 << EMIF_REG_RDWRLVL_EN_SHIFT)
  476. #define DDR3_INC_LVL ((DDR3_WR_LVL_INT << EMIF_REG_WRLVLINC_INT_SHIFT) \
  477. | (DDR3_RD_LVL_GATE_INT << EMIF_REG_RDLVLGATEINC_INT_SHIFT) \
  478. | (DDR3_RD_LVL_INT << EMIF_REG_RDLVLINC_RMP_INT_SHIFT) \
  479. | (RD_RW_LVL_INC_PRE << EMIF_REG_RDWRLVLINC_RMP_PRE_SHIFT))
  480. #define SDRAM_CONFIG_EXT_RD_LVL_11_SAMPLES 0x0000C1A7
  481. #define SDRAM_CONFIG_EXT_RD_LVL_4_SAMPLES 0x000001A7
  482. #define SDRAM_CONFIG_EXT_RD_LVL_11_SAMPLES_ES2 0x0000C1C7
  483. /* DMM */
  484. #define DMM_BASE 0x4E000040
  485. /* Memory Adapter */
  486. #define MA_BASE 0x482AF040
  487. #define MA_PRIORITY 0x482A2000
  488. #define MA_HIMEM_INTERLEAVE_UN_SHIFT 8
  489. #define MA_HIMEM_INTERLEAVE_UN_MASK (1 << 8)
  490. /* DMM_LISA_MAP */
  491. #define EMIF_SYS_ADDR_SHIFT 24
  492. #define EMIF_SYS_ADDR_MASK (0xff << 24)
  493. #define EMIF_SYS_SIZE_SHIFT 20
  494. #define EMIF_SYS_SIZE_MASK (0x7 << 20)
  495. #define EMIF_SDRC_INTL_SHIFT 18
  496. #define EMIF_SDRC_INTL_MASK (0x3 << 18)
  497. #define EMIF_SDRC_ADDRSPC_SHIFT 16
  498. #define EMIF_SDRC_ADDRSPC_MASK (0x3 << 16)
  499. #define EMIF_SDRC_MAP_SHIFT 8
  500. #define EMIF_SDRC_MAP_MASK (0x3 << 8)
  501. #define EMIF_SDRC_ADDR_SHIFT 0
  502. #define EMIF_SDRC_ADDR_MASK (0xff << 0)
  503. /* DMM_LISA_MAP fields */
  504. #define DMM_SDRC_MAP_UNMAPPED 0
  505. #define DMM_SDRC_MAP_EMIF1_ONLY 1
  506. #define DMM_SDRC_MAP_EMIF2_ONLY 2
  507. #define DMM_SDRC_MAP_EMIF1_AND_EMIF2 3
  508. #define DMM_SDRC_INTL_NONE 0
  509. #define DMM_SDRC_INTL_128B 1
  510. #define DMM_SDRC_INTL_256B 2
  511. #define DMM_SDRC_INTL_512 3
  512. #define DMM_SDRC_ADDR_SPC_SDRAM 0
  513. #define DMM_SDRC_ADDR_SPC_NVM 1
  514. #define DMM_SDRC_ADDR_SPC_INVALID 2
  515. #define DMM_LISA_MAP_INTERLEAVED_BASE_VAL (\
  516. (DMM_SDRC_MAP_EMIF1_AND_EMIF2 << EMIF_SDRC_MAP_SHIFT) |\
  517. (DMM_SDRC_ADDR_SPC_SDRAM << EMIF_SDRC_ADDRSPC_SHIFT) |\
  518. (DMM_SDRC_INTL_128B << EMIF_SDRC_INTL_SHIFT) |\
  519. (CONFIG_SYS_SDRAM_BASE << EMIF_SYS_ADDR_SHIFT))
  520. #define DMM_LISA_MAP_EMIF1_ONLY_BASE_VAL (\
  521. (DMM_SDRC_MAP_EMIF1_ONLY << EMIF_SDRC_MAP_SHIFT)|\
  522. (DMM_SDRC_ADDR_SPC_SDRAM << EMIF_SDRC_ADDRSPC_SHIFT)|\
  523. (DMM_SDRC_INTL_NONE << EMIF_SDRC_INTL_SHIFT))
  524. #define DMM_LISA_MAP_EMIF2_ONLY_BASE_VAL (\
  525. (DMM_SDRC_MAP_EMIF2_ONLY << EMIF_SDRC_MAP_SHIFT)|\
  526. (DMM_SDRC_ADDR_SPC_SDRAM << EMIF_SDRC_ADDRSPC_SHIFT)|\
  527. (DMM_SDRC_INTL_NONE << EMIF_SDRC_INTL_SHIFT))
  528. /* Trap for invalid TILER PAT entries */
  529. #define DMM_LISA_MAP_0_INVAL_ADDR_TRAP (\
  530. (0 << EMIF_SDRC_ADDR_SHIFT) |\
  531. (DMM_SDRC_MAP_EMIF1_ONLY << EMIF_SDRC_MAP_SHIFT)|\
  532. (DMM_SDRC_ADDR_SPC_INVALID << EMIF_SDRC_ADDRSPC_SHIFT)|\
  533. (DMM_SDRC_INTL_NONE << EMIF_SDRC_INTL_SHIFT)|\
  534. (0xFF << EMIF_SYS_ADDR_SHIFT))
  535. #define EMIF_EXT_PHY_CTRL_TIMING_REG 0x5
  536. /* EMIF ECC CTRL reg */
  537. #define EMIF_ECC_CTRL_REG_ECC_EN_SHIFT 31
  538. #define EMIF_ECC_CTRL_REG_ECC_EN_MASK (1 << 31)
  539. #define EMIF_ECC_CTRL_REG_ECC_ADDR_RGN_PROT_SHIFT 30
  540. #define EMIF_ECC_CTRL_REG_ECC_ADDR_RGN_PROT_MASK (1 << 30)
  541. #define EMIF_ECC_CTRL_REG_ECC_VERIFY_DIS_SHIFT 29
  542. #define EMIF_ECC_CTRL_REG_ECC_VERIFY_DIS_MASK (1 << 29)
  543. #define EMIF_ECC_REG_RMW_EN_SHIFT 28
  544. #define EMIF_ECC_REG_RMW_EN_MASK (1 << 28)
  545. #define EMIF_ECC_REG_ECC_ADDR_RGN_2_EN_SHIFT 1
  546. #define EMIF_ECC_REG_ECC_ADDR_RGN_2_EN_MASK (1 << 1)
  547. #define EMIF_ECC_REG_ECC_ADDR_RGN_1_EN_SHIFT 0
  548. #define EMIF_ECC_REG_ECC_ADDR_RGN_1_EN_MASK (1 << 0)
  549. /* EMIF ECC ADDRESS RANGE */
  550. #define EMIF_ECC_REG_ECC_END_ADDR_SHIFT 16
  551. #define EMIF_ECC_REG_ECC_END_ADDR_MASK (0xffff << 16)
  552. #define EMIF_ECC_REG_ECC_START_ADDR_SHIFT 0
  553. #define EMIF_ECC_REG_ECC_START_ADDR_MASK (0xffff << 0)
  554. /* EMIF_SYSTEM_OCP_INTERRUPT_RAW_STATUS */
  555. #define EMIF_INT_ONEBIT_ECC_ERR_SYS_SHIFT 5
  556. #define EMIF_INT_ONEBIT_ECC_ERR_SYS_MASK (1 << 5)
  557. #define EMIF_INT_TWOBIT_ECC_ERR_SYS_SHIFT 4
  558. #define EMIF_INT_TWOBIT_ECC_ERR_SYS_MASK (1 << 4)
  559. #define EMIF_INT_WR_ECC_ERR_SYS_SHIFT 3
  560. #define EMIF_INT_WR_ECC_ERR_SYS_MASK (1 << 3)
  561. /* Reg mapping structure */
  562. struct emif_reg_struct {
  563. u32 emif_mod_id_rev;
  564. u32 emif_status;
  565. u32 emif_sdram_config;
  566. u32 emif_lpddr2_nvm_config;
  567. u32 emif_sdram_ref_ctrl;
  568. u32 emif_sdram_ref_ctrl_shdw;
  569. u32 emif_sdram_tim_1;
  570. u32 emif_sdram_tim_1_shdw;
  571. u32 emif_sdram_tim_2;
  572. u32 emif_sdram_tim_2_shdw;
  573. u32 emif_sdram_tim_3;
  574. u32 emif_sdram_tim_3_shdw;
  575. u32 emif_lpddr2_nvm_tim;
  576. u32 emif_lpddr2_nvm_tim_shdw;
  577. u32 emif_pwr_mgmt_ctrl;
  578. u32 emif_pwr_mgmt_ctrl_shdw;
  579. u32 emif_lpddr2_mode_reg_data;
  580. u32 padding1[1];
  581. u32 emif_lpddr2_mode_reg_data_es2;
  582. u32 padding11[1];
  583. u32 emif_lpddr2_mode_reg_cfg;
  584. u32 emif_l3_config;
  585. u32 emif_l3_cfg_val_1;
  586. u32 emif_l3_cfg_val_2;
  587. u32 emif_iodft_tlgc;
  588. u32 padding2[7];
  589. u32 emif_perf_cnt_1;
  590. u32 emif_perf_cnt_2;
  591. u32 emif_perf_cnt_cfg;
  592. u32 emif_perf_cnt_sel;
  593. u32 emif_perf_cnt_tim;
  594. u32 padding3;
  595. u32 emif_read_idlectrl;
  596. u32 emif_read_idlectrl_shdw;
  597. u32 padding4;
  598. u32 emif_irqstatus_raw_sys;
  599. u32 emif_irqstatus_raw_ll;
  600. u32 emif_irqstatus_sys;
  601. u32 emif_irqstatus_ll;
  602. u32 emif_irqenable_set_sys;
  603. u32 emif_irqenable_set_ll;
  604. u32 emif_irqenable_clr_sys;
  605. u32 emif_irqenable_clr_ll;
  606. u32 padding5;
  607. u32 emif_zq_config;
  608. u32 emif_temp_alert_config;
  609. u32 emif_l3_err_log;
  610. u32 emif_rd_wr_lvl_rmp_win;
  611. u32 emif_rd_wr_lvl_rmp_ctl;
  612. u32 emif_rd_wr_lvl_ctl;
  613. u32 padding6[1];
  614. u32 emif_ddr_phy_ctrl_1;
  615. u32 emif_ddr_phy_ctrl_1_shdw;
  616. u32 emif_ddr_phy_ctrl_2;
  617. u32 padding7[4];
  618. u32 emif_prio_class_serv_map;
  619. u32 emif_connect_id_serv_1_map;
  620. u32 emif_connect_id_serv_2_map;
  621. u32 padding8;
  622. u32 emif_ecc_ctrl_reg;
  623. u32 emif_ecc_address_range_1;
  624. u32 emif_ecc_address_range_2;
  625. u32 padding8_1;
  626. u32 emif_rd_wr_exec_thresh;
  627. u32 emif_cos_config;
  628. #if defined(CONFIG_DRA7XX) || defined(CONFIG_ARCH_KEYSTONE)
  629. u32 padding9[2];
  630. u32 emif_1b_ecc_err_cnt;
  631. u32 emif_1b_ecc_err_thrush;
  632. u32 emif_1b_ecc_err_dist_1;
  633. u32 emif_1b_ecc_err_addr_log;
  634. u32 emif_2b_ecc_err_addr_log;
  635. u32 emif_ddr_phy_status[28];
  636. u32 padding10[19];
  637. #else
  638. u32 padding9[6];
  639. u32 emif_ddr_phy_status[28];
  640. u32 padding10[20];
  641. #endif
  642. u32 emif_ddr_ext_phy_ctrl_1;
  643. u32 emif_ddr_ext_phy_ctrl_1_shdw;
  644. u32 emif_ddr_ext_phy_ctrl_2;
  645. u32 emif_ddr_ext_phy_ctrl_2_shdw;
  646. u32 emif_ddr_ext_phy_ctrl_3;
  647. u32 emif_ddr_ext_phy_ctrl_3_shdw;
  648. u32 emif_ddr_ext_phy_ctrl_4;
  649. u32 emif_ddr_ext_phy_ctrl_4_shdw;
  650. u32 emif_ddr_ext_phy_ctrl_5;
  651. u32 emif_ddr_ext_phy_ctrl_5_shdw;
  652. u32 emif_ddr_ext_phy_ctrl_6;
  653. u32 emif_ddr_ext_phy_ctrl_6_shdw;
  654. u32 emif_ddr_ext_phy_ctrl_7;
  655. u32 emif_ddr_ext_phy_ctrl_7_shdw;
  656. u32 emif_ddr_ext_phy_ctrl_8;
  657. u32 emif_ddr_ext_phy_ctrl_8_shdw;
  658. u32 emif_ddr_ext_phy_ctrl_9;
  659. u32 emif_ddr_ext_phy_ctrl_9_shdw;
  660. u32 emif_ddr_ext_phy_ctrl_10;
  661. u32 emif_ddr_ext_phy_ctrl_10_shdw;
  662. u32 emif_ddr_ext_phy_ctrl_11;
  663. u32 emif_ddr_ext_phy_ctrl_11_shdw;
  664. u32 emif_ddr_ext_phy_ctrl_12;
  665. u32 emif_ddr_ext_phy_ctrl_12_shdw;
  666. u32 emif_ddr_ext_phy_ctrl_13;
  667. u32 emif_ddr_ext_phy_ctrl_13_shdw;
  668. u32 emif_ddr_ext_phy_ctrl_14;
  669. u32 emif_ddr_ext_phy_ctrl_14_shdw;
  670. u32 emif_ddr_ext_phy_ctrl_15;
  671. u32 emif_ddr_ext_phy_ctrl_15_shdw;
  672. u32 emif_ddr_ext_phy_ctrl_16;
  673. u32 emif_ddr_ext_phy_ctrl_16_shdw;
  674. u32 emif_ddr_ext_phy_ctrl_17;
  675. u32 emif_ddr_ext_phy_ctrl_17_shdw;
  676. u32 emif_ddr_ext_phy_ctrl_18;
  677. u32 emif_ddr_ext_phy_ctrl_18_shdw;
  678. u32 emif_ddr_ext_phy_ctrl_19;
  679. u32 emif_ddr_ext_phy_ctrl_19_shdw;
  680. u32 emif_ddr_ext_phy_ctrl_20;
  681. u32 emif_ddr_ext_phy_ctrl_20_shdw;
  682. u32 emif_ddr_ext_phy_ctrl_21;
  683. u32 emif_ddr_ext_phy_ctrl_21_shdw;
  684. u32 emif_ddr_ext_phy_ctrl_22;
  685. u32 emif_ddr_ext_phy_ctrl_22_shdw;
  686. u32 emif_ddr_ext_phy_ctrl_23;
  687. u32 emif_ddr_ext_phy_ctrl_23_shdw;
  688. u32 emif_ddr_ext_phy_ctrl_24;
  689. u32 emif_ddr_ext_phy_ctrl_24_shdw;
  690. u32 emif_ddr_ext_phy_ctrl_25;
  691. u32 emif_ddr_ext_phy_ctrl_25_shdw;
  692. u32 emif_ddr_ext_phy_ctrl_26;
  693. u32 emif_ddr_ext_phy_ctrl_26_shdw;
  694. u32 emif_ddr_ext_phy_ctrl_27;
  695. u32 emif_ddr_ext_phy_ctrl_27_shdw;
  696. u32 emif_ddr_ext_phy_ctrl_28;
  697. u32 emif_ddr_ext_phy_ctrl_28_shdw;
  698. u32 emif_ddr_ext_phy_ctrl_29;
  699. u32 emif_ddr_ext_phy_ctrl_29_shdw;
  700. u32 emif_ddr_ext_phy_ctrl_30;
  701. u32 emif_ddr_ext_phy_ctrl_30_shdw;
  702. u32 emif_ddr_ext_phy_ctrl_31;
  703. u32 emif_ddr_ext_phy_ctrl_31_shdw;
  704. u32 emif_ddr_ext_phy_ctrl_32;
  705. u32 emif_ddr_ext_phy_ctrl_32_shdw;
  706. u32 emif_ddr_ext_phy_ctrl_33;
  707. u32 emif_ddr_ext_phy_ctrl_33_shdw;
  708. u32 emif_ddr_ext_phy_ctrl_34;
  709. u32 emif_ddr_ext_phy_ctrl_34_shdw;
  710. u32 emif_ddr_ext_phy_ctrl_35;
  711. u32 emif_ddr_ext_phy_ctrl_35_shdw;
  712. union {
  713. u32 emif_ddr_ext_phy_ctrl_36;
  714. u32 emif_ddr_fifo_misaligned_clear_1;
  715. };
  716. union {
  717. u32 emif_ddr_ext_phy_ctrl_36_shdw;
  718. u32 emif_ddr_fifo_misaligned_clear_2;
  719. };
  720. };
  721. struct dmm_lisa_map_regs {
  722. u32 dmm_lisa_map_0;
  723. u32 dmm_lisa_map_1;
  724. u32 dmm_lisa_map_2;
  725. u32 dmm_lisa_map_3;
  726. u8 is_ma_present;
  727. };
  728. #define CS0 0
  729. #define CS1 1
  730. /* The maximum frequency at which the LPDDR2 interface can operate in Hz*/
  731. #define MAX_LPDDR2_FREQ 400000000 /* 400 MHz */
  732. /*
  733. * The period of DDR clk is represented as numerator and denominator for
  734. * better accuracy in integer based calculations. However, if the numerator
  735. * and denominator are very huge there may be chances of overflow in
  736. * calculations. So, as a trade-off keep denominator(and consequently
  737. * numerator) within a limit sacrificing some accuracy - but not much
  738. * If denominator and numerator are already small (such as at 400 MHz)
  739. * no adjustment is needed
  740. */
  741. #define EMIF_PERIOD_DEN_LIMIT 1000
  742. /*
  743. * Maximum number of different frequencies supported by EMIF driver
  744. * Determines the number of entries in the pointer array for register
  745. * cache
  746. */
  747. #define EMIF_MAX_NUM_FREQUENCIES 6
  748. /*
  749. * Indices into the Addressing Table array.
  750. * One entry each for all the different types of devices with different
  751. * addressing schemes
  752. */
  753. #define ADDR_TABLE_INDEX64M 0
  754. #define ADDR_TABLE_INDEX128M 1
  755. #define ADDR_TABLE_INDEX256M 2
  756. #define ADDR_TABLE_INDEX512M 3
  757. #define ADDR_TABLE_INDEX1GS4 4
  758. #define ADDR_TABLE_INDEX2GS4 5
  759. #define ADDR_TABLE_INDEX4G 6
  760. #define ADDR_TABLE_INDEX8G 7
  761. #define ADDR_TABLE_INDEX1GS2 8
  762. #define ADDR_TABLE_INDEX2GS2 9
  763. #define ADDR_TABLE_INDEXMAX 10
  764. /* Number of Row bits */
  765. #define ROW_9 0
  766. #define ROW_10 1
  767. #define ROW_11 2
  768. #define ROW_12 3
  769. #define ROW_13 4
  770. #define ROW_14 5
  771. #define ROW_15 6
  772. #define ROW_16 7
  773. /* Number of Column bits */
  774. #define COL_8 0
  775. #define COL_9 1
  776. #define COL_10 2
  777. #define COL_11 3
  778. #define COL_7 4 /*Not supported by OMAP included for completeness */
  779. /* Number of Banks*/
  780. #define BANKS1 0
  781. #define BANKS2 1
  782. #define BANKS4 2
  783. #define BANKS8 3
  784. /* Refresh rate in micro seconds x 10 */
  785. #define T_REFI_15_6 156
  786. #define T_REFI_7_8 78
  787. #define T_REFI_3_9 39
  788. #define EBANK_CS1_DIS 0
  789. #define EBANK_CS1_EN 1
  790. /* Read Latency used by the device at reset */
  791. #define RL_BOOT 3
  792. /* Read Latency for the highest frequency you want to use */
  793. #ifdef CONFIG_OMAP54XX
  794. #define RL_FINAL 8
  795. #else
  796. #define RL_FINAL 6
  797. #endif
  798. /* Interleaving policies at EMIF level- between banks and Chip Selects */
  799. #define EMIF_INTERLEAVING_POLICY_MAX_INTERLEAVING 0
  800. #define EMIF_INTERLEAVING_POLICY_NO_BANK_INTERLEAVING 3
  801. /*
  802. * Interleaving policy to be used
  803. * Currently set to MAX interleaving for better performance
  804. */
  805. #define EMIF_INTERLEAVING_POLICY EMIF_INTERLEAVING_POLICY_MAX_INTERLEAVING
  806. /* State of the core voltage:
  807. * This is important for some parameters such as read idle control and
  808. * ZQ calibration timings. Timings are much stricter when voltage ramp
  809. * is happening compared to when the voltage is stable.
  810. * We need to calculate two sets of values for these parameters and use
  811. * them accordingly
  812. */
  813. #define LPDDR2_VOLTAGE_STABLE 0
  814. #define LPDDR2_VOLTAGE_RAMPING 1
  815. /* Length of the forced read idle period in terms of cycles */
  816. #define EMIF_REG_READ_IDLE_LEN_VAL 5
  817. /* Interval between forced 'read idles' */
  818. /* To be used when voltage is changed for DPS/DVFS - 1us */
  819. #define READ_IDLE_INTERVAL_DVFS (1*1000)
  820. /*
  821. * To be used when voltage is not scaled except by Smart Reflex
  822. * 50us - or maximum value will do
  823. */
  824. #define READ_IDLE_INTERVAL_NORMAL (50*1000)
  825. /*
  826. * Unless voltage is changing due to DVFS one ZQCS command every 50ms should
  827. * be enough. This shoule be enough also in the case when voltage is changing
  828. * due to smart-reflex.
  829. */
  830. #define EMIF_ZQCS_INTERVAL_NORMAL_IN_US (50*1000)
  831. /*
  832. * If voltage is changing due to DVFS ZQCS should be performed more
  833. * often(every 50us)
  834. */
  835. #define EMIF_ZQCS_INTERVAL_DVFS_IN_US 50
  836. /* The interval between ZQCL commands as a multiple of ZQCS interval */
  837. #define REG_ZQ_ZQCL_MULT 4
  838. /* The interval between ZQINIT commands as a multiple of ZQCL interval */
  839. #define REG_ZQ_ZQINIT_MULT 3
  840. /* Enable ZQ Calibration on exiting Self-refresh */
  841. #define REG_ZQ_SFEXITEN_ENABLE 1
  842. /*
  843. * ZQ Calibration simultaneously on both chip-selects:
  844. * Needs one calibration resistor per CS
  845. * None of the boards that we know of have this capability
  846. * So disabled by default
  847. */
  848. #define REG_ZQ_DUALCALEN_DISABLE 0
  849. /*
  850. * Enable ZQ Calibration by default on CS0. If we are asked to program
  851. * the EMIF there will be something connected to CS0 for sure
  852. */
  853. #define REG_ZQ_CS0EN_ENABLE 1
  854. /* EMIF_PWR_MGMT_CTRL register */
  855. /* Low power modes */
  856. #define LP_MODE_DISABLE 0
  857. #define LP_MODE_CLOCK_STOP 1
  858. #define LP_MODE_SELF_REFRESH 2
  859. #define LP_MODE_PWR_DN 3
  860. /* REG_DPD_EN */
  861. #define DPD_DISABLE 0
  862. #define DPD_ENABLE 1
  863. /* Maximum delay before Low Power Modes */
  864. #define REG_CS_TIM 0x0
  865. #define REG_SR_TIM 0xF
  866. #define REG_PD_TIM 0xF
  867. /* EMIF_PWR_MGMT_CTRL register */
  868. #define EMIF_PWR_MGMT_CTRL (\
  869. ((REG_CS_TIM << EMIF_REG_CS_TIM_SHIFT) & EMIF_REG_CS_TIM_MASK)|\
  870. ((REG_SR_TIM << EMIF_REG_SR_TIM_SHIFT) & EMIF_REG_SR_TIM_MASK)|\
  871. ((REG_PD_TIM << EMIF_REG_PD_TIM_SHIFT) & EMIF_REG_PD_TIM_MASK)|\
  872. ((LP_MODE_SELF_REFRESH << EMIF_REG_LP_MODE_SHIFT)\
  873. & EMIF_REG_LP_MODE_MASK) |\
  874. ((DPD_DISABLE << EMIF_REG_DPD_EN_SHIFT)\
  875. & EMIF_REG_DPD_EN_MASK))\
  876. #define EMIF_PWR_MGMT_CTRL_SHDW (\
  877. ((REG_CS_TIM << EMIF_REG_CS_TIM_SHDW_SHIFT)\
  878. & EMIF_REG_CS_TIM_SHDW_MASK) |\
  879. ((REG_SR_TIM << EMIF_REG_SR_TIM_SHDW_SHIFT)\
  880. & EMIF_REG_SR_TIM_SHDW_MASK) |\
  881. ((REG_PD_TIM << EMIF_REG_PD_TIM_SHDW_SHIFT)\
  882. & EMIF_REG_PD_TIM_SHDW_MASK))
  883. /* EMIF_L3_CONFIG register value */
  884. #define EMIF_L3_CONFIG_VAL_SYS_10_LL_0 0x0A0000FF
  885. #define EMIF_L3_CONFIG_VAL_SYS_10_MPU_3_LL_0 0x0A300000
  886. #define EMIF_L3_CONFIG_VAL_SYS_10_MPU_5_LL_0 0x0A500000
  887. /*
  888. * Value of bits 12:31 of DDR_PHY_CTRL_1 register:
  889. * All these fields have magic values dependent on frequency and
  890. * determined by PHY and DLL integration with EMIF. Setting the magic
  891. * values suggested by hw team.
  892. */
  893. #define EMIF_DDR_PHY_CTRL_1_BASE_VAL 0x049FF
  894. #define EMIF_DLL_SLAVE_DLY_CTRL_400_MHZ 0x41
  895. #define EMIF_DLL_SLAVE_DLY_CTRL_200_MHZ 0x80
  896. #define EMIF_DLL_SLAVE_DLY_CTRL_100_MHZ_AND_LESS 0xFF
  897. /*
  898. * MR1 value:
  899. * Burst length : 8
  900. * Burst type : sequential
  901. * Wrap : enabled
  902. * nWR : 3(default). EMIF does not do pre-charge.
  903. * : So nWR is don't care
  904. */
  905. #define MR1_BL_8_BT_SEQ_WRAP_EN_NWR_3 0x23
  906. #define MR1_BL_8_BT_SEQ_WRAP_EN_NWR_8 0xc3
  907. /* MR2 */
  908. #define MR2_RL3_WL1 1
  909. #define MR2_RL4_WL2 2
  910. #define MR2_RL5_WL2 3
  911. #define MR2_RL6_WL3 4
  912. /* MR10: ZQ calibration codes */
  913. #define MR10_ZQ_ZQCS 0x56
  914. #define MR10_ZQ_ZQCL 0xAB
  915. #define MR10_ZQ_ZQINIT 0xFF
  916. #define MR10_ZQ_ZQRESET 0xC3
  917. /* TEMP_ALERT_CONFIG */
  918. #define TEMP_ALERT_POLL_INTERVAL_MS 360 /* for temp gradient - 5 C/s */
  919. #define TEMP_ALERT_CONFIG_DEVCT_1 0
  920. #define TEMP_ALERT_CONFIG_DEVWDT_32 2
  921. /* MR16 value: refresh full array(no partial array self refresh) */
  922. #define MR16_REF_FULL_ARRAY 0
  923. /*
  924. * Maximum number of entries we keep in our array of timing tables
  925. * We need not keep all the speed bins supported by the device
  926. * We need to keep timing tables for only the speed bins that we
  927. * are interested in
  928. */
  929. #define MAX_NUM_SPEEDBINS 4
  930. /* LPDDR2 Densities */
  931. #define LPDDR2_DENSITY_64Mb 0
  932. #define LPDDR2_DENSITY_128Mb 1
  933. #define LPDDR2_DENSITY_256Mb 2
  934. #define LPDDR2_DENSITY_512Mb 3
  935. #define LPDDR2_DENSITY_1Gb 4
  936. #define LPDDR2_DENSITY_2Gb 5
  937. #define LPDDR2_DENSITY_4Gb 6
  938. #define LPDDR2_DENSITY_8Gb 7
  939. #define LPDDR2_DENSITY_16Gb 8
  940. #define LPDDR2_DENSITY_32Gb 9
  941. /* LPDDR2 type */
  942. #define LPDDR2_TYPE_S4 0
  943. #define LPDDR2_TYPE_S2 1
  944. #define LPDDR2_TYPE_NVM 2
  945. /* LPDDR2 IO width */
  946. #define LPDDR2_IO_WIDTH_32 0
  947. #define LPDDR2_IO_WIDTH_16 1
  948. #define LPDDR2_IO_WIDTH_8 2
  949. /* Mode register numbers */
  950. #define LPDDR2_MR0 0
  951. #define LPDDR2_MR1 1
  952. #define LPDDR2_MR2 2
  953. #define LPDDR2_MR3 3
  954. #define LPDDR2_MR4 4
  955. #define LPDDR2_MR5 5
  956. #define LPDDR2_MR6 6
  957. #define LPDDR2_MR7 7
  958. #define LPDDR2_MR8 8
  959. #define LPDDR2_MR9 9
  960. #define LPDDR2_MR10 10
  961. #define LPDDR2_MR11 11
  962. #define LPDDR2_MR16 16
  963. #define LPDDR2_MR17 17
  964. #define LPDDR2_MR18 18
  965. /* MR0 */
  966. #define LPDDR2_MR0_DAI_SHIFT 0
  967. #define LPDDR2_MR0_DAI_MASK 1
  968. #define LPDDR2_MR0_DI_SHIFT 1
  969. #define LPDDR2_MR0_DI_MASK (1 << 1)
  970. #define LPDDR2_MR0_DNVI_SHIFT 2
  971. #define LPDDR2_MR0_DNVI_MASK (1 << 2)
  972. /* MR4 */
  973. #define MR4_SDRAM_REF_RATE_SHIFT 0
  974. #define MR4_SDRAM_REF_RATE_MASK 7
  975. #define MR4_TUF_SHIFT 7
  976. #define MR4_TUF_MASK (1 << 7)
  977. /* MR4 SDRAM Refresh Rate field values */
  978. #define SDRAM_TEMP_LESS_LOW_SHUTDOWN 0x0
  979. #define SDRAM_TEMP_LESS_4X_REFRESH_AND_TIMINGS 0x1
  980. #define SDRAM_TEMP_LESS_2X_REFRESH_AND_TIMINGS 0x2
  981. #define SDRAM_TEMP_NOMINAL 0x3
  982. #define SDRAM_TEMP_RESERVED_4 0x4
  983. #define SDRAM_TEMP_HIGH_DERATE_REFRESH 0x5
  984. #define SDRAM_TEMP_HIGH_DERATE_REFRESH_AND_TIMINGS 0x6
  985. #define SDRAM_TEMP_VERY_HIGH_SHUTDOWN 0x7
  986. #define LPDDR2_MANUFACTURER_SAMSUNG 1
  987. #define LPDDR2_MANUFACTURER_QIMONDA 2
  988. #define LPDDR2_MANUFACTURER_ELPIDA 3
  989. #define LPDDR2_MANUFACTURER_ETRON 4
  990. #define LPDDR2_MANUFACTURER_NANYA 5
  991. #define LPDDR2_MANUFACTURER_HYNIX 6
  992. #define LPDDR2_MANUFACTURER_MOSEL 7
  993. #define LPDDR2_MANUFACTURER_WINBOND 8
  994. #define LPDDR2_MANUFACTURER_ESMT 9
  995. #define LPDDR2_MANUFACTURER_SPANSION 11
  996. #define LPDDR2_MANUFACTURER_SST 12
  997. #define LPDDR2_MANUFACTURER_ZMOS 13
  998. #define LPDDR2_MANUFACTURER_INTEL 14
  999. #define LPDDR2_MANUFACTURER_NUMONYX 254
  1000. #define LPDDR2_MANUFACTURER_MICRON 255
  1001. /* MR8 register fields */
  1002. #define MR8_TYPE_SHIFT 0x0
  1003. #define MR8_TYPE_MASK 0x3
  1004. #define MR8_DENSITY_SHIFT 0x2
  1005. #define MR8_DENSITY_MASK (0xF << 0x2)
  1006. #define MR8_IO_WIDTH_SHIFT 0x6
  1007. #define MR8_IO_WIDTH_MASK (0x3 << 0x6)
  1008. /* SDRAM TYPE */
  1009. #define EMIF_SDRAM_TYPE_DDR2 0x2
  1010. #define EMIF_SDRAM_TYPE_DDR3 0x3
  1011. #define EMIF_SDRAM_TYPE_LPDDR2 0x4
  1012. struct lpddr2_addressing {
  1013. u8 num_banks;
  1014. u8 t_REFI_us_x10;
  1015. u8 row_sz[2]; /* One entry each for x32 and x16 */
  1016. u8 col_sz[2]; /* One entry each for x32 and x16 */
  1017. };
  1018. /* Structure for timings from the DDR datasheet */
  1019. struct lpddr2_ac_timings {
  1020. u32 max_freq;
  1021. u8 RL;
  1022. u8 tRPab;
  1023. u8 tRCD;
  1024. u8 tWR;
  1025. u8 tRASmin;
  1026. u8 tRRD;
  1027. u8 tWTRx2;
  1028. u8 tXSR;
  1029. u8 tXPx2;
  1030. u8 tRFCab;
  1031. u8 tRTPx2;
  1032. u8 tCKE;
  1033. u8 tCKESR;
  1034. u8 tZQCS;
  1035. u32 tZQCL;
  1036. u32 tZQINIT;
  1037. u8 tDQSCKMAXx2;
  1038. u8 tRASmax;
  1039. u8 tFAW;
  1040. };
  1041. /*
  1042. * Min tCK values for some of the parameters:
  1043. * If the calculated clock cycles for the respective parameter is
  1044. * less than the corresponding min tCK value, we need to set the min
  1045. * tCK value. This may happen at lower frequencies.
  1046. */
  1047. struct lpddr2_min_tck {
  1048. u32 tRL;
  1049. u32 tRP_AB;
  1050. u32 tRCD;
  1051. u32 tWR;
  1052. u32 tRAS_MIN;
  1053. u32 tRRD;
  1054. u32 tWTR;
  1055. u32 tXP;
  1056. u32 tRTP;
  1057. u8 tCKE;
  1058. u32 tCKESR;
  1059. u32 tFAW;
  1060. };
  1061. struct lpddr2_device_details {
  1062. u8 type;
  1063. u8 density;
  1064. u8 io_width;
  1065. u8 manufacturer;
  1066. };
  1067. struct lpddr2_device_timings {
  1068. const struct lpddr2_ac_timings **ac_timings;
  1069. const struct lpddr2_min_tck *min_tck;
  1070. };
  1071. /* Details of the devices connected to each chip-select of an EMIF instance */
  1072. struct emif_device_details {
  1073. const struct lpddr2_device_details *cs0_device_details;
  1074. const struct lpddr2_device_details *cs1_device_details;
  1075. const struct lpddr2_device_timings *cs0_device_timings;
  1076. const struct lpddr2_device_timings *cs1_device_timings;
  1077. };
  1078. /*
  1079. * Structure containing shadow of important registers in EMIF
  1080. * The calculation function fills in this structure to be later used for
  1081. * initialization and DVFS
  1082. */
  1083. struct emif_regs {
  1084. u32 freq;
  1085. u32 sdram_config_init;
  1086. u32 sdram_config;
  1087. u32 sdram_config2;
  1088. u32 ref_ctrl;
  1089. u32 ref_ctrl_final;
  1090. u32 sdram_tim1;
  1091. u32 sdram_tim2;
  1092. u32 sdram_tim3;
  1093. u32 ocp_config;
  1094. u32 read_idle_ctrl;
  1095. u32 zq_config;
  1096. u32 temp_alert_config;
  1097. u32 emif_ddr_phy_ctlr_1_init;
  1098. u32 emif_ddr_phy_ctlr_1;
  1099. u32 emif_ddr_ext_phy_ctrl_1;
  1100. u32 emif_ddr_ext_phy_ctrl_2;
  1101. u32 emif_ddr_ext_phy_ctrl_3;
  1102. u32 emif_ddr_ext_phy_ctrl_4;
  1103. u32 emif_ddr_ext_phy_ctrl_5;
  1104. u32 emif_rd_wr_lvl_rmp_win;
  1105. u32 emif_rd_wr_lvl_rmp_ctl;
  1106. u32 emif_rd_wr_lvl_ctl;
  1107. u32 emif_rd_wr_exec_thresh;
  1108. u32 emif_prio_class_serv_map;
  1109. u32 emif_connect_id_serv_1_map;
  1110. u32 emif_connect_id_serv_2_map;
  1111. u32 emif_cos_config;
  1112. u32 emif_ecc_ctrl_reg;
  1113. u32 emif_ecc_address_range_1;
  1114. u32 emif_ecc_address_range_2;
  1115. };
  1116. struct lpddr2_mr_regs {
  1117. s8 mr1;
  1118. s8 mr2;
  1119. s8 mr3;
  1120. s8 mr10;
  1121. s8 mr16;
  1122. };
  1123. struct read_write_regs {
  1124. u32 read_reg;
  1125. u32 write_reg;
  1126. };
  1127. static inline u32 get_emif_rev(u32 base)
  1128. {
  1129. struct emif_reg_struct *emif = (struct emif_reg_struct *)base;
  1130. return (readl(&emif->emif_mod_id_rev) & EMIF_REG_MAJOR_REVISION_MASK)
  1131. >> EMIF_REG_MAJOR_REVISION_SHIFT;
  1132. }
  1133. /*
  1134. * Get SDRAM type connected to EMIF.
  1135. * Assuming similar SDRAM parts are connected to both EMIF's
  1136. * which is typically the case. So it is sufficient to get
  1137. * SDRAM type from EMIF1.
  1138. */
  1139. static inline u32 emif_sdram_type(u32 sdram_config)
  1140. {
  1141. return (sdram_config & EMIF_REG_SDRAM_TYPE_MASK)
  1142. >> EMIF_REG_SDRAM_TYPE_SHIFT;
  1143. }
  1144. /* assert macros */
  1145. #if defined(DEBUG)
  1146. #define emif_assert(c) ({ if (!(c)) for (;;); })
  1147. #else
  1148. #define emif_assert(c) ({ if (0) hang(); })
  1149. #endif
  1150. #ifdef CONFIG_SYS_EMIF_PRECALCULATED_TIMING_REGS
  1151. void emif_get_reg_dump(u32 emif_nr, const struct emif_regs **regs);
  1152. void emif_get_dmm_regs(const struct dmm_lisa_map_regs **dmm_lisa_regs);
  1153. #else
  1154. struct lpddr2_device_details *emif_get_device_details(u32 emif_nr, u8 cs,
  1155. struct lpddr2_device_details *lpddr2_dev_details);
  1156. void emif_get_device_timings(u32 emif_nr,
  1157. const struct lpddr2_device_timings **cs0_device_timings,
  1158. const struct lpddr2_device_timings **cs1_device_timings);
  1159. #endif
  1160. void do_ext_phy_settings(u32 base, const struct emif_regs *regs);
  1161. void get_lpddr2_mr_regs(const struct lpddr2_mr_regs **regs);
  1162. #ifndef CONFIG_SYS_EMIF_PRECALCULATED_TIMING_REGS
  1163. extern u32 *const T_num;
  1164. extern u32 *const T_den;
  1165. #endif
  1166. void config_data_eye_leveling_samples(u32 emif_base);
  1167. const struct read_write_regs *get_bug_regs(u32 *iterations);
  1168. #endif