ddr3_dfs.c 48 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482148314841485148614871488148914901491149214931494149514961497149814991500150115021503150415051506150715081509151015111512151315141515151615171518151915201521152215231524152515261527152815291530153115321533153415351536153715381539154015411542154315441545154615471548154915501551
  1. // SPDX-License-Identifier: GPL-2.0
  2. /*
  3. * Copyright (C) Marvell International Ltd. and its affiliates
  4. */
  5. #include <common.h>
  6. #include <i2c.h>
  7. #include <spl.h>
  8. #include <asm/io.h>
  9. #include <asm/arch/cpu.h>
  10. #include <asm/arch/soc.h>
  11. #include "ddr3_hw_training.h"
  12. /*
  13. * Debug
  14. */
  15. #define DEBUG_DFS_C(s, d, l) \
  16. DEBUG_DFS_S(s); DEBUG_DFS_D(d, l); DEBUG_DFS_S("\n")
  17. #define DEBUG_DFS_FULL_C(s, d, l) \
  18. DEBUG_DFS_FULL_S(s); DEBUG_DFS_FULL_D(d, l); DEBUG_DFS_FULL_S("\n")
  19. #ifdef MV_DEBUG_DFS
  20. #define DEBUG_DFS_S(s) puts(s)
  21. #define DEBUG_DFS_D(d, l) printf("%x", d)
  22. #else
  23. #define DEBUG_DFS_S(s)
  24. #define DEBUG_DFS_D(d, l)
  25. #endif
  26. #ifdef MV_DEBUG_DFS_FULL
  27. #define DEBUG_DFS_FULL_S(s) puts(s)
  28. #define DEBUG_DFS_FULL_D(d, l) printf("%x", d)
  29. #else
  30. #define DEBUG_DFS_FULL_S(s)
  31. #define DEBUG_DFS_FULL_D(d, l)
  32. #endif
  33. #if defined(MV88F672X)
  34. extern u8 div_ratio[CLK_VCO][CLK_DDR];
  35. extern void get_target_freq(u32 freq_mode, u32 *ddr_freq, u32 *hclk_ps);
  36. #else
  37. extern u16 odt_dynamic[ODT_OPT][MAX_CS];
  38. extern u8 div_ratio1to1[CLK_CPU][CLK_DDR];
  39. extern u8 div_ratio2to1[CLK_CPU][CLK_DDR];
  40. #endif
  41. extern u16 odt_static[ODT_OPT][MAX_CS];
  42. extern u32 cpu_fab_clk_to_hclk[FAB_OPT][CLK_CPU];
  43. extern u32 ddr3_get_vco_freq(void);
  44. u32 ddr3_get_freq_parameter(u32 target_freq, int ratio_2to1);
  45. #ifdef MV_DEBUG_DFS
  46. static inline void dfs_reg_write(u32 addr, u32 val)
  47. {
  48. printf("\n write reg 0x%08x = 0x%08x", addr, val);
  49. writel(val, INTER_REGS_BASE + addr);
  50. }
  51. #else
  52. static inline void dfs_reg_write(u32 addr, u32 val)
  53. {
  54. writel(val, INTER_REGS_BASE + addr);
  55. }
  56. #endif
  57. static void wait_refresh_op_complete(void)
  58. {
  59. u32 reg;
  60. /* Poll - Wait for Refresh operation completion */
  61. do {
  62. reg = reg_read(REG_SDRAM_OPERATION_ADDR) &
  63. REG_SDRAM_OPERATION_CMD_RFRS_DONE;
  64. } while (reg); /* Wait for '0' */
  65. }
  66. /*
  67. * Name: ddr3_get_freq_parameter
  68. * Desc: Finds CPU/DDR frequency ratio according to Sample@reset and table.
  69. * Args: target_freq - target frequency
  70. * Notes:
  71. * Returns: freq_par - the ratio parameter
  72. */
  73. u32 ddr3_get_freq_parameter(u32 target_freq, int ratio_2to1)
  74. {
  75. u32 ui_vco_freq, freq_par;
  76. ui_vco_freq = ddr3_get_vco_freq();
  77. #if defined(MV88F672X)
  78. freq_par = div_ratio[ui_vco_freq][target_freq];
  79. #else
  80. /* Find the ratio between PLL frequency and ddr-clk */
  81. if (ratio_2to1)
  82. freq_par = div_ratio2to1[ui_vco_freq][target_freq];
  83. else
  84. freq_par = div_ratio1to1[ui_vco_freq][target_freq];
  85. #endif
  86. return freq_par;
  87. }
  88. /*
  89. * Name: ddr3_dfs_high_2_low
  90. * Desc:
  91. * Args: freq - target frequency
  92. * Notes:
  93. * Returns: MV_OK - success, MV_FAIL - fail
  94. */
  95. int ddr3_dfs_high_2_low(u32 freq, MV_DRAM_INFO *dram_info)
  96. {
  97. #if defined(MV88F78X60) || defined(MV88F672X)
  98. /* This Flow is relevant for ArmadaXP A0 */
  99. u32 reg, freq_par, tmp;
  100. u32 cs = 0;
  101. DEBUG_DFS_C("DDR3 - DFS - High To Low - Starting DFS procedure to Frequency - ",
  102. freq, 1);
  103. /* target frequency - 100MHz */
  104. freq_par = ddr3_get_freq_parameter(freq, 0);
  105. #if defined(MV88F672X)
  106. u32 hclk;
  107. u32 cpu_freq = ddr3_get_cpu_freq();
  108. get_target_freq(cpu_freq, &tmp, &hclk);
  109. #endif
  110. /* Configure - DRAM DLL final state after DFS is complete - Enable */
  111. reg = reg_read(REG_DFS_ADDR);
  112. /* [0] - DfsDllNextState - Disable */
  113. reg |= (1 << REG_DFS_DLLNEXTSTATE_OFFS);
  114. dfs_reg_write(REG_DFS_ADDR, reg); /* 0x1528 - DFS register */
  115. /*
  116. * Configure - XBAR Retry response during Block to enable internal
  117. * access - Disable
  118. */
  119. reg = reg_read(REG_METAL_MASK_ADDR);
  120. /* [0] - RetryMask - Disable */
  121. reg &= ~(1 << REG_METAL_MASK_RETRY_OFFS);
  122. /* 0x14B0 - Dunit MMask Register */
  123. dfs_reg_write(REG_METAL_MASK_ADDR, reg);
  124. /* Configure - Block new external transactions - Enable */
  125. reg = reg_read(REG_DFS_ADDR);
  126. reg |= (1 << REG_DFS_BLOCK_OFFS); /* [1] - DfsBlock - Enable */
  127. dfs_reg_write(REG_DFS_ADDR, reg); /* 0x1528 - DFS register */
  128. /* Registered DIMM support */
  129. if (dram_info->reg_dimm) {
  130. /*
  131. * Configure - Disable Register DIMM CKE Power
  132. * Down mode - CWA_RC
  133. */
  134. reg = (0x9 & REG_SDRAM_OPERATION_CWA_RC_MASK) <<
  135. REG_SDRAM_OPERATION_CWA_RC_OFFS;
  136. /*
  137. * Configure - Disable Register DIMM CKE Power
  138. * Down mode - CWA_DATA
  139. */
  140. reg |= ((0 & REG_SDRAM_OPERATION_CWA_DATA_MASK) <<
  141. REG_SDRAM_OPERATION_CWA_DATA_OFFS);
  142. /*
  143. * Configure - Disable Register DIMM CKE Power
  144. * Down mode - Set Delay - tMRD
  145. */
  146. reg |= (0 << REG_SDRAM_OPERATION_CWA_DELAY_SEL_OFFS);
  147. /* Configure - Issue CWA command with the above parameters */
  148. reg |= (REG_SDRAM_OPERATION_CMD_CWA &
  149. ~(0xF << REG_SDRAM_OPERATION_CS_OFFS));
  150. /* 0x1418 - SDRAM Operation Register */
  151. dfs_reg_write(REG_SDRAM_OPERATION_ADDR, reg);
  152. /* Poll - Wait for CWA operation completion */
  153. do {
  154. reg = reg_read(REG_SDRAM_OPERATION_ADDR) &
  155. (REG_SDRAM_OPERATION_CMD_MASK);
  156. } while (reg);
  157. /* Configure - Disable outputs floating during Self Refresh */
  158. reg = reg_read(REG_REGISTERED_DRAM_CTRL_ADDR);
  159. /* [15] - SRFloatEn - Disable */
  160. reg &= ~(1 << REG_REGISTERED_DRAM_CTRL_SR_FLOAT_OFFS);
  161. /* 0x16D0 - DDR3 Registered DRAM Control */
  162. dfs_reg_write(REG_REGISTERED_DRAM_CTRL_ADDR, reg);
  163. }
  164. /* Optional - Configure - DDR3_Rtt_nom_CS# */
  165. for (cs = 0; cs < MAX_CS; cs++) {
  166. if (dram_info->cs_ena & (1 << cs)) {
  167. reg = reg_read(REG_DDR3_MR1_CS_ADDR +
  168. (cs << MR_CS_ADDR_OFFS));
  169. reg &= REG_DDR3_MR1_RTT_MASK;
  170. dfs_reg_write(REG_DDR3_MR1_CS_ADDR +
  171. (cs << MR_CS_ADDR_OFFS), reg);
  172. }
  173. }
  174. /* Configure - Move DRAM into Self Refresh */
  175. reg = reg_read(REG_DFS_ADDR);
  176. reg |= (1 << REG_DFS_SR_OFFS); /* [2] - DfsSR - Enable */
  177. dfs_reg_write(REG_DFS_ADDR, reg); /* 0x1528 - DFS register */
  178. /* Poll - Wait for Self Refresh indication */
  179. do {
  180. reg = ((reg_read(REG_DFS_ADDR)) & (1 << REG_DFS_ATSR_OFFS));
  181. } while (reg == 0x0); /* 0x1528 [3] - DfsAtSR - Wait for '1' */
  182. /* Start of clock change procedure (PLL) */
  183. #if defined(MV88F672X)
  184. /* avantaLP */
  185. /* Configure cpupll_clkdiv_reset_mask */
  186. reg = reg_read(CPU_PLL_CLOCK_DIVIDER_CNTRL0);
  187. reg &= CPU_PLL_CLOCK_DIVIDER_CNTRL0_MASK;
  188. /* 0xE8264[7:0] 0xff CPU Clock Dividers Reset mask */
  189. dfs_reg_write(CPU_PLL_CLOCK_DIVIDER_CNTRL0, (reg + 0xFF));
  190. /* Configure cpu_clkdiv_reload_smooth */
  191. reg = reg_read(CPU_PLL_CNTRL0);
  192. reg &= CPU_PLL_CNTRL0_RELOAD_SMOOTH_MASK;
  193. /* 0xE8260 [15:8] 0x2 CPU Clock Dividers Reload Smooth enable */
  194. dfs_reg_write(CPU_PLL_CNTRL0,
  195. (reg + (2 << CPU_PLL_CNTRL0_RELOAD_SMOOTH_OFFS)));
  196. /* Configure cpupll_clkdiv_relax_en */
  197. reg = reg_read(CPU_PLL_CNTRL0);
  198. reg &= CPU_PLL_CNTRL0_RELAX_EN_MASK;
  199. /* 0xE8260 [31:24] 0x2 Relax Enable */
  200. dfs_reg_write(CPU_PLL_CNTRL0,
  201. (reg + (2 << CPU_PLL_CNTRL0_RELAX_EN_OFFS)));
  202. /* Configure cpupll_clkdiv_ddr_clk_ratio */
  203. reg = reg_read(CPU_PLL_CLOCK_DIVIDER_CNTRL1);
  204. /*
  205. * 0xE8268 [13:8] N Set Training clock:
  206. * APLL Out Clock (VCO freq) / N = 100 MHz
  207. */
  208. reg &= CPU_PLL_CLOCK_DIVIDER_CNTRL1_MASK;
  209. reg |= (freq_par << 8); /* full Integer ratio from PLL-out to ddr-clk */
  210. dfs_reg_write(CPU_PLL_CLOCK_DIVIDER_CNTRL1, reg);
  211. /* Configure cpupll_clkdiv_reload_ratio */
  212. reg = reg_read(CPU_PLL_CLOCK_DIVIDER_CNTRL0);
  213. reg &= CPU_PLL_CLOCK_RELOAD_RATIO_MASK;
  214. /* 0xE8264 [8]=0x1 CPU Clock Dividers Reload Ratio trigger set */
  215. dfs_reg_write(CPU_PLL_CLOCK_DIVIDER_CNTRL0,
  216. (reg + (1 << CPU_PLL_CLOCK_RELOAD_RATIO_OFFS)));
  217. udelay(1);
  218. /* Configure cpupll_clkdiv_reload_ratio */
  219. reg = reg_read(CPU_PLL_CLOCK_DIVIDER_CNTRL0);
  220. reg &= CPU_PLL_CLOCK_RELOAD_RATIO_MASK;
  221. /* 0xE8264 [8]=0x0 CPU Clock Dividers Reload Ratio trigger clear */
  222. dfs_reg_write(CPU_PLL_CLOCK_DIVIDER_CNTRL0, reg);
  223. udelay(5);
  224. #else
  225. /*
  226. * Initial Setup - assure that the "load new ratio" is clear (bit 24)
  227. * and in the same chance, block reassertions of reset [15:8] and
  228. * force reserved bits[7:0].
  229. */
  230. reg = 0x0000FDFF;
  231. /* 0x18700 - CPU Div CLK control 0 */
  232. dfs_reg_write(REG_CPU_DIV_CLK_CTRL_0_ADDR, reg);
  233. /*
  234. * RelaX whenever reset is asserted to that channel
  235. * (good for any case)
  236. */
  237. reg = 0x0000FF00;
  238. /* 0x18704 - CPU Div CLK control 0 */
  239. dfs_reg_write(REG_CPU_DIV_CLK_CTRL_1_ADDR, reg);
  240. reg = reg_read(REG_CPU_DIV_CLK_CTRL_2_ADDR) &
  241. REG_CPU_DIV_CLK_CTRL_3_FREQ_MASK;
  242. /* full Integer ratio from PLL-out to ddr-clk */
  243. reg |= (freq_par << REG_CPU_DIV_CLK_CTRL_3_FREQ_OFFS);
  244. /* 0x1870C - CPU Div CLK control 3 register */
  245. dfs_reg_write(REG_CPU_DIV_CLK_CTRL_2_ADDR, reg);
  246. /*
  247. * Shut off clock enable to the DDRPHY clock channel (this is the "D").
  248. * All the rest are kept as is (forced, but could be read-modify-write).
  249. * This is done now by RMW above.
  250. */
  251. /* Clock is not shut off gracefully - keep it running */
  252. reg = 0x000FFF02;
  253. dfs_reg_write(REG_CPU_DIV_CLK_CTRL_4_ADDR, reg);
  254. /* Wait before replacing the clock on the DDR Phy Channel. */
  255. udelay(1);
  256. /*
  257. * This for triggering the frequency update. Bit[24] is the
  258. * central control
  259. * bits [23:16] == which channels to change ==2 ==>
  260. * only DDR Phy (smooth transition)
  261. * bits [15:8] == mask reset reassertion due to clock modification
  262. * to these channels.
  263. * bits [7:0] == not in use
  264. */
  265. reg = 0x0102FDFF;
  266. /* 0x18700 - CPU Div CLK control 0 register */
  267. dfs_reg_write(REG_CPU_DIV_CLK_CTRL_0_ADDR, reg);
  268. udelay(1); /* Wait 1usec */
  269. /*
  270. * Poll Div CLK status 0 register - indication that the clocks
  271. * are active - 0x18718 [8]
  272. */
  273. do {
  274. reg = (reg_read(REG_CPU_DIV_CLK_STATUS_0_ADDR)) &
  275. (1 << REG_CPU_DIV_CLK_ALL_STABLE_OFFS);
  276. } while (reg == 0);
  277. /*
  278. * Clean the CTRL0, to be ready for next resets and next requests
  279. * of ratio modifications.
  280. */
  281. reg = 0x000000FF;
  282. /* 0x18700 - CPU Div CLK control 0 register */
  283. dfs_reg_write(REG_CPU_DIV_CLK_CTRL_0_ADDR, reg);
  284. udelay(5);
  285. #endif
  286. /* End of clock change procedure (PLL) */
  287. /* Configure - Select normal clock for the DDR PHY - Enable */
  288. reg = reg_read(REG_DRAM_INIT_CTRL_STATUS_ADDR);
  289. /* [16] - ddr_phy_trn_clk_sel - Enable */
  290. reg |= (1 << REG_DRAM_INIT_CTRL_TRN_CLK_OFFS);
  291. /* 0x18488 - DRAM Init control status register */
  292. dfs_reg_write(REG_DRAM_INIT_CTRL_STATUS_ADDR, reg);
  293. /* Configure - Set Correct Ratio - 1:1 */
  294. /* [15] - Phy2UnitClkRatio = 0 - Set 1:1 Ratio between Dunit and Phy */
  295. reg = reg_read(REG_DDR_IO_ADDR) & ~(1 << REG_DDR_IO_CLK_RATIO_OFFS);
  296. dfs_reg_write(REG_DDR_IO_ADDR, reg); /* 0x1524 - DDR IO Register */
  297. /* Configure - 2T Mode - Restore original configuration */
  298. reg = reg_read(REG_DUNIT_CTRL_LOW_ADDR);
  299. /* [3:4] 2T - 1T Mode - low freq */
  300. reg &= ~(REG_DUNIT_CTRL_LOW_2T_MASK << REG_DUNIT_CTRL_LOW_2T_OFFS);
  301. /* 0x1404 - DDR Controller Control Low Register */
  302. dfs_reg_write(REG_DUNIT_CTRL_LOW_ADDR, reg);
  303. /* Configure - Restore CL and CWL - MRS Commands */
  304. reg = reg_read(REG_DFS_ADDR);
  305. reg &= ~(REG_DFS_CL_NEXT_STATE_MASK << REG_DFS_CL_NEXT_STATE_OFFS);
  306. reg &= ~(REG_DFS_CWL_NEXT_STATE_MASK << REG_DFS_CWL_NEXT_STATE_OFFS);
  307. /* [8] - DfsCLNextState - MRS CL=6 after DFS (due to DLL-off mode) */
  308. reg |= (0x4 << REG_DFS_CL_NEXT_STATE_OFFS);
  309. /* [12] - DfsCWLNextState - MRS CWL=6 after DFS (due to DLL-off mode) */
  310. reg |= (0x1 << REG_DFS_CWL_NEXT_STATE_OFFS);
  311. dfs_reg_write(REG_DFS_ADDR, reg); /* 0x1528 - DFS register */
  312. /* Poll - Wait for APLL + ADLLs lock on new frequency */
  313. do {
  314. reg = (reg_read(REG_PHY_LOCK_STATUS_ADDR)) &
  315. REG_PHY_LOCK_APLL_ADLL_STATUS_MASK;
  316. /* 0x1674 [10:0] - Phy lock status Register */
  317. } while (reg != REG_PHY_LOCK_APLL_ADLL_STATUS_MASK);
  318. /* Configure - Reset the PHY Read FIFO and Write channels - Set Reset */
  319. reg = (reg_read(REG_SDRAM_CONFIG_ADDR) & REG_SDRAM_CONFIG_MASK);
  320. /* [30:29] = 0 - Data Pup R/W path reset */
  321. /* 0x1400 - SDRAM Configuration register */
  322. dfs_reg_write(REG_SDRAM_CONFIG_ADDR, reg);
  323. /*
  324. * Configure - DRAM Data PHY Read [30], Write [29] path
  325. * reset - Release Reset
  326. */
  327. reg = (reg_read(REG_SDRAM_CONFIG_ADDR) | ~REG_SDRAM_CONFIG_MASK);
  328. /* [30:29] = '11' - Data Pup R/W path reset */
  329. /* 0x1400 - SDRAM Configuration register */
  330. dfs_reg_write(REG_SDRAM_CONFIG_ADDR, reg);
  331. /* Registered DIMM support */
  332. if (dram_info->reg_dimm) {
  333. /*
  334. * Configure - Change register DRAM operating speed
  335. * (below 400MHz) - CWA_RC
  336. */
  337. reg = (0xA & REG_SDRAM_OPERATION_CWA_RC_MASK) <<
  338. REG_SDRAM_OPERATION_CWA_RC_OFFS;
  339. /*
  340. * Configure - Change register DRAM operating speed
  341. * (below 400MHz) - CWA_DATA
  342. */
  343. reg |= ((0x0 & REG_SDRAM_OPERATION_CWA_DATA_MASK) <<
  344. REG_SDRAM_OPERATION_CWA_DATA_OFFS);
  345. /* Configure - Set Delay - tSTAB */
  346. reg |= (0x1 << REG_SDRAM_OPERATION_CWA_DELAY_SEL_OFFS);
  347. /* Configure - Issue CWA command with the above parameters */
  348. reg |= (REG_SDRAM_OPERATION_CMD_CWA &
  349. ~(0xF << REG_SDRAM_OPERATION_CS_OFFS));
  350. /* 0x1418 - SDRAM Operation Register */
  351. dfs_reg_write(REG_SDRAM_OPERATION_ADDR, reg);
  352. /* Poll - Wait for CWA operation completion */
  353. do {
  354. reg = reg_read(REG_SDRAM_OPERATION_ADDR) &
  355. (REG_SDRAM_OPERATION_CMD_MASK);
  356. } while (reg);
  357. }
  358. /* Configure - Exit Self Refresh */
  359. /* [2] - DfsSR */
  360. reg = (reg_read(REG_DFS_ADDR) & ~(1 << REG_DFS_SR_OFFS));
  361. dfs_reg_write(REG_DFS_ADDR, reg); /* 0x1528 - DFS register */
  362. /*
  363. * Poll - DFS Register - 0x1528 [3] - DfsAtSR - All DRAM devices
  364. * on all ranks are NOT in self refresh mode
  365. */
  366. do {
  367. reg = ((reg_read(REG_DFS_ADDR)) & (1 << REG_DFS_ATSR_OFFS));
  368. } while (reg); /* Wait for '0' */
  369. /* Configure - Issue Refresh command */
  370. /* [3-0] = 0x2 - Refresh Command, [11-8] - enabled Cs */
  371. reg = REG_SDRAM_OPERATION_CMD_RFRS;
  372. for (cs = 0; cs < MAX_CS; cs++) {
  373. if (dram_info->cs_ena & (1 << cs))
  374. reg &= ~(1 << (REG_SDRAM_OPERATION_CS_OFFS + cs));
  375. }
  376. /* 0x1418 - SDRAM Operation Register */
  377. dfs_reg_write(REG_SDRAM_OPERATION_ADDR, reg);
  378. /* Poll - Wait for Refresh operation completion */
  379. wait_refresh_op_complete();
  380. /* Configure - Block new external transactions - Disable */
  381. reg = reg_read(REG_DFS_ADDR);
  382. reg &= ~(1 << REG_DFS_BLOCK_OFFS); /* [1] - DfsBlock - Disable */
  383. dfs_reg_write(REG_DFS_ADDR, reg); /* 0x1528 - DFS register */
  384. /*
  385. * Configure - XBAR Retry response during Block to enable
  386. * internal access - Disable
  387. */
  388. reg = reg_read(REG_METAL_MASK_ADDR);
  389. /* [0] - RetryMask - Enable */
  390. reg |= (1 << REG_METAL_MASK_RETRY_OFFS);
  391. /* 0x14B0 - Dunit MMask Register */
  392. dfs_reg_write(REG_METAL_MASK_ADDR, reg);
  393. for (cs = 0; cs < MAX_CS; cs++) {
  394. if (dram_info->cs_ena & (1 << cs)) {
  395. /* Configure - Set CL */
  396. reg = reg_read(REG_DDR3_MR0_CS_ADDR +
  397. (cs << MR_CS_ADDR_OFFS)) &
  398. ~REG_DDR3_MR0_CL_MASK;
  399. tmp = 0x4; /* CL=6 - 0x4 */
  400. reg |= ((tmp & 0x1) << REG_DDR3_MR0_CL_OFFS);
  401. reg |= ((tmp & 0xE) << REG_DDR3_MR0_CL_HIGH_OFFS);
  402. dfs_reg_write(REG_DDR3_MR0_CS_ADDR +
  403. (cs << MR_CS_ADDR_OFFS), reg);
  404. /* Configure - Set CWL */
  405. reg = reg_read(REG_DDR3_MR2_CS_ADDR +
  406. (cs << MR_CS_ADDR_OFFS))
  407. & ~(REG_DDR3_MR2_CWL_MASK << REG_DDR3_MR2_CWL_OFFS);
  408. /* CWL=6 - 0x1 */
  409. reg |= ((0x1) << REG_DDR3_MR2_CWL_OFFS);
  410. dfs_reg_write(REG_DDR3_MR2_CS_ADDR +
  411. (cs << MR_CS_ADDR_OFFS), reg);
  412. }
  413. }
  414. DEBUG_DFS_C("DDR3 - DFS - High To Low - Ended successfuly - new Frequency - ",
  415. freq, 1);
  416. return MV_OK;
  417. #else
  418. /* This Flow is relevant for Armada370 A0 and ArmadaXP Z1 */
  419. u32 reg, freq_par;
  420. u32 cs = 0;
  421. DEBUG_DFS_C("DDR3 - DFS - High To Low - Starting DFS procedure to Frequency - ",
  422. freq, 1);
  423. /* target frequency - 100MHz */
  424. freq_par = ddr3_get_freq_parameter(freq, 0);
  425. reg = 0x0000FF00;
  426. /* 0x18700 - CPU Div CLK control 0 */
  427. dfs_reg_write(REG_CPU_DIV_CLK_CTRL_1_ADDR, reg);
  428. /* 0x1600 - ODPG_CNTRL_Control */
  429. reg = reg_read(REG_ODPG_CNTRL_ADDR);
  430. /* [21] = 1 - auto refresh disable */
  431. reg |= (1 << REG_ODPG_CNTRL_OFFS);
  432. dfs_reg_write(REG_ODPG_CNTRL_ADDR, reg);
  433. /* 0x1670 - PHY lock mask register */
  434. reg = reg_read(REG_PHY_LOCK_MASK_ADDR);
  435. reg &= REG_PHY_LOCK_MASK_MASK; /* [11:0] = 0 */
  436. dfs_reg_write(REG_PHY_LOCK_MASK_ADDR, reg);
  437. reg = reg_read(REG_DFS_ADDR); /* 0x1528 - DFS register */
  438. /* Disable reconfig */
  439. reg &= ~0x10; /* [4] - Enable reconfig MR registers after DFS_ERG */
  440. reg |= 0x1; /* [0] - DRAM DLL disabled after DFS */
  441. dfs_reg_write(REG_DFS_ADDR, reg); /* 0x1528 - DFS register */
  442. reg = reg_read(REG_METAL_MASK_ADDR) & ~(1 << 0); /* [0] - disable */
  443. /* 0x14B0 - Dunit MMask Register */
  444. dfs_reg_write(REG_METAL_MASK_ADDR, reg);
  445. /* [1] - DFS Block enable */
  446. reg = reg_read(REG_DFS_ADDR) | (1 << REG_DFS_BLOCK_OFFS);
  447. dfs_reg_write(REG_DFS_ADDR, reg); /* 0x1528 - DFS register */
  448. /* [2] - DFS Self refresh enable */
  449. reg = reg_read(REG_DFS_ADDR) | (1 << REG_DFS_SR_OFFS);
  450. dfs_reg_write(REG_DFS_ADDR, reg); /* 0x1528 - DFS register */
  451. /*
  452. * Poll DFS Register - 0x1528 [3] - DfsAtSR -
  453. * All DRAM devices on all ranks are in self refresh mode -
  454. * DFS can be executed afterwards
  455. */
  456. do {
  457. reg = reg_read(REG_DFS_ADDR) & (1 << REG_DFS_ATSR_OFFS);
  458. } while (reg == 0x0); /* Wait for '1' */
  459. /* Disable ODT on DLL-off mode */
  460. dfs_reg_write(REG_SDRAM_ODT_CTRL_HIGH_ADDR,
  461. REG_SDRAM_ODT_CTRL_HIGH_OVRD_MASK);
  462. /* [11:0] = 0 */
  463. reg = (reg_read(REG_PHY_LOCK_MASK_ADDR) & REG_PHY_LOCK_MASK_MASK);
  464. /* 0x1670 - PHY lock mask register */
  465. dfs_reg_write(REG_PHY_LOCK_MASK_ADDR, reg);
  466. /* Add delay between entering SR and start ratio modification */
  467. udelay(1);
  468. /*
  469. * Initial Setup - assure that the "load new ratio" is clear (bit 24)
  470. * and in the same chance, block reassertions of reset [15:8] and
  471. * force reserved bits[7:0].
  472. */
  473. reg = 0x0000FDFF;
  474. /* 0x18700 - CPU Div CLK control 0 */
  475. dfs_reg_write(REG_CPU_DIV_CLK_CTRL_0_ADDR, reg);
  476. /*
  477. * RelaX whenever reset is asserted to that channel (good for any case)
  478. */
  479. reg = 0x0000FF00;
  480. /* 0x18700 - CPU Div CLK control 0 */
  481. dfs_reg_write(REG_CPU_DIV_CLK_CTRL_1_ADDR, reg);
  482. reg = reg_read(REG_CPU_DIV_CLK_CTRL_3_ADDR) &
  483. REG_CPU_DIV_CLK_CTRL_3_FREQ_MASK;
  484. /* Full Integer ratio from PLL-out to ddr-clk */
  485. reg |= (freq_par << REG_CPU_DIV_CLK_CTRL_3_FREQ_OFFS);
  486. /* 0x1870C - CPU Div CLK control 3 register */
  487. dfs_reg_write(REG_CPU_DIV_CLK_CTRL_3_ADDR, reg);
  488. /*
  489. * Shut off clock enable to the DDRPHY clock channel (this is the "D").
  490. * All the rest are kept as is (forced, but could be read-modify-write).
  491. * This is done now by RMW above.
  492. */
  493. /* Clock is not shut off gracefully - keep it running */
  494. reg = 0x000FFF02;
  495. dfs_reg_write(REG_CPU_DIV_CLK_CTRL_4_ADDR, reg);
  496. /* Wait before replacing the clock on the DDR Phy Channel. */
  497. udelay(1);
  498. /*
  499. * This for triggering the frequency update. Bit[24] is the
  500. * central control
  501. * bits [23:16] == which channels to change ==2 ==> only DDR Phy
  502. * (smooth transition)
  503. * bits [15:8] == mask reset reassertion due to clock modification
  504. * to these channels.
  505. * bits [7:0] == not in use
  506. */
  507. reg = 0x0102FDFF;
  508. /* 0x18700 - CPU Div CLK control 0 register */
  509. dfs_reg_write(REG_CPU_DIV_CLK_CTRL_0_ADDR, reg);
  510. udelay(1); /* Wait 1usec */
  511. /*
  512. * Poll Div CLK status 0 register - indication that the clocks
  513. * are active - 0x18718 [8]
  514. */
  515. do {
  516. reg = (reg_read(REG_CPU_DIV_CLK_STATUS_0_ADDR)) &
  517. (1 << REG_CPU_DIV_CLK_ALL_STABLE_OFFS);
  518. } while (reg == 0);
  519. /*
  520. * Clean the CTRL0, to be ready for next resets and next requests of
  521. * ratio modifications.
  522. */
  523. reg = 0x000000FF;
  524. /* 0x18700 - CPU Div CLK control 0 register */
  525. dfs_reg_write(REG_CPU_DIV_CLK_CTRL_0_ADDR, reg);
  526. udelay(5);
  527. /* Switch HCLK Mux to training clk (100Mhz), keep DFS request bit */
  528. reg = 0x20050000;
  529. /* 0x18488 - DRAM Init control status register */
  530. dfs_reg_write(REG_DRAM_INIT_CTRL_STATUS_ADDR, reg);
  531. reg = reg_read(REG_DDR_IO_ADDR) & ~(1 << REG_DDR_IO_CLK_RATIO_OFFS);
  532. /* [15] = 0 - Set 1:1 Ratio between Dunit and Phy */
  533. dfs_reg_write(REG_DDR_IO_ADDR, reg); /* 0x1524 - DDR IO Regist */
  534. reg = reg_read(REG_DRAM_PHY_CONFIG_ADDR) & REG_DRAM_PHY_CONFIG_MASK;
  535. /* [31:30]] - reset pup data ctrl ADLL */
  536. /* 0x15EC - DRAM PHY Config register */
  537. dfs_reg_write(REG_DRAM_PHY_CONFIG_ADDR, reg);
  538. reg = (reg_read(REG_DRAM_PHY_CONFIG_ADDR) | ~REG_DRAM_PHY_CONFIG_MASK);
  539. /* [31:30] - normal pup data ctrl ADLL */
  540. /* 0x15EC - DRAM PHY Config register */
  541. dfs_reg_write(REG_DRAM_PHY_CONFIG_ADDR, reg);
  542. udelay(1); /* Wait 1usec */
  543. /* 0x1404 */
  544. reg = (reg_read(REG_DUNIT_CTRL_LOW_ADDR) & 0xFFFFFFE7);
  545. dfs_reg_write(REG_DUNIT_CTRL_LOW_ADDR, reg);
  546. /* Poll Phy lock status register - APLL lock indication - 0x1674 */
  547. do {
  548. reg = (reg_read(REG_PHY_LOCK_STATUS_ADDR)) &
  549. REG_PHY_LOCK_STATUS_LOCK_MASK;
  550. } while (reg != REG_PHY_LOCK_STATUS_LOCK_MASK); /* Wait for '0xFFF' */
  551. reg = (reg_read(REG_SDRAM_CONFIG_ADDR) & REG_SDRAM_CONFIG_MASK);
  552. /* [30:29] = 0 - Data Pup R/W path reset */
  553. /* 0x1400 - SDRAM Configuration register */
  554. dfs_reg_write(REG_SDRAM_CONFIG_ADDR, reg);
  555. reg = reg_read(REG_SDRAM_CONFIG_ADDR) | ~REG_SDRAM_CONFIG_MASK;
  556. /* [30:29] = '11' - Data Pup R/W path reset */
  557. /* 0x1400 - SDRAM Configuration register */
  558. dfs_reg_write(REG_SDRAM_CONFIG_ADDR, reg);
  559. udelay(1000); /* Wait 1msec */
  560. for (cs = 0; cs < MAX_CS; cs++) {
  561. if (dram_info->cs_ena & (1 << cs)) {
  562. /* Poll - Wait for Refresh operation completion */
  563. wait_refresh_op_complete();
  564. /* Config CL and CWL with MR0 and MR2 registers */
  565. reg = reg_read(REG_DDR3_MR0_ADDR);
  566. reg &= ~0x74; /* CL [3:0]; [6:4],[2] */
  567. reg |= (1 << 5); /* CL = 4, CAS is 6 */
  568. dfs_reg_write(REG_DDR3_MR0_ADDR, reg);
  569. reg = REG_SDRAM_OPERATION_CMD_MR0 &
  570. ~(1 << (REG_SDRAM_OPERATION_CS_OFFS + cs));
  571. /* 0x1418 - SDRAM Operation Register */
  572. dfs_reg_write(REG_SDRAM_OPERATION_ADDR, reg);
  573. /* Poll - Wait for Refresh operation completion */
  574. wait_refresh_op_complete();
  575. reg = reg_read(REG_DDR3_MR2_ADDR);
  576. reg &= ~0x38; /* CWL [5:3] */
  577. reg |= (1 << 3); /* CWL = 1, CWL is 6 */
  578. dfs_reg_write(REG_DDR3_MR2_ADDR, reg);
  579. reg = REG_SDRAM_OPERATION_CMD_MR2 &
  580. ~(1 << (REG_SDRAM_OPERATION_CS_OFFS + cs));
  581. /* 0x1418 - SDRAM Operation Register */
  582. dfs_reg_write(REG_SDRAM_OPERATION_ADDR, reg);
  583. /* Poll - Wait for Refresh operation completion */
  584. wait_refresh_op_complete();
  585. /* Set current rd_sample_delay */
  586. reg = reg_read(REG_READ_DATA_SAMPLE_DELAYS_ADDR);
  587. reg &= ~(REG_READ_DATA_SAMPLE_DELAYS_MASK <<
  588. (REG_READ_DATA_SAMPLE_DELAYS_OFFS * cs));
  589. reg |= (5 << (REG_READ_DATA_SAMPLE_DELAYS_OFFS * cs));
  590. dfs_reg_write(REG_READ_DATA_SAMPLE_DELAYS_ADDR, reg);
  591. /* Set current rd_ready_delay */
  592. reg = reg_read(REG_READ_DATA_READY_DELAYS_ADDR);
  593. reg &= ~(REG_READ_DATA_READY_DELAYS_MASK <<
  594. (REG_READ_DATA_READY_DELAYS_OFFS * cs));
  595. reg |= ((6) << (REG_READ_DATA_READY_DELAYS_OFFS * cs));
  596. dfs_reg_write(REG_READ_DATA_READY_DELAYS_ADDR, reg);
  597. }
  598. }
  599. /* [2] - DFS Self refresh disable */
  600. reg = reg_read(REG_DFS_ADDR) & ~(1 << REG_DFS_SR_OFFS);
  601. dfs_reg_write(REG_DFS_ADDR, reg); /* 0x1528 - DFS register */
  602. /* [1] - DFS Block enable */
  603. reg = reg_read(REG_DFS_ADDR) & ~(1 << REG_DFS_BLOCK_OFFS);
  604. dfs_reg_write(REG_DFS_ADDR, reg); /* 0x1528 - DFS register */
  605. /*
  606. * Poll DFS Register - 0x1528 [3] - DfsAtSR -
  607. * All DRAM devices on all ranks are in self refresh mode - DFS can
  608. * be executed afterwards
  609. */
  610. do {
  611. reg = reg_read(REG_DFS_ADDR) & (1 << REG_DFS_ATSR_OFFS);
  612. } while (reg); /* Wait for '1' */
  613. reg = (reg_read(REG_METAL_MASK_ADDR) | (1 << 0));
  614. /* [0] - Enable Dunit to crossbar retry */
  615. /* 0x14B0 - Dunit MMask Register */
  616. dfs_reg_write(REG_METAL_MASK_ADDR, reg);
  617. /* 0x1600 - PHY lock mask register */
  618. reg = reg_read(REG_ODPG_CNTRL_ADDR);
  619. reg &= ~(1 << REG_ODPG_CNTRL_OFFS); /* [21] = 0 */
  620. dfs_reg_write(REG_ODPG_CNTRL_ADDR, reg);
  621. /* 0x1670 - PHY lock mask register */
  622. reg = reg_read(REG_PHY_LOCK_MASK_ADDR);
  623. reg |= ~REG_PHY_LOCK_MASK_MASK; /* [11:0] = FFF */
  624. dfs_reg_write(REG_PHY_LOCK_MASK_ADDR, reg);
  625. DEBUG_DFS_C("DDR3 - DFS - High To Low - Ended successfuly - new Frequency - ",
  626. freq, 1);
  627. return MV_OK;
  628. #endif
  629. }
  630. /*
  631. * Name: ddr3_dfs_low_2_high
  632. * Desc:
  633. * Args: freq - target frequency
  634. * Notes:
  635. * Returns: MV_OK - success, MV_FAIL - fail
  636. */
  637. int ddr3_dfs_low_2_high(u32 freq, int ratio_2to1, MV_DRAM_INFO *dram_info)
  638. {
  639. #if defined(MV88F78X60) || defined(MV88F672X)
  640. /* This Flow is relevant for ArmadaXP A0 */
  641. u32 reg, freq_par, tmp;
  642. u32 cs = 0;
  643. DEBUG_DFS_C("DDR3 - DFS - Low To High - Starting DFS procedure to Frequency - ",
  644. freq, 1);
  645. /* target frequency - freq */
  646. freq_par = ddr3_get_freq_parameter(freq, ratio_2to1);
  647. #if defined(MV88F672X)
  648. u32 hclk;
  649. u32 cpu_freq = ddr3_get_cpu_freq();
  650. get_target_freq(cpu_freq, &tmp, &hclk);
  651. #endif
  652. /* Configure - DRAM DLL final state after DFS is complete - Enable */
  653. reg = reg_read(REG_DFS_ADDR);
  654. /* [0] - DfsDllNextState - Enable */
  655. reg &= ~(1 << REG_DFS_DLLNEXTSTATE_OFFS);
  656. dfs_reg_write(REG_DFS_ADDR, reg); /* 0x1528 - DFS register */
  657. /*
  658. * Configure - XBAR Retry response during Block to enable
  659. * internal access - Disable
  660. */
  661. reg = reg_read(REG_METAL_MASK_ADDR);
  662. /* [0] - RetryMask - Disable */
  663. reg &= ~(1 << REG_METAL_MASK_RETRY_OFFS);
  664. /* 0x14B0 - Dunit MMask Register */
  665. dfs_reg_write(REG_METAL_MASK_ADDR, reg);
  666. /* Configure - Block new external transactions - Enable */
  667. reg = reg_read(REG_DFS_ADDR);
  668. reg |= (1 << REG_DFS_BLOCK_OFFS); /* [1] - DfsBlock - Enable */
  669. dfs_reg_write(REG_DFS_ADDR, reg); /* 0x1528 - DFS register */
  670. /* Configure - Move DRAM into Self Refresh */
  671. reg = reg_read(REG_DFS_ADDR);
  672. reg |= (1 << REG_DFS_SR_OFFS); /* [2] - DfsSR - Enable */
  673. dfs_reg_write(REG_DFS_ADDR, reg); /* 0x1528 - DFS register */
  674. /* Poll - Wait for Self Refresh indication */
  675. do {
  676. reg = ((reg_read(REG_DFS_ADDR)) & (1 << REG_DFS_ATSR_OFFS));
  677. } while (reg == 0x0); /* 0x1528 [3] - DfsAtSR - Wait for '1' */
  678. /* Start of clock change procedure (PLL) */
  679. #if defined(MV88F672X)
  680. /* avantaLP */
  681. /* Configure cpupll_clkdiv_reset_mask */
  682. reg = reg_read(CPU_PLL_CLOCK_DIVIDER_CNTRL0);
  683. reg &= CPU_PLL_CLOCK_DIVIDER_CNTRL0_MASK;
  684. /* 0xE8264[7:0] 0xff CPU Clock Dividers Reset mask */
  685. dfs_reg_write(CPU_PLL_CLOCK_DIVIDER_CNTRL0, (reg + 0xFF));
  686. /* Configure cpu_clkdiv_reload_smooth */
  687. reg = reg_read(CPU_PLL_CNTRL0);
  688. reg &= CPU_PLL_CNTRL0_RELOAD_SMOOTH_MASK;
  689. /* 0xE8260 [15:8] 0x2 CPU Clock Dividers Reload Smooth enable */
  690. dfs_reg_write(CPU_PLL_CNTRL0,
  691. reg + (2 << CPU_PLL_CNTRL0_RELOAD_SMOOTH_OFFS));
  692. /* Configure cpupll_clkdiv_relax_en */
  693. reg = reg_read(CPU_PLL_CNTRL0);
  694. reg &= CPU_PLL_CNTRL0_RELAX_EN_MASK;
  695. /* 0xE8260 [31:24] 0x2 Relax Enable */
  696. dfs_reg_write(CPU_PLL_CNTRL0,
  697. reg + (2 << CPU_PLL_CNTRL0_RELAX_EN_OFFS));
  698. /* Configure cpupll_clkdiv_ddr_clk_ratio */
  699. reg = reg_read(CPU_PLL_CLOCK_DIVIDER_CNTRL1);
  700. /*
  701. * 0xE8268 [13:8] N Set Training clock:
  702. * APLL Out Clock (VCO freq) / N = 100 MHz
  703. */
  704. reg &= CPU_PLL_CLOCK_DIVIDER_CNTRL1_MASK;
  705. reg |= (freq_par << 8); /* full Integer ratio from PLL-out to ddr-clk */
  706. dfs_reg_write(CPU_PLL_CLOCK_DIVIDER_CNTRL1, reg);
  707. /* Configure cpupll_clkdiv_reload_ratio */
  708. reg = reg_read(CPU_PLL_CLOCK_DIVIDER_CNTRL0);
  709. reg &= CPU_PLL_CLOCK_RELOAD_RATIO_MASK;
  710. /* 0xE8264 [8]=0x1 CPU Clock Dividers Reload Ratio trigger set */
  711. dfs_reg_write(CPU_PLL_CLOCK_DIVIDER_CNTRL0,
  712. reg + (1 << CPU_PLL_CLOCK_RELOAD_RATIO_OFFS));
  713. udelay(1);
  714. /* Configure cpupll_clkdiv_reload_ratio */
  715. reg = reg_read(CPU_PLL_CLOCK_DIVIDER_CNTRL0);
  716. reg &= CPU_PLL_CLOCK_RELOAD_RATIO_MASK;
  717. /* 0xE8264 [8]=0x0 CPU Clock Dividers Reload Ratio trigger clear */
  718. dfs_reg_write(CPU_PLL_CLOCK_DIVIDER_CNTRL0, reg);
  719. udelay(5);
  720. #else
  721. /*
  722. * Initial Setup - assure that the "load new ratio" is clear (bit 24)
  723. * and in the same chance, block reassertions of reset [15:8]
  724. * and force reserved bits[7:0].
  725. */
  726. reg = 0x0000FFFF;
  727. /* 0x18700 - CPU Div CLK control 0 */
  728. dfs_reg_write(REG_CPU_DIV_CLK_CTRL_0_ADDR, reg);
  729. /*
  730. * RelaX whenever reset is asserted to that channel (good for any case)
  731. */
  732. reg = 0x0000FF00;
  733. /* 0x18704 - CPU Div CLK control 0 */
  734. dfs_reg_write(REG_CPU_DIV_CLK_CTRL_1_ADDR, reg);
  735. reg = reg_read(REG_CPU_DIV_CLK_CTRL_2_ADDR) &
  736. REG_CPU_DIV_CLK_CTRL_3_FREQ_MASK;
  737. reg |= (freq_par << REG_CPU_DIV_CLK_CTRL_3_FREQ_OFFS);
  738. /* full Integer ratio from PLL-out to ddr-clk */
  739. /* 0x1870C - CPU Div CLK control 3 register */
  740. dfs_reg_write(REG_CPU_DIV_CLK_CTRL_2_ADDR, reg);
  741. /*
  742. * Shut off clock enable to the DDRPHY clock channel (this is the "D").
  743. * All the rest are kept as is (forced, but could be read-modify-write).
  744. * This is done now by RMW above.
  745. */
  746. reg = 0x000FFF02;
  747. dfs_reg_write(REG_CPU_DIV_CLK_CTRL_4_ADDR, reg);
  748. /* Wait before replacing the clock on the DDR Phy Channel. */
  749. udelay(1);
  750. reg = 0x0102FDFF;
  751. /*
  752. * This for triggering the frequency update. Bit[24] is the
  753. * central control
  754. * bits [23:16] == which channels to change ==2 ==> only DDR Phy
  755. * (smooth transition)
  756. * bits [15:8] == mask reset reassertion due to clock modification
  757. * to these channels.
  758. * bits [7:0] == not in use
  759. */
  760. /* 0x18700 - CPU Div CLK control 0 register */
  761. dfs_reg_write(REG_CPU_DIV_CLK_CTRL_0_ADDR, reg);
  762. udelay(1);
  763. /*
  764. * Poll Div CLK status 0 register - indication that the clocks
  765. * are active - 0x18718 [8]
  766. */
  767. do {
  768. reg = reg_read(REG_CPU_DIV_CLK_STATUS_0_ADDR) &
  769. (1 << REG_CPU_DIV_CLK_ALL_STABLE_OFFS);
  770. } while (reg == 0);
  771. reg = 0x000000FF;
  772. /*
  773. * Clean the CTRL0, to be ready for next resets and next requests
  774. * of ratio modifications.
  775. */
  776. /* 0x18700 - CPU Div CLK control 0 register */
  777. dfs_reg_write(REG_CPU_DIV_CLK_CTRL_0_ADDR, reg);
  778. #endif
  779. /* End of clock change procedure (PLL) */
  780. if (ratio_2to1) {
  781. /* Configure - Select normal clock for the DDR PHY - Disable */
  782. reg = reg_read(REG_DRAM_INIT_CTRL_STATUS_ADDR);
  783. /* [16] - ddr_phy_trn_clk_sel - Disable */
  784. reg &= ~(1 << REG_DRAM_INIT_CTRL_TRN_CLK_OFFS);
  785. /* 0x18488 - DRAM Init control status register */
  786. dfs_reg_write(REG_DRAM_INIT_CTRL_STATUS_ADDR, reg);
  787. }
  788. /*
  789. * Configure - Set Correct Ratio - according to target ratio
  790. * parameter - 2:1/1:1
  791. */
  792. if (ratio_2to1) {
  793. /*
  794. * [15] - Phy2UnitClkRatio = 1 - Set 2:1 Ratio between
  795. * Dunit and Phy
  796. */
  797. reg = reg_read(REG_DDR_IO_ADDR) |
  798. (1 << REG_DDR_IO_CLK_RATIO_OFFS);
  799. } else {
  800. /*
  801. * [15] - Phy2UnitClkRatio = 0 - Set 1:1 Ratio between
  802. * Dunit and Phy
  803. */
  804. reg = reg_read(REG_DDR_IO_ADDR) &
  805. ~(1 << REG_DDR_IO_CLK_RATIO_OFFS);
  806. }
  807. dfs_reg_write(REG_DDR_IO_ADDR, reg); /* 0x1524 - DDR IO Register */
  808. /* Configure - 2T Mode - Restore original configuration */
  809. reg = reg_read(REG_DUNIT_CTRL_LOW_ADDR);
  810. /* [3:4] 2T - Restore value */
  811. reg &= ~(REG_DUNIT_CTRL_LOW_2T_MASK << REG_DUNIT_CTRL_LOW_2T_OFFS);
  812. reg |= ((dram_info->mode_2t & REG_DUNIT_CTRL_LOW_2T_MASK) <<
  813. REG_DUNIT_CTRL_LOW_2T_OFFS);
  814. /* 0x1404 - DDR Controller Control Low Register */
  815. dfs_reg_write(REG_DUNIT_CTRL_LOW_ADDR, reg);
  816. /* Configure - Restore CL and CWL - MRS Commands */
  817. reg = reg_read(REG_DFS_ADDR);
  818. reg &= ~(REG_DFS_CL_NEXT_STATE_MASK << REG_DFS_CL_NEXT_STATE_OFFS);
  819. reg &= ~(REG_DFS_CWL_NEXT_STATE_MASK << REG_DFS_CWL_NEXT_STATE_OFFS);
  820. if (freq == DDR_400) {
  821. if (dram_info->target_frequency == 0x8)
  822. tmp = ddr3_cl_to_valid_cl(5);
  823. else
  824. tmp = ddr3_cl_to_valid_cl(6);
  825. } else {
  826. tmp = ddr3_cl_to_valid_cl(dram_info->cl);
  827. }
  828. /* [8] - DfsCLNextState */
  829. reg |= ((tmp & REG_DFS_CL_NEXT_STATE_MASK) << REG_DFS_CL_NEXT_STATE_OFFS);
  830. if (freq == DDR_400) {
  831. /* [12] - DfsCWLNextState */
  832. reg |= (((0) & REG_DFS_CWL_NEXT_STATE_MASK) <<
  833. REG_DFS_CWL_NEXT_STATE_OFFS);
  834. } else {
  835. /* [12] - DfsCWLNextState */
  836. reg |= (((dram_info->cwl) & REG_DFS_CWL_NEXT_STATE_MASK) <<
  837. REG_DFS_CWL_NEXT_STATE_OFFS);
  838. }
  839. dfs_reg_write(REG_DFS_ADDR, reg); /* 0x1528 - DFS register */
  840. /* Optional - Configure - DDR3_Rtt_nom_CS# */
  841. for (cs = 0; cs < MAX_CS; cs++) {
  842. if (dram_info->cs_ena & (1 << cs)) {
  843. reg = reg_read(REG_DDR3_MR1_CS_ADDR +
  844. (cs << MR_CS_ADDR_OFFS));
  845. reg &= REG_DDR3_MR1_RTT_MASK;
  846. reg |= odt_static[dram_info->cs_ena][cs];
  847. dfs_reg_write(REG_DDR3_MR1_CS_ADDR +
  848. (cs << MR_CS_ADDR_OFFS), reg);
  849. }
  850. }
  851. /* Configure - Reset ADLLs - Set Reset */
  852. reg = reg_read(REG_DRAM_PHY_CONFIG_ADDR) & REG_DRAM_PHY_CONFIG_MASK;
  853. /* [31:30]] - reset pup data ctrl ADLL */
  854. /* 0x15EC - DRAM PHY Config Register */
  855. dfs_reg_write(REG_DRAM_PHY_CONFIG_ADDR, reg);
  856. /* Configure - Reset ADLLs - Release Reset */
  857. reg = reg_read(REG_DRAM_PHY_CONFIG_ADDR) | ~REG_DRAM_PHY_CONFIG_MASK;
  858. /* [31:30] - normal pup data ctrl ADLL */
  859. /* 0x15EC - DRAM PHY Config register */
  860. dfs_reg_write(REG_DRAM_PHY_CONFIG_ADDR, reg);
  861. /* Poll - Wait for APLL + ADLLs lock on new frequency */
  862. do {
  863. reg = reg_read(REG_PHY_LOCK_STATUS_ADDR) &
  864. REG_PHY_LOCK_APLL_ADLL_STATUS_MASK;
  865. /* 0x1674 [10:0] - Phy lock status Register */
  866. } while (reg != REG_PHY_LOCK_APLL_ADLL_STATUS_MASK);
  867. /* Configure - Reset the PHY SDR clock divider */
  868. if (ratio_2to1) {
  869. /* Pup Reset Divider B - Set Reset */
  870. /* [28] - DataPupRdRST = 0 */
  871. reg = reg_read(REG_SDRAM_CONFIG_ADDR) &
  872. ~(1 << REG_SDRAM_CONFIG_PUPRSTDIV_OFFS);
  873. /* [28] - DataPupRdRST = 1 */
  874. tmp = reg_read(REG_SDRAM_CONFIG_ADDR) |
  875. (1 << REG_SDRAM_CONFIG_PUPRSTDIV_OFFS);
  876. /* 0x1400 - SDRAM Configuration register */
  877. dfs_reg_write(REG_SDRAM_CONFIG_ADDR, reg);
  878. /* Pup Reset Divider B - Release Reset */
  879. /* 0x1400 - SDRAM Configuration register */
  880. dfs_reg_write(REG_SDRAM_CONFIG_ADDR, tmp);
  881. }
  882. /* Configure - Reset the PHY Read FIFO and Write channels - Set Reset */
  883. reg = reg_read(REG_SDRAM_CONFIG_ADDR) & REG_SDRAM_CONFIG_MASK;
  884. /* [30:29] = 0 - Data Pup R/W path reset */
  885. /* 0x1400 - SDRAM Configuration register */
  886. dfs_reg_write(REG_SDRAM_CONFIG_ADDR, reg);
  887. /*
  888. * Configure - DRAM Data PHY Read [30], Write [29] path reset -
  889. * Release Reset
  890. */
  891. reg = reg_read(REG_SDRAM_CONFIG_ADDR) | ~REG_SDRAM_CONFIG_MASK;
  892. /* [30:29] = '11' - Data Pup R/W path reset */
  893. /* 0x1400 - SDRAM Configuration register */
  894. dfs_reg_write(REG_SDRAM_CONFIG_ADDR, reg);
  895. /* Registered DIMM support */
  896. if (dram_info->reg_dimm) {
  897. /*
  898. * Configure - Change register DRAM operating speed
  899. * (DDR3-1333 / DDR3-1600) - CWA_RC
  900. */
  901. reg = (0xA & REG_SDRAM_OPERATION_CWA_RC_MASK) <<
  902. REG_SDRAM_OPERATION_CWA_RC_OFFS;
  903. if (freq <= DDR_400) {
  904. /*
  905. * Configure - Change register DRAM operating speed
  906. * (DDR3-800) - CWA_DATA
  907. */
  908. reg |= ((0x0 & REG_SDRAM_OPERATION_CWA_DATA_MASK) <<
  909. REG_SDRAM_OPERATION_CWA_DATA_OFFS);
  910. } else if ((freq > DDR_400) && (freq <= DDR_533)) {
  911. /*
  912. * Configure - Change register DRAM operating speed
  913. * (DDR3-1066) - CWA_DATA
  914. */
  915. reg |= ((0x1 & REG_SDRAM_OPERATION_CWA_DATA_MASK) <<
  916. REG_SDRAM_OPERATION_CWA_DATA_OFFS);
  917. } else if ((freq > DDR_533) && (freq <= DDR_666)) {
  918. /*
  919. * Configure - Change register DRAM operating speed
  920. * (DDR3-1333) - CWA_DATA
  921. */
  922. reg |= ((0x2 & REG_SDRAM_OPERATION_CWA_DATA_MASK) <<
  923. REG_SDRAM_OPERATION_CWA_DATA_OFFS);
  924. } else {
  925. /*
  926. * Configure - Change register DRAM operating speed
  927. * (DDR3-1600) - CWA_DATA
  928. */
  929. reg |= ((0x3 & REG_SDRAM_OPERATION_CWA_DATA_MASK) <<
  930. REG_SDRAM_OPERATION_CWA_DATA_OFFS);
  931. }
  932. /* Configure - Set Delay - tSTAB */
  933. reg |= (0x1 << REG_SDRAM_OPERATION_CWA_DELAY_SEL_OFFS);
  934. /* Configure - Issue CWA command with the above parameters */
  935. reg |= (REG_SDRAM_OPERATION_CMD_CWA &
  936. ~(0xF << REG_SDRAM_OPERATION_CS_OFFS));
  937. /* 0x1418 - SDRAM Operation Register */
  938. dfs_reg_write(REG_SDRAM_OPERATION_ADDR, reg);
  939. /* Poll - Wait for CWA operation completion */
  940. do {
  941. reg = reg_read(REG_SDRAM_OPERATION_ADDR) &
  942. REG_SDRAM_OPERATION_CMD_MASK;
  943. } while (reg);
  944. }
  945. /* Configure - Exit Self Refresh */
  946. /* [2] - DfsSR */
  947. reg = reg_read(REG_DFS_ADDR) & ~(1 << REG_DFS_SR_OFFS);
  948. dfs_reg_write(REG_DFS_ADDR, reg); /* 0x1528 - DFS register */
  949. /*
  950. * Poll - DFS Register - 0x1528 [3] - DfsAtSR - All DRAM
  951. * devices on all ranks are NOT in self refresh mode
  952. */
  953. do {
  954. reg = reg_read(REG_DFS_ADDR) & (1 << REG_DFS_ATSR_OFFS);
  955. } while (reg); /* Wait for '0' */
  956. /* Configure - Issue Refresh command */
  957. /* [3-0] = 0x2 - Refresh Command, [11-8] - enabled Cs */
  958. reg = REG_SDRAM_OPERATION_CMD_RFRS;
  959. for (cs = 0; cs < MAX_CS; cs++) {
  960. if (dram_info->cs_ena & (1 << cs))
  961. reg &= ~(1 << (REG_SDRAM_OPERATION_CS_OFFS + cs));
  962. }
  963. /* 0x1418 - SDRAM Operation Register */
  964. dfs_reg_write(REG_SDRAM_OPERATION_ADDR, reg);
  965. /* Poll - Wait for Refresh operation completion */
  966. wait_refresh_op_complete();
  967. /* Configure - Block new external transactions - Disable */
  968. reg = reg_read(REG_DFS_ADDR);
  969. reg &= ~(1 << REG_DFS_BLOCK_OFFS); /* [1] - DfsBlock - Disable */
  970. dfs_reg_write(REG_DFS_ADDR, reg); /* 0x1528 - DFS register */
  971. /*
  972. * Configure - XBAR Retry response during Block to enable
  973. * internal access - Disable
  974. */
  975. reg = reg_read(REG_METAL_MASK_ADDR);
  976. /* [0] - RetryMask - Enable */
  977. reg |= (1 << REG_METAL_MASK_RETRY_OFFS);
  978. /* 0x14B0 - Dunit MMask Register */
  979. dfs_reg_write(REG_METAL_MASK_ADDR, reg);
  980. for (cs = 0; cs < MAX_CS; cs++) {
  981. if (dram_info->cs_ena & (1 << cs)) {
  982. /* Configure - Set CL */
  983. reg = reg_read(REG_DDR3_MR0_CS_ADDR +
  984. (cs << MR_CS_ADDR_OFFS)) &
  985. ~REG_DDR3_MR0_CL_MASK;
  986. if (freq == DDR_400)
  987. tmp = ddr3_cl_to_valid_cl(6);
  988. else
  989. tmp = ddr3_cl_to_valid_cl(dram_info->cl);
  990. reg |= ((tmp & 0x1) << REG_DDR3_MR0_CL_OFFS);
  991. reg |= ((tmp & 0xE) << REG_DDR3_MR0_CL_HIGH_OFFS);
  992. dfs_reg_write(REG_DDR3_MR0_CS_ADDR +
  993. (cs << MR_CS_ADDR_OFFS), reg);
  994. /* Configure - Set CWL */
  995. reg = reg_read(REG_DDR3_MR2_CS_ADDR +
  996. (cs << MR_CS_ADDR_OFFS)) &
  997. ~(REG_DDR3_MR2_CWL_MASK << REG_DDR3_MR2_CWL_OFFS);
  998. if (freq == DDR_400)
  999. reg |= ((0) << REG_DDR3_MR2_CWL_OFFS);
  1000. else
  1001. reg |= ((dram_info->cwl) << REG_DDR3_MR2_CWL_OFFS);
  1002. dfs_reg_write(REG_DDR3_MR2_CS_ADDR +
  1003. (cs << MR_CS_ADDR_OFFS), reg);
  1004. }
  1005. }
  1006. DEBUG_DFS_C("DDR3 - DFS - Low To High - Ended successfuly - new Frequency - ",
  1007. freq, 1);
  1008. return MV_OK;
  1009. #else
  1010. /* This Flow is relevant for Armada370 A0 and ArmadaXP Z1 */
  1011. u32 reg, freq_par, tmp;
  1012. u32 cs = 0;
  1013. DEBUG_DFS_C("DDR3 - DFS - Low To High - Starting DFS procedure to Frequency - ",
  1014. freq, 1);
  1015. /* target frequency - freq */
  1016. freq_par = ddr3_get_freq_parameter(freq, ratio_2to1);
  1017. reg = 0x0000FF00;
  1018. dfs_reg_write(REG_CPU_DIV_CLK_CTRL_1_ADDR, reg);
  1019. /* 0x1600 - PHY lock mask register */
  1020. reg = reg_read(REG_ODPG_CNTRL_ADDR);
  1021. reg |= (1 << REG_ODPG_CNTRL_OFFS); /* [21] = 1 */
  1022. dfs_reg_write(REG_ODPG_CNTRL_ADDR, reg);
  1023. /* 0x1670 - PHY lock mask register */
  1024. reg = reg_read(REG_PHY_LOCK_MASK_ADDR);
  1025. reg &= REG_PHY_LOCK_MASK_MASK; /* [11:0] = 0 */
  1026. dfs_reg_write(REG_PHY_LOCK_MASK_ADDR, reg);
  1027. /* Enable reconfig MR Registers after DFS */
  1028. reg = reg_read(REG_DFS_ADDR); /* 0x1528 - DFS register */
  1029. /* [4] - Disable - reconfig MR registers after DFS_ERG */
  1030. reg &= ~0x11;
  1031. /* [0] - Enable - DRAM DLL after DFS */
  1032. dfs_reg_write(REG_DFS_ADDR, reg); /* 0x1528 - DFS register */
  1033. /* Disable DRAM Controller to crossbar retry */
  1034. /* [0] - disable */
  1035. reg = reg_read(REG_METAL_MASK_ADDR) & ~(1 << 0);
  1036. /* 0x14B0 - Dunit MMask Register */
  1037. dfs_reg_write(REG_METAL_MASK_ADDR, reg);
  1038. /* Enable DRAM Blocking */
  1039. /* [1] - DFS Block enable */
  1040. reg = reg_read(REG_DFS_ADDR) | (1 << REG_DFS_BLOCK_OFFS);
  1041. dfs_reg_write(REG_DFS_ADDR, reg); /* 0x1528 - DFS register */
  1042. /* Enable Self refresh */
  1043. /* [2] - DFS Self refresh enable */
  1044. reg = reg_read(REG_DFS_ADDR) | (1 << REG_DFS_SR_OFFS);
  1045. dfs_reg_write(REG_DFS_ADDR, reg); /* 0x1528 - DFS register */
  1046. /*
  1047. * Poll DFS Register - All DRAM devices on all ranks are in
  1048. * self refresh mode - DFS can be executed afterwards
  1049. */
  1050. /* 0x1528 [3] - DfsAtSR */
  1051. do {
  1052. reg = reg_read(REG_DFS_ADDR) & (1 << REG_DFS_ATSR_OFFS);
  1053. } while (reg == 0x0); /* Wait for '1' */
  1054. /*
  1055. * Set Correct Ratio - if freq>MARGIN_FREQ use 2:1 ratio
  1056. * else use 1:1 ratio
  1057. */
  1058. if (ratio_2to1) {
  1059. /* [15] = 1 - Set 2:1 Ratio between Dunit and Phy */
  1060. reg = reg_read(REG_DDR_IO_ADDR) |
  1061. (1 << REG_DDR_IO_CLK_RATIO_OFFS);
  1062. } else {
  1063. /* [15] = 0 - Set 1:1 Ratio between Dunit and Phy */
  1064. reg = reg_read(REG_DDR_IO_ADDR) &
  1065. ~(1 << REG_DDR_IO_CLK_RATIO_OFFS);
  1066. }
  1067. dfs_reg_write(REG_DDR_IO_ADDR, reg); /* 0x1524 - DDR IO Register */
  1068. /* Switch HCLK Mux from (100Mhz) [16]=0, keep DFS request bit */
  1069. reg = 0x20040000;
  1070. /*
  1071. * [29] - training logic request DFS, [28:27] -
  1072. * preload patterns frequency [18]
  1073. */
  1074. /* 0x18488 - DRAM Init control status register */
  1075. dfs_reg_write(REG_DRAM_INIT_CTRL_STATUS_ADDR, reg);
  1076. /* Add delay between entering SR and start ratio modification */
  1077. udelay(1);
  1078. /*
  1079. * Initial Setup - assure that the "load new ratio" is clear (bit 24)
  1080. * and in the same chance, block reassertions of reset [15:8] and
  1081. * force reserved bits[7:0].
  1082. */
  1083. reg = 0x0000FFFF;
  1084. /* 0x18700 - CPU Div CLK control 0 */
  1085. dfs_reg_write(REG_CPU_DIV_CLK_CTRL_0_ADDR, reg);
  1086. /*
  1087. * RelaX whenever reset is asserted to that channel (good for any case)
  1088. */
  1089. reg = 0x0000FF00;
  1090. /* 0x18704 - CPU Div CLK control 0 */
  1091. dfs_reg_write(REG_CPU_DIV_CLK_CTRL_1_ADDR, reg);
  1092. reg = reg_read(REG_CPU_DIV_CLK_CTRL_3_ADDR) &
  1093. REG_CPU_DIV_CLK_CTRL_3_FREQ_MASK;
  1094. reg |= (freq_par << REG_CPU_DIV_CLK_CTRL_3_FREQ_OFFS);
  1095. /* Full Integer ratio from PLL-out to ddr-clk */
  1096. /* 0x1870C - CPU Div CLK control 3 register */
  1097. dfs_reg_write(REG_CPU_DIV_CLK_CTRL_3_ADDR, reg);
  1098. /*
  1099. * Shut off clock enable to the DDRPHY clock channel (this is the "D").
  1100. * All the rest are kept as is (forced, but could be read-modify-write).
  1101. * This is done now by RMW above.
  1102. */
  1103. reg = 0x000FFF02;
  1104. dfs_reg_write(REG_CPU_DIV_CLK_CTRL_4_ADDR, reg);
  1105. /* Wait before replacing the clock on the DDR Phy Channel. */
  1106. udelay(1);
  1107. reg = 0x0102FDFF;
  1108. /*
  1109. * This for triggering the frequency update. Bit[24] is the
  1110. * central control
  1111. * bits [23:16] == which channels to change ==2 ==> only DDR Phy
  1112. * (smooth transition)
  1113. * bits [15:8] == mask reset reassertion due to clock modification
  1114. * to these channels.
  1115. * bits [7:0] == not in use
  1116. */
  1117. /* 0x18700 - CPU Div CLK control 0 register */
  1118. dfs_reg_write(REG_CPU_DIV_CLK_CTRL_0_ADDR, reg);
  1119. udelay(1);
  1120. /*
  1121. * Poll Div CLK status 0 register - indication that the clocks are
  1122. * active - 0x18718 [8]
  1123. */
  1124. do {
  1125. reg = reg_read(REG_CPU_DIV_CLK_STATUS_0_ADDR) &
  1126. (1 << REG_CPU_DIV_CLK_ALL_STABLE_OFFS);
  1127. } while (reg == 0);
  1128. reg = 0x000000FF;
  1129. /*
  1130. * Clean the CTRL0, to be ready for next resets and next requests of
  1131. * ratio modifications.
  1132. */
  1133. /* 0x18700 - CPU Div CLK control 0 register */
  1134. dfs_reg_write(REG_CPU_DIV_CLK_CTRL_0_ADDR, reg);
  1135. udelay(5);
  1136. if (ratio_2to1) {
  1137. /* Pup Reset Divider B - Set Reset */
  1138. /* [28] = 0 - Pup Reset Divider B */
  1139. reg = reg_read(REG_SDRAM_CONFIG_ADDR) & ~(1 << 28);
  1140. /* [28] = 1 - Pup Reset Divider B */
  1141. tmp = reg_read(REG_SDRAM_CONFIG_ADDR) | (1 << 28);
  1142. /* 0x1400 - SDRAM Configuration register */
  1143. dfs_reg_write(REG_SDRAM_CONFIG_ADDR, reg);
  1144. /* Pup Reset Divider B - Release Reset */
  1145. /* 0x1400 - SDRAM Configuration register */
  1146. dfs_reg_write(REG_SDRAM_CONFIG_ADDR, tmp);
  1147. }
  1148. /* DRAM Data PHYs ADLL Reset - Set Reset */
  1149. reg = (reg_read(REG_DRAM_PHY_CONFIG_ADDR) & REG_DRAM_PHY_CONFIG_MASK);
  1150. /* [31:30]] - reset pup data ctrl ADLL */
  1151. /* 0x15EC - DRAM PHY Config Register */
  1152. dfs_reg_write(REG_DRAM_PHY_CONFIG_ADDR, reg);
  1153. udelay(25);
  1154. /* APLL lock indication - Poll Phy lock status Register - 0x1674 [9] */
  1155. do {
  1156. reg = reg_read(REG_PHY_LOCK_STATUS_ADDR) &
  1157. (1 << REG_PHY_LOCK_STATUS_LOCK_OFFS);
  1158. } while (reg == 0);
  1159. /* DRAM Data PHYs ADLL Reset - Release Reset */
  1160. reg = reg_read(REG_DRAM_PHY_CONFIG_ADDR) | ~REG_DRAM_PHY_CONFIG_MASK;
  1161. /* [31:30] - normal pup data ctrl ADLL */
  1162. /* 0x15EC - DRAM PHY Config register */
  1163. dfs_reg_write(REG_DRAM_PHY_CONFIG_ADDR, reg);
  1164. udelay(10000); /* Wait 10msec */
  1165. /*
  1166. * APLL lock indication - Poll Phy lock status Register - 0x1674 [11:0]
  1167. */
  1168. do {
  1169. reg = reg_read(REG_PHY_LOCK_STATUS_ADDR) &
  1170. REG_PHY_LOCK_STATUS_LOCK_MASK;
  1171. } while (reg != REG_PHY_LOCK_STATUS_LOCK_MASK);
  1172. /* DRAM Data PHY Read [30], Write [29] path reset - Set Reset */
  1173. reg = reg_read(REG_SDRAM_CONFIG_ADDR) & REG_SDRAM_CONFIG_MASK;
  1174. /* [30:29] = 0 - Data Pup R/W path reset */
  1175. /* 0x1400 - SDRAM Configuration register */
  1176. dfs_reg_write(REG_SDRAM_CONFIG_ADDR, reg);
  1177. /* DRAM Data PHY Read [30], Write [29] path reset - Release Reset */
  1178. reg = reg_read(REG_SDRAM_CONFIG_ADDR) | ~REG_SDRAM_CONFIG_MASK;
  1179. /* [30:29] = '11' - Data Pup R/W path reset */
  1180. /* 0x1400 - SDRAM Configuration register */
  1181. dfs_reg_write(REG_SDRAM_CONFIG_ADDR, reg);
  1182. /* Disable DFS Reconfig */
  1183. reg = reg_read(REG_DFS_ADDR) & ~(1 << 4);
  1184. dfs_reg_write(REG_DFS_ADDR, reg); /* 0x1528 - DFS register */
  1185. /* [2] - DFS Self refresh disable */
  1186. reg = reg_read(REG_DFS_ADDR) & ~(1 << REG_DFS_SR_OFFS);
  1187. dfs_reg_write(REG_DFS_ADDR, reg); /* 0x1528 - DFS register */
  1188. /*
  1189. * Poll DFS Register - 0x1528 [3] - DfsAtSR - All DRAM devices on
  1190. * all ranks are NOT in self refresh mode
  1191. */
  1192. do {
  1193. reg = reg_read(REG_DFS_ADDR) & (1 << REG_DFS_ATSR_OFFS);
  1194. } while (reg); /* Wait for '0' */
  1195. /* 0x1404 */
  1196. reg = (reg_read(REG_DUNIT_CTRL_LOW_ADDR) & 0xFFFFFFE7) | 0x2;
  1197. /* Configure - 2T Mode - Restore original configuration */
  1198. /* [3:4] 2T - Restore value */
  1199. reg &= ~(REG_DUNIT_CTRL_LOW_2T_MASK << REG_DUNIT_CTRL_LOW_2T_OFFS);
  1200. reg |= ((dram_info->mode_2t & REG_DUNIT_CTRL_LOW_2T_MASK) <<
  1201. REG_DUNIT_CTRL_LOW_2T_OFFS);
  1202. dfs_reg_write(REG_DUNIT_CTRL_LOW_ADDR, reg);
  1203. udelay(1); /* Wait 1us */
  1204. for (cs = 0; cs < MAX_CS; cs++) {
  1205. if (dram_info->cs_ena & (1 << cs)) {
  1206. reg = (reg_read(REG_DDR3_MR1_ADDR));
  1207. /* DLL Enable */
  1208. reg &= ~(1 << REG_DDR3_MR1_DLL_ENA_OFFS);
  1209. dfs_reg_write(REG_DDR3_MR1_ADDR, reg);
  1210. /* Issue MRS Command to current cs */
  1211. reg = REG_SDRAM_OPERATION_CMD_MR1 &
  1212. ~(1 << (REG_SDRAM_OPERATION_CS_OFFS + cs));
  1213. /*
  1214. * [3-0] = 0x4 - MR1 Command, [11-8] -
  1215. * enable current cs
  1216. */
  1217. /* 0x1418 - SDRAM Operation Register */
  1218. dfs_reg_write(REG_SDRAM_OPERATION_ADDR, reg);
  1219. /* Poll - Wait for Refresh operation completion */
  1220. wait_refresh_op_complete();
  1221. /* DLL Reset - MR0 */
  1222. reg = reg_read(REG_DDR3_MR0_ADDR);
  1223. dfs_reg_write(REG_DDR3_MR0_ADDR, reg);
  1224. /* Issue MRS Command to current cs */
  1225. reg = REG_SDRAM_OPERATION_CMD_MR0 &
  1226. ~(1 << (REG_SDRAM_OPERATION_CS_OFFS + cs));
  1227. /*
  1228. * [3-0] = 0x4 - MR1 Command, [11-8] -
  1229. * enable current cs
  1230. */
  1231. /* 0x1418 - SDRAM Operation Register */
  1232. dfs_reg_write(REG_SDRAM_OPERATION_ADDR, reg);
  1233. /* Poll - Wait for Refresh operation completion */
  1234. wait_refresh_op_complete();
  1235. reg = reg_read(REG_DDR3_MR0_ADDR);
  1236. reg &= ~0x74; /* CL [3:0]; [6:4],[2] */
  1237. if (freq == DDR_400)
  1238. tmp = ddr3_cl_to_valid_cl(6) & 0xF;
  1239. else
  1240. tmp = ddr3_cl_to_valid_cl(dram_info->cl) & 0xF;
  1241. reg |= ((tmp & 0x1) << 2);
  1242. reg |= ((tmp >> 1) << 4); /* to bit 4 */
  1243. dfs_reg_write(REG_DDR3_MR0_ADDR, reg);
  1244. reg = REG_SDRAM_OPERATION_CMD_MR0 &
  1245. ~(1 << (REG_SDRAM_OPERATION_CS_OFFS + cs));
  1246. /* 0x1418 - SDRAM Operation Register */
  1247. dfs_reg_write(REG_SDRAM_OPERATION_ADDR, reg);
  1248. /* Poll - Wait for Refresh operation completion */
  1249. wait_refresh_op_complete();
  1250. reg = reg_read(REG_DDR3_MR2_ADDR);
  1251. reg &= ~0x38; /* CWL [5:3] */
  1252. /* CWL = 0 ,for 400 MHg is 5 */
  1253. if (freq != DDR_400)
  1254. reg |= dram_info->cwl << REG_DDR3_MR2_CWL_OFFS;
  1255. dfs_reg_write(REG_DDR3_MR2_ADDR, reg);
  1256. reg = REG_SDRAM_OPERATION_CMD_MR2 &
  1257. ~(1 << (REG_SDRAM_OPERATION_CS_OFFS + cs));
  1258. /* 0x1418 - SDRAM Operation Register */
  1259. dfs_reg_write(REG_SDRAM_OPERATION_ADDR, reg);
  1260. /* Poll - Wait for Refresh operation completion */
  1261. wait_refresh_op_complete();
  1262. /* Set current rd_sample_delay */
  1263. reg = reg_read(REG_READ_DATA_SAMPLE_DELAYS_ADDR);
  1264. reg &= ~(REG_READ_DATA_SAMPLE_DELAYS_MASK <<
  1265. (REG_READ_DATA_SAMPLE_DELAYS_OFFS * cs));
  1266. reg |= (dram_info->cl <<
  1267. (REG_READ_DATA_SAMPLE_DELAYS_OFFS * cs));
  1268. dfs_reg_write(REG_READ_DATA_SAMPLE_DELAYS_ADDR, reg);
  1269. /* Set current rd_ready_delay */
  1270. reg = reg_read(REG_READ_DATA_READY_DELAYS_ADDR);
  1271. reg &= ~(REG_READ_DATA_READY_DELAYS_MASK <<
  1272. (REG_READ_DATA_READY_DELAYS_OFFS * cs));
  1273. reg |= ((dram_info->cl + 1) <<
  1274. (REG_READ_DATA_SAMPLE_DELAYS_OFFS * cs));
  1275. dfs_reg_write(REG_READ_DATA_READY_DELAYS_ADDR, reg);
  1276. }
  1277. }
  1278. /* Enable ODT on DLL-on mode */
  1279. dfs_reg_write(REG_SDRAM_ODT_CTRL_HIGH_ADDR, 0);
  1280. /* [1] - DFS Block disable */
  1281. reg = reg_read(REG_DFS_ADDR) & ~(1 << REG_DFS_BLOCK_OFFS);
  1282. dfs_reg_write(REG_DFS_ADDR, reg); /* 0x1528 - DFS register */
  1283. /* Change DDR frequency to 100MHz procedure: */
  1284. /* 0x1600 - PHY lock mask register */
  1285. reg = reg_read(REG_ODPG_CNTRL_ADDR);
  1286. reg &= ~(1 << REG_ODPG_CNTRL_OFFS); /* [21] = 0 */
  1287. dfs_reg_write(REG_ODPG_CNTRL_ADDR, reg);
  1288. /* Change DDR frequency to 100MHz procedure: */
  1289. /* 0x1670 - PHY lock mask register */
  1290. reg = reg_read(REG_PHY_LOCK_MASK_ADDR);
  1291. reg |= ~REG_PHY_LOCK_MASK_MASK; /* [11:0] = FFF */
  1292. dfs_reg_write(REG_PHY_LOCK_MASK_ADDR, reg);
  1293. reg = reg_read(REG_METAL_MASK_ADDR) | (1 << 0); /* [0] - disable */
  1294. /* 0x14B0 - Dunit MMask Register */
  1295. dfs_reg_write(REG_METAL_MASK_ADDR, reg);
  1296. DEBUG_DFS_C("DDR3 - DFS - Low To High - Ended successfuly - new Frequency - ",
  1297. freq, 1);
  1298. return MV_OK;
  1299. #endif
  1300. }