ddr3_dfs.c 48 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519152015211522152315241525152615271528152915301531153215331534153515361537153815391540154115421543154415451546154715481549155015511552
  1. /*
  2. * Copyright (C) Marvell International Ltd. and its affiliates
  3. *
  4. * SPDX-License-Identifier: GPL-2.0
  5. */
  6. #include <common.h>
  7. #include <i2c.h>
  8. #include <spl.h>
  9. #include <asm/io.h>
  10. #include <asm/arch/cpu.h>
  11. #include <asm/arch/soc.h>
  12. #include "ddr3_hw_training.h"
  13. /*
  14. * Debug
  15. */
  16. #define DEBUG_DFS_C(s, d, l) \
  17. DEBUG_DFS_S(s); DEBUG_DFS_D(d, l); DEBUG_DFS_S("\n")
  18. #define DEBUG_DFS_FULL_C(s, d, l) \
  19. DEBUG_DFS_FULL_S(s); DEBUG_DFS_FULL_D(d, l); DEBUG_DFS_FULL_S("\n")
  20. #ifdef MV_DEBUG_DFS
  21. #define DEBUG_DFS_S(s) puts(s)
  22. #define DEBUG_DFS_D(d, l) printf("%x", d)
  23. #else
  24. #define DEBUG_DFS_S(s)
  25. #define DEBUG_DFS_D(d, l)
  26. #endif
  27. #ifdef MV_DEBUG_DFS_FULL
  28. #define DEBUG_DFS_FULL_S(s) puts(s)
  29. #define DEBUG_DFS_FULL_D(d, l) printf("%x", d)
  30. #else
  31. #define DEBUG_DFS_FULL_S(s)
  32. #define DEBUG_DFS_FULL_D(d, l)
  33. #endif
  34. #if defined(MV88F672X)
  35. extern u8 div_ratio[CLK_VCO][CLK_DDR];
  36. extern void get_target_freq(u32 freq_mode, u32 *ddr_freq, u32 *hclk_ps);
  37. #else
  38. extern u16 odt_dynamic[ODT_OPT][MAX_CS];
  39. extern u8 div_ratio1to1[CLK_CPU][CLK_DDR];
  40. extern u8 div_ratio2to1[CLK_CPU][CLK_DDR];
  41. #endif
  42. extern u16 odt_static[ODT_OPT][MAX_CS];
  43. extern u32 cpu_fab_clk_to_hclk[FAB_OPT][CLK_CPU];
  44. extern u32 ddr3_get_vco_freq(void);
  45. u32 ddr3_get_freq_parameter(u32 target_freq, int ratio_2to1);
  46. #ifdef MV_DEBUG_DFS
  47. static inline void dfs_reg_write(u32 addr, u32 val)
  48. {
  49. printf("\n write reg 0x%08x = 0x%08x", addr, val);
  50. writel(val, INTER_REGS_BASE + addr);
  51. }
  52. #else
  53. static inline void dfs_reg_write(u32 addr, u32 val)
  54. {
  55. writel(val, INTER_REGS_BASE + addr);
  56. }
  57. #endif
  58. static void wait_refresh_op_complete(void)
  59. {
  60. u32 reg;
  61. /* Poll - Wait for Refresh operation completion */
  62. do {
  63. reg = reg_read(REG_SDRAM_OPERATION_ADDR) &
  64. REG_SDRAM_OPERATION_CMD_RFRS_DONE;
  65. } while (reg); /* Wait for '0' */
  66. }
  67. /*
  68. * Name: ddr3_get_freq_parameter
  69. * Desc: Finds CPU/DDR frequency ratio according to Sample@reset and table.
  70. * Args: target_freq - target frequency
  71. * Notes:
  72. * Returns: freq_par - the ratio parameter
  73. */
  74. u32 ddr3_get_freq_parameter(u32 target_freq, int ratio_2to1)
  75. {
  76. u32 ui_vco_freq, freq_par;
  77. ui_vco_freq = ddr3_get_vco_freq();
  78. #if defined(MV88F672X)
  79. freq_par = div_ratio[ui_vco_freq][target_freq];
  80. #else
  81. /* Find the ratio between PLL frequency and ddr-clk */
  82. if (ratio_2to1)
  83. freq_par = div_ratio2to1[ui_vco_freq][target_freq];
  84. else
  85. freq_par = div_ratio1to1[ui_vco_freq][target_freq];
  86. #endif
  87. return freq_par;
  88. }
  89. /*
  90. * Name: ddr3_dfs_high_2_low
  91. * Desc:
  92. * Args: freq - target frequency
  93. * Notes:
  94. * Returns: MV_OK - success, MV_FAIL - fail
  95. */
  96. int ddr3_dfs_high_2_low(u32 freq, MV_DRAM_INFO *dram_info)
  97. {
  98. #if defined(MV88F78X60) || defined(MV88F672X)
  99. /* This Flow is relevant for ArmadaXP A0 */
  100. u32 reg, freq_par, tmp;
  101. u32 cs = 0;
  102. DEBUG_DFS_C("DDR3 - DFS - High To Low - Starting DFS procedure to Frequency - ",
  103. freq, 1);
  104. /* target frequency - 100MHz */
  105. freq_par = ddr3_get_freq_parameter(freq, 0);
  106. #if defined(MV88F672X)
  107. u32 hclk;
  108. u32 cpu_freq = ddr3_get_cpu_freq();
  109. get_target_freq(cpu_freq, &tmp, &hclk);
  110. #endif
  111. /* Configure - DRAM DLL final state after DFS is complete - Enable */
  112. reg = reg_read(REG_DFS_ADDR);
  113. /* [0] - DfsDllNextState - Disable */
  114. reg |= (1 << REG_DFS_DLLNEXTSTATE_OFFS);
  115. dfs_reg_write(REG_DFS_ADDR, reg); /* 0x1528 - DFS register */
  116. /*
  117. * Configure - XBAR Retry response during Block to enable internal
  118. * access - Disable
  119. */
  120. reg = reg_read(REG_METAL_MASK_ADDR);
  121. /* [0] - RetryMask - Disable */
  122. reg &= ~(1 << REG_METAL_MASK_RETRY_OFFS);
  123. /* 0x14B0 - Dunit MMask Register */
  124. dfs_reg_write(REG_METAL_MASK_ADDR, reg);
  125. /* Configure - Block new external transactions - Enable */
  126. reg = reg_read(REG_DFS_ADDR);
  127. reg |= (1 << REG_DFS_BLOCK_OFFS); /* [1] - DfsBlock - Enable */
  128. dfs_reg_write(REG_DFS_ADDR, reg); /* 0x1528 - DFS register */
  129. /* Registered DIMM support */
  130. if (dram_info->reg_dimm) {
  131. /*
  132. * Configure - Disable Register DIMM CKE Power
  133. * Down mode - CWA_RC
  134. */
  135. reg = (0x9 & REG_SDRAM_OPERATION_CWA_RC_MASK) <<
  136. REG_SDRAM_OPERATION_CWA_RC_OFFS;
  137. /*
  138. * Configure - Disable Register DIMM CKE Power
  139. * Down mode - CWA_DATA
  140. */
  141. reg |= ((0 & REG_SDRAM_OPERATION_CWA_DATA_MASK) <<
  142. REG_SDRAM_OPERATION_CWA_DATA_OFFS);
  143. /*
  144. * Configure - Disable Register DIMM CKE Power
  145. * Down mode - Set Delay - tMRD
  146. */
  147. reg |= (0 << REG_SDRAM_OPERATION_CWA_DELAY_SEL_OFFS);
  148. /* Configure - Issue CWA command with the above parameters */
  149. reg |= (REG_SDRAM_OPERATION_CMD_CWA &
  150. ~(0xF << REG_SDRAM_OPERATION_CS_OFFS));
  151. /* 0x1418 - SDRAM Operation Register */
  152. dfs_reg_write(REG_SDRAM_OPERATION_ADDR, reg);
  153. /* Poll - Wait for CWA operation completion */
  154. do {
  155. reg = reg_read(REG_SDRAM_OPERATION_ADDR) &
  156. (REG_SDRAM_OPERATION_CMD_MASK);
  157. } while (reg);
  158. /* Configure - Disable outputs floating during Self Refresh */
  159. reg = reg_read(REG_REGISTERED_DRAM_CTRL_ADDR);
  160. /* [15] - SRFloatEn - Disable */
  161. reg &= ~(1 << REG_REGISTERED_DRAM_CTRL_SR_FLOAT_OFFS);
  162. /* 0x16D0 - DDR3 Registered DRAM Control */
  163. dfs_reg_write(REG_REGISTERED_DRAM_CTRL_ADDR, reg);
  164. }
  165. /* Optional - Configure - DDR3_Rtt_nom_CS# */
  166. for (cs = 0; cs < MAX_CS; cs++) {
  167. if (dram_info->cs_ena & (1 << cs)) {
  168. reg = reg_read(REG_DDR3_MR1_CS_ADDR +
  169. (cs << MR_CS_ADDR_OFFS));
  170. reg &= REG_DDR3_MR1_RTT_MASK;
  171. dfs_reg_write(REG_DDR3_MR1_CS_ADDR +
  172. (cs << MR_CS_ADDR_OFFS), reg);
  173. }
  174. }
  175. /* Configure - Move DRAM into Self Refresh */
  176. reg = reg_read(REG_DFS_ADDR);
  177. reg |= (1 << REG_DFS_SR_OFFS); /* [2] - DfsSR - Enable */
  178. dfs_reg_write(REG_DFS_ADDR, reg); /* 0x1528 - DFS register */
  179. /* Poll - Wait for Self Refresh indication */
  180. do {
  181. reg = ((reg_read(REG_DFS_ADDR)) & (1 << REG_DFS_ATSR_OFFS));
  182. } while (reg == 0x0); /* 0x1528 [3] - DfsAtSR - Wait for '1' */
  183. /* Start of clock change procedure (PLL) */
  184. #if defined(MV88F672X)
  185. /* avantaLP */
  186. /* Configure cpupll_clkdiv_reset_mask */
  187. reg = reg_read(CPU_PLL_CLOCK_DIVIDER_CNTRL0);
  188. reg &= CPU_PLL_CLOCK_DIVIDER_CNTRL0_MASK;
  189. /* 0xE8264[7:0] 0xff CPU Clock Dividers Reset mask */
  190. dfs_reg_write(CPU_PLL_CLOCK_DIVIDER_CNTRL0, (reg + 0xFF));
  191. /* Configure cpu_clkdiv_reload_smooth */
  192. reg = reg_read(CPU_PLL_CNTRL0);
  193. reg &= CPU_PLL_CNTRL0_RELOAD_SMOOTH_MASK;
  194. /* 0xE8260 [15:8] 0x2 CPU Clock Dividers Reload Smooth enable */
  195. dfs_reg_write(CPU_PLL_CNTRL0,
  196. (reg + (2 << CPU_PLL_CNTRL0_RELOAD_SMOOTH_OFFS)));
  197. /* Configure cpupll_clkdiv_relax_en */
  198. reg = reg_read(CPU_PLL_CNTRL0);
  199. reg &= CPU_PLL_CNTRL0_RELAX_EN_MASK;
  200. /* 0xE8260 [31:24] 0x2 Relax Enable */
  201. dfs_reg_write(CPU_PLL_CNTRL0,
  202. (reg + (2 << CPU_PLL_CNTRL0_RELAX_EN_OFFS)));
  203. /* Configure cpupll_clkdiv_ddr_clk_ratio */
  204. reg = reg_read(CPU_PLL_CLOCK_DIVIDER_CNTRL1);
  205. /*
  206. * 0xE8268 [13:8] N Set Training clock:
  207. * APLL Out Clock (VCO freq) / N = 100 MHz
  208. */
  209. reg &= CPU_PLL_CLOCK_DIVIDER_CNTRL1_MASK;
  210. reg |= (freq_par << 8); /* full Integer ratio from PLL-out to ddr-clk */
  211. dfs_reg_write(CPU_PLL_CLOCK_DIVIDER_CNTRL1, reg);
  212. /* Configure cpupll_clkdiv_reload_ratio */
  213. reg = reg_read(CPU_PLL_CLOCK_DIVIDER_CNTRL0);
  214. reg &= CPU_PLL_CLOCK_RELOAD_RATIO_MASK;
  215. /* 0xE8264 [8]=0x1 CPU Clock Dividers Reload Ratio trigger set */
  216. dfs_reg_write(CPU_PLL_CLOCK_DIVIDER_CNTRL0,
  217. (reg + (1 << CPU_PLL_CLOCK_RELOAD_RATIO_OFFS)));
  218. udelay(1);
  219. /* Configure cpupll_clkdiv_reload_ratio */
  220. reg = reg_read(CPU_PLL_CLOCK_DIVIDER_CNTRL0);
  221. reg &= CPU_PLL_CLOCK_RELOAD_RATIO_MASK;
  222. /* 0xE8264 [8]=0x0 CPU Clock Dividers Reload Ratio trigger clear */
  223. dfs_reg_write(CPU_PLL_CLOCK_DIVIDER_CNTRL0, reg);
  224. udelay(5);
  225. #else
  226. /*
  227. * Initial Setup - assure that the "load new ratio" is clear (bit 24)
  228. * and in the same chance, block reassertions of reset [15:8] and
  229. * force reserved bits[7:0].
  230. */
  231. reg = 0x0000FDFF;
  232. /* 0x18700 - CPU Div CLK control 0 */
  233. dfs_reg_write(REG_CPU_DIV_CLK_CTRL_0_ADDR, reg);
  234. /*
  235. * RelaX whenever reset is asserted to that channel
  236. * (good for any case)
  237. */
  238. reg = 0x0000FF00;
  239. /* 0x18704 - CPU Div CLK control 0 */
  240. dfs_reg_write(REG_CPU_DIV_CLK_CTRL_1_ADDR, reg);
  241. reg = reg_read(REG_CPU_DIV_CLK_CTRL_2_ADDR) &
  242. REG_CPU_DIV_CLK_CTRL_3_FREQ_MASK;
  243. /* full Integer ratio from PLL-out to ddr-clk */
  244. reg |= (freq_par << REG_CPU_DIV_CLK_CTRL_3_FREQ_OFFS);
  245. /* 0x1870C - CPU Div CLK control 3 register */
  246. dfs_reg_write(REG_CPU_DIV_CLK_CTRL_2_ADDR, reg);
  247. /*
  248. * Shut off clock enable to the DDRPHY clock channel (this is the "D").
  249. * All the rest are kept as is (forced, but could be read-modify-write).
  250. * This is done now by RMW above.
  251. */
  252. /* Clock is not shut off gracefully - keep it running */
  253. reg = 0x000FFF02;
  254. dfs_reg_write(REG_CPU_DIV_CLK_CTRL_4_ADDR, reg);
  255. /* Wait before replacing the clock on the DDR Phy Channel. */
  256. udelay(1);
  257. /*
  258. * This for triggering the frequency update. Bit[24] is the
  259. * central control
  260. * bits [23:16] == which channels to change ==2 ==>
  261. * only DDR Phy (smooth transition)
  262. * bits [15:8] == mask reset reassertion due to clock modification
  263. * to these channels.
  264. * bits [7:0] == not in use
  265. */
  266. reg = 0x0102FDFF;
  267. /* 0x18700 - CPU Div CLK control 0 register */
  268. dfs_reg_write(REG_CPU_DIV_CLK_CTRL_0_ADDR, reg);
  269. udelay(1); /* Wait 1usec */
  270. /*
  271. * Poll Div CLK status 0 register - indication that the clocks
  272. * are active - 0x18718 [8]
  273. */
  274. do {
  275. reg = (reg_read(REG_CPU_DIV_CLK_STATUS_0_ADDR)) &
  276. (1 << REG_CPU_DIV_CLK_ALL_STABLE_OFFS);
  277. } while (reg == 0);
  278. /*
  279. * Clean the CTRL0, to be ready for next resets and next requests
  280. * of ratio modifications.
  281. */
  282. reg = 0x000000FF;
  283. /* 0x18700 - CPU Div CLK control 0 register */
  284. dfs_reg_write(REG_CPU_DIV_CLK_CTRL_0_ADDR, reg);
  285. udelay(5);
  286. #endif
  287. /* End of clock change procedure (PLL) */
  288. /* Configure - Select normal clock for the DDR PHY - Enable */
  289. reg = reg_read(REG_DRAM_INIT_CTRL_STATUS_ADDR);
  290. /* [16] - ddr_phy_trn_clk_sel - Enable */
  291. reg |= (1 << REG_DRAM_INIT_CTRL_TRN_CLK_OFFS);
  292. /* 0x18488 - DRAM Init control status register */
  293. dfs_reg_write(REG_DRAM_INIT_CTRL_STATUS_ADDR, reg);
  294. /* Configure - Set Correct Ratio - 1:1 */
  295. /* [15] - Phy2UnitClkRatio = 0 - Set 1:1 Ratio between Dunit and Phy */
  296. reg = reg_read(REG_DDR_IO_ADDR) & ~(1 << REG_DDR_IO_CLK_RATIO_OFFS);
  297. dfs_reg_write(REG_DDR_IO_ADDR, reg); /* 0x1524 - DDR IO Register */
  298. /* Configure - 2T Mode - Restore original configuration */
  299. reg = reg_read(REG_DUNIT_CTRL_LOW_ADDR);
  300. /* [3:4] 2T - 1T Mode - low freq */
  301. reg &= ~(REG_DUNIT_CTRL_LOW_2T_MASK << REG_DUNIT_CTRL_LOW_2T_OFFS);
  302. /* 0x1404 - DDR Controller Control Low Register */
  303. dfs_reg_write(REG_DUNIT_CTRL_LOW_ADDR, reg);
  304. /* Configure - Restore CL and CWL - MRS Commands */
  305. reg = reg_read(REG_DFS_ADDR);
  306. reg &= ~(REG_DFS_CL_NEXT_STATE_MASK << REG_DFS_CL_NEXT_STATE_OFFS);
  307. reg &= ~(REG_DFS_CWL_NEXT_STATE_MASK << REG_DFS_CWL_NEXT_STATE_OFFS);
  308. /* [8] - DfsCLNextState - MRS CL=6 after DFS (due to DLL-off mode) */
  309. reg |= (0x4 << REG_DFS_CL_NEXT_STATE_OFFS);
  310. /* [12] - DfsCWLNextState - MRS CWL=6 after DFS (due to DLL-off mode) */
  311. reg |= (0x1 << REG_DFS_CWL_NEXT_STATE_OFFS);
  312. dfs_reg_write(REG_DFS_ADDR, reg); /* 0x1528 - DFS register */
  313. /* Poll - Wait for APLL + ADLLs lock on new frequency */
  314. do {
  315. reg = (reg_read(REG_PHY_LOCK_STATUS_ADDR)) &
  316. REG_PHY_LOCK_APLL_ADLL_STATUS_MASK;
  317. /* 0x1674 [10:0] - Phy lock status Register */
  318. } while (reg != REG_PHY_LOCK_APLL_ADLL_STATUS_MASK);
  319. /* Configure - Reset the PHY Read FIFO and Write channels - Set Reset */
  320. reg = (reg_read(REG_SDRAM_CONFIG_ADDR) & REG_SDRAM_CONFIG_MASK);
  321. /* [30:29] = 0 - Data Pup R/W path reset */
  322. /* 0x1400 - SDRAM Configuration register */
  323. dfs_reg_write(REG_SDRAM_CONFIG_ADDR, reg);
  324. /*
  325. * Configure - DRAM Data PHY Read [30], Write [29] path
  326. * reset - Release Reset
  327. */
  328. reg = (reg_read(REG_SDRAM_CONFIG_ADDR) | ~REG_SDRAM_CONFIG_MASK);
  329. /* [30:29] = '11' - Data Pup R/W path reset */
  330. /* 0x1400 - SDRAM Configuration register */
  331. dfs_reg_write(REG_SDRAM_CONFIG_ADDR, reg);
  332. /* Registered DIMM support */
  333. if (dram_info->reg_dimm) {
  334. /*
  335. * Configure - Change register DRAM operating speed
  336. * (below 400MHz) - CWA_RC
  337. */
  338. reg = (0xA & REG_SDRAM_OPERATION_CWA_RC_MASK) <<
  339. REG_SDRAM_OPERATION_CWA_RC_OFFS;
  340. /*
  341. * Configure - Change register DRAM operating speed
  342. * (below 400MHz) - CWA_DATA
  343. */
  344. reg |= ((0x0 & REG_SDRAM_OPERATION_CWA_DATA_MASK) <<
  345. REG_SDRAM_OPERATION_CWA_DATA_OFFS);
  346. /* Configure - Set Delay - tSTAB */
  347. reg |= (0x1 << REG_SDRAM_OPERATION_CWA_DELAY_SEL_OFFS);
  348. /* Configure - Issue CWA command with the above parameters */
  349. reg |= (REG_SDRAM_OPERATION_CMD_CWA &
  350. ~(0xF << REG_SDRAM_OPERATION_CS_OFFS));
  351. /* 0x1418 - SDRAM Operation Register */
  352. dfs_reg_write(REG_SDRAM_OPERATION_ADDR, reg);
  353. /* Poll - Wait for CWA operation completion */
  354. do {
  355. reg = reg_read(REG_SDRAM_OPERATION_ADDR) &
  356. (REG_SDRAM_OPERATION_CMD_MASK);
  357. } while (reg);
  358. }
  359. /* Configure - Exit Self Refresh */
  360. /* [2] - DfsSR */
  361. reg = (reg_read(REG_DFS_ADDR) & ~(1 << REG_DFS_SR_OFFS));
  362. dfs_reg_write(REG_DFS_ADDR, reg); /* 0x1528 - DFS register */
  363. /*
  364. * Poll - DFS Register - 0x1528 [3] - DfsAtSR - All DRAM devices
  365. * on all ranks are NOT in self refresh mode
  366. */
  367. do {
  368. reg = ((reg_read(REG_DFS_ADDR)) & (1 << REG_DFS_ATSR_OFFS));
  369. } while (reg); /* Wait for '0' */
  370. /* Configure - Issue Refresh command */
  371. /* [3-0] = 0x2 - Refresh Command, [11-8] - enabled Cs */
  372. reg = REG_SDRAM_OPERATION_CMD_RFRS;
  373. for (cs = 0; cs < MAX_CS; cs++) {
  374. if (dram_info->cs_ena & (1 << cs))
  375. reg &= ~(1 << (REG_SDRAM_OPERATION_CS_OFFS + cs));
  376. }
  377. /* 0x1418 - SDRAM Operation Register */
  378. dfs_reg_write(REG_SDRAM_OPERATION_ADDR, reg);
  379. /* Poll - Wait for Refresh operation completion */
  380. wait_refresh_op_complete();
  381. /* Configure - Block new external transactions - Disable */
  382. reg = reg_read(REG_DFS_ADDR);
  383. reg &= ~(1 << REG_DFS_BLOCK_OFFS); /* [1] - DfsBlock - Disable */
  384. dfs_reg_write(REG_DFS_ADDR, reg); /* 0x1528 - DFS register */
  385. /*
  386. * Configure - XBAR Retry response during Block to enable
  387. * internal access - Disable
  388. */
  389. reg = reg_read(REG_METAL_MASK_ADDR);
  390. /* [0] - RetryMask - Enable */
  391. reg |= (1 << REG_METAL_MASK_RETRY_OFFS);
  392. /* 0x14B0 - Dunit MMask Register */
  393. dfs_reg_write(REG_METAL_MASK_ADDR, reg);
  394. for (cs = 0; cs < MAX_CS; cs++) {
  395. if (dram_info->cs_ena & (1 << cs)) {
  396. /* Configure - Set CL */
  397. reg = reg_read(REG_DDR3_MR0_CS_ADDR +
  398. (cs << MR_CS_ADDR_OFFS)) &
  399. ~REG_DDR3_MR0_CL_MASK;
  400. tmp = 0x4; /* CL=6 - 0x4 */
  401. reg |= ((tmp & 0x1) << REG_DDR3_MR0_CL_OFFS);
  402. reg |= ((tmp & 0xE) << REG_DDR3_MR0_CL_HIGH_OFFS);
  403. dfs_reg_write(REG_DDR3_MR0_CS_ADDR +
  404. (cs << MR_CS_ADDR_OFFS), reg);
  405. /* Configure - Set CWL */
  406. reg = reg_read(REG_DDR3_MR2_CS_ADDR +
  407. (cs << MR_CS_ADDR_OFFS))
  408. & ~(REG_DDR3_MR2_CWL_MASK << REG_DDR3_MR2_CWL_OFFS);
  409. /* CWL=6 - 0x1 */
  410. reg |= ((0x1) << REG_DDR3_MR2_CWL_OFFS);
  411. dfs_reg_write(REG_DDR3_MR2_CS_ADDR +
  412. (cs << MR_CS_ADDR_OFFS), reg);
  413. }
  414. }
  415. DEBUG_DFS_C("DDR3 - DFS - High To Low - Ended successfuly - new Frequency - ",
  416. freq, 1);
  417. return MV_OK;
  418. #else
  419. /* This Flow is relevant for Armada370 A0 and ArmadaXP Z1 */
  420. u32 reg, freq_par;
  421. u32 cs = 0;
  422. DEBUG_DFS_C("DDR3 - DFS - High To Low - Starting DFS procedure to Frequency - ",
  423. freq, 1);
  424. /* target frequency - 100MHz */
  425. freq_par = ddr3_get_freq_parameter(freq, 0);
  426. reg = 0x0000FF00;
  427. /* 0x18700 - CPU Div CLK control 0 */
  428. dfs_reg_write(REG_CPU_DIV_CLK_CTRL_1_ADDR, reg);
  429. /* 0x1600 - ODPG_CNTRL_Control */
  430. reg = reg_read(REG_ODPG_CNTRL_ADDR);
  431. /* [21] = 1 - auto refresh disable */
  432. reg |= (1 << REG_ODPG_CNTRL_OFFS);
  433. dfs_reg_write(REG_ODPG_CNTRL_ADDR, reg);
  434. /* 0x1670 - PHY lock mask register */
  435. reg = reg_read(REG_PHY_LOCK_MASK_ADDR);
  436. reg &= REG_PHY_LOCK_MASK_MASK; /* [11:0] = 0 */
  437. dfs_reg_write(REG_PHY_LOCK_MASK_ADDR, reg);
  438. reg = reg_read(REG_DFS_ADDR); /* 0x1528 - DFS register */
  439. /* Disable reconfig */
  440. reg &= ~0x10; /* [4] - Enable reconfig MR registers after DFS_ERG */
  441. reg |= 0x1; /* [0] - DRAM DLL disabled after DFS */
  442. dfs_reg_write(REG_DFS_ADDR, reg); /* 0x1528 - DFS register */
  443. reg = reg_read(REG_METAL_MASK_ADDR) & ~(1 << 0); /* [0] - disable */
  444. /* 0x14B0 - Dunit MMask Register */
  445. dfs_reg_write(REG_METAL_MASK_ADDR, reg);
  446. /* [1] - DFS Block enable */
  447. reg = reg_read(REG_DFS_ADDR) | (1 << REG_DFS_BLOCK_OFFS);
  448. dfs_reg_write(REG_DFS_ADDR, reg); /* 0x1528 - DFS register */
  449. /* [2] - DFS Self refresh enable */
  450. reg = reg_read(REG_DFS_ADDR) | (1 << REG_DFS_SR_OFFS);
  451. dfs_reg_write(REG_DFS_ADDR, reg); /* 0x1528 - DFS register */
  452. /*
  453. * Poll DFS Register - 0x1528 [3] - DfsAtSR -
  454. * All DRAM devices on all ranks are in self refresh mode -
  455. * DFS can be executed afterwards
  456. */
  457. do {
  458. reg = reg_read(REG_DFS_ADDR) & (1 << REG_DFS_ATSR_OFFS);
  459. } while (reg == 0x0); /* Wait for '1' */
  460. /* Disable ODT on DLL-off mode */
  461. dfs_reg_write(REG_SDRAM_ODT_CTRL_HIGH_ADDR,
  462. REG_SDRAM_ODT_CTRL_HIGH_OVRD_MASK);
  463. /* [11:0] = 0 */
  464. reg = (reg_read(REG_PHY_LOCK_MASK_ADDR) & REG_PHY_LOCK_MASK_MASK);
  465. /* 0x1670 - PHY lock mask register */
  466. dfs_reg_write(REG_PHY_LOCK_MASK_ADDR, reg);
  467. /* Add delay between entering SR and start ratio modification */
  468. udelay(1);
  469. /*
  470. * Initial Setup - assure that the "load new ratio" is clear (bit 24)
  471. * and in the same chance, block reassertions of reset [15:8] and
  472. * force reserved bits[7:0].
  473. */
  474. reg = 0x0000FDFF;
  475. /* 0x18700 - CPU Div CLK control 0 */
  476. dfs_reg_write(REG_CPU_DIV_CLK_CTRL_0_ADDR, reg);
  477. /*
  478. * RelaX whenever reset is asserted to that channel (good for any case)
  479. */
  480. reg = 0x0000FF00;
  481. /* 0x18700 - CPU Div CLK control 0 */
  482. dfs_reg_write(REG_CPU_DIV_CLK_CTRL_1_ADDR, reg);
  483. reg = reg_read(REG_CPU_DIV_CLK_CTRL_3_ADDR) &
  484. REG_CPU_DIV_CLK_CTRL_3_FREQ_MASK;
  485. /* Full Integer ratio from PLL-out to ddr-clk */
  486. reg |= (freq_par << REG_CPU_DIV_CLK_CTRL_3_FREQ_OFFS);
  487. /* 0x1870C - CPU Div CLK control 3 register */
  488. dfs_reg_write(REG_CPU_DIV_CLK_CTRL_3_ADDR, reg);
  489. /*
  490. * Shut off clock enable to the DDRPHY clock channel (this is the "D").
  491. * All the rest are kept as is (forced, but could be read-modify-write).
  492. * This is done now by RMW above.
  493. */
  494. /* Clock is not shut off gracefully - keep it running */
  495. reg = 0x000FFF02;
  496. dfs_reg_write(REG_CPU_DIV_CLK_CTRL_4_ADDR, reg);
  497. /* Wait before replacing the clock on the DDR Phy Channel. */
  498. udelay(1);
  499. /*
  500. * This for triggering the frequency update. Bit[24] is the
  501. * central control
  502. * bits [23:16] == which channels to change ==2 ==> only DDR Phy
  503. * (smooth transition)
  504. * bits [15:8] == mask reset reassertion due to clock modification
  505. * to these channels.
  506. * bits [7:0] == not in use
  507. */
  508. reg = 0x0102FDFF;
  509. /* 0x18700 - CPU Div CLK control 0 register */
  510. dfs_reg_write(REG_CPU_DIV_CLK_CTRL_0_ADDR, reg);
  511. udelay(1); /* Wait 1usec */
  512. /*
  513. * Poll Div CLK status 0 register - indication that the clocks
  514. * are active - 0x18718 [8]
  515. */
  516. do {
  517. reg = (reg_read(REG_CPU_DIV_CLK_STATUS_0_ADDR)) &
  518. (1 << REG_CPU_DIV_CLK_ALL_STABLE_OFFS);
  519. } while (reg == 0);
  520. /*
  521. * Clean the CTRL0, to be ready for next resets and next requests of
  522. * ratio modifications.
  523. */
  524. reg = 0x000000FF;
  525. /* 0x18700 - CPU Div CLK control 0 register */
  526. dfs_reg_write(REG_CPU_DIV_CLK_CTRL_0_ADDR, reg);
  527. udelay(5);
  528. /* Switch HCLK Mux to training clk (100Mhz), keep DFS request bit */
  529. reg = 0x20050000;
  530. /* 0x18488 - DRAM Init control status register */
  531. dfs_reg_write(REG_DRAM_INIT_CTRL_STATUS_ADDR, reg);
  532. reg = reg_read(REG_DDR_IO_ADDR) & ~(1 << REG_DDR_IO_CLK_RATIO_OFFS);
  533. /* [15] = 0 - Set 1:1 Ratio between Dunit and Phy */
  534. dfs_reg_write(REG_DDR_IO_ADDR, reg); /* 0x1524 - DDR IO Regist */
  535. reg = reg_read(REG_DRAM_PHY_CONFIG_ADDR) & REG_DRAM_PHY_CONFIG_MASK;
  536. /* [31:30]] - reset pup data ctrl ADLL */
  537. /* 0x15EC - DRAM PHY Config register */
  538. dfs_reg_write(REG_DRAM_PHY_CONFIG_ADDR, reg);
  539. reg = (reg_read(REG_DRAM_PHY_CONFIG_ADDR) | ~REG_DRAM_PHY_CONFIG_MASK);
  540. /* [31:30] - normal pup data ctrl ADLL */
  541. /* 0x15EC - DRAM PHY Config register */
  542. dfs_reg_write(REG_DRAM_PHY_CONFIG_ADDR, reg);
  543. udelay(1); /* Wait 1usec */
  544. /* 0x1404 */
  545. reg = (reg_read(REG_DUNIT_CTRL_LOW_ADDR) & 0xFFFFFFE7);
  546. dfs_reg_write(REG_DUNIT_CTRL_LOW_ADDR, reg);
  547. /* Poll Phy lock status register - APLL lock indication - 0x1674 */
  548. do {
  549. reg = (reg_read(REG_PHY_LOCK_STATUS_ADDR)) &
  550. REG_PHY_LOCK_STATUS_LOCK_MASK;
  551. } while (reg != REG_PHY_LOCK_STATUS_LOCK_MASK); /* Wait for '0xFFF' */
  552. reg = (reg_read(REG_SDRAM_CONFIG_ADDR) & REG_SDRAM_CONFIG_MASK);
  553. /* [30:29] = 0 - Data Pup R/W path reset */
  554. /* 0x1400 - SDRAM Configuration register */
  555. dfs_reg_write(REG_SDRAM_CONFIG_ADDR, reg);
  556. reg = reg_read(REG_SDRAM_CONFIG_ADDR) | ~REG_SDRAM_CONFIG_MASK;
  557. /* [30:29] = '11' - Data Pup R/W path reset */
  558. /* 0x1400 - SDRAM Configuration register */
  559. dfs_reg_write(REG_SDRAM_CONFIG_ADDR, reg);
  560. udelay(1000); /* Wait 1msec */
  561. for (cs = 0; cs < MAX_CS; cs++) {
  562. if (dram_info->cs_ena & (1 << cs)) {
  563. /* Poll - Wait for Refresh operation completion */
  564. wait_refresh_op_complete();
  565. /* Config CL and CWL with MR0 and MR2 registers */
  566. reg = reg_read(REG_DDR3_MR0_ADDR);
  567. reg &= ~0x74; /* CL [3:0]; [6:4],[2] */
  568. reg |= (1 << 5); /* CL = 4, CAS is 6 */
  569. dfs_reg_write(REG_DDR3_MR0_ADDR, reg);
  570. reg = REG_SDRAM_OPERATION_CMD_MR0 &
  571. ~(1 << (REG_SDRAM_OPERATION_CS_OFFS + cs));
  572. /* 0x1418 - SDRAM Operation Register */
  573. dfs_reg_write(REG_SDRAM_OPERATION_ADDR, reg);
  574. /* Poll - Wait for Refresh operation completion */
  575. wait_refresh_op_complete();
  576. reg = reg_read(REG_DDR3_MR2_ADDR);
  577. reg &= ~0x38; /* CWL [5:3] */
  578. reg |= (1 << 3); /* CWL = 1, CWL is 6 */
  579. dfs_reg_write(REG_DDR3_MR2_ADDR, reg);
  580. reg = REG_SDRAM_OPERATION_CMD_MR2 &
  581. ~(1 << (REG_SDRAM_OPERATION_CS_OFFS + cs));
  582. /* 0x1418 - SDRAM Operation Register */
  583. dfs_reg_write(REG_SDRAM_OPERATION_ADDR, reg);
  584. /* Poll - Wait for Refresh operation completion */
  585. wait_refresh_op_complete();
  586. /* Set current rd_sample_delay */
  587. reg = reg_read(REG_READ_DATA_SAMPLE_DELAYS_ADDR);
  588. reg &= ~(REG_READ_DATA_SAMPLE_DELAYS_MASK <<
  589. (REG_READ_DATA_SAMPLE_DELAYS_OFFS * cs));
  590. reg |= (5 << (REG_READ_DATA_SAMPLE_DELAYS_OFFS * cs));
  591. dfs_reg_write(REG_READ_DATA_SAMPLE_DELAYS_ADDR, reg);
  592. /* Set current rd_ready_delay */
  593. reg = reg_read(REG_READ_DATA_READY_DELAYS_ADDR);
  594. reg &= ~(REG_READ_DATA_READY_DELAYS_MASK <<
  595. (REG_READ_DATA_READY_DELAYS_OFFS * cs));
  596. reg |= ((6) << (REG_READ_DATA_READY_DELAYS_OFFS * cs));
  597. dfs_reg_write(REG_READ_DATA_READY_DELAYS_ADDR, reg);
  598. }
  599. }
  600. /* [2] - DFS Self refresh disable */
  601. reg = reg_read(REG_DFS_ADDR) & ~(1 << REG_DFS_SR_OFFS);
  602. dfs_reg_write(REG_DFS_ADDR, reg); /* 0x1528 - DFS register */
  603. /* [1] - DFS Block enable */
  604. reg = reg_read(REG_DFS_ADDR) & ~(1 << REG_DFS_BLOCK_OFFS);
  605. dfs_reg_write(REG_DFS_ADDR, reg); /* 0x1528 - DFS register */
  606. /*
  607. * Poll DFS Register - 0x1528 [3] - DfsAtSR -
  608. * All DRAM devices on all ranks are in self refresh mode - DFS can
  609. * be executed afterwards
  610. */
  611. do {
  612. reg = reg_read(REG_DFS_ADDR) & (1 << REG_DFS_ATSR_OFFS);
  613. } while (reg); /* Wait for '1' */
  614. reg = (reg_read(REG_METAL_MASK_ADDR) | (1 << 0));
  615. /* [0] - Enable Dunit to crossbar retry */
  616. /* 0x14B0 - Dunit MMask Register */
  617. dfs_reg_write(REG_METAL_MASK_ADDR, reg);
  618. /* 0x1600 - PHY lock mask register */
  619. reg = reg_read(REG_ODPG_CNTRL_ADDR);
  620. reg &= ~(1 << REG_ODPG_CNTRL_OFFS); /* [21] = 0 */
  621. dfs_reg_write(REG_ODPG_CNTRL_ADDR, reg);
  622. /* 0x1670 - PHY lock mask register */
  623. reg = reg_read(REG_PHY_LOCK_MASK_ADDR);
  624. reg |= ~REG_PHY_LOCK_MASK_MASK; /* [11:0] = FFF */
  625. dfs_reg_write(REG_PHY_LOCK_MASK_ADDR, reg);
  626. DEBUG_DFS_C("DDR3 - DFS - High To Low - Ended successfuly - new Frequency - ",
  627. freq, 1);
  628. return MV_OK;
  629. #endif
  630. }
  631. /*
  632. * Name: ddr3_dfs_low_2_high
  633. * Desc:
  634. * Args: freq - target frequency
  635. * Notes:
  636. * Returns: MV_OK - success, MV_FAIL - fail
  637. */
  638. int ddr3_dfs_low_2_high(u32 freq, int ratio_2to1, MV_DRAM_INFO *dram_info)
  639. {
  640. #if defined(MV88F78X60) || defined(MV88F672X)
  641. /* This Flow is relevant for ArmadaXP A0 */
  642. u32 reg, freq_par, tmp;
  643. u32 cs = 0;
  644. DEBUG_DFS_C("DDR3 - DFS - Low To High - Starting DFS procedure to Frequency - ",
  645. freq, 1);
  646. /* target frequency - freq */
  647. freq_par = ddr3_get_freq_parameter(freq, ratio_2to1);
  648. #if defined(MV88F672X)
  649. u32 hclk;
  650. u32 cpu_freq = ddr3_get_cpu_freq();
  651. get_target_freq(cpu_freq, &tmp, &hclk);
  652. #endif
  653. /* Configure - DRAM DLL final state after DFS is complete - Enable */
  654. reg = reg_read(REG_DFS_ADDR);
  655. /* [0] - DfsDllNextState - Enable */
  656. reg &= ~(1 << REG_DFS_DLLNEXTSTATE_OFFS);
  657. dfs_reg_write(REG_DFS_ADDR, reg); /* 0x1528 - DFS register */
  658. /*
  659. * Configure - XBAR Retry response during Block to enable
  660. * internal access - Disable
  661. */
  662. reg = reg_read(REG_METAL_MASK_ADDR);
  663. /* [0] - RetryMask - Disable */
  664. reg &= ~(1 << REG_METAL_MASK_RETRY_OFFS);
  665. /* 0x14B0 - Dunit MMask Register */
  666. dfs_reg_write(REG_METAL_MASK_ADDR, reg);
  667. /* Configure - Block new external transactions - Enable */
  668. reg = reg_read(REG_DFS_ADDR);
  669. reg |= (1 << REG_DFS_BLOCK_OFFS); /* [1] - DfsBlock - Enable */
  670. dfs_reg_write(REG_DFS_ADDR, reg); /* 0x1528 - DFS register */
  671. /* Configure - Move DRAM into Self Refresh */
  672. reg = reg_read(REG_DFS_ADDR);
  673. reg |= (1 << REG_DFS_SR_OFFS); /* [2] - DfsSR - Enable */
  674. dfs_reg_write(REG_DFS_ADDR, reg); /* 0x1528 - DFS register */
  675. /* Poll - Wait for Self Refresh indication */
  676. do {
  677. reg = ((reg_read(REG_DFS_ADDR)) & (1 << REG_DFS_ATSR_OFFS));
  678. } while (reg == 0x0); /* 0x1528 [3] - DfsAtSR - Wait for '1' */
  679. /* Start of clock change procedure (PLL) */
  680. #if defined(MV88F672X)
  681. /* avantaLP */
  682. /* Configure cpupll_clkdiv_reset_mask */
  683. reg = reg_read(CPU_PLL_CLOCK_DIVIDER_CNTRL0);
  684. reg &= CPU_PLL_CLOCK_DIVIDER_CNTRL0_MASK;
  685. /* 0xE8264[7:0] 0xff CPU Clock Dividers Reset mask */
  686. dfs_reg_write(CPU_PLL_CLOCK_DIVIDER_CNTRL0, (reg + 0xFF));
  687. /* Configure cpu_clkdiv_reload_smooth */
  688. reg = reg_read(CPU_PLL_CNTRL0);
  689. reg &= CPU_PLL_CNTRL0_RELOAD_SMOOTH_MASK;
  690. /* 0xE8260 [15:8] 0x2 CPU Clock Dividers Reload Smooth enable */
  691. dfs_reg_write(CPU_PLL_CNTRL0,
  692. reg + (2 << CPU_PLL_CNTRL0_RELOAD_SMOOTH_OFFS));
  693. /* Configure cpupll_clkdiv_relax_en */
  694. reg = reg_read(CPU_PLL_CNTRL0);
  695. reg &= CPU_PLL_CNTRL0_RELAX_EN_MASK;
  696. /* 0xE8260 [31:24] 0x2 Relax Enable */
  697. dfs_reg_write(CPU_PLL_CNTRL0,
  698. reg + (2 << CPU_PLL_CNTRL0_RELAX_EN_OFFS));
  699. /* Configure cpupll_clkdiv_ddr_clk_ratio */
  700. reg = reg_read(CPU_PLL_CLOCK_DIVIDER_CNTRL1);
  701. /*
  702. * 0xE8268 [13:8] N Set Training clock:
  703. * APLL Out Clock (VCO freq) / N = 100 MHz
  704. */
  705. reg &= CPU_PLL_CLOCK_DIVIDER_CNTRL1_MASK;
  706. reg |= (freq_par << 8); /* full Integer ratio from PLL-out to ddr-clk */
  707. dfs_reg_write(CPU_PLL_CLOCK_DIVIDER_CNTRL1, reg);
  708. /* Configure cpupll_clkdiv_reload_ratio */
  709. reg = reg_read(CPU_PLL_CLOCK_DIVIDER_CNTRL0);
  710. reg &= CPU_PLL_CLOCK_RELOAD_RATIO_MASK;
  711. /* 0xE8264 [8]=0x1 CPU Clock Dividers Reload Ratio trigger set */
  712. dfs_reg_write(CPU_PLL_CLOCK_DIVIDER_CNTRL0,
  713. reg + (1 << CPU_PLL_CLOCK_RELOAD_RATIO_OFFS));
  714. udelay(1);
  715. /* Configure cpupll_clkdiv_reload_ratio */
  716. reg = reg_read(CPU_PLL_CLOCK_DIVIDER_CNTRL0);
  717. reg &= CPU_PLL_CLOCK_RELOAD_RATIO_MASK;
  718. /* 0xE8264 [8]=0x0 CPU Clock Dividers Reload Ratio trigger clear */
  719. dfs_reg_write(CPU_PLL_CLOCK_DIVIDER_CNTRL0, reg);
  720. udelay(5);
  721. #else
  722. /*
  723. * Initial Setup - assure that the "load new ratio" is clear (bit 24)
  724. * and in the same chance, block reassertions of reset [15:8]
  725. * and force reserved bits[7:0].
  726. */
  727. reg = 0x0000FFFF;
  728. /* 0x18700 - CPU Div CLK control 0 */
  729. dfs_reg_write(REG_CPU_DIV_CLK_CTRL_0_ADDR, reg);
  730. /*
  731. * RelaX whenever reset is asserted to that channel (good for any case)
  732. */
  733. reg = 0x0000FF00;
  734. /* 0x18704 - CPU Div CLK control 0 */
  735. dfs_reg_write(REG_CPU_DIV_CLK_CTRL_1_ADDR, reg);
  736. reg = reg_read(REG_CPU_DIV_CLK_CTRL_2_ADDR) &
  737. REG_CPU_DIV_CLK_CTRL_3_FREQ_MASK;
  738. reg |= (freq_par << REG_CPU_DIV_CLK_CTRL_3_FREQ_OFFS);
  739. /* full Integer ratio from PLL-out to ddr-clk */
  740. /* 0x1870C - CPU Div CLK control 3 register */
  741. dfs_reg_write(REG_CPU_DIV_CLK_CTRL_2_ADDR, reg);
  742. /*
  743. * Shut off clock enable to the DDRPHY clock channel (this is the "D").
  744. * All the rest are kept as is (forced, but could be read-modify-write).
  745. * This is done now by RMW above.
  746. */
  747. reg = 0x000FFF02;
  748. dfs_reg_write(REG_CPU_DIV_CLK_CTRL_4_ADDR, reg);
  749. /* Wait before replacing the clock on the DDR Phy Channel. */
  750. udelay(1);
  751. reg = 0x0102FDFF;
  752. /*
  753. * This for triggering the frequency update. Bit[24] is the
  754. * central control
  755. * bits [23:16] == which channels to change ==2 ==> only DDR Phy
  756. * (smooth transition)
  757. * bits [15:8] == mask reset reassertion due to clock modification
  758. * to these channels.
  759. * bits [7:0] == not in use
  760. */
  761. /* 0x18700 - CPU Div CLK control 0 register */
  762. dfs_reg_write(REG_CPU_DIV_CLK_CTRL_0_ADDR, reg);
  763. udelay(1);
  764. /*
  765. * Poll Div CLK status 0 register - indication that the clocks
  766. * are active - 0x18718 [8]
  767. */
  768. do {
  769. reg = reg_read(REG_CPU_DIV_CLK_STATUS_0_ADDR) &
  770. (1 << REG_CPU_DIV_CLK_ALL_STABLE_OFFS);
  771. } while (reg == 0);
  772. reg = 0x000000FF;
  773. /*
  774. * Clean the CTRL0, to be ready for next resets and next requests
  775. * of ratio modifications.
  776. */
  777. /* 0x18700 - CPU Div CLK control 0 register */
  778. dfs_reg_write(REG_CPU_DIV_CLK_CTRL_0_ADDR, reg);
  779. #endif
  780. /* End of clock change procedure (PLL) */
  781. if (ratio_2to1) {
  782. /* Configure - Select normal clock for the DDR PHY - Disable */
  783. reg = reg_read(REG_DRAM_INIT_CTRL_STATUS_ADDR);
  784. /* [16] - ddr_phy_trn_clk_sel - Disable */
  785. reg &= ~(1 << REG_DRAM_INIT_CTRL_TRN_CLK_OFFS);
  786. /* 0x18488 - DRAM Init control status register */
  787. dfs_reg_write(REG_DRAM_INIT_CTRL_STATUS_ADDR, reg);
  788. }
  789. /*
  790. * Configure - Set Correct Ratio - according to target ratio
  791. * parameter - 2:1/1:1
  792. */
  793. if (ratio_2to1) {
  794. /*
  795. * [15] - Phy2UnitClkRatio = 1 - Set 2:1 Ratio between
  796. * Dunit and Phy
  797. */
  798. reg = reg_read(REG_DDR_IO_ADDR) |
  799. (1 << REG_DDR_IO_CLK_RATIO_OFFS);
  800. } else {
  801. /*
  802. * [15] - Phy2UnitClkRatio = 0 - Set 1:1 Ratio between
  803. * Dunit and Phy
  804. */
  805. reg = reg_read(REG_DDR_IO_ADDR) &
  806. ~(1 << REG_DDR_IO_CLK_RATIO_OFFS);
  807. }
  808. dfs_reg_write(REG_DDR_IO_ADDR, reg); /* 0x1524 - DDR IO Register */
  809. /* Configure - 2T Mode - Restore original configuration */
  810. reg = reg_read(REG_DUNIT_CTRL_LOW_ADDR);
  811. /* [3:4] 2T - Restore value */
  812. reg &= ~(REG_DUNIT_CTRL_LOW_2T_MASK << REG_DUNIT_CTRL_LOW_2T_OFFS);
  813. reg |= ((dram_info->mode_2t & REG_DUNIT_CTRL_LOW_2T_MASK) <<
  814. REG_DUNIT_CTRL_LOW_2T_OFFS);
  815. /* 0x1404 - DDR Controller Control Low Register */
  816. dfs_reg_write(REG_DUNIT_CTRL_LOW_ADDR, reg);
  817. /* Configure - Restore CL and CWL - MRS Commands */
  818. reg = reg_read(REG_DFS_ADDR);
  819. reg &= ~(REG_DFS_CL_NEXT_STATE_MASK << REG_DFS_CL_NEXT_STATE_OFFS);
  820. reg &= ~(REG_DFS_CWL_NEXT_STATE_MASK << REG_DFS_CWL_NEXT_STATE_OFFS);
  821. if (freq == DDR_400) {
  822. if (dram_info->target_frequency == 0x8)
  823. tmp = ddr3_cl_to_valid_cl(5);
  824. else
  825. tmp = ddr3_cl_to_valid_cl(6);
  826. } else {
  827. tmp = ddr3_cl_to_valid_cl(dram_info->cl);
  828. }
  829. /* [8] - DfsCLNextState */
  830. reg |= ((tmp & REG_DFS_CL_NEXT_STATE_MASK) << REG_DFS_CL_NEXT_STATE_OFFS);
  831. if (freq == DDR_400) {
  832. /* [12] - DfsCWLNextState */
  833. reg |= (((0) & REG_DFS_CWL_NEXT_STATE_MASK) <<
  834. REG_DFS_CWL_NEXT_STATE_OFFS);
  835. } else {
  836. /* [12] - DfsCWLNextState */
  837. reg |= (((dram_info->cwl) & REG_DFS_CWL_NEXT_STATE_MASK) <<
  838. REG_DFS_CWL_NEXT_STATE_OFFS);
  839. }
  840. dfs_reg_write(REG_DFS_ADDR, reg); /* 0x1528 - DFS register */
  841. /* Optional - Configure - DDR3_Rtt_nom_CS# */
  842. for (cs = 0; cs < MAX_CS; cs++) {
  843. if (dram_info->cs_ena & (1 << cs)) {
  844. reg = reg_read(REG_DDR3_MR1_CS_ADDR +
  845. (cs << MR_CS_ADDR_OFFS));
  846. reg &= REG_DDR3_MR1_RTT_MASK;
  847. reg |= odt_static[dram_info->cs_ena][cs];
  848. dfs_reg_write(REG_DDR3_MR1_CS_ADDR +
  849. (cs << MR_CS_ADDR_OFFS), reg);
  850. }
  851. }
  852. /* Configure - Reset ADLLs - Set Reset */
  853. reg = reg_read(REG_DRAM_PHY_CONFIG_ADDR) & REG_DRAM_PHY_CONFIG_MASK;
  854. /* [31:30]] - reset pup data ctrl ADLL */
  855. /* 0x15EC - DRAM PHY Config Register */
  856. dfs_reg_write(REG_DRAM_PHY_CONFIG_ADDR, reg);
  857. /* Configure - Reset ADLLs - Release Reset */
  858. reg = reg_read(REG_DRAM_PHY_CONFIG_ADDR) | ~REG_DRAM_PHY_CONFIG_MASK;
  859. /* [31:30] - normal pup data ctrl ADLL */
  860. /* 0x15EC - DRAM PHY Config register */
  861. dfs_reg_write(REG_DRAM_PHY_CONFIG_ADDR, reg);
  862. /* Poll - Wait for APLL + ADLLs lock on new frequency */
  863. do {
  864. reg = reg_read(REG_PHY_LOCK_STATUS_ADDR) &
  865. REG_PHY_LOCK_APLL_ADLL_STATUS_MASK;
  866. /* 0x1674 [10:0] - Phy lock status Register */
  867. } while (reg != REG_PHY_LOCK_APLL_ADLL_STATUS_MASK);
  868. /* Configure - Reset the PHY SDR clock divider */
  869. if (ratio_2to1) {
  870. /* Pup Reset Divider B - Set Reset */
  871. /* [28] - DataPupRdRST = 0 */
  872. reg = reg_read(REG_SDRAM_CONFIG_ADDR) &
  873. ~(1 << REG_SDRAM_CONFIG_PUPRSTDIV_OFFS);
  874. /* [28] - DataPupRdRST = 1 */
  875. tmp = reg_read(REG_SDRAM_CONFIG_ADDR) |
  876. (1 << REG_SDRAM_CONFIG_PUPRSTDIV_OFFS);
  877. /* 0x1400 - SDRAM Configuration register */
  878. dfs_reg_write(REG_SDRAM_CONFIG_ADDR, reg);
  879. /* Pup Reset Divider B - Release Reset */
  880. /* 0x1400 - SDRAM Configuration register */
  881. dfs_reg_write(REG_SDRAM_CONFIG_ADDR, tmp);
  882. }
  883. /* Configure - Reset the PHY Read FIFO and Write channels - Set Reset */
  884. reg = reg_read(REG_SDRAM_CONFIG_ADDR) & REG_SDRAM_CONFIG_MASK;
  885. /* [30:29] = 0 - Data Pup R/W path reset */
  886. /* 0x1400 - SDRAM Configuration register */
  887. dfs_reg_write(REG_SDRAM_CONFIG_ADDR, reg);
  888. /*
  889. * Configure - DRAM Data PHY Read [30], Write [29] path reset -
  890. * Release Reset
  891. */
  892. reg = reg_read(REG_SDRAM_CONFIG_ADDR) | ~REG_SDRAM_CONFIG_MASK;
  893. /* [30:29] = '11' - Data Pup R/W path reset */
  894. /* 0x1400 - SDRAM Configuration register */
  895. dfs_reg_write(REG_SDRAM_CONFIG_ADDR, reg);
  896. /* Registered DIMM support */
  897. if (dram_info->reg_dimm) {
  898. /*
  899. * Configure - Change register DRAM operating speed
  900. * (DDR3-1333 / DDR3-1600) - CWA_RC
  901. */
  902. reg = (0xA & REG_SDRAM_OPERATION_CWA_RC_MASK) <<
  903. REG_SDRAM_OPERATION_CWA_RC_OFFS;
  904. if (freq <= DDR_400) {
  905. /*
  906. * Configure - Change register DRAM operating speed
  907. * (DDR3-800) - CWA_DATA
  908. */
  909. reg |= ((0x0 & REG_SDRAM_OPERATION_CWA_DATA_MASK) <<
  910. REG_SDRAM_OPERATION_CWA_DATA_OFFS);
  911. } else if ((freq > DDR_400) && (freq <= DDR_533)) {
  912. /*
  913. * Configure - Change register DRAM operating speed
  914. * (DDR3-1066) - CWA_DATA
  915. */
  916. reg |= ((0x1 & REG_SDRAM_OPERATION_CWA_DATA_MASK) <<
  917. REG_SDRAM_OPERATION_CWA_DATA_OFFS);
  918. } else if ((freq > DDR_533) && (freq <= DDR_666)) {
  919. /*
  920. * Configure - Change register DRAM operating speed
  921. * (DDR3-1333) - CWA_DATA
  922. */
  923. reg |= ((0x2 & REG_SDRAM_OPERATION_CWA_DATA_MASK) <<
  924. REG_SDRAM_OPERATION_CWA_DATA_OFFS);
  925. } else {
  926. /*
  927. * Configure - Change register DRAM operating speed
  928. * (DDR3-1600) - CWA_DATA
  929. */
  930. reg |= ((0x3 & REG_SDRAM_OPERATION_CWA_DATA_MASK) <<
  931. REG_SDRAM_OPERATION_CWA_DATA_OFFS);
  932. }
  933. /* Configure - Set Delay - tSTAB */
  934. reg |= (0x1 << REG_SDRAM_OPERATION_CWA_DELAY_SEL_OFFS);
  935. /* Configure - Issue CWA command with the above parameters */
  936. reg |= (REG_SDRAM_OPERATION_CMD_CWA &
  937. ~(0xF << REG_SDRAM_OPERATION_CS_OFFS));
  938. /* 0x1418 - SDRAM Operation Register */
  939. dfs_reg_write(REG_SDRAM_OPERATION_ADDR, reg);
  940. /* Poll - Wait for CWA operation completion */
  941. do {
  942. reg = reg_read(REG_SDRAM_OPERATION_ADDR) &
  943. REG_SDRAM_OPERATION_CMD_MASK;
  944. } while (reg);
  945. }
  946. /* Configure - Exit Self Refresh */
  947. /* [2] - DfsSR */
  948. reg = reg_read(REG_DFS_ADDR) & ~(1 << REG_DFS_SR_OFFS);
  949. dfs_reg_write(REG_DFS_ADDR, reg); /* 0x1528 - DFS register */
  950. /*
  951. * Poll - DFS Register - 0x1528 [3] - DfsAtSR - All DRAM
  952. * devices on all ranks are NOT in self refresh mode
  953. */
  954. do {
  955. reg = reg_read(REG_DFS_ADDR) & (1 << REG_DFS_ATSR_OFFS);
  956. } while (reg); /* Wait for '0' */
  957. /* Configure - Issue Refresh command */
  958. /* [3-0] = 0x2 - Refresh Command, [11-8] - enabled Cs */
  959. reg = REG_SDRAM_OPERATION_CMD_RFRS;
  960. for (cs = 0; cs < MAX_CS; cs++) {
  961. if (dram_info->cs_ena & (1 << cs))
  962. reg &= ~(1 << (REG_SDRAM_OPERATION_CS_OFFS + cs));
  963. }
  964. /* 0x1418 - SDRAM Operation Register */
  965. dfs_reg_write(REG_SDRAM_OPERATION_ADDR, reg);
  966. /* Poll - Wait for Refresh operation completion */
  967. wait_refresh_op_complete();
  968. /* Configure - Block new external transactions - Disable */
  969. reg = reg_read(REG_DFS_ADDR);
  970. reg &= ~(1 << REG_DFS_BLOCK_OFFS); /* [1] - DfsBlock - Disable */
  971. dfs_reg_write(REG_DFS_ADDR, reg); /* 0x1528 - DFS register */
  972. /*
  973. * Configure - XBAR Retry response during Block to enable
  974. * internal access - Disable
  975. */
  976. reg = reg_read(REG_METAL_MASK_ADDR);
  977. /* [0] - RetryMask - Enable */
  978. reg |= (1 << REG_METAL_MASK_RETRY_OFFS);
  979. /* 0x14B0 - Dunit MMask Register */
  980. dfs_reg_write(REG_METAL_MASK_ADDR, reg);
  981. for (cs = 0; cs < MAX_CS; cs++) {
  982. if (dram_info->cs_ena & (1 << cs)) {
  983. /* Configure - Set CL */
  984. reg = reg_read(REG_DDR3_MR0_CS_ADDR +
  985. (cs << MR_CS_ADDR_OFFS)) &
  986. ~REG_DDR3_MR0_CL_MASK;
  987. if (freq == DDR_400)
  988. tmp = ddr3_cl_to_valid_cl(6);
  989. else
  990. tmp = ddr3_cl_to_valid_cl(dram_info->cl);
  991. reg |= ((tmp & 0x1) << REG_DDR3_MR0_CL_OFFS);
  992. reg |= ((tmp & 0xE) << REG_DDR3_MR0_CL_HIGH_OFFS);
  993. dfs_reg_write(REG_DDR3_MR0_CS_ADDR +
  994. (cs << MR_CS_ADDR_OFFS), reg);
  995. /* Configure - Set CWL */
  996. reg = reg_read(REG_DDR3_MR2_CS_ADDR +
  997. (cs << MR_CS_ADDR_OFFS)) &
  998. ~(REG_DDR3_MR2_CWL_MASK << REG_DDR3_MR2_CWL_OFFS);
  999. if (freq == DDR_400)
  1000. reg |= ((0) << REG_DDR3_MR2_CWL_OFFS);
  1001. else
  1002. reg |= ((dram_info->cwl) << REG_DDR3_MR2_CWL_OFFS);
  1003. dfs_reg_write(REG_DDR3_MR2_CS_ADDR +
  1004. (cs << MR_CS_ADDR_OFFS), reg);
  1005. }
  1006. }
  1007. DEBUG_DFS_C("DDR3 - DFS - Low To High - Ended successfuly - new Frequency - ",
  1008. freq, 1);
  1009. return MV_OK;
  1010. #else
  1011. /* This Flow is relevant for Armada370 A0 and ArmadaXP Z1 */
  1012. u32 reg, freq_par, tmp;
  1013. u32 cs = 0;
  1014. DEBUG_DFS_C("DDR3 - DFS - Low To High - Starting DFS procedure to Frequency - ",
  1015. freq, 1);
  1016. /* target frequency - freq */
  1017. freq_par = ddr3_get_freq_parameter(freq, ratio_2to1);
  1018. reg = 0x0000FF00;
  1019. dfs_reg_write(REG_CPU_DIV_CLK_CTRL_1_ADDR, reg);
  1020. /* 0x1600 - PHY lock mask register */
  1021. reg = reg_read(REG_ODPG_CNTRL_ADDR);
  1022. reg |= (1 << REG_ODPG_CNTRL_OFFS); /* [21] = 1 */
  1023. dfs_reg_write(REG_ODPG_CNTRL_ADDR, reg);
  1024. /* 0x1670 - PHY lock mask register */
  1025. reg = reg_read(REG_PHY_LOCK_MASK_ADDR);
  1026. reg &= REG_PHY_LOCK_MASK_MASK; /* [11:0] = 0 */
  1027. dfs_reg_write(REG_PHY_LOCK_MASK_ADDR, reg);
  1028. /* Enable reconfig MR Registers after DFS */
  1029. reg = reg_read(REG_DFS_ADDR); /* 0x1528 - DFS register */
  1030. /* [4] - Disable - reconfig MR registers after DFS_ERG */
  1031. reg &= ~0x11;
  1032. /* [0] - Enable - DRAM DLL after DFS */
  1033. dfs_reg_write(REG_DFS_ADDR, reg); /* 0x1528 - DFS register */
  1034. /* Disable DRAM Controller to crossbar retry */
  1035. /* [0] - disable */
  1036. reg = reg_read(REG_METAL_MASK_ADDR) & ~(1 << 0);
  1037. /* 0x14B0 - Dunit MMask Register */
  1038. dfs_reg_write(REG_METAL_MASK_ADDR, reg);
  1039. /* Enable DRAM Blocking */
  1040. /* [1] - DFS Block enable */
  1041. reg = reg_read(REG_DFS_ADDR) | (1 << REG_DFS_BLOCK_OFFS);
  1042. dfs_reg_write(REG_DFS_ADDR, reg); /* 0x1528 - DFS register */
  1043. /* Enable Self refresh */
  1044. /* [2] - DFS Self refresh enable */
  1045. reg = reg_read(REG_DFS_ADDR) | (1 << REG_DFS_SR_OFFS);
  1046. dfs_reg_write(REG_DFS_ADDR, reg); /* 0x1528 - DFS register */
  1047. /*
  1048. * Poll DFS Register - All DRAM devices on all ranks are in
  1049. * self refresh mode - DFS can be executed afterwards
  1050. */
  1051. /* 0x1528 [3] - DfsAtSR */
  1052. do {
  1053. reg = reg_read(REG_DFS_ADDR) & (1 << REG_DFS_ATSR_OFFS);
  1054. } while (reg == 0x0); /* Wait for '1' */
  1055. /*
  1056. * Set Correct Ratio - if freq>MARGIN_FREQ use 2:1 ratio
  1057. * else use 1:1 ratio
  1058. */
  1059. if (ratio_2to1) {
  1060. /* [15] = 1 - Set 2:1 Ratio between Dunit and Phy */
  1061. reg = reg_read(REG_DDR_IO_ADDR) |
  1062. (1 << REG_DDR_IO_CLK_RATIO_OFFS);
  1063. } else {
  1064. /* [15] = 0 - Set 1:1 Ratio between Dunit and Phy */
  1065. reg = reg_read(REG_DDR_IO_ADDR) &
  1066. ~(1 << REG_DDR_IO_CLK_RATIO_OFFS);
  1067. }
  1068. dfs_reg_write(REG_DDR_IO_ADDR, reg); /* 0x1524 - DDR IO Register */
  1069. /* Switch HCLK Mux from (100Mhz) [16]=0, keep DFS request bit */
  1070. reg = 0x20040000;
  1071. /*
  1072. * [29] - training logic request DFS, [28:27] -
  1073. * preload patterns frequency [18]
  1074. */
  1075. /* 0x18488 - DRAM Init control status register */
  1076. dfs_reg_write(REG_DRAM_INIT_CTRL_STATUS_ADDR, reg);
  1077. /* Add delay between entering SR and start ratio modification */
  1078. udelay(1);
  1079. /*
  1080. * Initial Setup - assure that the "load new ratio" is clear (bit 24)
  1081. * and in the same chance, block reassertions of reset [15:8] and
  1082. * force reserved bits[7:0].
  1083. */
  1084. reg = 0x0000FFFF;
  1085. /* 0x18700 - CPU Div CLK control 0 */
  1086. dfs_reg_write(REG_CPU_DIV_CLK_CTRL_0_ADDR, reg);
  1087. /*
  1088. * RelaX whenever reset is asserted to that channel (good for any case)
  1089. */
  1090. reg = 0x0000FF00;
  1091. /* 0x18704 - CPU Div CLK control 0 */
  1092. dfs_reg_write(REG_CPU_DIV_CLK_CTRL_1_ADDR, reg);
  1093. reg = reg_read(REG_CPU_DIV_CLK_CTRL_3_ADDR) &
  1094. REG_CPU_DIV_CLK_CTRL_3_FREQ_MASK;
  1095. reg |= (freq_par << REG_CPU_DIV_CLK_CTRL_3_FREQ_OFFS);
  1096. /* Full Integer ratio from PLL-out to ddr-clk */
  1097. /* 0x1870C - CPU Div CLK control 3 register */
  1098. dfs_reg_write(REG_CPU_DIV_CLK_CTRL_3_ADDR, reg);
  1099. /*
  1100. * Shut off clock enable to the DDRPHY clock channel (this is the "D").
  1101. * All the rest are kept as is (forced, but could be read-modify-write).
  1102. * This is done now by RMW above.
  1103. */
  1104. reg = 0x000FFF02;
  1105. dfs_reg_write(REG_CPU_DIV_CLK_CTRL_4_ADDR, reg);
  1106. /* Wait before replacing the clock on the DDR Phy Channel. */
  1107. udelay(1);
  1108. reg = 0x0102FDFF;
  1109. /*
  1110. * This for triggering the frequency update. Bit[24] is the
  1111. * central control
  1112. * bits [23:16] == which channels to change ==2 ==> only DDR Phy
  1113. * (smooth transition)
  1114. * bits [15:8] == mask reset reassertion due to clock modification
  1115. * to these channels.
  1116. * bits [7:0] == not in use
  1117. */
  1118. /* 0x18700 - CPU Div CLK control 0 register */
  1119. dfs_reg_write(REG_CPU_DIV_CLK_CTRL_0_ADDR, reg);
  1120. udelay(1);
  1121. /*
  1122. * Poll Div CLK status 0 register - indication that the clocks are
  1123. * active - 0x18718 [8]
  1124. */
  1125. do {
  1126. reg = reg_read(REG_CPU_DIV_CLK_STATUS_0_ADDR) &
  1127. (1 << REG_CPU_DIV_CLK_ALL_STABLE_OFFS);
  1128. } while (reg == 0);
  1129. reg = 0x000000FF;
  1130. /*
  1131. * Clean the CTRL0, to be ready for next resets and next requests of
  1132. * ratio modifications.
  1133. */
  1134. /* 0x18700 - CPU Div CLK control 0 register */
  1135. dfs_reg_write(REG_CPU_DIV_CLK_CTRL_0_ADDR, reg);
  1136. udelay(5);
  1137. if (ratio_2to1) {
  1138. /* Pup Reset Divider B - Set Reset */
  1139. /* [28] = 0 - Pup Reset Divider B */
  1140. reg = reg_read(REG_SDRAM_CONFIG_ADDR) & ~(1 << 28);
  1141. /* [28] = 1 - Pup Reset Divider B */
  1142. tmp = reg_read(REG_SDRAM_CONFIG_ADDR) | (1 << 28);
  1143. /* 0x1400 - SDRAM Configuration register */
  1144. dfs_reg_write(REG_SDRAM_CONFIG_ADDR, reg);
  1145. /* Pup Reset Divider B - Release Reset */
  1146. /* 0x1400 - SDRAM Configuration register */
  1147. dfs_reg_write(REG_SDRAM_CONFIG_ADDR, tmp);
  1148. }
  1149. /* DRAM Data PHYs ADLL Reset - Set Reset */
  1150. reg = (reg_read(REG_DRAM_PHY_CONFIG_ADDR) & REG_DRAM_PHY_CONFIG_MASK);
  1151. /* [31:30]] - reset pup data ctrl ADLL */
  1152. /* 0x15EC - DRAM PHY Config Register */
  1153. dfs_reg_write(REG_DRAM_PHY_CONFIG_ADDR, reg);
  1154. udelay(25);
  1155. /* APLL lock indication - Poll Phy lock status Register - 0x1674 [9] */
  1156. do {
  1157. reg = reg_read(REG_PHY_LOCK_STATUS_ADDR) &
  1158. (1 << REG_PHY_LOCK_STATUS_LOCK_OFFS);
  1159. } while (reg == 0);
  1160. /* DRAM Data PHYs ADLL Reset - Release Reset */
  1161. reg = reg_read(REG_DRAM_PHY_CONFIG_ADDR) | ~REG_DRAM_PHY_CONFIG_MASK;
  1162. /* [31:30] - normal pup data ctrl ADLL */
  1163. /* 0x15EC - DRAM PHY Config register */
  1164. dfs_reg_write(REG_DRAM_PHY_CONFIG_ADDR, reg);
  1165. udelay(10000); /* Wait 10msec */
  1166. /*
  1167. * APLL lock indication - Poll Phy lock status Register - 0x1674 [11:0]
  1168. */
  1169. do {
  1170. reg = reg_read(REG_PHY_LOCK_STATUS_ADDR) &
  1171. REG_PHY_LOCK_STATUS_LOCK_MASK;
  1172. } while (reg != REG_PHY_LOCK_STATUS_LOCK_MASK);
  1173. /* DRAM Data PHY Read [30], Write [29] path reset - Set Reset */
  1174. reg = reg_read(REG_SDRAM_CONFIG_ADDR) & REG_SDRAM_CONFIG_MASK;
  1175. /* [30:29] = 0 - Data Pup R/W path reset */
  1176. /* 0x1400 - SDRAM Configuration register */
  1177. dfs_reg_write(REG_SDRAM_CONFIG_ADDR, reg);
  1178. /* DRAM Data PHY Read [30], Write [29] path reset - Release Reset */
  1179. reg = reg_read(REG_SDRAM_CONFIG_ADDR) | ~REG_SDRAM_CONFIG_MASK;
  1180. /* [30:29] = '11' - Data Pup R/W path reset */
  1181. /* 0x1400 - SDRAM Configuration register */
  1182. dfs_reg_write(REG_SDRAM_CONFIG_ADDR, reg);
  1183. /* Disable DFS Reconfig */
  1184. reg = reg_read(REG_DFS_ADDR) & ~(1 << 4);
  1185. dfs_reg_write(REG_DFS_ADDR, reg); /* 0x1528 - DFS register */
  1186. /* [2] - DFS Self refresh disable */
  1187. reg = reg_read(REG_DFS_ADDR) & ~(1 << REG_DFS_SR_OFFS);
  1188. dfs_reg_write(REG_DFS_ADDR, reg); /* 0x1528 - DFS register */
  1189. /*
  1190. * Poll DFS Register - 0x1528 [3] - DfsAtSR - All DRAM devices on
  1191. * all ranks are NOT in self refresh mode
  1192. */
  1193. do {
  1194. reg = reg_read(REG_DFS_ADDR) & (1 << REG_DFS_ATSR_OFFS);
  1195. } while (reg); /* Wait for '0' */
  1196. /* 0x1404 */
  1197. reg = (reg_read(REG_DUNIT_CTRL_LOW_ADDR) & 0xFFFFFFE7) | 0x2;
  1198. /* Configure - 2T Mode - Restore original configuration */
  1199. /* [3:4] 2T - Restore value */
  1200. reg &= ~(REG_DUNIT_CTRL_LOW_2T_MASK << REG_DUNIT_CTRL_LOW_2T_OFFS);
  1201. reg |= ((dram_info->mode_2t & REG_DUNIT_CTRL_LOW_2T_MASK) <<
  1202. REG_DUNIT_CTRL_LOW_2T_OFFS);
  1203. dfs_reg_write(REG_DUNIT_CTRL_LOW_ADDR, reg);
  1204. udelay(1); /* Wait 1us */
  1205. for (cs = 0; cs < MAX_CS; cs++) {
  1206. if (dram_info->cs_ena & (1 << cs)) {
  1207. reg = (reg_read(REG_DDR3_MR1_ADDR));
  1208. /* DLL Enable */
  1209. reg &= ~(1 << REG_DDR3_MR1_DLL_ENA_OFFS);
  1210. dfs_reg_write(REG_DDR3_MR1_ADDR, reg);
  1211. /* Issue MRS Command to current cs */
  1212. reg = REG_SDRAM_OPERATION_CMD_MR1 &
  1213. ~(1 << (REG_SDRAM_OPERATION_CS_OFFS + cs));
  1214. /*
  1215. * [3-0] = 0x4 - MR1 Command, [11-8] -
  1216. * enable current cs
  1217. */
  1218. /* 0x1418 - SDRAM Operation Register */
  1219. dfs_reg_write(REG_SDRAM_OPERATION_ADDR, reg);
  1220. /* Poll - Wait for Refresh operation completion */
  1221. wait_refresh_op_complete();
  1222. /* DLL Reset - MR0 */
  1223. reg = reg_read(REG_DDR3_MR0_ADDR);
  1224. dfs_reg_write(REG_DDR3_MR0_ADDR, reg);
  1225. /* Issue MRS Command to current cs */
  1226. reg = REG_SDRAM_OPERATION_CMD_MR0 &
  1227. ~(1 << (REG_SDRAM_OPERATION_CS_OFFS + cs));
  1228. /*
  1229. * [3-0] = 0x4 - MR1 Command, [11-8] -
  1230. * enable current cs
  1231. */
  1232. /* 0x1418 - SDRAM Operation Register */
  1233. dfs_reg_write(REG_SDRAM_OPERATION_ADDR, reg);
  1234. /* Poll - Wait for Refresh operation completion */
  1235. wait_refresh_op_complete();
  1236. reg = reg_read(REG_DDR3_MR0_ADDR);
  1237. reg &= ~0x74; /* CL [3:0]; [6:4],[2] */
  1238. if (freq == DDR_400)
  1239. tmp = ddr3_cl_to_valid_cl(6) & 0xF;
  1240. else
  1241. tmp = ddr3_cl_to_valid_cl(dram_info->cl) & 0xF;
  1242. reg |= ((tmp & 0x1) << 2);
  1243. reg |= ((tmp >> 1) << 4); /* to bit 4 */
  1244. dfs_reg_write(REG_DDR3_MR0_ADDR, reg);
  1245. reg = REG_SDRAM_OPERATION_CMD_MR0 &
  1246. ~(1 << (REG_SDRAM_OPERATION_CS_OFFS + cs));
  1247. /* 0x1418 - SDRAM Operation Register */
  1248. dfs_reg_write(REG_SDRAM_OPERATION_ADDR, reg);
  1249. /* Poll - Wait for Refresh operation completion */
  1250. wait_refresh_op_complete();
  1251. reg = reg_read(REG_DDR3_MR2_ADDR);
  1252. reg &= ~0x38; /* CWL [5:3] */
  1253. /* CWL = 0 ,for 400 MHg is 5 */
  1254. if (freq != DDR_400)
  1255. reg |= dram_info->cwl << REG_DDR3_MR2_CWL_OFFS;
  1256. dfs_reg_write(REG_DDR3_MR2_ADDR, reg);
  1257. reg = REG_SDRAM_OPERATION_CMD_MR2 &
  1258. ~(1 << (REG_SDRAM_OPERATION_CS_OFFS + cs));
  1259. /* 0x1418 - SDRAM Operation Register */
  1260. dfs_reg_write(REG_SDRAM_OPERATION_ADDR, reg);
  1261. /* Poll - Wait for Refresh operation completion */
  1262. wait_refresh_op_complete();
  1263. /* Set current rd_sample_delay */
  1264. reg = reg_read(REG_READ_DATA_SAMPLE_DELAYS_ADDR);
  1265. reg &= ~(REG_READ_DATA_SAMPLE_DELAYS_MASK <<
  1266. (REG_READ_DATA_SAMPLE_DELAYS_OFFS * cs));
  1267. reg |= (dram_info->cl <<
  1268. (REG_READ_DATA_SAMPLE_DELAYS_OFFS * cs));
  1269. dfs_reg_write(REG_READ_DATA_SAMPLE_DELAYS_ADDR, reg);
  1270. /* Set current rd_ready_delay */
  1271. reg = reg_read(REG_READ_DATA_READY_DELAYS_ADDR);
  1272. reg &= ~(REG_READ_DATA_READY_DELAYS_MASK <<
  1273. (REG_READ_DATA_READY_DELAYS_OFFS * cs));
  1274. reg |= ((dram_info->cl + 1) <<
  1275. (REG_READ_DATA_SAMPLE_DELAYS_OFFS * cs));
  1276. dfs_reg_write(REG_READ_DATA_READY_DELAYS_ADDR, reg);
  1277. }
  1278. }
  1279. /* Enable ODT on DLL-on mode */
  1280. dfs_reg_write(REG_SDRAM_ODT_CTRL_HIGH_ADDR, 0);
  1281. /* [1] - DFS Block disable */
  1282. reg = reg_read(REG_DFS_ADDR) & ~(1 << REG_DFS_BLOCK_OFFS);
  1283. dfs_reg_write(REG_DFS_ADDR, reg); /* 0x1528 - DFS register */
  1284. /* Change DDR frequency to 100MHz procedure: */
  1285. /* 0x1600 - PHY lock mask register */
  1286. reg = reg_read(REG_ODPG_CNTRL_ADDR);
  1287. reg &= ~(1 << REG_ODPG_CNTRL_OFFS); /* [21] = 0 */
  1288. dfs_reg_write(REG_ODPG_CNTRL_ADDR, reg);
  1289. /* Change DDR frequency to 100MHz procedure: */
  1290. /* 0x1670 - PHY lock mask register */
  1291. reg = reg_read(REG_PHY_LOCK_MASK_ADDR);
  1292. reg |= ~REG_PHY_LOCK_MASK_MASK; /* [11:0] = FFF */
  1293. dfs_reg_write(REG_PHY_LOCK_MASK_ADDR, reg);
  1294. reg = reg_read(REG_METAL_MASK_ADDR) | (1 << 0); /* [0] - disable */
  1295. /* 0x14B0 - Dunit MMask Register */
  1296. dfs_reg_write(REG_METAL_MASK_ADDR, reg);
  1297. DEBUG_DFS_C("DDR3 - DFS - Low To High - Ended successfuly - new Frequency - ",
  1298. freq, 1);
  1299. return MV_OK;
  1300. #endif
  1301. }