ddr3_hw_training.c 28 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115
  1. /*
  2. * Copyright (C) Marvell International Ltd. and its affiliates
  3. *
  4. * SPDX-License-Identifier: GPL-2.0
  5. */
  6. #include <common.h>
  7. #include <i2c.h>
  8. #include <spl.h>
  9. #include <asm/io.h>
  10. #include <asm/arch/cpu.h>
  11. #include <asm/arch/soc.h>
  12. #include "ddr3_init.h"
  13. #include "ddr3_hw_training.h"
  14. #include "xor.h"
  15. #ifdef MV88F78X60
  16. #include "ddr3_patterns_64bit.h"
  17. #else
  18. #include "ddr3_patterns_16bit.h"
  19. #if defined(MV88F672X)
  20. #include "ddr3_patterns_16bit.h"
  21. #endif
  22. #endif
  23. /*
  24. * Debug
  25. */
  26. #define DEBUG_MAIN_C(s, d, l) \
  27. DEBUG_MAIN_S(s); DEBUG_MAIN_D(d, l); DEBUG_MAIN_S("\n")
  28. #define DEBUG_MAIN_FULL_C(s, d, l) \
  29. DEBUG_MAIN_FULL_S(s); DEBUG_MAIN_FULL_D(d, l); DEBUG_MAIN_FULL_S("\n")
  30. #ifdef MV_DEBUG_MAIN
  31. #define DEBUG_MAIN_S(s) puts(s)
  32. #define DEBUG_MAIN_D(d, l) printf("%x", d)
  33. #else
  34. #define DEBUG_MAIN_S(s)
  35. #define DEBUG_MAIN_D(d, l)
  36. #endif
  37. #ifdef MV_DEBUG_MAIN_FULL
  38. #define DEBUG_MAIN_FULL_S(s) puts(s)
  39. #define DEBUG_MAIN_FULL_D(d, l) printf("%x", d)
  40. #else
  41. #define DEBUG_MAIN_FULL_S(s)
  42. #define DEBUG_MAIN_FULL_D(d, l)
  43. #endif
  44. #ifdef MV_DEBUG_SUSPEND_RESUME
  45. #define DEBUG_SUSPEND_RESUME_S(s) puts(s)
  46. #define DEBUG_SUSPEND_RESUME_D(d, l) printf("%x", d)
  47. #else
  48. #define DEBUG_SUSPEND_RESUME_S(s)
  49. #define DEBUG_SUSPEND_RESUME_D(d, l)
  50. #endif
  51. static u32 ddr3_sw_wl_rl_debug;
  52. static u32 ddr3_run_pbs = 1;
  53. void ddr3_print_version(void)
  54. {
  55. puts("DDR3 Training Sequence - Ver 5.7.");
  56. }
  57. void ddr3_set_sw_wl_rl_debug(u32 val)
  58. {
  59. ddr3_sw_wl_rl_debug = val;
  60. }
  61. void ddr3_set_pbs(u32 val)
  62. {
  63. ddr3_run_pbs = val;
  64. }
  65. int ddr3_hw_training(u32 target_freq, u32 ddr_width, int xor_bypass,
  66. u32 scrub_offs, u32 scrub_size, int dqs_clk_aligned,
  67. int debug_mode, int reg_dimm_skip_wl)
  68. {
  69. /* A370 has no PBS mechanism */
  70. __maybe_unused u32 first_loop_flag = 0;
  71. u32 freq, reg;
  72. MV_DRAM_INFO dram_info;
  73. int ratio_2to1 = 0;
  74. int tmp_ratio = 1;
  75. int status;
  76. if (debug_mode)
  77. DEBUG_MAIN_S("DDR3 Training Sequence - DEBUG - 1\n");
  78. memset(&dram_info, 0, sizeof(dram_info));
  79. dram_info.num_cs = ddr3_get_cs_num_from_reg();
  80. dram_info.cs_ena = ddr3_get_cs_ena_from_reg();
  81. dram_info.target_frequency = target_freq;
  82. dram_info.ddr_width = ddr_width;
  83. dram_info.num_of_std_pups = ddr_width / PUP_SIZE;
  84. dram_info.rl400_bug = 0;
  85. dram_info.multi_cs_mr_support = 0;
  86. #ifdef MV88F67XX
  87. dram_info.rl400_bug = 1;
  88. #endif
  89. /* Ignore ECC errors - if ECC is enabled */
  90. reg = reg_read(REG_SDRAM_CONFIG_ADDR);
  91. if (reg & (1 << REG_SDRAM_CONFIG_ECC_OFFS)) {
  92. dram_info.ecc_ena = 1;
  93. reg |= (1 << REG_SDRAM_CONFIG_IERR_OFFS);
  94. reg_write(REG_SDRAM_CONFIG_ADDR, reg);
  95. } else {
  96. dram_info.ecc_ena = 0;
  97. }
  98. reg = reg_read(REG_SDRAM_CONFIG_ADDR);
  99. if (reg & (1 << REG_SDRAM_CONFIG_REGDIMM_OFFS))
  100. dram_info.reg_dimm = 1;
  101. else
  102. dram_info.reg_dimm = 0;
  103. dram_info.num_of_total_pups = ddr_width / PUP_SIZE + dram_info.ecc_ena;
  104. /* Get target 2T value */
  105. reg = reg_read(REG_DUNIT_CTRL_LOW_ADDR);
  106. dram_info.mode_2t = (reg >> REG_DUNIT_CTRL_LOW_2T_OFFS) &
  107. REG_DUNIT_CTRL_LOW_2T_MASK;
  108. /* Get target CL value */
  109. #ifdef MV88F67XX
  110. reg = reg_read(REG_DDR3_MR0_ADDR) >> 2;
  111. #else
  112. reg = reg_read(REG_DDR3_MR0_CS_ADDR) >> 2;
  113. #endif
  114. reg = (((reg >> 1) & 0xE) | (reg & 0x1)) & 0xF;
  115. dram_info.cl = ddr3_valid_cl_to_cl(reg);
  116. /* Get target CWL value */
  117. #ifdef MV88F67XX
  118. reg = reg_read(REG_DDR3_MR2_ADDR) >> REG_DDR3_MR2_CWL_OFFS;
  119. #else
  120. reg = reg_read(REG_DDR3_MR2_CS_ADDR) >> REG_DDR3_MR2_CWL_OFFS;
  121. #endif
  122. reg &= REG_DDR3_MR2_CWL_MASK;
  123. dram_info.cwl = reg;
  124. #if !defined(MV88F67XX)
  125. /* A370 has no PBS mechanism */
  126. #if defined(MV88F78X60)
  127. if ((dram_info.target_frequency > DDR_400) && (ddr3_run_pbs))
  128. first_loop_flag = 1;
  129. #else
  130. /* first_loop_flag = 1; skip mid freq at ALP/A375 */
  131. if ((dram_info.target_frequency > DDR_400) && (ddr3_run_pbs) &&
  132. (mv_ctrl_revision_get() >= UMC_A0))
  133. first_loop_flag = 1;
  134. else
  135. first_loop_flag = 0;
  136. #endif
  137. #endif
  138. freq = dram_info.target_frequency;
  139. /* Set ODT to always on */
  140. ddr3_odt_activate(1);
  141. /* Init XOR */
  142. mv_sys_xor_init(&dram_info);
  143. /* Get DRAM/HCLK ratio */
  144. if (reg_read(REG_DDR_IO_ADDR) & (1 << REG_DDR_IO_CLK_RATIO_OFFS))
  145. ratio_2to1 = 1;
  146. /*
  147. * Xor Bypass - ECC support in AXP is currently available for 1:1
  148. * modes frequency modes.
  149. * Not all frequency modes support the ddr3 training sequence
  150. * (Only 1200/300).
  151. * Xor Bypass allows using the Xor initializations and scrubbing
  152. * inside the ddr3 training sequence without running the training
  153. * itself.
  154. */
  155. if (xor_bypass == 0) {
  156. if (ddr3_run_pbs) {
  157. DEBUG_MAIN_S("DDR3 Training Sequence - Run with PBS.\n");
  158. } else {
  159. DEBUG_MAIN_S("DDR3 Training Sequence - Run without PBS.\n");
  160. }
  161. if (dram_info.target_frequency > DFS_MARGIN) {
  162. tmp_ratio = 0;
  163. freq = DDR_100;
  164. if (dram_info.reg_dimm == 1)
  165. freq = DDR_300;
  166. if (MV_OK != ddr3_dfs_high_2_low(freq, &dram_info)) {
  167. /* Set low - 100Mhz DDR Frequency by HW */
  168. DEBUG_MAIN_S("DDR3 Training Sequence - FAILED (Dfs High2Low)\n");
  169. return MV_DDR3_TRAINING_ERR_DFS_H2L;
  170. }
  171. if ((dram_info.reg_dimm == 1) &&
  172. (reg_dimm_skip_wl == 0)) {
  173. if (MV_OK !=
  174. ddr3_write_leveling_hw_reg_dimm(freq,
  175. &dram_info))
  176. DEBUG_MAIN_S("DDR3 Training Sequence - Registered DIMM Low WL - SKIP\n");
  177. }
  178. if (ddr3_get_log_level() >= MV_LOG_LEVEL_1)
  179. ddr3_print_freq(freq);
  180. if (debug_mode)
  181. DEBUG_MAIN_S("DDR3 Training Sequence - DEBUG - 2\n");
  182. } else {
  183. if (!dqs_clk_aligned) {
  184. #ifdef MV88F67XX
  185. /*
  186. * If running training sequence without DFS,
  187. * we must run Write leveling before writing
  188. * the patterns
  189. */
  190. /*
  191. * ODT - Multi CS system use SW WL,
  192. * Single CS System use HW WL
  193. */
  194. if (dram_info.cs_ena > 1) {
  195. if (MV_OK !=
  196. ddr3_write_leveling_sw(
  197. freq, tmp_ratio,
  198. &dram_info)) {
  199. DEBUG_MAIN_S("DDR3 Training Sequence - FAILED (Write Leveling Sw)\n");
  200. return MV_DDR3_TRAINING_ERR_WR_LVL_SW;
  201. }
  202. } else {
  203. if (MV_OK !=
  204. ddr3_write_leveling_hw(freq,
  205. &dram_info)) {
  206. DEBUG_MAIN_S("DDR3 Training Sequence - FAILED (Write Leveling Hw)\n");
  207. return MV_DDR3_TRAINING_ERR_WR_LVL_HW;
  208. }
  209. }
  210. #else
  211. if (MV_OK != ddr3_write_leveling_hw(
  212. freq, &dram_info)) {
  213. DEBUG_MAIN_S("DDR3 Training Sequence - FAILED (Write Leveling Hw)\n");
  214. if (ddr3_sw_wl_rl_debug) {
  215. if (MV_OK !=
  216. ddr3_write_leveling_sw(
  217. freq, tmp_ratio,
  218. &dram_info)) {
  219. DEBUG_MAIN_S("DDR3 Training Sequence - FAILED (Write Leveling Sw)\n");
  220. return MV_DDR3_TRAINING_ERR_WR_LVL_SW;
  221. }
  222. } else {
  223. return MV_DDR3_TRAINING_ERR_WR_LVL_HW;
  224. }
  225. }
  226. #endif
  227. }
  228. if (debug_mode)
  229. DEBUG_MAIN_S("DDR3 Training Sequence - DEBUG - 3\n");
  230. }
  231. if (MV_OK != ddr3_load_patterns(&dram_info, 0)) {
  232. DEBUG_MAIN_S("DDR3 Training Sequence - FAILED (Loading Patterns)\n");
  233. return MV_DDR3_TRAINING_ERR_LOAD_PATTERNS;
  234. }
  235. /*
  236. * TODO:
  237. * The mainline U-Boot port of the bin_hdr DDR training code
  238. * needs a delay of minimum 20ms here (10ms is a bit too short
  239. * and the CPU hangs). The bin_hdr code doesn't have this delay.
  240. * To be save here, lets add a delay of 50ms here.
  241. *
  242. * Tested on the Marvell DB-MV784MP-GP board
  243. */
  244. mdelay(50);
  245. do {
  246. freq = dram_info.target_frequency;
  247. tmp_ratio = ratio_2to1;
  248. DEBUG_MAIN_FULL_S("DDR3 Training Sequence - DEBUG - 4\n");
  249. #if defined(MV88F78X60)
  250. /*
  251. * There is a difference on the DFS frequency at the
  252. * first iteration of this loop
  253. */
  254. if (first_loop_flag) {
  255. freq = DDR_400;
  256. tmp_ratio = 0;
  257. }
  258. #endif
  259. if (MV_OK != ddr3_dfs_low_2_high(freq, tmp_ratio,
  260. &dram_info)) {
  261. DEBUG_MAIN_S("DDR3 Training Sequence - FAILED (Dfs Low2High)\n");
  262. return MV_DDR3_TRAINING_ERR_DFS_H2L;
  263. }
  264. if (ddr3_get_log_level() >= MV_LOG_LEVEL_1) {
  265. ddr3_print_freq(freq);
  266. }
  267. if (debug_mode)
  268. DEBUG_MAIN_S("DDR3 Training Sequence - DEBUG - 5\n");
  269. /* Write leveling */
  270. if (!dqs_clk_aligned) {
  271. #ifdef MV88F67XX
  272. /*
  273. * ODT - Multi CS system that not support Multi
  274. * CS MRS commands must use SW WL
  275. */
  276. if (dram_info.cs_ena > 1) {
  277. if (MV_OK != ddr3_write_leveling_sw(
  278. freq, tmp_ratio, &dram_info)) {
  279. DEBUG_MAIN_S("DDR3 Training Sequence - FAILED (Write Leveling Sw)\n");
  280. return MV_DDR3_TRAINING_ERR_WR_LVL_SW;
  281. }
  282. } else {
  283. if (MV_OK != ddr3_write_leveling_hw(
  284. freq, &dram_info)) {
  285. DEBUG_MAIN_S("DDR3 Training Sequence - FAILED (Write Leveling Hw)\n");
  286. return MV_DDR3_TRAINING_ERR_WR_LVL_HW;
  287. }
  288. }
  289. #else
  290. if ((dram_info.reg_dimm == 1) &&
  291. (freq == DDR_400)) {
  292. if (reg_dimm_skip_wl == 0) {
  293. if (MV_OK != ddr3_write_leveling_hw_reg_dimm(
  294. freq, &dram_info))
  295. DEBUG_MAIN_S("DDR3 Training Sequence - Registered DIMM WL - SKIP\n");
  296. }
  297. } else {
  298. if (MV_OK != ddr3_write_leveling_hw(
  299. freq, &dram_info)) {
  300. DEBUG_MAIN_S("DDR3 Training Sequence - FAILED (Write Leveling Hw)\n");
  301. if (ddr3_sw_wl_rl_debug) {
  302. if (MV_OK != ddr3_write_leveling_sw(
  303. freq, tmp_ratio, &dram_info)) {
  304. DEBUG_MAIN_S("DDR3 Training Sequence - FAILED (Write Leveling Sw)\n");
  305. return MV_DDR3_TRAINING_ERR_WR_LVL_SW;
  306. }
  307. } else {
  308. return MV_DDR3_TRAINING_ERR_WR_LVL_HW;
  309. }
  310. }
  311. }
  312. #endif
  313. if (debug_mode)
  314. DEBUG_MAIN_S
  315. ("DDR3 Training Sequence - DEBUG - 6\n");
  316. }
  317. /* Read Leveling */
  318. /*
  319. * Armada 370 - Support for HCLK @ 400MHZ - must use
  320. * SW read leveling
  321. */
  322. if (freq == DDR_400 && dram_info.rl400_bug) {
  323. status = ddr3_read_leveling_sw(freq, tmp_ratio,
  324. &dram_info);
  325. if (MV_OK != status) {
  326. DEBUG_MAIN_S
  327. ("DDR3 Training Sequence - FAILED (Read Leveling Sw)\n");
  328. return status;
  329. }
  330. } else {
  331. if (MV_OK != ddr3_read_leveling_hw(
  332. freq, &dram_info)) {
  333. DEBUG_MAIN_S("DDR3 Training Sequence - FAILED (Read Leveling Hw)\n");
  334. if (ddr3_sw_wl_rl_debug) {
  335. if (MV_OK != ddr3_read_leveling_sw(
  336. freq, tmp_ratio,
  337. &dram_info)) {
  338. DEBUG_MAIN_S("DDR3 Training Sequence - FAILED (Read Leveling Sw)\n");
  339. return MV_DDR3_TRAINING_ERR_WR_LVL_SW;
  340. }
  341. } else {
  342. return MV_DDR3_TRAINING_ERR_WR_LVL_HW;
  343. }
  344. }
  345. }
  346. if (debug_mode)
  347. DEBUG_MAIN_S("DDR3 Training Sequence - DEBUG - 7\n");
  348. if (MV_OK != ddr3_wl_supplement(&dram_info)) {
  349. DEBUG_MAIN_S("DDR3 Training Sequence - FAILED (Write Leveling Hi-Freq Sup)\n");
  350. return MV_DDR3_TRAINING_ERR_WR_LVL_HI_FREQ;
  351. }
  352. if (debug_mode)
  353. DEBUG_MAIN_S("DDR3 Training Sequence - DEBUG - 8\n");
  354. #if !defined(MV88F67XX)
  355. /* A370 has no PBS mechanism */
  356. #if defined(MV88F78X60) || defined(MV88F672X)
  357. if (first_loop_flag == 1) {
  358. first_loop_flag = 0;
  359. status = MV_OK;
  360. status = ddr3_pbs_rx(&dram_info);
  361. if (MV_OK != status) {
  362. DEBUG_MAIN_S("DDR3 Training Sequence - FAILED (PBS RX)\n");
  363. return status;
  364. }
  365. if (debug_mode)
  366. DEBUG_MAIN_S("DDR3 Training Sequence - DEBUG - 9\n");
  367. status = ddr3_pbs_tx(&dram_info);
  368. if (MV_OK != status) {
  369. DEBUG_MAIN_S("DDR3 Training Sequence - FAILED (PBS TX)\n");
  370. return status;
  371. }
  372. if (debug_mode)
  373. DEBUG_MAIN_S("DDR3 Training Sequence - DEBUG - 10\n");
  374. }
  375. #endif
  376. #endif
  377. } while (freq != dram_info.target_frequency);
  378. status = ddr3_dqs_centralization_rx(&dram_info);
  379. if (MV_OK != status) {
  380. DEBUG_MAIN_S("DDR3 Training Sequence - FAILED (DQS Centralization RX)\n");
  381. return status;
  382. }
  383. if (debug_mode)
  384. DEBUG_MAIN_S("DDR3 Training Sequence - DEBUG - 11\n");
  385. status = ddr3_dqs_centralization_tx(&dram_info);
  386. if (MV_OK != status) {
  387. DEBUG_MAIN_S("DDR3 Training Sequence - FAILED (DQS Centralization TX)\n");
  388. return status;
  389. }
  390. if (debug_mode)
  391. DEBUG_MAIN_S("DDR3 Training Sequence - DEBUG - 12\n");
  392. }
  393. ddr3_set_performance_params(&dram_info);
  394. if (dram_info.ecc_ena) {
  395. /* Need to SCRUB the DRAM memory area to load U-Boot */
  396. mv_sys_xor_finish();
  397. dram_info.num_cs = 1;
  398. dram_info.cs_ena = 1;
  399. mv_sys_xor_init(&dram_info);
  400. mv_xor_mem_init(0, scrub_offs, scrub_size, 0xdeadbeef,
  401. 0xdeadbeef);
  402. /* Wait for previous transfer completion */
  403. while (mv_xor_state_get(0) != MV_IDLE)
  404. ;
  405. if (debug_mode)
  406. DEBUG_MAIN_S("DDR3 Training Sequence - DEBUG - 13\n");
  407. }
  408. /* Return XOR State */
  409. mv_sys_xor_finish();
  410. #if defined(MV88F78X60)
  411. /* Save training results in memeory for resume state */
  412. ddr3_save_training(&dram_info);
  413. #endif
  414. /* Clear ODT always on */
  415. ddr3_odt_activate(0);
  416. /* Configure Dynamic read ODT */
  417. ddr3_odt_read_dynamic_config(&dram_info);
  418. return MV_OK;
  419. }
  420. void ddr3_set_performance_params(MV_DRAM_INFO *dram_info)
  421. {
  422. u32 twr2wr, trd2rd, trd2wr_wr2rd;
  423. u32 tmp1, tmp2, reg;
  424. DEBUG_MAIN_FULL_C("Max WL Phase: ", dram_info->wl_max_phase, 2);
  425. DEBUG_MAIN_FULL_C("Min WL Phase: ", dram_info->wl_min_phase, 2);
  426. DEBUG_MAIN_FULL_C("Max RL Phase: ", dram_info->rl_max_phase, 2);
  427. DEBUG_MAIN_FULL_C("Min RL Phase: ", dram_info->rl_min_phase, 2);
  428. if (dram_info->wl_max_phase < 2)
  429. twr2wr = 0x2;
  430. else
  431. twr2wr = 0x3;
  432. trd2rd = 0x1 + (dram_info->rl_max_phase + 1) / 2 +
  433. (dram_info->rl_max_phase + 1) % 2;
  434. tmp1 = (dram_info->rl_max_phase - dram_info->wl_min_phase) / 2 +
  435. (((dram_info->rl_max_phase - dram_info->wl_min_phase) % 2) >
  436. 0 ? 1 : 0);
  437. tmp2 = (dram_info->wl_max_phase - dram_info->rl_min_phase) / 2 +
  438. ((dram_info->wl_max_phase - dram_info->rl_min_phase) % 2 >
  439. 0 ? 1 : 0);
  440. trd2wr_wr2rd = (tmp1 >= tmp2) ? tmp1 : tmp2;
  441. trd2wr_wr2rd += 2;
  442. trd2rd += 2;
  443. twr2wr += 2;
  444. DEBUG_MAIN_FULL_C("WR 2 WR: ", twr2wr, 2);
  445. DEBUG_MAIN_FULL_C("RD 2 RD: ", trd2rd, 2);
  446. DEBUG_MAIN_FULL_C("RD 2 WR / WR 2 RD: ", trd2wr_wr2rd, 2);
  447. reg = reg_read(REG_SDRAM_TIMING_HIGH_ADDR);
  448. reg &= ~(REG_SDRAM_TIMING_H_W2W_MASK << REG_SDRAM_TIMING_H_W2W_OFFS);
  449. reg |= ((twr2wr & REG_SDRAM_TIMING_H_W2W_MASK) <<
  450. REG_SDRAM_TIMING_H_W2W_OFFS);
  451. reg &= ~(REG_SDRAM_TIMING_H_R2R_MASK << REG_SDRAM_TIMING_H_R2R_OFFS);
  452. reg &= ~(REG_SDRAM_TIMING_H_R2R_H_MASK <<
  453. REG_SDRAM_TIMING_H_R2R_H_OFFS);
  454. reg |= ((trd2rd & REG_SDRAM_TIMING_H_R2R_MASK) <<
  455. REG_SDRAM_TIMING_H_R2R_OFFS);
  456. reg |= (((trd2rd >> 2) & REG_SDRAM_TIMING_H_R2R_H_MASK) <<
  457. REG_SDRAM_TIMING_H_R2R_H_OFFS);
  458. reg &= ~(REG_SDRAM_TIMING_H_R2W_W2R_MASK <<
  459. REG_SDRAM_TIMING_H_R2W_W2R_OFFS);
  460. reg &= ~(REG_SDRAM_TIMING_H_R2W_W2R_H_MASK <<
  461. REG_SDRAM_TIMING_H_R2W_W2R_H_OFFS);
  462. reg |= ((trd2wr_wr2rd & REG_SDRAM_TIMING_H_R2W_W2R_MASK) <<
  463. REG_SDRAM_TIMING_H_R2W_W2R_OFFS);
  464. reg |= (((trd2wr_wr2rd >> 2) & REG_SDRAM_TIMING_H_R2W_W2R_H_MASK) <<
  465. REG_SDRAM_TIMING_H_R2W_W2R_H_OFFS);
  466. reg_write(REG_SDRAM_TIMING_HIGH_ADDR, reg);
  467. }
  468. /*
  469. * Perform DDR3 PUP Indirect Write
  470. */
  471. void ddr3_write_pup_reg(u32 mode, u32 cs, u32 pup, u32 phase, u32 delay)
  472. {
  473. u32 reg = 0;
  474. if (pup == PUP_BC)
  475. reg |= (1 << REG_PHY_BC_OFFS);
  476. else
  477. reg |= (pup << REG_PHY_PUP_OFFS);
  478. reg |= ((0x4 * cs + mode) << REG_PHY_CS_OFFS);
  479. reg |= (phase << REG_PHY_PHASE_OFFS) | delay;
  480. if (mode == PUP_WL_MODE)
  481. reg |= ((INIT_WL_DELAY + delay) << REG_PHY_DQS_REF_DLY_OFFS);
  482. reg_write(REG_PHY_REGISTRY_FILE_ACCESS_ADDR, reg); /* 0x16A0 */
  483. reg |= REG_PHY_REGISTRY_FILE_ACCESS_OP_WR;
  484. reg_write(REG_PHY_REGISTRY_FILE_ACCESS_ADDR, reg); /* 0x16A0 */
  485. do {
  486. reg = reg_read(REG_PHY_REGISTRY_FILE_ACCESS_ADDR) &
  487. REG_PHY_REGISTRY_FILE_ACCESS_OP_DONE;
  488. } while (reg); /* Wait for '0' to mark the end of the transaction */
  489. /* If read Leveling mode - need to write to register 3 separetly */
  490. if (mode == PUP_RL_MODE) {
  491. reg = 0;
  492. if (pup == PUP_BC)
  493. reg |= (1 << REG_PHY_BC_OFFS);
  494. else
  495. reg |= (pup << REG_PHY_PUP_OFFS);
  496. reg |= ((0x4 * cs + mode + 1) << REG_PHY_CS_OFFS);
  497. reg |= (INIT_RL_DELAY);
  498. reg_write(REG_PHY_REGISTRY_FILE_ACCESS_ADDR, reg); /* 0x16A0 */
  499. reg |= REG_PHY_REGISTRY_FILE_ACCESS_OP_WR;
  500. reg_write(REG_PHY_REGISTRY_FILE_ACCESS_ADDR, reg); /* 0x16A0 */
  501. do {
  502. reg = reg_read(REG_PHY_REGISTRY_FILE_ACCESS_ADDR) &
  503. REG_PHY_REGISTRY_FILE_ACCESS_OP_DONE;
  504. } while (reg);
  505. }
  506. }
  507. /*
  508. * Perform DDR3 PUP Indirect Read
  509. */
  510. u32 ddr3_read_pup_reg(u32 mode, u32 cs, u32 pup)
  511. {
  512. u32 reg;
  513. reg = (pup << REG_PHY_PUP_OFFS) |
  514. ((0x4 * cs + mode) << REG_PHY_CS_OFFS);
  515. reg_write(REG_PHY_REGISTRY_FILE_ACCESS_ADDR, reg); /* 0x16A0 */
  516. reg |= REG_PHY_REGISTRY_FILE_ACCESS_OP_RD;
  517. reg_write(REG_PHY_REGISTRY_FILE_ACCESS_ADDR, reg); /* 0x16A0 */
  518. do {
  519. reg = reg_read(REG_PHY_REGISTRY_FILE_ACCESS_ADDR) &
  520. REG_PHY_REGISTRY_FILE_ACCESS_OP_DONE;
  521. } while (reg); /* Wait for '0' to mark the end of the transaction */
  522. return reg_read(REG_PHY_REGISTRY_FILE_ACCESS_ADDR); /* 0x16A0 */
  523. }
  524. /*
  525. * Set training patterns
  526. */
  527. int ddr3_load_patterns(MV_DRAM_INFO *dram_info, int resume)
  528. {
  529. u32 reg;
  530. /* Enable SW override - Required for the ECC Pup */
  531. reg = reg_read(REG_DRAM_TRAINING_2_ADDR) |
  532. (1 << REG_DRAM_TRAINING_2_SW_OVRD_OFFS);
  533. /* [0] = 1 - Enable SW override */
  534. /* 0x15B8 - Training SW 2 Register */
  535. reg_write(REG_DRAM_TRAINING_2_ADDR, reg);
  536. reg = (1 << REG_DRAM_TRAINING_AUTO_OFFS);
  537. reg_write(REG_DRAM_TRAINING_ADDR, reg); /* 0x15B0 - Training Register */
  538. if (resume == 0) {
  539. #if defined(MV88F78X60) || defined(MV88F672X)
  540. ddr3_load_pbs_patterns(dram_info);
  541. #endif
  542. ddr3_load_dqs_patterns(dram_info);
  543. }
  544. /* Disable SW override - Must be in a different stage */
  545. /* [0]=0 - Enable SW override */
  546. reg = reg_read(REG_DRAM_TRAINING_2_ADDR);
  547. reg &= ~(1 << REG_DRAM_TRAINING_2_SW_OVRD_OFFS);
  548. /* 0x15B8 - Training SW 2 Register */
  549. reg_write(REG_DRAM_TRAINING_2_ADDR, reg);
  550. reg = reg_read(REG_DRAM_TRAINING_1_ADDR) |
  551. (1 << REG_DRAM_TRAINING_1_TRNBPOINT_OFFS);
  552. reg_write(REG_DRAM_TRAINING_1_ADDR, reg);
  553. /* Set Base Addr */
  554. #if defined(MV88F67XX)
  555. reg_write(REG_DRAM_TRAINING_PATTERN_BASE_ADDR, 0);
  556. #else
  557. if (resume == 0)
  558. reg_write(REG_DRAM_TRAINING_PATTERN_BASE_ADDR, 0);
  559. else
  560. reg_write(REG_DRAM_TRAINING_PATTERN_BASE_ADDR,
  561. RESUME_RL_PATTERNS_ADDR);
  562. #endif
  563. /* Set Patterns */
  564. if (resume == 0) {
  565. reg = (dram_info->cs_ena << REG_DRAM_TRAINING_CS_OFFS) |
  566. (1 << REG_DRAM_TRAINING_PATTERNS_OFFS);
  567. } else {
  568. reg = (0x1 << REG_DRAM_TRAINING_CS_OFFS) |
  569. (1 << REG_DRAM_TRAINING_PATTERNS_OFFS);
  570. }
  571. reg |= (1 << REG_DRAM_TRAINING_AUTO_OFFS);
  572. reg_write(REG_DRAM_TRAINING_ADDR, reg);
  573. udelay(100);
  574. /* Check if Successful */
  575. if (reg_read(REG_DRAM_TRAINING_ADDR) &
  576. (1 << REG_DRAM_TRAINING_ERROR_OFFS))
  577. return MV_OK;
  578. else
  579. return MV_FAIL;
  580. }
  581. #if !defined(MV88F67XX)
  582. /*
  583. * Name: ddr3_save_training(MV_DRAM_INFO *dram_info)
  584. * Desc: saves the training results to memeory (RL,WL,PBS,Rx/Tx
  585. * Centeralization)
  586. * Args: MV_DRAM_INFO *dram_info
  587. * Notes:
  588. * Returns: None.
  589. */
  590. void ddr3_save_training(MV_DRAM_INFO *dram_info)
  591. {
  592. u32 val, pup, tmp_cs, cs, i, dq;
  593. u32 crc = 0;
  594. u32 regs = 0;
  595. u32 *sdram_offset = (u32 *)RESUME_TRAINING_VALUES_ADDR;
  596. u32 mode_config[MAX_TRAINING_MODE];
  597. mode_config[DQS_WR_MODE] = PUP_DQS_WR;
  598. mode_config[WL_MODE_] = PUP_WL_MODE;
  599. mode_config[RL_MODE_] = PUP_RL_MODE;
  600. mode_config[DQS_RD_MODE] = PUP_DQS_RD;
  601. mode_config[PBS_TX_DM_MODE] = PUP_PBS_TX_DM;
  602. mode_config[PBS_TX_MODE] = PUP_PBS_TX;
  603. mode_config[PBS_RX_MODE] = PUP_PBS_RX;
  604. /* num of training modes */
  605. for (i = 0; i < MAX_TRAINING_MODE; i++) {
  606. tmp_cs = dram_info->cs_ena;
  607. /* num of CS */
  608. for (cs = 0; cs < MAX_CS; cs++) {
  609. if (tmp_cs & (1 << cs)) {
  610. /* num of PUPs */
  611. for (pup = 0; pup < dram_info->num_of_total_pups;
  612. pup++) {
  613. if (pup == dram_info->num_of_std_pups &&
  614. dram_info->ecc_ena)
  615. pup = ECC_PUP;
  616. if (i == PBS_TX_DM_MODE) {
  617. /*
  618. * Change CS bitmask because
  619. * PBS works only with CS0
  620. */
  621. tmp_cs = 0x1;
  622. val = ddr3_read_pup_reg(
  623. mode_config[i], CS0, pup);
  624. } else if (i == PBS_TX_MODE ||
  625. i == PBS_RX_MODE) {
  626. /*
  627. * Change CS bitmask because
  628. * PBS works only with CS0
  629. */
  630. tmp_cs = 0x1;
  631. for (dq = 0; dq <= DQ_NUM;
  632. dq++) {
  633. val = ddr3_read_pup_reg(
  634. mode_config[i] + dq,
  635. CS0,
  636. pup);
  637. (*sdram_offset) = val;
  638. crc += *sdram_offset;
  639. sdram_offset++;
  640. regs++;
  641. }
  642. continue;
  643. } else {
  644. val = ddr3_read_pup_reg(
  645. mode_config[i], cs, pup);
  646. }
  647. *sdram_offset = val;
  648. crc += *sdram_offset;
  649. sdram_offset++;
  650. regs++;
  651. }
  652. }
  653. }
  654. }
  655. *sdram_offset = reg_read(REG_READ_DATA_SAMPLE_DELAYS_ADDR);
  656. crc += *sdram_offset;
  657. sdram_offset++;
  658. regs++;
  659. *sdram_offset = reg_read(REG_READ_DATA_READY_DELAYS_ADDR);
  660. crc += *sdram_offset;
  661. sdram_offset++;
  662. regs++;
  663. sdram_offset = (u32 *)NUM_OF_REGISTER_ADDR;
  664. *sdram_offset = regs;
  665. DEBUG_SUSPEND_RESUME_S("Training Results CheckSum write= ");
  666. DEBUG_SUSPEND_RESUME_D(crc, 8);
  667. DEBUG_SUSPEND_RESUME_S("\n");
  668. sdram_offset = (u32 *)CHECKSUM_RESULT_ADDR;
  669. *sdram_offset = crc;
  670. }
  671. /*
  672. * Name: ddr3_read_training_results()
  673. * Desc: Reads the training results from memeory (RL,WL,PBS,Rx/Tx
  674. * Centeralization)
  675. * and writes them to the relevant registers
  676. * Args: MV_DRAM_INFO *dram_info
  677. * Notes:
  678. * Returns: None.
  679. */
  680. int ddr3_read_training_results(void)
  681. {
  682. u32 val, reg, idx, dqs_wr_idx = 0, crc = 0;
  683. u32 *sdram_offset = (u32 *)RESUME_TRAINING_VALUES_ADDR;
  684. u32 training_val[RESUME_TRAINING_VALUES_MAX] = { 0 };
  685. u32 regs = *((u32 *)NUM_OF_REGISTER_ADDR);
  686. /*
  687. * Read Training results & Dunit registers from memory and write
  688. * it to an array
  689. */
  690. for (idx = 0; idx < regs; idx++) {
  691. training_val[idx] = *sdram_offset;
  692. crc += *sdram_offset;
  693. sdram_offset++;
  694. }
  695. sdram_offset = (u32 *)CHECKSUM_RESULT_ADDR;
  696. if ((*sdram_offset) == crc) {
  697. DEBUG_SUSPEND_RESUME_S("Training Results CheckSum read PASS= ");
  698. DEBUG_SUSPEND_RESUME_D(crc, 8);
  699. DEBUG_SUSPEND_RESUME_S("\n");
  700. } else {
  701. DEBUG_MAIN_S("Wrong Training Results CheckSum\n");
  702. return MV_FAIL;
  703. }
  704. /*
  705. * We iterate through all the registers except for the last 2 since
  706. * they are Dunit registers (and not PHY registers)
  707. */
  708. for (idx = 0; idx < (regs - 2); idx++) {
  709. val = training_val[idx];
  710. reg = (val >> REG_PHY_CS_OFFS) & 0x3F; /*read the phy address */
  711. /* Check if the values belongs to the DQS WR */
  712. if (reg == PUP_WL_MODE) {
  713. /* bit[5:0] in DQS_WR are delay */
  714. val = (training_val[dqs_wr_idx++] & 0x3F);
  715. /*
  716. * bit[15:10] are DQS_WR delay & bit[9:0] are
  717. * WL phase & delay
  718. */
  719. val = (val << REG_PHY_DQS_REF_DLY_OFFS) |
  720. (training_val[idx] & 0x3C003FF);
  721. /* Add Request pending and write operation bits */
  722. val |= REG_PHY_REGISTRY_FILE_ACCESS_OP_WR;
  723. } else if (reg == PUP_DQS_WR) {
  724. /*
  725. * Do nothing since DQS_WR will be done in PUP_WL_MODE
  726. */
  727. continue;
  728. }
  729. val |= REG_PHY_REGISTRY_FILE_ACCESS_OP_WR;
  730. reg_write(REG_PHY_REGISTRY_FILE_ACCESS_ADDR, val);
  731. do {
  732. val = (reg_read(REG_PHY_REGISTRY_FILE_ACCESS_ADDR)) &
  733. REG_PHY_REGISTRY_FILE_ACCESS_OP_DONE;
  734. } while (val); /* Wait for '0' to mark the end of the transaction */
  735. }
  736. /* write last 2 Dunit configurations */
  737. val = training_val[idx];
  738. reg_write(REG_READ_DATA_SAMPLE_DELAYS_ADDR, val); /* reg 0x1538 */
  739. val = training_val[idx + 1];
  740. reg_write(REG_READ_DATA_READY_DELAYS_ADDR, val); /* reg 0x153c */
  741. return MV_OK;
  742. }
  743. /*
  744. * Name: ddr3_check_if_resume_mode()
  745. * Desc: Reads the address (0x3000) of the Resume Magic word (0xDEADB002)
  746. * Args: MV_DRAM_INFO *dram_info
  747. * Notes:
  748. * Returns: return (magic_word == SUSPEND_MAGIC_WORD)
  749. */
  750. int ddr3_check_if_resume_mode(MV_DRAM_INFO *dram_info, u32 freq)
  751. {
  752. u32 magic_word;
  753. u32 *sdram_offset = (u32 *)BOOT_INFO_ADDR;
  754. if (dram_info->reg_dimm != 1) {
  755. /*
  756. * Perform write levleling in order initiate the phy with
  757. * low frequency
  758. */
  759. if (MV_OK != ddr3_write_leveling_hw(freq, dram_info)) {
  760. DEBUG_MAIN_S("DDR3 Training Sequence - FAILED (Write Leveling Hw)\n");
  761. return MV_DDR3_TRAINING_ERR_WR_LVL_HW;
  762. }
  763. }
  764. if (MV_OK != ddr3_load_patterns(dram_info, 1)) {
  765. DEBUG_MAIN_S("DDR3 Training Sequence - FAILED (Loading Patterns)\n");
  766. return MV_DDR3_TRAINING_ERR_LOAD_PATTERNS;
  767. }
  768. /* Enable CS0 only for RL */
  769. dram_info->cs_ena = 0x1;
  770. /* Perform Read levleling in order to get stable memory */
  771. if (MV_OK != ddr3_read_leveling_hw(freq, dram_info)) {
  772. DEBUG_MAIN_S("DDR3 Training Sequence - FAILED (Read Leveling Hw)\n");
  773. return MV_DDR3_TRAINING_ERR_WR_LVL_HW;
  774. }
  775. /* Back to relevant CS */
  776. dram_info->cs_ena = ddr3_get_cs_ena_from_reg();
  777. magic_word = *sdram_offset;
  778. return magic_word == SUSPEND_MAGIC_WORD;
  779. }
  780. /*
  781. * Name: ddr3_training_suspend_resume()
  782. * Desc: Execute the Resume state
  783. * Args: MV_DRAM_INFO *dram_info
  784. * Notes:
  785. * Returns: return (magic_word == SUSPEND_MAGIC_WORD)
  786. */
  787. int ddr3_training_suspend_resume(MV_DRAM_INFO *dram_info)
  788. {
  789. u32 freq, reg;
  790. int tmp_ratio;
  791. /* Configure DDR */
  792. if (MV_OK != ddr3_read_training_results())
  793. return MV_FAIL;
  794. /* Reset read FIFO */
  795. reg = reg_read(REG_DRAM_TRAINING_ADDR);
  796. /* Start Auto Read Leveling procedure */
  797. reg |= (1 << REG_DRAM_TRAINING_RL_OFFS);
  798. reg_write(REG_DRAM_TRAINING_ADDR, reg); /* 0x15B0 - Training Register */
  799. reg = reg_read(REG_DRAM_TRAINING_2_ADDR);
  800. reg |= ((1 << REG_DRAM_TRAINING_2_FIFO_RST_OFFS) +
  801. (1 << REG_DRAM_TRAINING_2_SW_OVRD_OFFS));
  802. /* [0] = 1 - Enable SW override, [4] = 1 - FIFO reset */
  803. /* 0x15B8 - Training SW 2 Register */
  804. reg_write(REG_DRAM_TRAINING_2_ADDR, reg);
  805. udelay(2);
  806. reg = reg_read(REG_DRAM_TRAINING_ADDR);
  807. /* Clear Auto Read Leveling procedure */
  808. reg &= ~(1 << REG_DRAM_TRAINING_RL_OFFS);
  809. reg_write(REG_DRAM_TRAINING_ADDR, reg); /* 0x15B0 - Training Register */
  810. /* Return to target frequency */
  811. freq = dram_info->target_frequency;
  812. tmp_ratio = 1;
  813. if (MV_OK != ddr3_dfs_low_2_high(freq, tmp_ratio, dram_info)) {
  814. DEBUG_MAIN_S("DDR3 Training Sequence - FAILED (Dfs Low2High)\n");
  815. return MV_DDR3_TRAINING_ERR_DFS_H2L;
  816. }
  817. if (dram_info->ecc_ena) {
  818. /* Scabbling the RL area pattern and the training area */
  819. mv_sys_xor_finish();
  820. dram_info->num_cs = 1;
  821. dram_info->cs_ena = 1;
  822. mv_sys_xor_init(dram_info);
  823. mv_xor_mem_init(0, RESUME_RL_PATTERNS_ADDR,
  824. RESUME_RL_PATTERNS_SIZE, 0xFFFFFFFF, 0xFFFFFFFF);
  825. /* Wait for previous transfer completion */
  826. while (mv_xor_state_get(0) != MV_IDLE)
  827. ;
  828. /* Return XOR State */
  829. mv_sys_xor_finish();
  830. }
  831. return MV_OK;
  832. }
  833. #endif
  834. void ddr3_print_freq(u32 freq)
  835. {
  836. u32 tmp_freq;
  837. switch (freq) {
  838. case 0:
  839. tmp_freq = 100;
  840. break;
  841. case 1:
  842. tmp_freq = 300;
  843. break;
  844. case 2:
  845. tmp_freq = 360;
  846. break;
  847. case 3:
  848. tmp_freq = 400;
  849. break;
  850. case 4:
  851. tmp_freq = 444;
  852. break;
  853. case 5:
  854. tmp_freq = 500;
  855. break;
  856. case 6:
  857. tmp_freq = 533;
  858. break;
  859. case 7:
  860. tmp_freq = 600;
  861. break;
  862. case 8:
  863. tmp_freq = 666;
  864. break;
  865. case 9:
  866. tmp_freq = 720;
  867. break;
  868. case 10:
  869. tmp_freq = 800;
  870. break;
  871. default:
  872. tmp_freq = 100;
  873. }
  874. printf("Current frequency is: %dMHz\n", tmp_freq);
  875. }
  876. int ddr3_get_min_max_read_sample_delay(u32 cs_enable, u32 reg, u32 *min,
  877. u32 *max, u32 *cs_max)
  878. {
  879. u32 cs, delay;
  880. *min = 0xFFFFFFFF;
  881. *max = 0x0;
  882. for (cs = 0; cs < MAX_CS; cs++) {
  883. if ((cs_enable & (1 << cs)) == 0)
  884. continue;
  885. delay = ((reg >> (cs * 8)) & 0x1F);
  886. if (delay < *min)
  887. *min = delay;
  888. if (delay > *max) {
  889. *max = delay;
  890. *cs_max = cs;
  891. }
  892. }
  893. return MV_OK;
  894. }
  895. int ddr3_get_min_max_rl_phase(MV_DRAM_INFO *dram_info, u32 *min, u32 *max,
  896. u32 cs)
  897. {
  898. u32 pup, reg, phase;
  899. *min = 0xFFFFFFFF;
  900. *max = 0x0;
  901. for (pup = 0; pup < dram_info->num_of_total_pups; pup++) {
  902. reg = ddr3_read_pup_reg(PUP_RL_MODE, cs, pup);
  903. phase = ((reg >> 8) & 0x7);
  904. if (phase < *min)
  905. *min = phase;
  906. if (phase > *max)
  907. *max = phase;
  908. }
  909. return MV_OK;
  910. }
  911. int ddr3_odt_activate(int activate)
  912. {
  913. u32 reg, mask;
  914. mask = (1 << REG_DUNIT_ODT_CTRL_OVRD_OFFS) |
  915. (1 << REG_DUNIT_ODT_CTRL_OVRD_VAL_OFFS);
  916. /* {0x0000149C} - DDR Dunit ODT Control Register */
  917. reg = reg_read(REG_DUNIT_ODT_CTRL_ADDR);
  918. if (activate)
  919. reg |= mask;
  920. else
  921. reg &= ~mask;
  922. reg_write(REG_DUNIT_ODT_CTRL_ADDR, reg);
  923. return MV_OK;
  924. }
  925. int ddr3_odt_read_dynamic_config(MV_DRAM_INFO *dram_info)
  926. {
  927. u32 min_read_sample_delay, max_read_sample_delay, max_rl_phase;
  928. u32 min, max, cs_max;
  929. u32 cs_ena, reg;
  930. reg = reg_read(REG_READ_DATA_SAMPLE_DELAYS_ADDR);
  931. cs_ena = ddr3_get_cs_ena_from_reg();
  932. /* Get minimum and maximum of read sample delay of all CS */
  933. ddr3_get_min_max_read_sample_delay(cs_ena, reg, &min_read_sample_delay,
  934. &max_read_sample_delay, &cs_max);
  935. /*
  936. * Get minimum and maximum read leveling phase which belongs to the
  937. * maximal read sample delay
  938. */
  939. ddr3_get_min_max_rl_phase(dram_info, &min, &max, cs_max);
  940. max_rl_phase = max;
  941. /* DDR ODT Timing (Low) Register calculation */
  942. reg = reg_read(REG_ODT_TIME_LOW_ADDR);
  943. reg &= ~(0x1FF << REG_ODT_ON_CTL_RD_OFFS);
  944. reg |= (((min_read_sample_delay - 1) & 0xF) << REG_ODT_ON_CTL_RD_OFFS);
  945. reg |= (((max_read_sample_delay + 4 + (((max_rl_phase + 1) / 2) + 1)) &
  946. 0x1F) << REG_ODT_OFF_CTL_RD_OFFS);
  947. reg_write(REG_ODT_TIME_LOW_ADDR, reg);
  948. return MV_OK;
  949. }