ddr3_hw_training.c 28 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114
  1. // SPDX-License-Identifier: GPL-2.0
  2. /*
  3. * Copyright (C) Marvell International Ltd. and its affiliates
  4. */
  5. #include <common.h>
  6. #include <i2c.h>
  7. #include <spl.h>
  8. #include <asm/io.h>
  9. #include <asm/arch/cpu.h>
  10. #include <asm/arch/soc.h>
  11. #include "ddr3_init.h"
  12. #include "ddr3_hw_training.h"
  13. #include "xor.h"
  14. #ifdef MV88F78X60
  15. #include "ddr3_patterns_64bit.h"
  16. #else
  17. #include "ddr3_patterns_16bit.h"
  18. #if defined(MV88F672X)
  19. #include "ddr3_patterns_16bit.h"
  20. #endif
  21. #endif
  22. /*
  23. * Debug
  24. */
  25. #define DEBUG_MAIN_C(s, d, l) \
  26. DEBUG_MAIN_S(s); DEBUG_MAIN_D(d, l); DEBUG_MAIN_S("\n")
  27. #define DEBUG_MAIN_FULL_C(s, d, l) \
  28. DEBUG_MAIN_FULL_S(s); DEBUG_MAIN_FULL_D(d, l); DEBUG_MAIN_FULL_S("\n")
  29. #ifdef MV_DEBUG_MAIN
  30. #define DEBUG_MAIN_S(s) puts(s)
  31. #define DEBUG_MAIN_D(d, l) printf("%x", d)
  32. #else
  33. #define DEBUG_MAIN_S(s)
  34. #define DEBUG_MAIN_D(d, l)
  35. #endif
  36. #ifdef MV_DEBUG_MAIN_FULL
  37. #define DEBUG_MAIN_FULL_S(s) puts(s)
  38. #define DEBUG_MAIN_FULL_D(d, l) printf("%x", d)
  39. #else
  40. #define DEBUG_MAIN_FULL_S(s)
  41. #define DEBUG_MAIN_FULL_D(d, l)
  42. #endif
  43. #ifdef MV_DEBUG_SUSPEND_RESUME
  44. #define DEBUG_SUSPEND_RESUME_S(s) puts(s)
  45. #define DEBUG_SUSPEND_RESUME_D(d, l) printf("%x", d)
  46. #else
  47. #define DEBUG_SUSPEND_RESUME_S(s)
  48. #define DEBUG_SUSPEND_RESUME_D(d, l)
  49. #endif
  50. static u32 ddr3_sw_wl_rl_debug;
  51. static u32 ddr3_run_pbs = 1;
  52. void ddr3_print_version(void)
  53. {
  54. puts("DDR3 Training Sequence - Ver 5.7.");
  55. }
  56. void ddr3_set_sw_wl_rl_debug(u32 val)
  57. {
  58. ddr3_sw_wl_rl_debug = val;
  59. }
  60. void ddr3_set_pbs(u32 val)
  61. {
  62. ddr3_run_pbs = val;
  63. }
  64. int ddr3_hw_training(u32 target_freq, u32 ddr_width, int xor_bypass,
  65. u32 scrub_offs, u32 scrub_size, int dqs_clk_aligned,
  66. int debug_mode, int reg_dimm_skip_wl)
  67. {
  68. /* A370 has no PBS mechanism */
  69. __maybe_unused u32 first_loop_flag = 0;
  70. u32 freq, reg;
  71. MV_DRAM_INFO dram_info;
  72. int ratio_2to1 = 0;
  73. int tmp_ratio = 1;
  74. int status;
  75. if (debug_mode)
  76. DEBUG_MAIN_S("DDR3 Training Sequence - DEBUG - 1\n");
  77. memset(&dram_info, 0, sizeof(dram_info));
  78. dram_info.num_cs = ddr3_get_cs_num_from_reg();
  79. dram_info.cs_ena = ddr3_get_cs_ena_from_reg();
  80. dram_info.target_frequency = target_freq;
  81. dram_info.ddr_width = ddr_width;
  82. dram_info.num_of_std_pups = ddr_width / PUP_SIZE;
  83. dram_info.rl400_bug = 0;
  84. dram_info.multi_cs_mr_support = 0;
  85. #ifdef MV88F67XX
  86. dram_info.rl400_bug = 1;
  87. #endif
  88. /* Ignore ECC errors - if ECC is enabled */
  89. reg = reg_read(REG_SDRAM_CONFIG_ADDR);
  90. if (reg & (1 << REG_SDRAM_CONFIG_ECC_OFFS)) {
  91. dram_info.ecc_ena = 1;
  92. reg |= (1 << REG_SDRAM_CONFIG_IERR_OFFS);
  93. reg_write(REG_SDRAM_CONFIG_ADDR, reg);
  94. } else {
  95. dram_info.ecc_ena = 0;
  96. }
  97. reg = reg_read(REG_SDRAM_CONFIG_ADDR);
  98. if (reg & (1 << REG_SDRAM_CONFIG_REGDIMM_OFFS))
  99. dram_info.reg_dimm = 1;
  100. else
  101. dram_info.reg_dimm = 0;
  102. dram_info.num_of_total_pups = ddr_width / PUP_SIZE + dram_info.ecc_ena;
  103. /* Get target 2T value */
  104. reg = reg_read(REG_DUNIT_CTRL_LOW_ADDR);
  105. dram_info.mode_2t = (reg >> REG_DUNIT_CTRL_LOW_2T_OFFS) &
  106. REG_DUNIT_CTRL_LOW_2T_MASK;
  107. /* Get target CL value */
  108. #ifdef MV88F67XX
  109. reg = reg_read(REG_DDR3_MR0_ADDR) >> 2;
  110. #else
  111. reg = reg_read(REG_DDR3_MR0_CS_ADDR) >> 2;
  112. #endif
  113. reg = (((reg >> 1) & 0xE) | (reg & 0x1)) & 0xF;
  114. dram_info.cl = ddr3_valid_cl_to_cl(reg);
  115. /* Get target CWL value */
  116. #ifdef MV88F67XX
  117. reg = reg_read(REG_DDR3_MR2_ADDR) >> REG_DDR3_MR2_CWL_OFFS;
  118. #else
  119. reg = reg_read(REG_DDR3_MR2_CS_ADDR) >> REG_DDR3_MR2_CWL_OFFS;
  120. #endif
  121. reg &= REG_DDR3_MR2_CWL_MASK;
  122. dram_info.cwl = reg;
  123. #if !defined(MV88F67XX)
  124. /* A370 has no PBS mechanism */
  125. #if defined(MV88F78X60)
  126. if ((dram_info.target_frequency > DDR_400) && (ddr3_run_pbs))
  127. first_loop_flag = 1;
  128. #else
  129. /* first_loop_flag = 1; skip mid freq at ALP/A375 */
  130. if ((dram_info.target_frequency > DDR_400) && (ddr3_run_pbs) &&
  131. (mv_ctrl_revision_get() >= UMC_A0))
  132. first_loop_flag = 1;
  133. else
  134. first_loop_flag = 0;
  135. #endif
  136. #endif
  137. freq = dram_info.target_frequency;
  138. /* Set ODT to always on */
  139. ddr3_odt_activate(1);
  140. /* Init XOR */
  141. mv_sys_xor_init(&dram_info);
  142. /* Get DRAM/HCLK ratio */
  143. if (reg_read(REG_DDR_IO_ADDR) & (1 << REG_DDR_IO_CLK_RATIO_OFFS))
  144. ratio_2to1 = 1;
  145. /*
  146. * Xor Bypass - ECC support in AXP is currently available for 1:1
  147. * modes frequency modes.
  148. * Not all frequency modes support the ddr3 training sequence
  149. * (Only 1200/300).
  150. * Xor Bypass allows using the Xor initializations and scrubbing
  151. * inside the ddr3 training sequence without running the training
  152. * itself.
  153. */
  154. if (xor_bypass == 0) {
  155. if (ddr3_run_pbs) {
  156. DEBUG_MAIN_S("DDR3 Training Sequence - Run with PBS.\n");
  157. } else {
  158. DEBUG_MAIN_S("DDR3 Training Sequence - Run without PBS.\n");
  159. }
  160. if (dram_info.target_frequency > DFS_MARGIN) {
  161. tmp_ratio = 0;
  162. freq = DDR_100;
  163. if (dram_info.reg_dimm == 1)
  164. freq = DDR_300;
  165. if (MV_OK != ddr3_dfs_high_2_low(freq, &dram_info)) {
  166. /* Set low - 100Mhz DDR Frequency by HW */
  167. DEBUG_MAIN_S("DDR3 Training Sequence - FAILED (Dfs High2Low)\n");
  168. return MV_DDR3_TRAINING_ERR_DFS_H2L;
  169. }
  170. if ((dram_info.reg_dimm == 1) &&
  171. (reg_dimm_skip_wl == 0)) {
  172. if (MV_OK !=
  173. ddr3_write_leveling_hw_reg_dimm(freq,
  174. &dram_info))
  175. DEBUG_MAIN_S("DDR3 Training Sequence - Registered DIMM Low WL - SKIP\n");
  176. }
  177. if (ddr3_get_log_level() >= MV_LOG_LEVEL_1)
  178. ddr3_print_freq(freq);
  179. if (debug_mode)
  180. DEBUG_MAIN_S("DDR3 Training Sequence - DEBUG - 2\n");
  181. } else {
  182. if (!dqs_clk_aligned) {
  183. #ifdef MV88F67XX
  184. /*
  185. * If running training sequence without DFS,
  186. * we must run Write leveling before writing
  187. * the patterns
  188. */
  189. /*
  190. * ODT - Multi CS system use SW WL,
  191. * Single CS System use HW WL
  192. */
  193. if (dram_info.cs_ena > 1) {
  194. if (MV_OK !=
  195. ddr3_write_leveling_sw(
  196. freq, tmp_ratio,
  197. &dram_info)) {
  198. DEBUG_MAIN_S("DDR3 Training Sequence - FAILED (Write Leveling Sw)\n");
  199. return MV_DDR3_TRAINING_ERR_WR_LVL_SW;
  200. }
  201. } else {
  202. if (MV_OK !=
  203. ddr3_write_leveling_hw(freq,
  204. &dram_info)) {
  205. DEBUG_MAIN_S("DDR3 Training Sequence - FAILED (Write Leveling Hw)\n");
  206. return MV_DDR3_TRAINING_ERR_WR_LVL_HW;
  207. }
  208. }
  209. #else
  210. if (MV_OK != ddr3_write_leveling_hw(
  211. freq, &dram_info)) {
  212. DEBUG_MAIN_S("DDR3 Training Sequence - FAILED (Write Leveling Hw)\n");
  213. if (ddr3_sw_wl_rl_debug) {
  214. if (MV_OK !=
  215. ddr3_write_leveling_sw(
  216. freq, tmp_ratio,
  217. &dram_info)) {
  218. DEBUG_MAIN_S("DDR3 Training Sequence - FAILED (Write Leveling Sw)\n");
  219. return MV_DDR3_TRAINING_ERR_WR_LVL_SW;
  220. }
  221. } else {
  222. return MV_DDR3_TRAINING_ERR_WR_LVL_HW;
  223. }
  224. }
  225. #endif
  226. }
  227. if (debug_mode)
  228. DEBUG_MAIN_S("DDR3 Training Sequence - DEBUG - 3\n");
  229. }
  230. if (MV_OK != ddr3_load_patterns(&dram_info, 0)) {
  231. DEBUG_MAIN_S("DDR3 Training Sequence - FAILED (Loading Patterns)\n");
  232. return MV_DDR3_TRAINING_ERR_LOAD_PATTERNS;
  233. }
  234. /*
  235. * TODO:
  236. * The mainline U-Boot port of the bin_hdr DDR training code
  237. * needs a delay of minimum 20ms here (10ms is a bit too short
  238. * and the CPU hangs). The bin_hdr code doesn't have this delay.
  239. * To be save here, lets add a delay of 50ms here.
  240. *
  241. * Tested on the Marvell DB-MV784MP-GP board
  242. */
  243. mdelay(50);
  244. do {
  245. freq = dram_info.target_frequency;
  246. tmp_ratio = ratio_2to1;
  247. DEBUG_MAIN_FULL_S("DDR3 Training Sequence - DEBUG - 4\n");
  248. #if defined(MV88F78X60)
  249. /*
  250. * There is a difference on the DFS frequency at the
  251. * first iteration of this loop
  252. */
  253. if (first_loop_flag) {
  254. freq = DDR_400;
  255. tmp_ratio = 0;
  256. }
  257. #endif
  258. if (MV_OK != ddr3_dfs_low_2_high(freq, tmp_ratio,
  259. &dram_info)) {
  260. DEBUG_MAIN_S("DDR3 Training Sequence - FAILED (Dfs Low2High)\n");
  261. return MV_DDR3_TRAINING_ERR_DFS_H2L;
  262. }
  263. if (ddr3_get_log_level() >= MV_LOG_LEVEL_1) {
  264. ddr3_print_freq(freq);
  265. }
  266. if (debug_mode)
  267. DEBUG_MAIN_S("DDR3 Training Sequence - DEBUG - 5\n");
  268. /* Write leveling */
  269. if (!dqs_clk_aligned) {
  270. #ifdef MV88F67XX
  271. /*
  272. * ODT - Multi CS system that not support Multi
  273. * CS MRS commands must use SW WL
  274. */
  275. if (dram_info.cs_ena > 1) {
  276. if (MV_OK != ddr3_write_leveling_sw(
  277. freq, tmp_ratio, &dram_info)) {
  278. DEBUG_MAIN_S("DDR3 Training Sequence - FAILED (Write Leveling Sw)\n");
  279. return MV_DDR3_TRAINING_ERR_WR_LVL_SW;
  280. }
  281. } else {
  282. if (MV_OK != ddr3_write_leveling_hw(
  283. freq, &dram_info)) {
  284. DEBUG_MAIN_S("DDR3 Training Sequence - FAILED (Write Leveling Hw)\n");
  285. return MV_DDR3_TRAINING_ERR_WR_LVL_HW;
  286. }
  287. }
  288. #else
  289. if ((dram_info.reg_dimm == 1) &&
  290. (freq == DDR_400)) {
  291. if (reg_dimm_skip_wl == 0) {
  292. if (MV_OK != ddr3_write_leveling_hw_reg_dimm(
  293. freq, &dram_info))
  294. DEBUG_MAIN_S("DDR3 Training Sequence - Registered DIMM WL - SKIP\n");
  295. }
  296. } else {
  297. if (MV_OK != ddr3_write_leveling_hw(
  298. freq, &dram_info)) {
  299. DEBUG_MAIN_S("DDR3 Training Sequence - FAILED (Write Leveling Hw)\n");
  300. if (ddr3_sw_wl_rl_debug) {
  301. if (MV_OK != ddr3_write_leveling_sw(
  302. freq, tmp_ratio, &dram_info)) {
  303. DEBUG_MAIN_S("DDR3 Training Sequence - FAILED (Write Leveling Sw)\n");
  304. return MV_DDR3_TRAINING_ERR_WR_LVL_SW;
  305. }
  306. } else {
  307. return MV_DDR3_TRAINING_ERR_WR_LVL_HW;
  308. }
  309. }
  310. }
  311. #endif
  312. if (debug_mode)
  313. DEBUG_MAIN_S
  314. ("DDR3 Training Sequence - DEBUG - 6\n");
  315. }
  316. /* Read Leveling */
  317. /*
  318. * Armada 370 - Support for HCLK @ 400MHZ - must use
  319. * SW read leveling
  320. */
  321. if (freq == DDR_400 && dram_info.rl400_bug) {
  322. status = ddr3_read_leveling_sw(freq, tmp_ratio,
  323. &dram_info);
  324. if (MV_OK != status) {
  325. DEBUG_MAIN_S
  326. ("DDR3 Training Sequence - FAILED (Read Leveling Sw)\n");
  327. return status;
  328. }
  329. } else {
  330. if (MV_OK != ddr3_read_leveling_hw(
  331. freq, &dram_info)) {
  332. DEBUG_MAIN_S("DDR3 Training Sequence - FAILED (Read Leveling Hw)\n");
  333. if (ddr3_sw_wl_rl_debug) {
  334. if (MV_OK != ddr3_read_leveling_sw(
  335. freq, tmp_ratio,
  336. &dram_info)) {
  337. DEBUG_MAIN_S("DDR3 Training Sequence - FAILED (Read Leveling Sw)\n");
  338. return MV_DDR3_TRAINING_ERR_WR_LVL_SW;
  339. }
  340. } else {
  341. return MV_DDR3_TRAINING_ERR_WR_LVL_HW;
  342. }
  343. }
  344. }
  345. if (debug_mode)
  346. DEBUG_MAIN_S("DDR3 Training Sequence - DEBUG - 7\n");
  347. if (MV_OK != ddr3_wl_supplement(&dram_info)) {
  348. DEBUG_MAIN_S("DDR3 Training Sequence - FAILED (Write Leveling Hi-Freq Sup)\n");
  349. return MV_DDR3_TRAINING_ERR_WR_LVL_HI_FREQ;
  350. }
  351. if (debug_mode)
  352. DEBUG_MAIN_S("DDR3 Training Sequence - DEBUG - 8\n");
  353. #if !defined(MV88F67XX)
  354. /* A370 has no PBS mechanism */
  355. #if defined(MV88F78X60) || defined(MV88F672X)
  356. if (first_loop_flag == 1) {
  357. first_loop_flag = 0;
  358. status = MV_OK;
  359. status = ddr3_pbs_rx(&dram_info);
  360. if (MV_OK != status) {
  361. DEBUG_MAIN_S("DDR3 Training Sequence - FAILED (PBS RX)\n");
  362. return status;
  363. }
  364. if (debug_mode)
  365. DEBUG_MAIN_S("DDR3 Training Sequence - DEBUG - 9\n");
  366. status = ddr3_pbs_tx(&dram_info);
  367. if (MV_OK != status) {
  368. DEBUG_MAIN_S("DDR3 Training Sequence - FAILED (PBS TX)\n");
  369. return status;
  370. }
  371. if (debug_mode)
  372. DEBUG_MAIN_S("DDR3 Training Sequence - DEBUG - 10\n");
  373. }
  374. #endif
  375. #endif
  376. } while (freq != dram_info.target_frequency);
  377. status = ddr3_dqs_centralization_rx(&dram_info);
  378. if (MV_OK != status) {
  379. DEBUG_MAIN_S("DDR3 Training Sequence - FAILED (DQS Centralization RX)\n");
  380. return status;
  381. }
  382. if (debug_mode)
  383. DEBUG_MAIN_S("DDR3 Training Sequence - DEBUG - 11\n");
  384. status = ddr3_dqs_centralization_tx(&dram_info);
  385. if (MV_OK != status) {
  386. DEBUG_MAIN_S("DDR3 Training Sequence - FAILED (DQS Centralization TX)\n");
  387. return status;
  388. }
  389. if (debug_mode)
  390. DEBUG_MAIN_S("DDR3 Training Sequence - DEBUG - 12\n");
  391. }
  392. ddr3_set_performance_params(&dram_info);
  393. if (dram_info.ecc_ena) {
  394. /* Need to SCRUB the DRAM memory area to load U-Boot */
  395. mv_sys_xor_finish();
  396. dram_info.num_cs = 1;
  397. dram_info.cs_ena = 1;
  398. mv_sys_xor_init(&dram_info);
  399. mv_xor_mem_init(0, scrub_offs, scrub_size, 0xdeadbeef,
  400. 0xdeadbeef);
  401. /* Wait for previous transfer completion */
  402. while (mv_xor_state_get(0) != MV_IDLE)
  403. ;
  404. if (debug_mode)
  405. DEBUG_MAIN_S("DDR3 Training Sequence - DEBUG - 13\n");
  406. }
  407. /* Return XOR State */
  408. mv_sys_xor_finish();
  409. #if defined(MV88F78X60)
  410. /* Save training results in memeory for resume state */
  411. ddr3_save_training(&dram_info);
  412. #endif
  413. /* Clear ODT always on */
  414. ddr3_odt_activate(0);
  415. /* Configure Dynamic read ODT */
  416. ddr3_odt_read_dynamic_config(&dram_info);
  417. return MV_OK;
  418. }
  419. void ddr3_set_performance_params(MV_DRAM_INFO *dram_info)
  420. {
  421. u32 twr2wr, trd2rd, trd2wr_wr2rd;
  422. u32 tmp1, tmp2, reg;
  423. DEBUG_MAIN_FULL_C("Max WL Phase: ", dram_info->wl_max_phase, 2);
  424. DEBUG_MAIN_FULL_C("Min WL Phase: ", dram_info->wl_min_phase, 2);
  425. DEBUG_MAIN_FULL_C("Max RL Phase: ", dram_info->rl_max_phase, 2);
  426. DEBUG_MAIN_FULL_C("Min RL Phase: ", dram_info->rl_min_phase, 2);
  427. if (dram_info->wl_max_phase < 2)
  428. twr2wr = 0x2;
  429. else
  430. twr2wr = 0x3;
  431. trd2rd = 0x1 + (dram_info->rl_max_phase + 1) / 2 +
  432. (dram_info->rl_max_phase + 1) % 2;
  433. tmp1 = (dram_info->rl_max_phase - dram_info->wl_min_phase) / 2 +
  434. (((dram_info->rl_max_phase - dram_info->wl_min_phase) % 2) >
  435. 0 ? 1 : 0);
  436. tmp2 = (dram_info->wl_max_phase - dram_info->rl_min_phase) / 2 +
  437. ((dram_info->wl_max_phase - dram_info->rl_min_phase) % 2 >
  438. 0 ? 1 : 0);
  439. trd2wr_wr2rd = (tmp1 >= tmp2) ? tmp1 : tmp2;
  440. trd2wr_wr2rd += 2;
  441. trd2rd += 2;
  442. twr2wr += 2;
  443. DEBUG_MAIN_FULL_C("WR 2 WR: ", twr2wr, 2);
  444. DEBUG_MAIN_FULL_C("RD 2 RD: ", trd2rd, 2);
  445. DEBUG_MAIN_FULL_C("RD 2 WR / WR 2 RD: ", trd2wr_wr2rd, 2);
  446. reg = reg_read(REG_SDRAM_TIMING_HIGH_ADDR);
  447. reg &= ~(REG_SDRAM_TIMING_H_W2W_MASK << REG_SDRAM_TIMING_H_W2W_OFFS);
  448. reg |= ((twr2wr & REG_SDRAM_TIMING_H_W2W_MASK) <<
  449. REG_SDRAM_TIMING_H_W2W_OFFS);
  450. reg &= ~(REG_SDRAM_TIMING_H_R2R_MASK << REG_SDRAM_TIMING_H_R2R_OFFS);
  451. reg &= ~(REG_SDRAM_TIMING_H_R2R_H_MASK <<
  452. REG_SDRAM_TIMING_H_R2R_H_OFFS);
  453. reg |= ((trd2rd & REG_SDRAM_TIMING_H_R2R_MASK) <<
  454. REG_SDRAM_TIMING_H_R2R_OFFS);
  455. reg |= (((trd2rd >> 2) & REG_SDRAM_TIMING_H_R2R_H_MASK) <<
  456. REG_SDRAM_TIMING_H_R2R_H_OFFS);
  457. reg &= ~(REG_SDRAM_TIMING_H_R2W_W2R_MASK <<
  458. REG_SDRAM_TIMING_H_R2W_W2R_OFFS);
  459. reg &= ~(REG_SDRAM_TIMING_H_R2W_W2R_H_MASK <<
  460. REG_SDRAM_TIMING_H_R2W_W2R_H_OFFS);
  461. reg |= ((trd2wr_wr2rd & REG_SDRAM_TIMING_H_R2W_W2R_MASK) <<
  462. REG_SDRAM_TIMING_H_R2W_W2R_OFFS);
  463. reg |= (((trd2wr_wr2rd >> 2) & REG_SDRAM_TIMING_H_R2W_W2R_H_MASK) <<
  464. REG_SDRAM_TIMING_H_R2W_W2R_H_OFFS);
  465. reg_write(REG_SDRAM_TIMING_HIGH_ADDR, reg);
  466. }
  467. /*
  468. * Perform DDR3 PUP Indirect Write
  469. */
  470. void ddr3_write_pup_reg(u32 mode, u32 cs, u32 pup, u32 phase, u32 delay)
  471. {
  472. u32 reg = 0;
  473. if (pup == PUP_BC)
  474. reg |= (1 << REG_PHY_BC_OFFS);
  475. else
  476. reg |= (pup << REG_PHY_PUP_OFFS);
  477. reg |= ((0x4 * cs + mode) << REG_PHY_CS_OFFS);
  478. reg |= (phase << REG_PHY_PHASE_OFFS) | delay;
  479. if (mode == PUP_WL_MODE)
  480. reg |= ((INIT_WL_DELAY + delay) << REG_PHY_DQS_REF_DLY_OFFS);
  481. reg_write(REG_PHY_REGISTRY_FILE_ACCESS_ADDR, reg); /* 0x16A0 */
  482. reg |= REG_PHY_REGISTRY_FILE_ACCESS_OP_WR;
  483. reg_write(REG_PHY_REGISTRY_FILE_ACCESS_ADDR, reg); /* 0x16A0 */
  484. do {
  485. reg = reg_read(REG_PHY_REGISTRY_FILE_ACCESS_ADDR) &
  486. REG_PHY_REGISTRY_FILE_ACCESS_OP_DONE;
  487. } while (reg); /* Wait for '0' to mark the end of the transaction */
  488. /* If read Leveling mode - need to write to register 3 separetly */
  489. if (mode == PUP_RL_MODE) {
  490. reg = 0;
  491. if (pup == PUP_BC)
  492. reg |= (1 << REG_PHY_BC_OFFS);
  493. else
  494. reg |= (pup << REG_PHY_PUP_OFFS);
  495. reg |= ((0x4 * cs + mode + 1) << REG_PHY_CS_OFFS);
  496. reg |= (INIT_RL_DELAY);
  497. reg_write(REG_PHY_REGISTRY_FILE_ACCESS_ADDR, reg); /* 0x16A0 */
  498. reg |= REG_PHY_REGISTRY_FILE_ACCESS_OP_WR;
  499. reg_write(REG_PHY_REGISTRY_FILE_ACCESS_ADDR, reg); /* 0x16A0 */
  500. do {
  501. reg = reg_read(REG_PHY_REGISTRY_FILE_ACCESS_ADDR) &
  502. REG_PHY_REGISTRY_FILE_ACCESS_OP_DONE;
  503. } while (reg);
  504. }
  505. }
  506. /*
  507. * Perform DDR3 PUP Indirect Read
  508. */
  509. u32 ddr3_read_pup_reg(u32 mode, u32 cs, u32 pup)
  510. {
  511. u32 reg;
  512. reg = (pup << REG_PHY_PUP_OFFS) |
  513. ((0x4 * cs + mode) << REG_PHY_CS_OFFS);
  514. reg_write(REG_PHY_REGISTRY_FILE_ACCESS_ADDR, reg); /* 0x16A0 */
  515. reg |= REG_PHY_REGISTRY_FILE_ACCESS_OP_RD;
  516. reg_write(REG_PHY_REGISTRY_FILE_ACCESS_ADDR, reg); /* 0x16A0 */
  517. do {
  518. reg = reg_read(REG_PHY_REGISTRY_FILE_ACCESS_ADDR) &
  519. REG_PHY_REGISTRY_FILE_ACCESS_OP_DONE;
  520. } while (reg); /* Wait for '0' to mark the end of the transaction */
  521. return reg_read(REG_PHY_REGISTRY_FILE_ACCESS_ADDR); /* 0x16A0 */
  522. }
  523. /*
  524. * Set training patterns
  525. */
  526. int ddr3_load_patterns(MV_DRAM_INFO *dram_info, int resume)
  527. {
  528. u32 reg;
  529. /* Enable SW override - Required for the ECC Pup */
  530. reg = reg_read(REG_DRAM_TRAINING_2_ADDR) |
  531. (1 << REG_DRAM_TRAINING_2_SW_OVRD_OFFS);
  532. /* [0] = 1 - Enable SW override */
  533. /* 0x15B8 - Training SW 2 Register */
  534. reg_write(REG_DRAM_TRAINING_2_ADDR, reg);
  535. reg = (1 << REG_DRAM_TRAINING_AUTO_OFFS);
  536. reg_write(REG_DRAM_TRAINING_ADDR, reg); /* 0x15B0 - Training Register */
  537. if (resume == 0) {
  538. #if defined(MV88F78X60) || defined(MV88F672X)
  539. ddr3_load_pbs_patterns(dram_info);
  540. #endif
  541. ddr3_load_dqs_patterns(dram_info);
  542. }
  543. /* Disable SW override - Must be in a different stage */
  544. /* [0]=0 - Enable SW override */
  545. reg = reg_read(REG_DRAM_TRAINING_2_ADDR);
  546. reg &= ~(1 << REG_DRAM_TRAINING_2_SW_OVRD_OFFS);
  547. /* 0x15B8 - Training SW 2 Register */
  548. reg_write(REG_DRAM_TRAINING_2_ADDR, reg);
  549. reg = reg_read(REG_DRAM_TRAINING_1_ADDR) |
  550. (1 << REG_DRAM_TRAINING_1_TRNBPOINT_OFFS);
  551. reg_write(REG_DRAM_TRAINING_1_ADDR, reg);
  552. /* Set Base Addr */
  553. #if defined(MV88F67XX)
  554. reg_write(REG_DRAM_TRAINING_PATTERN_BASE_ADDR, 0);
  555. #else
  556. if (resume == 0)
  557. reg_write(REG_DRAM_TRAINING_PATTERN_BASE_ADDR, 0);
  558. else
  559. reg_write(REG_DRAM_TRAINING_PATTERN_BASE_ADDR,
  560. RESUME_RL_PATTERNS_ADDR);
  561. #endif
  562. /* Set Patterns */
  563. if (resume == 0) {
  564. reg = (dram_info->cs_ena << REG_DRAM_TRAINING_CS_OFFS) |
  565. (1 << REG_DRAM_TRAINING_PATTERNS_OFFS);
  566. } else {
  567. reg = (0x1 << REG_DRAM_TRAINING_CS_OFFS) |
  568. (1 << REG_DRAM_TRAINING_PATTERNS_OFFS);
  569. }
  570. reg |= (1 << REG_DRAM_TRAINING_AUTO_OFFS);
  571. reg_write(REG_DRAM_TRAINING_ADDR, reg);
  572. udelay(100);
  573. /* Check if Successful */
  574. if (reg_read(REG_DRAM_TRAINING_ADDR) &
  575. (1 << REG_DRAM_TRAINING_ERROR_OFFS))
  576. return MV_OK;
  577. else
  578. return MV_FAIL;
  579. }
  580. #if !defined(MV88F67XX)
  581. /*
  582. * Name: ddr3_save_training(MV_DRAM_INFO *dram_info)
  583. * Desc: saves the training results to memeory (RL,WL,PBS,Rx/Tx
  584. * Centeralization)
  585. * Args: MV_DRAM_INFO *dram_info
  586. * Notes:
  587. * Returns: None.
  588. */
  589. void ddr3_save_training(MV_DRAM_INFO *dram_info)
  590. {
  591. u32 val, pup, tmp_cs, cs, i, dq;
  592. u32 crc = 0;
  593. u32 regs = 0;
  594. u32 *sdram_offset = (u32 *)RESUME_TRAINING_VALUES_ADDR;
  595. u32 mode_config[MAX_TRAINING_MODE];
  596. mode_config[DQS_WR_MODE] = PUP_DQS_WR;
  597. mode_config[WL_MODE_] = PUP_WL_MODE;
  598. mode_config[RL_MODE_] = PUP_RL_MODE;
  599. mode_config[DQS_RD_MODE] = PUP_DQS_RD;
  600. mode_config[PBS_TX_DM_MODE] = PUP_PBS_TX_DM;
  601. mode_config[PBS_TX_MODE] = PUP_PBS_TX;
  602. mode_config[PBS_RX_MODE] = PUP_PBS_RX;
  603. /* num of training modes */
  604. for (i = 0; i < MAX_TRAINING_MODE; i++) {
  605. tmp_cs = dram_info->cs_ena;
  606. /* num of CS */
  607. for (cs = 0; cs < MAX_CS; cs++) {
  608. if (tmp_cs & (1 << cs)) {
  609. /* num of PUPs */
  610. for (pup = 0; pup < dram_info->num_of_total_pups;
  611. pup++) {
  612. if (pup == dram_info->num_of_std_pups &&
  613. dram_info->ecc_ena)
  614. pup = ECC_PUP;
  615. if (i == PBS_TX_DM_MODE) {
  616. /*
  617. * Change CS bitmask because
  618. * PBS works only with CS0
  619. */
  620. tmp_cs = 0x1;
  621. val = ddr3_read_pup_reg(
  622. mode_config[i], CS0, pup);
  623. } else if (i == PBS_TX_MODE ||
  624. i == PBS_RX_MODE) {
  625. /*
  626. * Change CS bitmask because
  627. * PBS works only with CS0
  628. */
  629. tmp_cs = 0x1;
  630. for (dq = 0; dq <= DQ_NUM;
  631. dq++) {
  632. val = ddr3_read_pup_reg(
  633. mode_config[i] + dq,
  634. CS0,
  635. pup);
  636. (*sdram_offset) = val;
  637. crc += *sdram_offset;
  638. sdram_offset++;
  639. regs++;
  640. }
  641. continue;
  642. } else {
  643. val = ddr3_read_pup_reg(
  644. mode_config[i], cs, pup);
  645. }
  646. *sdram_offset = val;
  647. crc += *sdram_offset;
  648. sdram_offset++;
  649. regs++;
  650. }
  651. }
  652. }
  653. }
  654. *sdram_offset = reg_read(REG_READ_DATA_SAMPLE_DELAYS_ADDR);
  655. crc += *sdram_offset;
  656. sdram_offset++;
  657. regs++;
  658. *sdram_offset = reg_read(REG_READ_DATA_READY_DELAYS_ADDR);
  659. crc += *sdram_offset;
  660. sdram_offset++;
  661. regs++;
  662. sdram_offset = (u32 *)NUM_OF_REGISTER_ADDR;
  663. *sdram_offset = regs;
  664. DEBUG_SUSPEND_RESUME_S("Training Results CheckSum write= ");
  665. DEBUG_SUSPEND_RESUME_D(crc, 8);
  666. DEBUG_SUSPEND_RESUME_S("\n");
  667. sdram_offset = (u32 *)CHECKSUM_RESULT_ADDR;
  668. *sdram_offset = crc;
  669. }
  670. /*
  671. * Name: ddr3_read_training_results()
  672. * Desc: Reads the training results from memeory (RL,WL,PBS,Rx/Tx
  673. * Centeralization)
  674. * and writes them to the relevant registers
  675. * Args: MV_DRAM_INFO *dram_info
  676. * Notes:
  677. * Returns: None.
  678. */
  679. int ddr3_read_training_results(void)
  680. {
  681. u32 val, reg, idx, dqs_wr_idx = 0, crc = 0;
  682. u32 *sdram_offset = (u32 *)RESUME_TRAINING_VALUES_ADDR;
  683. u32 training_val[RESUME_TRAINING_VALUES_MAX] = { 0 };
  684. u32 regs = *((u32 *)NUM_OF_REGISTER_ADDR);
  685. /*
  686. * Read Training results & Dunit registers from memory and write
  687. * it to an array
  688. */
  689. for (idx = 0; idx < regs; idx++) {
  690. training_val[idx] = *sdram_offset;
  691. crc += *sdram_offset;
  692. sdram_offset++;
  693. }
  694. sdram_offset = (u32 *)CHECKSUM_RESULT_ADDR;
  695. if ((*sdram_offset) == crc) {
  696. DEBUG_SUSPEND_RESUME_S("Training Results CheckSum read PASS= ");
  697. DEBUG_SUSPEND_RESUME_D(crc, 8);
  698. DEBUG_SUSPEND_RESUME_S("\n");
  699. } else {
  700. DEBUG_MAIN_S("Wrong Training Results CheckSum\n");
  701. return MV_FAIL;
  702. }
  703. /*
  704. * We iterate through all the registers except for the last 2 since
  705. * they are Dunit registers (and not PHY registers)
  706. */
  707. for (idx = 0; idx < (regs - 2); idx++) {
  708. val = training_val[idx];
  709. reg = (val >> REG_PHY_CS_OFFS) & 0x3F; /*read the phy address */
  710. /* Check if the values belongs to the DQS WR */
  711. if (reg == PUP_WL_MODE) {
  712. /* bit[5:0] in DQS_WR are delay */
  713. val = (training_val[dqs_wr_idx++] & 0x3F);
  714. /*
  715. * bit[15:10] are DQS_WR delay & bit[9:0] are
  716. * WL phase & delay
  717. */
  718. val = (val << REG_PHY_DQS_REF_DLY_OFFS) |
  719. (training_val[idx] & 0x3C003FF);
  720. /* Add Request pending and write operation bits */
  721. val |= REG_PHY_REGISTRY_FILE_ACCESS_OP_WR;
  722. } else if (reg == PUP_DQS_WR) {
  723. /*
  724. * Do nothing since DQS_WR will be done in PUP_WL_MODE
  725. */
  726. continue;
  727. }
  728. val |= REG_PHY_REGISTRY_FILE_ACCESS_OP_WR;
  729. reg_write(REG_PHY_REGISTRY_FILE_ACCESS_ADDR, val);
  730. do {
  731. val = (reg_read(REG_PHY_REGISTRY_FILE_ACCESS_ADDR)) &
  732. REG_PHY_REGISTRY_FILE_ACCESS_OP_DONE;
  733. } while (val); /* Wait for '0' to mark the end of the transaction */
  734. }
  735. /* write last 2 Dunit configurations */
  736. val = training_val[idx];
  737. reg_write(REG_READ_DATA_SAMPLE_DELAYS_ADDR, val); /* reg 0x1538 */
  738. val = training_val[idx + 1];
  739. reg_write(REG_READ_DATA_READY_DELAYS_ADDR, val); /* reg 0x153c */
  740. return MV_OK;
  741. }
  742. /*
  743. * Name: ddr3_check_if_resume_mode()
  744. * Desc: Reads the address (0x3000) of the Resume Magic word (0xDEADB002)
  745. * Args: MV_DRAM_INFO *dram_info
  746. * Notes:
  747. * Returns: return (magic_word == SUSPEND_MAGIC_WORD)
  748. */
  749. int ddr3_check_if_resume_mode(MV_DRAM_INFO *dram_info, u32 freq)
  750. {
  751. u32 magic_word;
  752. u32 *sdram_offset = (u32 *)BOOT_INFO_ADDR;
  753. if (dram_info->reg_dimm != 1) {
  754. /*
  755. * Perform write levleling in order initiate the phy with
  756. * low frequency
  757. */
  758. if (MV_OK != ddr3_write_leveling_hw(freq, dram_info)) {
  759. DEBUG_MAIN_S("DDR3 Training Sequence - FAILED (Write Leveling Hw)\n");
  760. return MV_DDR3_TRAINING_ERR_WR_LVL_HW;
  761. }
  762. }
  763. if (MV_OK != ddr3_load_patterns(dram_info, 1)) {
  764. DEBUG_MAIN_S("DDR3 Training Sequence - FAILED (Loading Patterns)\n");
  765. return MV_DDR3_TRAINING_ERR_LOAD_PATTERNS;
  766. }
  767. /* Enable CS0 only for RL */
  768. dram_info->cs_ena = 0x1;
  769. /* Perform Read levleling in order to get stable memory */
  770. if (MV_OK != ddr3_read_leveling_hw(freq, dram_info)) {
  771. DEBUG_MAIN_S("DDR3 Training Sequence - FAILED (Read Leveling Hw)\n");
  772. return MV_DDR3_TRAINING_ERR_WR_LVL_HW;
  773. }
  774. /* Back to relevant CS */
  775. dram_info->cs_ena = ddr3_get_cs_ena_from_reg();
  776. magic_word = *sdram_offset;
  777. return magic_word == SUSPEND_MAGIC_WORD;
  778. }
  779. /*
  780. * Name: ddr3_training_suspend_resume()
  781. * Desc: Execute the Resume state
  782. * Args: MV_DRAM_INFO *dram_info
  783. * Notes:
  784. * Returns: return (magic_word == SUSPEND_MAGIC_WORD)
  785. */
  786. int ddr3_training_suspend_resume(MV_DRAM_INFO *dram_info)
  787. {
  788. u32 freq, reg;
  789. int tmp_ratio;
  790. /* Configure DDR */
  791. if (MV_OK != ddr3_read_training_results())
  792. return MV_FAIL;
  793. /* Reset read FIFO */
  794. reg = reg_read(REG_DRAM_TRAINING_ADDR);
  795. /* Start Auto Read Leveling procedure */
  796. reg |= (1 << REG_DRAM_TRAINING_RL_OFFS);
  797. reg_write(REG_DRAM_TRAINING_ADDR, reg); /* 0x15B0 - Training Register */
  798. reg = reg_read(REG_DRAM_TRAINING_2_ADDR);
  799. reg |= ((1 << REG_DRAM_TRAINING_2_FIFO_RST_OFFS) +
  800. (1 << REG_DRAM_TRAINING_2_SW_OVRD_OFFS));
  801. /* [0] = 1 - Enable SW override, [4] = 1 - FIFO reset */
  802. /* 0x15B8 - Training SW 2 Register */
  803. reg_write(REG_DRAM_TRAINING_2_ADDR, reg);
  804. udelay(2);
  805. reg = reg_read(REG_DRAM_TRAINING_ADDR);
  806. /* Clear Auto Read Leveling procedure */
  807. reg &= ~(1 << REG_DRAM_TRAINING_RL_OFFS);
  808. reg_write(REG_DRAM_TRAINING_ADDR, reg); /* 0x15B0 - Training Register */
  809. /* Return to target frequency */
  810. freq = dram_info->target_frequency;
  811. tmp_ratio = 1;
  812. if (MV_OK != ddr3_dfs_low_2_high(freq, tmp_ratio, dram_info)) {
  813. DEBUG_MAIN_S("DDR3 Training Sequence - FAILED (Dfs Low2High)\n");
  814. return MV_DDR3_TRAINING_ERR_DFS_H2L;
  815. }
  816. if (dram_info->ecc_ena) {
  817. /* Scabbling the RL area pattern and the training area */
  818. mv_sys_xor_finish();
  819. dram_info->num_cs = 1;
  820. dram_info->cs_ena = 1;
  821. mv_sys_xor_init(dram_info);
  822. mv_xor_mem_init(0, RESUME_RL_PATTERNS_ADDR,
  823. RESUME_RL_PATTERNS_SIZE, 0xFFFFFFFF, 0xFFFFFFFF);
  824. /* Wait for previous transfer completion */
  825. while (mv_xor_state_get(0) != MV_IDLE)
  826. ;
  827. /* Return XOR State */
  828. mv_sys_xor_finish();
  829. }
  830. return MV_OK;
  831. }
  832. #endif
  833. void ddr3_print_freq(u32 freq)
  834. {
  835. u32 tmp_freq;
  836. switch (freq) {
  837. case 0:
  838. tmp_freq = 100;
  839. break;
  840. case 1:
  841. tmp_freq = 300;
  842. break;
  843. case 2:
  844. tmp_freq = 360;
  845. break;
  846. case 3:
  847. tmp_freq = 400;
  848. break;
  849. case 4:
  850. tmp_freq = 444;
  851. break;
  852. case 5:
  853. tmp_freq = 500;
  854. break;
  855. case 6:
  856. tmp_freq = 533;
  857. break;
  858. case 7:
  859. tmp_freq = 600;
  860. break;
  861. case 8:
  862. tmp_freq = 666;
  863. break;
  864. case 9:
  865. tmp_freq = 720;
  866. break;
  867. case 10:
  868. tmp_freq = 800;
  869. break;
  870. default:
  871. tmp_freq = 100;
  872. }
  873. printf("Current frequency is: %dMHz\n", tmp_freq);
  874. }
  875. int ddr3_get_min_max_read_sample_delay(u32 cs_enable, u32 reg, u32 *min,
  876. u32 *max, u32 *cs_max)
  877. {
  878. u32 cs, delay;
  879. *min = 0xFFFFFFFF;
  880. *max = 0x0;
  881. for (cs = 0; cs < MAX_CS; cs++) {
  882. if ((cs_enable & (1 << cs)) == 0)
  883. continue;
  884. delay = ((reg >> (cs * 8)) & 0x1F);
  885. if (delay < *min)
  886. *min = delay;
  887. if (delay > *max) {
  888. *max = delay;
  889. *cs_max = cs;
  890. }
  891. }
  892. return MV_OK;
  893. }
  894. int ddr3_get_min_max_rl_phase(MV_DRAM_INFO *dram_info, u32 *min, u32 *max,
  895. u32 cs)
  896. {
  897. u32 pup, reg, phase;
  898. *min = 0xFFFFFFFF;
  899. *max = 0x0;
  900. for (pup = 0; pup < dram_info->num_of_total_pups; pup++) {
  901. reg = ddr3_read_pup_reg(PUP_RL_MODE, cs, pup);
  902. phase = ((reg >> 8) & 0x7);
  903. if (phase < *min)
  904. *min = phase;
  905. if (phase > *max)
  906. *max = phase;
  907. }
  908. return MV_OK;
  909. }
  910. int ddr3_odt_activate(int activate)
  911. {
  912. u32 reg, mask;
  913. mask = (1 << REG_DUNIT_ODT_CTRL_OVRD_OFFS) |
  914. (1 << REG_DUNIT_ODT_CTRL_OVRD_VAL_OFFS);
  915. /* {0x0000149C} - DDR Dunit ODT Control Register */
  916. reg = reg_read(REG_DUNIT_ODT_CTRL_ADDR);
  917. if (activate)
  918. reg |= mask;
  919. else
  920. reg &= ~mask;
  921. reg_write(REG_DUNIT_ODT_CTRL_ADDR, reg);
  922. return MV_OK;
  923. }
  924. int ddr3_odt_read_dynamic_config(MV_DRAM_INFO *dram_info)
  925. {
  926. u32 min_read_sample_delay, max_read_sample_delay, max_rl_phase;
  927. u32 min, max, cs_max;
  928. u32 cs_ena, reg;
  929. reg = reg_read(REG_READ_DATA_SAMPLE_DELAYS_ADDR);
  930. cs_ena = ddr3_get_cs_ena_from_reg();
  931. /* Get minimum and maximum of read sample delay of all CS */
  932. ddr3_get_min_max_read_sample_delay(cs_ena, reg, &min_read_sample_delay,
  933. &max_read_sample_delay, &cs_max);
  934. /*
  935. * Get minimum and maximum read leveling phase which belongs to the
  936. * maximal read sample delay
  937. */
  938. ddr3_get_min_max_rl_phase(dram_info, &min, &max, cs_max);
  939. max_rl_phase = max;
  940. /* DDR ODT Timing (Low) Register calculation */
  941. reg = reg_read(REG_ODT_TIME_LOW_ADDR);
  942. reg &= ~(0x1FF << REG_ODT_ON_CTL_RD_OFFS);
  943. reg |= (((min_read_sample_delay - 1) & 0xF) << REG_ODT_ON_CTL_RD_OFFS);
  944. reg |= (((max_read_sample_delay + 4 + (((max_rl_phase + 1) / 2) + 1)) &
  945. 0x1F) << REG_ODT_OFF_CTL_RD_OFFS);
  946. reg_write(REG_ODT_TIME_LOW_ADDR, reg);
  947. return MV_OK;
  948. }