ddr3_write_leveling.c 39 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365
  1. // SPDX-License-Identifier: GPL-2.0
  2. /*
  3. * Copyright (C) Marvell International Ltd. and its affiliates
  4. */
  5. #include <common.h>
  6. #include <i2c.h>
  7. #include <spl.h>
  8. #include <asm/io.h>
  9. #include <asm/arch/cpu.h>
  10. #include <asm/arch/soc.h>
  11. #include "ddr3_hw_training.h"
  12. /*
  13. * Debug
  14. */
  15. #define DEBUG_WL_C(s, d, l) \
  16. DEBUG_WL_S(s); DEBUG_WL_D(d, l); DEBUG_WL_S("\n")
  17. #define DEBUG_WL_FULL_C(s, d, l) \
  18. DEBUG_WL_FULL_S(s); DEBUG_WL_FULL_D(d, l); DEBUG_WL_FULL_S("\n")
  19. #ifdef MV_DEBUG_WL
  20. #define DEBUG_WL_S(s) puts(s)
  21. #define DEBUG_WL_D(d, l) printf("%x", d)
  22. #define DEBUG_RL_S(s) \
  23. debug_cond(ddr3_get_log_level() >= MV_LOG_LEVEL_2, "%s", s)
  24. #define DEBUG_RL_D(d, l) \
  25. debug_cond(ddr3_get_log_level() >= MV_LOG_LEVEL_2, "%x", d)
  26. #else
  27. #define DEBUG_WL_S(s)
  28. #define DEBUG_WL_D(d, l)
  29. #endif
  30. #ifdef MV_DEBUG_WL_FULL
  31. #define DEBUG_WL_FULL_S(s) puts(s)
  32. #define DEBUG_WL_FULL_D(d, l) printf("%x", d)
  33. #else
  34. #define DEBUG_WL_FULL_S(s)
  35. #define DEBUG_WL_FULL_D(d, l)
  36. #endif
  37. #define WL_SUP_EXPECTED_DATA 0x21
  38. #define WL_SUP_READ_DRAM_ENTRY 0x8
  39. static int ddr3_write_leveling_single_cs(u32 cs, u32 freq, int ratio_2to1,
  40. u32 *result,
  41. MV_DRAM_INFO *dram_info);
  42. static void ddr3_write_ctrl_pup_reg(int bc_acc, u32 pup, u32 reg_addr,
  43. u32 data);
  44. extern u16 odt_static[ODT_OPT][MAX_CS];
  45. extern u16 odt_dynamic[ODT_OPT][MAX_CS];
  46. extern u32 wl_sup_pattern[LEN_WL_SUP_PATTERN];
  47. /*
  48. * Name: ddr3_write_leveling_hw
  49. * Desc: Execute Write leveling phase by HW
  50. * Args: freq - current sequence frequency
  51. * dram_info - main struct
  52. * Notes:
  53. * Returns: MV_OK if success, MV_FAIL if fail.
  54. */
  55. int ddr3_write_leveling_hw(u32 freq, MV_DRAM_INFO *dram_info)
  56. {
  57. u32 reg, phase, delay, cs, pup;
  58. #ifdef MV88F67XX
  59. int dpde_flag = 0;
  60. #endif
  61. /* Debug message - Start Read leveling procedure */
  62. DEBUG_WL_S("DDR3 - Write Leveling - Starting HW WL procedure\n");
  63. #ifdef MV88F67XX
  64. /* Dynamic pad issue (BTS669) during WL */
  65. reg = reg_read(REG_DUNIT_CTRL_LOW_ADDR);
  66. if (reg & (1 << REG_DUNIT_CTRL_LOW_DPDE_OFFS)) {
  67. dpde_flag = 1;
  68. reg_write(REG_DUNIT_CTRL_LOW_ADDR,
  69. reg & ~(1 << REG_DUNIT_CTRL_LOW_DPDE_OFFS));
  70. }
  71. #endif
  72. reg = 1 << REG_DRAM_TRAINING_WL_OFFS;
  73. /* Config the retest number */
  74. reg |= (COUNT_HW_WL << REG_DRAM_TRAINING_RETEST_OFFS);
  75. reg |= (dram_info->cs_ena << (REG_DRAM_TRAINING_CS_OFFS));
  76. reg_write(REG_DRAM_TRAINING_ADDR, reg); /* 0x15B0 - Training Register */
  77. reg = reg_read(REG_DRAM_TRAINING_SHADOW_ADDR) |
  78. (1 << REG_DRAM_TRAINING_AUTO_OFFS);
  79. reg_write(REG_DRAM_TRAINING_SHADOW_ADDR, reg);
  80. /* Wait */
  81. do {
  82. reg = reg_read(REG_DRAM_TRAINING_SHADOW_ADDR) &
  83. (1 << REG_DRAM_TRAINING_AUTO_OFFS);
  84. } while (reg); /* Wait for '0' */
  85. reg = reg_read(REG_DRAM_TRAINING_ADDR);
  86. /* Check if Successful */
  87. if (reg & (1 << REG_DRAM_TRAINING_ERROR_OFFS)) {
  88. /*
  89. * Read results to arrays - Results are required for WL
  90. * High freq Supplement and DQS Centralization
  91. */
  92. for (cs = 0; cs < MAX_CS; cs++) {
  93. if (dram_info->cs_ena & (1 << cs)) {
  94. for (pup = 0;
  95. pup < dram_info->num_of_total_pups;
  96. pup++) {
  97. if (pup == dram_info->num_of_std_pups
  98. && dram_info->ecc_ena)
  99. pup = ECC_PUP;
  100. reg =
  101. ddr3_read_pup_reg(PUP_WL_MODE, cs,
  102. pup);
  103. phase =
  104. (reg >> REG_PHY_PHASE_OFFS) &
  105. PUP_PHASE_MASK;
  106. delay = reg & PUP_DELAY_MASK;
  107. dram_info->wl_val[cs][pup][P] = phase;
  108. dram_info->wl_val[cs][pup][D] = delay;
  109. dram_info->wl_val[cs][pup][S] =
  110. WL_HI_FREQ_STATE - 1;
  111. reg =
  112. ddr3_read_pup_reg(PUP_WL_MODE + 0x1,
  113. cs, pup);
  114. dram_info->wl_val[cs][pup][DQS] =
  115. (reg & 0x3F);
  116. }
  117. #ifdef MV_DEBUG_WL
  118. /* Debug message - Print res for cs[i]: cs,PUP,Phase,Delay */
  119. DEBUG_WL_S("DDR3 - Write Leveling - Write Leveling Cs - ");
  120. DEBUG_WL_D((u32) cs, 1);
  121. DEBUG_WL_S(" Results:\n");
  122. for (pup = 0;
  123. pup < dram_info->num_of_total_pups;
  124. pup++) {
  125. if (pup == dram_info->num_of_std_pups
  126. && dram_info->ecc_ena)
  127. pup = ECC_PUP;
  128. DEBUG_WL_S("DDR3 - Write Leveling - PUP: ");
  129. DEBUG_WL_D((u32) pup, 1);
  130. DEBUG_WL_S(", Phase: ");
  131. DEBUG_WL_D((u32)
  132. dram_info->wl_val[cs][pup]
  133. [P], 1);
  134. DEBUG_WL_S(", Delay: ");
  135. DEBUG_WL_D((u32)
  136. dram_info->wl_val[cs][pup]
  137. [D], 2);
  138. DEBUG_WL_S("\n");
  139. }
  140. #endif
  141. }
  142. }
  143. /* Dynamic pad issue (BTS669) during WL */
  144. #ifdef MV88F67XX
  145. if (dpde_flag) {
  146. reg = reg_read(REG_DUNIT_CTRL_LOW_ADDR) |
  147. (1 << REG_DUNIT_CTRL_LOW_DPDE_OFFS);
  148. reg_write(REG_DUNIT_CTRL_LOW_ADDR, reg);
  149. }
  150. #endif
  151. DEBUG_WL_S("DDR3 - Write Leveling - HW WL Ended Successfully\n");
  152. return MV_OK;
  153. } else {
  154. DEBUG_WL_S("DDR3 - Write Leveling - HW WL Error\n");
  155. return MV_FAIL;
  156. }
  157. }
  158. /*
  159. * Name: ddr3_wl_supplement
  160. * Desc: Write Leveling Supplement
  161. * Args: dram_info - main struct
  162. * Notes:
  163. * Returns: MV_OK if success, MV_FAIL if fail.
  164. */
  165. int ddr3_wl_supplement(MV_DRAM_INFO *dram_info)
  166. {
  167. u32 cs, cnt, pup_num, sum, phase, delay, max_pup_num, pup, sdram_offset;
  168. u32 tmp_count, ecc, reg;
  169. u32 ddr_width, tmp_pup, idx;
  170. u32 sdram_pup_val, uj;
  171. u32 one_clk_err = 0, align_err = 0, no_err = 0, err = 0, err_n = 0;
  172. u32 sdram_data[LEN_WL_SUP_PATTERN] __aligned(32) = { 0 };
  173. ddr_width = dram_info->ddr_width;
  174. no_err = 0;
  175. DEBUG_WL_S("DDR3 - Write Leveling Hi-Freq Supplement - Starting\n");
  176. switch (ddr_width) {
  177. /* Data error from pos-adge to pos-adge */
  178. case 16:
  179. one_clk_err = 4;
  180. align_err = 4;
  181. break;
  182. case 32:
  183. one_clk_err = 8;
  184. align_err = 8;
  185. break;
  186. case 64:
  187. one_clk_err = 0x10;
  188. align_err = 0x10;
  189. break;
  190. default:
  191. DEBUG_WL_S("Error - bus width!!!\n");
  192. return MV_FAIL;
  193. }
  194. /* Enable SW override */
  195. reg = reg_read(REG_DRAM_TRAINING_2_ADDR) |
  196. (1 << REG_DRAM_TRAINING_2_SW_OVRD_OFFS);
  197. /* [0] = 1 - Enable SW override */
  198. /* 0x15B8 - Training SW 2 Register */
  199. reg_write(REG_DRAM_TRAINING_2_ADDR, reg);
  200. DEBUG_WL_S("DDR3 - Write Leveling Hi-Freq Supplement - SW Override Enabled\n");
  201. reg = (1 << REG_DRAM_TRAINING_AUTO_OFFS);
  202. reg_write(REG_DRAM_TRAINING_ADDR, reg); /* 0x15B0 - Training Register */
  203. tmp_count = 0;
  204. for (cs = 0; cs < MAX_CS; cs++) {
  205. if (dram_info->cs_ena & (1 << cs)) {
  206. sum = 0;
  207. /*
  208. * 2 iterations loop: 1)actual WL results 2) fix WL
  209. * if needed
  210. */
  211. for (cnt = 0; cnt < COUNT_WL_HI_FREQ; cnt++) {
  212. DEBUG_WL_C("COUNT = ", cnt, 1);
  213. for (ecc = 0; ecc < (dram_info->ecc_ena + 1);
  214. ecc++) {
  215. if (ecc) {
  216. DEBUG_WL_S("ECC PUP:\n");
  217. } else {
  218. DEBUG_WL_S("DATA PUP:\n");
  219. }
  220. max_pup_num =
  221. dram_info->num_of_std_pups * (1 -
  222. ecc) +
  223. ecc;
  224. /* ECC Support - Switch ECC Mux on ecc=1 */
  225. reg =
  226. (reg_read(REG_DRAM_TRAINING_2_ADDR)
  227. & ~(1 <<
  228. REG_DRAM_TRAINING_2_ECC_MUX_OFFS));
  229. reg |=
  230. (dram_info->ecc_ena *
  231. ecc <<
  232. REG_DRAM_TRAINING_2_ECC_MUX_OFFS);
  233. reg_write(REG_DRAM_TRAINING_2_ADDR,
  234. reg);
  235. ddr3_reset_phy_read_fifo();
  236. /* Write to memory */
  237. sdram_offset =
  238. tmp_count * (SDRAM_CS_SIZE + 1) +
  239. 0x200;
  240. if (MV_OK != ddr3_dram_sram_burst((u32)
  241. wl_sup_pattern,
  242. sdram_offset,
  243. LEN_WL_SUP_PATTERN))
  244. return MV_FAIL;
  245. /* Read from memory */
  246. if (MV_OK !=
  247. ddr3_dram_sram_burst(sdram_offset,
  248. (u32)
  249. sdram_data,
  250. LEN_WL_SUP_PATTERN))
  251. return MV_FAIL;
  252. /* Print the buffer */
  253. for (uj = 0; uj < LEN_WL_SUP_PATTERN;
  254. uj++) {
  255. if ((uj % 4 == 0) && (uj != 0)) {
  256. DEBUG_WL_S("\n");
  257. }
  258. DEBUG_WL_D(sdram_data[uj],
  259. 8);
  260. DEBUG_WL_S(" ");
  261. }
  262. /* Check pup which DQS/DATA is error */
  263. for (pup = 0; pup < max_pup_num; pup++) {
  264. /* ECC support - bit 8 */
  265. pup_num = (ecc) ? ECC_PUP : pup;
  266. if (pup < 4) { /* lower 32 bit */
  267. tmp_pup = pup;
  268. idx =
  269. WL_SUP_READ_DRAM_ENTRY;
  270. } else { /* higher 32 bit */
  271. tmp_pup = pup - 4;
  272. idx =
  273. WL_SUP_READ_DRAM_ENTRY
  274. + 1;
  275. }
  276. DEBUG_WL_S("\nCS: ");
  277. DEBUG_WL_D((u32) cs, 1);
  278. DEBUG_WL_S(" PUP: ");
  279. DEBUG_WL_D((u32) pup_num, 1);
  280. DEBUG_WL_S("\n");
  281. sdram_pup_val =
  282. ((sdram_data[idx] >>
  283. ((tmp_pup) * 8)) & 0xFF);
  284. DEBUG_WL_C("Actual Data = ",
  285. sdram_pup_val, 2);
  286. DEBUG_WL_C("Expected Data = ",
  287. (WL_SUP_EXPECTED_DATA
  288. + pup), 2);
  289. /*
  290. * ALINGHMENT: calculate
  291. * expected data vs actual data
  292. */
  293. err =
  294. (WL_SUP_EXPECTED_DATA +
  295. pup) - sdram_pup_val;
  296. /*
  297. * CLOCK LONG: calculate
  298. * expected data vs actual data
  299. */
  300. err_n =
  301. sdram_pup_val -
  302. (WL_SUP_EXPECTED_DATA +
  303. pup);
  304. DEBUG_WL_C("err = ", err, 2);
  305. DEBUG_WL_C("err_n = ", err_n,
  306. 2);
  307. if (err == no_err) {
  308. /* PUP is correct - increment State */
  309. dram_info->wl_val[cs]
  310. [pup_num]
  311. [S] = 1;
  312. } else if (err_n == one_clk_err) {
  313. /* clock is longer than DQS */
  314. phase =
  315. ((dram_info->wl_val
  316. [cs]
  317. [pup_num][P] +
  318. WL_HI_FREQ_SHIFT)
  319. % MAX_PHASE_2TO1);
  320. dram_info->wl_val[cs]
  321. [pup_num]
  322. [P] = phase;
  323. delay =
  324. dram_info->wl_val
  325. [cs][pup_num]
  326. [D];
  327. DEBUG_WL_S("#### Clock is longer than DQS more than one clk cycle ####\n");
  328. ddr3_write_pup_reg
  329. (PUP_WL_MODE, cs,
  330. pup * (1 - ecc) +
  331. ECC_PUP * ecc,
  332. phase, delay);
  333. } else if (err == align_err) {
  334. /* clock is align to DQS */
  335. phase =
  336. dram_info->wl_val
  337. [cs][pup_num]
  338. [P];
  339. delay =
  340. dram_info->wl_val
  341. [cs][pup_num]
  342. [D];
  343. DEBUG_WL_S("#### Alignment PUPS problem ####\n");
  344. if ((phase == 0)
  345. || ((phase == 1)
  346. && (delay <=
  347. 0x10))) {
  348. DEBUG_WL_S("#### Warning - Possible Layout Violation (DQS is longer than CLK)####\n");
  349. }
  350. phase = 0x0;
  351. delay = 0x0;
  352. dram_info->wl_val[cs]
  353. [pup_num]
  354. [P] = phase;
  355. dram_info->wl_val[cs]
  356. [pup_num]
  357. [D] = delay;
  358. ddr3_write_pup_reg
  359. (PUP_WL_MODE, cs,
  360. pup * (1 - ecc) +
  361. ECC_PUP * ecc,
  362. phase, delay);
  363. }
  364. /* Stop condition for ECC phase */
  365. pup = (ecc) ? max_pup_num : pup;
  366. }
  367. /* ECC Support - Disable ECC MUX */
  368. reg =
  369. (reg_read(REG_DRAM_TRAINING_2_ADDR)
  370. & ~(1 <<
  371. REG_DRAM_TRAINING_2_ECC_MUX_OFFS));
  372. reg_write(REG_DRAM_TRAINING_2_ADDR,
  373. reg);
  374. }
  375. }
  376. for (pup = 0; pup < dram_info->num_of_std_pups; pup++)
  377. sum += dram_info->wl_val[cs][pup][S];
  378. if (dram_info->ecc_ena)
  379. sum += dram_info->wl_val[cs][ECC_PUP][S];
  380. /* Checks if any pup is not locked after the change */
  381. if (sum < (WL_HI_FREQ_STATE * (dram_info->num_of_total_pups))) {
  382. DEBUG_WL_C("DDR3 - Write Leveling Hi-Freq Supplement - didn't work for Cs - ",
  383. (u32) cs, 1);
  384. return MV_FAIL;
  385. }
  386. tmp_count++;
  387. }
  388. }
  389. dram_info->wl_max_phase = 0;
  390. dram_info->wl_min_phase = 10;
  391. /*
  392. * Read results to arrays - Results are required for DQS Centralization
  393. */
  394. for (cs = 0; cs < MAX_CS; cs++) {
  395. if (dram_info->cs_ena & (1 << cs)) {
  396. for (pup = 0; pup < dram_info->num_of_total_pups; pup++) {
  397. if (pup == dram_info->num_of_std_pups
  398. && dram_info->ecc_ena)
  399. pup = ECC_PUP;
  400. reg = ddr3_read_pup_reg(PUP_WL_MODE, cs, pup);
  401. phase =
  402. (reg >> REG_PHY_PHASE_OFFS) &
  403. PUP_PHASE_MASK;
  404. if (phase > dram_info->wl_max_phase)
  405. dram_info->wl_max_phase = phase;
  406. if (phase < dram_info->wl_min_phase)
  407. dram_info->wl_min_phase = phase;
  408. }
  409. }
  410. }
  411. /* Disable SW override - Must be in a different stage */
  412. /* [0]=0 - Enable SW override */
  413. reg = reg_read(REG_DRAM_TRAINING_2_ADDR);
  414. reg &= ~(1 << REG_DRAM_TRAINING_2_SW_OVRD_OFFS);
  415. /* 0x15B8 - Training SW 2 Register */
  416. reg_write(REG_DRAM_TRAINING_2_ADDR, reg);
  417. reg = reg_read(REG_DRAM_TRAINING_1_ADDR) |
  418. (1 << REG_DRAM_TRAINING_1_TRNBPOINT_OFFS);
  419. reg_write(REG_DRAM_TRAINING_1_ADDR, reg);
  420. DEBUG_WL_S("DDR3 - Write Leveling Hi-Freq Supplement - Ended Successfully\n");
  421. return MV_OK;
  422. }
  423. /*
  424. * Name: ddr3_write_leveling_hw_reg_dimm
  425. * Desc: Execute Write leveling phase by HW
  426. * Args: freq - current sequence frequency
  427. * dram_info - main struct
  428. * Notes:
  429. * Returns: MV_OK if success, MV_FAIL if fail.
  430. */
  431. int ddr3_write_leveling_hw_reg_dimm(u32 freq, MV_DRAM_INFO *dram_info)
  432. {
  433. u32 reg, phase, delay, cs, pup, pup_num;
  434. __maybe_unused int dpde_flag = 0;
  435. /* Debug message - Start Read leveling procedure */
  436. DEBUG_WL_S("DDR3 - Write Leveling - Starting HW WL procedure\n");
  437. if (dram_info->num_cs > 2) {
  438. DEBUG_WL_S("DDR3 - Write Leveling - HW WL Ended Successfully\n");
  439. return MV_NO_CHANGE;
  440. }
  441. /* If target freq = 400 move clock start point */
  442. /* Write to control PUP to Control Deskew Regs */
  443. if (freq <= DDR_400) {
  444. for (pup = 0; pup <= dram_info->num_of_total_pups; pup++) {
  445. /* PUP_DELAY_MASK 0x1F */
  446. /* reg = 0x0C10001F + (uj << 16); */
  447. ddr3_write_ctrl_pup_reg(1, pup, CNTRL_PUP_DESKEW + pup,
  448. 0x1F);
  449. }
  450. }
  451. #ifdef MV88F67XX
  452. /* Dynamic pad issue (BTS669) during WL */
  453. reg = reg_read(REG_DUNIT_CTRL_LOW_ADDR);
  454. if (reg & (1 << REG_DUNIT_CTRL_LOW_DPDE_OFFS)) {
  455. dpde_flag = 1;
  456. reg_write(REG_DUNIT_CTRL_LOW_ADDR,
  457. reg & ~(1 << REG_DUNIT_CTRL_LOW_DPDE_OFFS));
  458. }
  459. #endif
  460. reg = (1 << REG_DRAM_TRAINING_WL_OFFS);
  461. /* Config the retest number */
  462. reg |= (COUNT_HW_WL << REG_DRAM_TRAINING_RETEST_OFFS);
  463. reg |= (dram_info->cs_ena << (REG_DRAM_TRAINING_CS_OFFS));
  464. reg_write(REG_DRAM_TRAINING_ADDR, reg); /* 0x15B0 - Training Register */
  465. reg = reg_read(REG_DRAM_TRAINING_SHADOW_ADDR) |
  466. (1 << REG_DRAM_TRAINING_AUTO_OFFS);
  467. reg_write(REG_DRAM_TRAINING_SHADOW_ADDR, reg);
  468. /* Wait */
  469. do {
  470. reg = reg_read(REG_DRAM_TRAINING_SHADOW_ADDR) &
  471. (1 << REG_DRAM_TRAINING_AUTO_OFFS);
  472. } while (reg); /* Wait for '0' */
  473. reg = reg_read(REG_DRAM_TRAINING_ADDR);
  474. /* Check if Successful */
  475. if (reg & (1 << REG_DRAM_TRAINING_ERROR_OFFS)) {
  476. /*
  477. * Read results to arrays - Results are required for WL High
  478. * freq Supplement and DQS Centralization
  479. */
  480. for (cs = 0; cs < MAX_CS; cs++) {
  481. if (dram_info->cs_ena & (1 << cs)) {
  482. for (pup = 0;
  483. pup < dram_info->num_of_total_pups;
  484. pup++) {
  485. if (pup == dram_info->num_of_std_pups
  486. && dram_info->ecc_ena)
  487. pup = ECC_BIT;
  488. reg =
  489. ddr3_read_pup_reg(PUP_WL_MODE, cs,
  490. pup);
  491. phase =
  492. (reg >> REG_PHY_PHASE_OFFS) &
  493. PUP_PHASE_MASK;
  494. delay = reg & PUP_DELAY_MASK;
  495. dram_info->wl_val[cs][pup][P] = phase;
  496. dram_info->wl_val[cs][pup][D] = delay;
  497. if ((phase == 1) && (delay >= 0x1D)) {
  498. /*
  499. * Need to do it here for
  500. * uncorrect WL values
  501. */
  502. ddr3_write_pup_reg(PUP_WL_MODE,
  503. cs, pup, 0,
  504. 0);
  505. dram_info->wl_val[cs][pup][P] =
  506. 0;
  507. dram_info->wl_val[cs][pup][D] =
  508. 0;
  509. }
  510. dram_info->wl_val[cs][pup][S] =
  511. WL_HI_FREQ_STATE - 1;
  512. reg =
  513. ddr3_read_pup_reg(PUP_WL_MODE + 0x1,
  514. cs, pup);
  515. dram_info->wl_val[cs][pup][DQS] =
  516. (reg & 0x3F);
  517. }
  518. #ifdef MV_DEBUG_WL
  519. /*
  520. * Debug message - Print res for cs[i]:
  521. * cs,PUP,Phase,Delay
  522. */
  523. DEBUG_WL_S("DDR3 - Write Leveling - Write Leveling Cs - ");
  524. DEBUG_WL_D((u32) cs, 1);
  525. DEBUG_WL_S(" Results:\n");
  526. for (pup = 0;
  527. pup < dram_info->num_of_total_pups;
  528. pup++) {
  529. DEBUG_WL_S
  530. ("DDR3 - Write Leveling - PUP: ");
  531. DEBUG_WL_D((u32) pup, 1);
  532. DEBUG_WL_S(", Phase: ");
  533. DEBUG_WL_D((u32)
  534. dram_info->wl_val[cs][pup]
  535. [P], 1);
  536. DEBUG_WL_S(", Delay: ");
  537. DEBUG_WL_D((u32)
  538. dram_info->wl_val[cs][pup]
  539. [D], 2);
  540. DEBUG_WL_S("\n");
  541. }
  542. #endif
  543. }
  544. }
  545. #ifdef MV88F67XX
  546. /* Dynamic pad issue (BTS669) during WL */
  547. if (dpde_flag) {
  548. reg = reg_read(REG_DUNIT_CTRL_LOW_ADDR) |
  549. (1 << REG_DUNIT_CTRL_LOW_DPDE_OFFS);
  550. reg_write(REG_DUNIT_CTRL_LOW_ADDR, reg);
  551. }
  552. #endif
  553. DEBUG_WL_S("DDR3 - Write Leveling - HW WL Ended Successfully\n");
  554. /* If target freq = 400 move clock back */
  555. /* Write to control PUP to Control Deskew Regs */
  556. if (freq <= DDR_400) {
  557. for (pup = 0; pup <= dram_info->num_of_total_pups;
  558. pup++) {
  559. ddr3_write_ctrl_pup_reg(1, pup,
  560. CNTRL_PUP_DESKEW + pup, 0);
  561. }
  562. }
  563. return MV_OK;
  564. } else {
  565. /* Configure Each PUP with locked leveling settings */
  566. for (cs = 0; cs < MAX_CS; cs++) {
  567. if (dram_info->cs_ena & (1 << cs)) {
  568. for (pup = 0;
  569. pup < dram_info->num_of_total_pups;
  570. pup++) {
  571. /* ECC support - bit 8 */
  572. pup_num = (pup == dram_info->num_of_std_pups) ?
  573. ECC_BIT : pup;
  574. ddr3_write_pup_reg(PUP_WL_MODE, cs,
  575. pup_num, 0, 0);
  576. }
  577. }
  578. }
  579. reg_write(REG_DRAM_TRAINING_ADDR, 0);
  580. /* If target freq = 400 move clock back */
  581. /* Write to control PUP to Control Deskew Regs */
  582. if (freq <= DDR_400) {
  583. for (pup = 0; pup <= dram_info->num_of_total_pups;
  584. pup++) {
  585. ddr3_write_ctrl_pup_reg(1, pup,
  586. CNTRL_PUP_DESKEW + pup, 0);
  587. }
  588. }
  589. DEBUG_WL_S("DDR3 - Write Leveling - HW WL Ended Successfully\n");
  590. return MV_NO_CHANGE;
  591. }
  592. }
  593. /*
  594. * Name: ddr3_write_leveling_sw
  595. * Desc: Execute Write leveling phase by SW
  596. * Args: freq - current sequence frequency
  597. * dram_info - main struct
  598. * Notes:
  599. * Returns: MV_OK if success, MV_FAIL if fail.
  600. */
  601. int ddr3_write_leveling_sw(u32 freq, int ratio_2to1, MV_DRAM_INFO *dram_info)
  602. {
  603. u32 reg, cs, cnt, pup, max_pup_num;
  604. u32 res[MAX_CS];
  605. max_pup_num = dram_info->num_of_total_pups;
  606. __maybe_unused int dpde_flag = 0;
  607. /* Debug message - Start Write leveling procedure */
  608. DEBUG_WL_S("DDR3 - Write Leveling - Starting SW WL procedure\n");
  609. #ifdef MV88F67XX
  610. /* Dynamic pad issue (BTS669) during WL */
  611. reg = reg_read(REG_DUNIT_CTRL_LOW_ADDR);
  612. if (reg & (1 << REG_DUNIT_CTRL_LOW_DPDE_OFFS)) {
  613. dpde_flag = 1;
  614. reg_write(REG_DUNIT_CTRL_LOW_ADDR,
  615. reg & ~(1 << REG_DUNIT_CTRL_LOW_DPDE_OFFS));
  616. }
  617. #endif
  618. /* Set Output buffer-off to all CS and correct ODT values */
  619. for (cs = 0; cs < MAX_CS; cs++) {
  620. if (dram_info->cs_ena & (1 << cs)) {
  621. reg = reg_read(REG_DDR3_MR1_ADDR) &
  622. REG_DDR3_MR1_ODT_MASK;
  623. reg |= odt_static[dram_info->cs_ena][cs];
  624. reg |= (1 << REG_DDR3_MR1_OUTBUF_DIS_OFFS);
  625. /* 0x15D0 - DDR3 MR0 Register */
  626. reg_write(REG_DDR3_MR1_ADDR, reg);
  627. /* Issue MRS Command to current cs */
  628. reg = REG_SDRAM_OPERATION_CMD_MR1 &
  629. ~(1 << (REG_SDRAM_OPERATION_CS_OFFS + cs));
  630. /*
  631. * [3-0] = 0x4 - MR1 Command, [11-8] -
  632. * enable current cs
  633. */
  634. /* 0x1418 - SDRAM Operation Register */
  635. reg_write(REG_SDRAM_OPERATION_ADDR, reg);
  636. udelay(MRS_DELAY);
  637. }
  638. }
  639. DEBUG_WL_FULL_S("DDR3 - Write Leveling - Qoff and RTT Values are set for all Cs\n");
  640. /* Enable SW override */
  641. reg = reg_read(REG_DRAM_TRAINING_2_ADDR) |
  642. (1 << REG_DRAM_TRAINING_2_SW_OVRD_OFFS);
  643. /* [0] = 1 - Enable SW override */
  644. /* 0x15B8 - Training SW 2 Register */
  645. reg_write(REG_DRAM_TRAINING_2_ADDR, reg);
  646. DEBUG_WL_FULL_S("DDR3 - Write Leveling - SW Override Enabled\n");
  647. /* Enable PHY write leveling mode */
  648. reg = reg_read(REG_DRAM_TRAINING_2_ADDR) &
  649. ~(1 << REG_DRAM_TRAINING_2_WL_MODE_OFFS);
  650. /* [2] = 0 - TrnWLMode - Enable */
  651. /* 0x15B8 - Training SW 2 Register */
  652. reg_write(REG_DRAM_TRAINING_2_ADDR, reg);
  653. /* Reset WL results arry */
  654. memset(dram_info->wl_val, 0, sizeof(u32) * MAX_CS * MAX_PUP_NUM * 7);
  655. /* Loop for each cs */
  656. for (cs = 0; cs < MAX_CS; cs++) {
  657. if (dram_info->cs_ena & (1 << cs)) {
  658. DEBUG_WL_FULL_C("DDR3 - Write Leveling - Starting working with Cs - ",
  659. (u32) cs, 1);
  660. /* Refresh X9 current cs */
  661. DEBUG_WL_FULL_S("DDR3 - Write Leveling - Refresh X9\n");
  662. for (cnt = 0; cnt < COUNT_WL_RFRS; cnt++) {
  663. reg =
  664. REG_SDRAM_OPERATION_CMD_RFRS & ~(1 <<
  665. (REG_SDRAM_OPERATION_CS_OFFS
  666. + cs));
  667. /* [3-0] = 0x2 - refresh, [11-8] - enable current cs */
  668. reg_write(REG_SDRAM_OPERATION_ADDR, reg); /* 0x1418 - SDRAM Operation Register */
  669. do {
  670. reg =
  671. ((reg_read
  672. (REG_SDRAM_OPERATION_ADDR)) &
  673. REG_SDRAM_OPERATION_CMD_RFRS_DONE);
  674. } while (reg); /* Wait for '0' */
  675. }
  676. /* Configure MR1 in Cs[CsNum] - write leveling on, output buffer on */
  677. DEBUG_WL_FULL_S("DDR3 - Write Leveling - Configure MR1 for current Cs: WL-on,OB-on\n");
  678. reg = reg_read(REG_DDR3_MR1_ADDR) &
  679. REG_DDR3_MR1_OUTBUF_WL_MASK;
  680. /* Set ODT Values */
  681. reg &= REG_DDR3_MR1_ODT_MASK;
  682. reg |= odt_static[dram_info->cs_ena][cs];
  683. /* Enable WL MODE */
  684. reg |= (1 << REG_DDR3_MR1_WL_ENA_OFFS);
  685. /* [7]=1, [12]=0 - Output Buffer and write leveling enabled */
  686. reg_write(REG_DDR3_MR1_ADDR, reg); /* 0x15D4 - DDR3 MR1 Register */
  687. /* Issue MRS Command to current cs */
  688. reg = REG_SDRAM_OPERATION_CMD_MR1 &
  689. ~(1 << (REG_SDRAM_OPERATION_CS_OFFS + cs));
  690. /*
  691. * [3-0] = 0x4 - MR1 Command, [11-8] -
  692. * enable current cs
  693. */
  694. /* 0x1418 - SDRAM Operation Register */
  695. reg_write(REG_SDRAM_OPERATION_ADDR, reg);
  696. udelay(MRS_DELAY);
  697. /* Write leveling cs[cs] */
  698. if (MV_OK !=
  699. ddr3_write_leveling_single_cs(cs, freq, ratio_2to1,
  700. (u32 *)(res + cs),
  701. dram_info)) {
  702. DEBUG_WL_FULL_C("DDR3 - Write Leveling single Cs - FAILED - Cs - ",
  703. (u32) cs, 1);
  704. for (pup = 0; pup < max_pup_num; pup++) {
  705. if (((res[cs] >> pup) & 0x1) == 0) {
  706. DEBUG_WL_C("Failed Byte : ",
  707. pup, 1);
  708. }
  709. }
  710. return MV_FAIL;
  711. }
  712. /* Set TrnWLDeUpd - After each CS is done */
  713. reg = reg_read(REG_TRAINING_WL_ADDR) |
  714. (1 << REG_TRAINING_WL_CS_DONE_OFFS);
  715. /* 0x16AC - Training Write leveling register */
  716. reg_write(REG_TRAINING_WL_ADDR, reg);
  717. /*
  718. * Debug message - Finished Write leveling cs[cs] -
  719. * each PUP Fail/Success
  720. */
  721. DEBUG_WL_FULL_C("DDR3 - Write Leveling - Finished Cs - ", (u32) cs,
  722. 1);
  723. DEBUG_WL_FULL_C("DDR3 - Write Leveling - The Results: 1-PUP locked, 0-PUP failed -",
  724. (u32) res[cs], 3);
  725. /*
  726. * Configure MR1 in cs[cs] - write leveling off (0),
  727. * output buffer off (1)
  728. */
  729. reg = reg_read(REG_DDR3_MR1_ADDR) &
  730. REG_DDR3_MR1_OUTBUF_WL_MASK;
  731. reg |= (1 << REG_DDR3_MR1_OUTBUF_DIS_OFFS);
  732. /* No need to sort ODT since it is same CS */
  733. /* 0x15D4 - DDR3 MR1 Register */
  734. reg_write(REG_DDR3_MR1_ADDR, reg);
  735. /* Issue MRS Command to current cs */
  736. reg = REG_SDRAM_OPERATION_CMD_MR1 &
  737. ~(1 << (REG_SDRAM_OPERATION_CS_OFFS + cs));
  738. /*
  739. * [3-0] = 0x4 - MR1 Command, [11-8] -
  740. * enable current cs
  741. */
  742. /* 0x1418 - SDRAM Operation Register */
  743. reg_write(REG_SDRAM_OPERATION_ADDR, reg);
  744. udelay(MRS_DELAY);
  745. }
  746. }
  747. /* Disable WL Mode */
  748. /* [2]=1 - TrnWLMode - Disable */
  749. reg = reg_read(REG_DRAM_TRAINING_2_ADDR);
  750. reg |= (1 << REG_DRAM_TRAINING_2_WL_MODE_OFFS);
  751. /* 0x15B8 - Training SW 2 Register */
  752. reg_write(REG_DRAM_TRAINING_2_ADDR, reg);
  753. /* Disable SW override - Must be in a different stage */
  754. /* [0]=0 - Enable SW override */
  755. reg = reg_read(REG_DRAM_TRAINING_2_ADDR);
  756. reg &= ~(1 << REG_DRAM_TRAINING_2_SW_OVRD_OFFS);
  757. /* 0x15B8 - Training SW 2 Register */
  758. reg_write(REG_DRAM_TRAINING_2_ADDR, reg);
  759. /* Set Output buffer-on to all CS and correct ODT values */
  760. for (cs = 0; cs < MAX_CS; cs++) {
  761. if (dram_info->cs_ena & (1 << cs)) {
  762. reg = reg_read(REG_DDR3_MR1_ADDR) &
  763. REG_DDR3_MR1_ODT_MASK;
  764. reg &= REG_DDR3_MR1_OUTBUF_WL_MASK;
  765. reg |= odt_static[dram_info->cs_ena][cs];
  766. /* 0x15D0 - DDR3 MR1 Register */
  767. reg_write(REG_DDR3_MR1_ADDR, reg);
  768. /* Issue MRS Command to current cs */
  769. reg = REG_SDRAM_OPERATION_CMD_MR1 &
  770. ~(1 << (REG_SDRAM_OPERATION_CS_OFFS + cs));
  771. /*
  772. * [3-0] = 0x4 - MR1 Command, [11-8] -
  773. * enable current cs
  774. */
  775. /* 0x1418 - SDRAM Operation Register */
  776. reg_write(REG_SDRAM_OPERATION_ADDR, reg);
  777. udelay(MRS_DELAY);
  778. }
  779. }
  780. #ifdef MV88F67XX
  781. /* Dynamic pad issue (BTS669) during WL */
  782. if (dpde_flag) {
  783. reg = reg_read(REG_DUNIT_CTRL_LOW_ADDR) |
  784. (1 << REG_DUNIT_CTRL_LOW_DPDE_OFFS);
  785. reg_write(REG_DUNIT_CTRL_LOW_ADDR, reg);
  786. }
  787. #endif
  788. DEBUG_WL_FULL_S("DDR3 - Write Leveling - Finished WL procedure for all Cs\n");
  789. return MV_OK;
  790. }
  791. #if !defined(MV88F672X)
  792. /*
  793. * Name: ddr3_write_leveling_sw
  794. * Desc: Execute Write leveling phase by SW
  795. * Args: freq - current sequence frequency
  796. * dram_info - main struct
  797. * Notes:
  798. * Returns: MV_OK if success, MV_FAIL if fail.
  799. */
  800. int ddr3_write_leveling_sw_reg_dimm(u32 freq, int ratio_2to1,
  801. MV_DRAM_INFO *dram_info)
  802. {
  803. u32 reg, cs, cnt, pup;
  804. u32 res[MAX_CS];
  805. __maybe_unused int dpde_flag = 0;
  806. /* Debug message - Start Write leveling procedure */
  807. DEBUG_WL_S("DDR3 - Write Leveling - Starting SW WL procedure\n");
  808. #ifdef MV88F67XX
  809. /* Dynamic pad issue (BTS669) during WL */
  810. reg = reg_read(REG_DUNIT_CTRL_LOW_ADDR);
  811. if (reg & (1 << REG_DUNIT_CTRL_LOW_DPDE_OFFS)) {
  812. dpde_flag = 1;
  813. reg_write(REG_DUNIT_CTRL_LOW_ADDR,
  814. reg & ~(1 << REG_DUNIT_CTRL_LOW_DPDE_OFFS));
  815. }
  816. #endif
  817. /* If target freq = 400 move clock start point */
  818. /* Write to control PUP to Control Deskew Regs */
  819. if (freq <= DDR_400) {
  820. for (pup = 0; pup <= dram_info->num_of_total_pups; pup++) {
  821. /* PUP_DELAY_MASK 0x1F */
  822. /* reg = 0x0C10001F + (uj << 16); */
  823. ddr3_write_ctrl_pup_reg(1, pup, CNTRL_PUP_DESKEW + pup,
  824. 0x1F);
  825. }
  826. }
  827. /* Set Output buffer-off to all CS and correct ODT values */
  828. for (cs = 0; cs < MAX_CS; cs++) {
  829. if (dram_info->cs_ena & (1 << cs)) {
  830. reg = reg_read(REG_DDR3_MR1_ADDR) &
  831. REG_DDR3_MR1_ODT_MASK;
  832. reg |= odt_static[dram_info->cs_ena][cs];
  833. reg |= (1 << REG_DDR3_MR1_OUTBUF_DIS_OFFS);
  834. /* 0x15D0 - DDR3 MR0 Register */
  835. reg_write(REG_DDR3_MR1_ADDR, reg);
  836. /* Issue MRS Command to current cs */
  837. reg = REG_SDRAM_OPERATION_CMD_MR1 &
  838. ~(1 << (REG_SDRAM_OPERATION_CS_OFFS + cs));
  839. /*
  840. * [3-0] = 0x4 - MR1 Command, [11-8] -
  841. * enable current cs
  842. */
  843. /* 0x1418 - SDRAM Operation Register */
  844. reg_write(REG_SDRAM_OPERATION_ADDR, reg);
  845. udelay(MRS_DELAY);
  846. }
  847. }
  848. DEBUG_WL_FULL_S("DDR3 - Write Leveling - Qoff and RTT Values are set for all Cs\n");
  849. /* Enable SW override */
  850. reg = reg_read(REG_DRAM_TRAINING_2_ADDR) |
  851. (1 << REG_DRAM_TRAINING_2_SW_OVRD_OFFS);
  852. /* [0] = 1 - Enable SW override */
  853. /* 0x15B8 - Training SW 2 Register */
  854. reg_write(REG_DRAM_TRAINING_2_ADDR, reg);
  855. DEBUG_WL_FULL_S("DDR3 - Write Leveling - SW Override Enabled\n");
  856. /* Enable PHY write leveling mode */
  857. reg = reg_read(REG_DRAM_TRAINING_2_ADDR) &
  858. ~(1 << REG_DRAM_TRAINING_2_WL_MODE_OFFS);
  859. /* [2] = 0 - TrnWLMode - Enable */
  860. /* 0x15B8 - Training SW 2 Register */
  861. reg_write(REG_DRAM_TRAINING_2_ADDR, reg);
  862. /* Loop for each cs */
  863. for (cs = 0; cs < MAX_CS; cs++) {
  864. if (dram_info->cs_ena & (1 << cs)) {
  865. DEBUG_WL_FULL_C("DDR3 - Write Leveling - Starting working with Cs - ",
  866. (u32) cs, 1);
  867. /* Refresh X9 current cs */
  868. DEBUG_WL_FULL_S("DDR3 - Write Leveling - Refresh X9\n");
  869. for (cnt = 0; cnt < COUNT_WL_RFRS; cnt++) {
  870. reg =
  871. REG_SDRAM_OPERATION_CMD_RFRS & ~(1 <<
  872. (REG_SDRAM_OPERATION_CS_OFFS
  873. + cs));
  874. /* [3-0] = 0x2 - refresh, [11-8] - enable current cs */
  875. reg_write(REG_SDRAM_OPERATION_ADDR, reg); /* 0x1418 - SDRAM Operation Register */
  876. do {
  877. reg =
  878. ((reg_read
  879. (REG_SDRAM_OPERATION_ADDR)) &
  880. REG_SDRAM_OPERATION_CMD_RFRS_DONE);
  881. } while (reg); /* Wait for '0' */
  882. }
  883. /*
  884. * Configure MR1 in Cs[CsNum] - write leveling on,
  885. * output buffer on
  886. */
  887. DEBUG_WL_FULL_S("DDR3 - Write Leveling - Configure MR1 for current Cs: WL-on,OB-on\n");
  888. reg = reg_read(REG_DDR3_MR1_ADDR) &
  889. REG_DDR3_MR1_OUTBUF_WL_MASK;
  890. /* Set ODT Values */
  891. reg &= REG_DDR3_MR1_ODT_MASK;
  892. reg |= odt_static[dram_info->cs_ena][cs];
  893. /* Enable WL MODE */
  894. reg |= (1 << REG_DDR3_MR1_WL_ENA_OFFS);
  895. /*
  896. * [7]=1, [12]=0 - Output Buffer and write leveling
  897. * enabled
  898. */
  899. /* 0x15D4 - DDR3 MR1 Register */
  900. reg_write(REG_DDR3_MR1_ADDR, reg);
  901. /* Issue MRS Command to current cs */
  902. reg = REG_SDRAM_OPERATION_CMD_MR1 &
  903. ~(1 << (REG_SDRAM_OPERATION_CS_OFFS + cs));
  904. /*
  905. * [3-0] = 0x4 - MR1 Command, [11-8] -
  906. * enable current cs
  907. */
  908. /* 0x1418 - SDRAM Operation Register */
  909. reg_write(REG_SDRAM_OPERATION_ADDR, reg);
  910. udelay(MRS_DELAY);
  911. /* Write leveling cs[cs] */
  912. if (MV_OK !=
  913. ddr3_write_leveling_single_cs(cs, freq, ratio_2to1,
  914. (u32 *)(res + cs),
  915. dram_info)) {
  916. DEBUG_WL_FULL_C("DDR3 - Write Leveling single Cs - FAILED - Cs - ",
  917. (u32) cs, 1);
  918. return MV_FAIL;
  919. }
  920. /* Set TrnWLDeUpd - After each CS is done */
  921. reg = reg_read(REG_TRAINING_WL_ADDR) |
  922. (1 << REG_TRAINING_WL_CS_DONE_OFFS);
  923. /* 0x16AC - Training Write leveling register */
  924. reg_write(REG_TRAINING_WL_ADDR, reg);
  925. /*
  926. * Debug message - Finished Write leveling cs[cs] -
  927. * each PUP Fail/Success
  928. */
  929. DEBUG_WL_FULL_C("DDR3 - Write Leveling - Finished Cs - ", (u32) cs,
  930. 1);
  931. DEBUG_WL_FULL_C("DDR3 - Write Leveling - The Results: 1-PUP locked, 0-PUP failed -",
  932. (u32) res[cs], 3);
  933. /* Configure MR1 in cs[cs] - write leveling off (0), output buffer off (1) */
  934. reg = reg_read(REG_DDR3_MR1_ADDR) &
  935. REG_DDR3_MR1_OUTBUF_WL_MASK;
  936. reg |= (1 << REG_DDR3_MR1_OUTBUF_DIS_OFFS);
  937. /* No need to sort ODT since it is same CS */
  938. /* 0x15D4 - DDR3 MR1 Register */
  939. reg_write(REG_DDR3_MR1_ADDR, reg);
  940. /* Issue MRS Command to current cs */
  941. reg = REG_SDRAM_OPERATION_CMD_MR1 &
  942. ~(1 << (REG_SDRAM_OPERATION_CS_OFFS + cs));
  943. /*
  944. * [3-0] = 0x4 - MR1 Command, [11-8] -
  945. * enable current cs
  946. */
  947. /* 0x1418 - SDRAM Operation Register */
  948. reg_write(REG_SDRAM_OPERATION_ADDR, reg);
  949. udelay(MRS_DELAY);
  950. }
  951. }
  952. /* Disable WL Mode */
  953. /* [2]=1 - TrnWLMode - Disable */
  954. reg = reg_read(REG_DRAM_TRAINING_2_ADDR);
  955. reg |= (1 << REG_DRAM_TRAINING_2_WL_MODE_OFFS);
  956. /* 0x15B8 - Training SW 2 Register */
  957. reg_write(REG_DRAM_TRAINING_2_ADDR, reg);
  958. /* Disable SW override - Must be in a different stage */
  959. /* [0]=0 - Enable SW override */
  960. reg = reg_read(REG_DRAM_TRAINING_2_ADDR);
  961. reg &= ~(1 << REG_DRAM_TRAINING_2_SW_OVRD_OFFS);
  962. /* 0x15B8 - Training SW 2 Register */
  963. reg_write(REG_DRAM_TRAINING_2_ADDR, reg);
  964. /* Set Output buffer-on to all CS and correct ODT values */
  965. for (cs = 0; cs < MAX_CS; cs++) {
  966. if (dram_info->cs_ena & (1 << cs)) {
  967. reg = reg_read(REG_DDR3_MR1_ADDR) &
  968. REG_DDR3_MR1_ODT_MASK;
  969. reg &= REG_DDR3_MR1_OUTBUF_WL_MASK;
  970. reg |= odt_static[dram_info->cs_ena][cs];
  971. /* 0x15D0 - DDR3 MR1 Register */
  972. reg_write(REG_DDR3_MR1_ADDR, reg);
  973. /* Issue MRS Command to current cs */
  974. reg = REG_SDRAM_OPERATION_CMD_MR1 &
  975. ~(1 << (REG_SDRAM_OPERATION_CS_OFFS + cs));
  976. /*
  977. * [3-0] = 0x4 - MR1 Command, [11-8] -
  978. * enable current cs
  979. */
  980. /* 0x1418 - SDRAM Operation Register */
  981. reg_write(REG_SDRAM_OPERATION_ADDR, reg);
  982. udelay(MRS_DELAY);
  983. }
  984. }
  985. #ifdef MV88F67XX
  986. /* Dynamic pad issue (BTS669) during WL */
  987. if (dpde_flag) {
  988. reg = reg_read(REG_DUNIT_CTRL_LOW_ADDR) |
  989. (1 << REG_DUNIT_CTRL_LOW_DPDE_OFFS);
  990. reg_write(REG_DUNIT_CTRL_LOW_ADDR, reg);
  991. }
  992. #endif
  993. /* If target freq = 400 move clock back */
  994. /* Write to control PUP to Control Deskew Regs */
  995. if (freq <= DDR_400) {
  996. for (pup = 0; pup <= dram_info->num_of_total_pups; pup++) {
  997. ddr3_write_ctrl_pup_reg(1, pup, CNTRL_PUP_DESKEW + pup,
  998. 0);
  999. }
  1000. }
  1001. DEBUG_WL_FULL_S("DDR3 - Write Leveling - Finished WL procedure for all Cs\n");
  1002. return MV_OK;
  1003. }
  1004. #endif
  1005. /*
  1006. * Name: ddr3_write_leveling_single_cs
  1007. * Desc: Execute Write leveling for single Chip select
  1008. * Args: cs - current chip select
  1009. * freq - current sequence frequency
  1010. * result - res array
  1011. * dram_info - main struct
  1012. * Notes:
  1013. * Returns: MV_OK if success, MV_FAIL if fail.
  1014. */
  1015. static int ddr3_write_leveling_single_cs(u32 cs, u32 freq, int ratio_2to1,
  1016. u32 *result, MV_DRAM_INFO *dram_info)
  1017. {
  1018. u32 reg, pup_num, delay, phase, phaseMax, max_pup_num, pup,
  1019. max_pup_mask;
  1020. max_pup_num = dram_info->num_of_total_pups;
  1021. *result = 0;
  1022. u32 flag[MAX_PUP_NUM] = { 0 };
  1023. DEBUG_WL_FULL_C("DDR3 - Write Leveling Single Cs - WL for Cs - ",
  1024. (u32) cs, 1);
  1025. switch (max_pup_num) {
  1026. case 2:
  1027. max_pup_mask = 0x3;
  1028. break;
  1029. case 4:
  1030. max_pup_mask = 0xf;
  1031. DEBUG_WL_C("max_pup_mask = ", max_pup_mask, 3);
  1032. break;
  1033. case 5:
  1034. max_pup_mask = 0x1f;
  1035. DEBUG_WL_C("max_pup_mask = ", max_pup_mask, 3);
  1036. break;
  1037. case 8:
  1038. max_pup_mask = 0xff;
  1039. DEBUG_WL_C("max_pup_mask = ", max_pup_mask, 3);
  1040. break;
  1041. case 9:
  1042. max_pup_mask = 0x1ff;
  1043. DEBUG_WL_C("max_pup_mask = ", max_pup_mask, 3);
  1044. break;
  1045. default:
  1046. DEBUG_WL_C("ddr3_write_leveling_single_cs wrong max_pup_num = ",
  1047. max_pup_num, 3);
  1048. return MV_FAIL;
  1049. }
  1050. /* CS ODT Override */
  1051. reg = reg_read(REG_SDRAM_ODT_CTRL_HIGH_ADDR) &
  1052. REG_SDRAM_ODT_CTRL_HIGH_OVRD_MASK;
  1053. reg |= (REG_SDRAM_ODT_CTRL_HIGH_OVRD_ENA << (2 * cs));
  1054. /* Set 0x3 - Enable ODT on the curent cs and disable on other cs */
  1055. /* 0x1498 - SDRAM ODT Control high */
  1056. reg_write(REG_SDRAM_ODT_CTRL_HIGH_ADDR, reg);
  1057. DEBUG_WL_FULL_S("DDR3 - Write Leveling Single Cs - ODT Asserted for current Cs\n");
  1058. /* tWLMRD Delay */
  1059. /* Delay of minimum 40 Dram clock cycles - 20 Tclk cycles */
  1060. udelay(1);
  1061. /* [1:0] - current cs number */
  1062. reg = (reg_read(REG_TRAINING_WL_ADDR) & REG_TRAINING_WL_CS_MASK) | cs;
  1063. reg |= (1 << REG_TRAINING_WL_UPD_OFFS); /* [2] - trnWLCsUpd */
  1064. /* 0x16AC - Training Write leveling register */
  1065. reg_write(REG_TRAINING_WL_ADDR, reg);
  1066. /* Broadcast to all PUPs: Reset DQS phase, reset leveling delay */
  1067. ddr3_write_pup_reg(PUP_WL_MODE, cs, PUP_BC, 0, 0);
  1068. /* Seek Edge */
  1069. DEBUG_WL_FULL_S("DDR3 - Write Leveling Single Cs - Seek Edge - Current Cs\n");
  1070. /* Drive DQS high for one cycle - All data PUPs */
  1071. DEBUG_WL_FULL_S("DDR3 - Write Leveling Single Cs - Seek Edge - Driving DQS high for one cycle\n");
  1072. if (!ratio_2to1) {
  1073. reg = (reg_read(REG_TRAINING_WL_ADDR) &
  1074. REG_TRAINING_WL_RATIO_MASK) | REG_TRAINING_WL_1TO1;
  1075. } else {
  1076. reg = (reg_read(REG_TRAINING_WL_ADDR) &
  1077. REG_TRAINING_WL_RATIO_MASK) | REG_TRAINING_WL_2TO1;
  1078. }
  1079. /* 0x16AC - Training Write leveling register */
  1080. reg_write(REG_TRAINING_WL_ADDR, reg);
  1081. /* Wait tWLdelay */
  1082. do {
  1083. /* [29] - trnWLDelayExp */
  1084. reg = (reg_read(REG_TRAINING_WL_ADDR)) &
  1085. REG_TRAINING_WL_DELAYEXP_MASK;
  1086. } while (reg == 0x0); /* Wait for '1' */
  1087. /* Read WL res */
  1088. reg = (reg_read(REG_TRAINING_WL_ADDR) >> REG_TRAINING_WL_RESULTS_OFFS) &
  1089. REG_TRAINING_WL_RESULTS_MASK;
  1090. /* [28:20] - TrnWLResult */
  1091. if (!ratio_2to1) /* Different phase options for 2:1 or 1:1 modes */
  1092. phaseMax = MAX_PHASE_1TO1;
  1093. else
  1094. phaseMax = MAX_PHASE_2TO1;
  1095. DEBUG_WL_FULL_S("DDR3 - Write Leveling Single Cs - Seek Edge - Shift DQS + Octet Leveling\n");
  1096. /* Shift DQS + Octet leveling */
  1097. for (phase = 0; phase < phaseMax; phase++) {
  1098. for (delay = 0; delay < MAX_DELAY; delay++) {
  1099. /* Broadcast to all PUPs: DQS phase,leveling delay */
  1100. ddr3_write_pup_reg(PUP_WL_MODE, cs, PUP_BC, phase,
  1101. delay);
  1102. udelay(1); /* Delay of 3 Tclk cycles */
  1103. DEBUG_WL_FULL_S("DDR3 - Write Leveling Single Cs - Seek Edge: Phase = ");
  1104. DEBUG_WL_FULL_D((u32) phase, 1);
  1105. DEBUG_WL_FULL_S(", Delay = ");
  1106. DEBUG_WL_FULL_D((u32) delay, 1);
  1107. DEBUG_WL_FULL_S("\n");
  1108. /* Drive DQS high for one cycle - All data PUPs */
  1109. if (!ratio_2to1) {
  1110. reg = (reg_read(REG_TRAINING_WL_ADDR) &
  1111. REG_TRAINING_WL_RATIO_MASK) |
  1112. REG_TRAINING_WL_1TO1;
  1113. } else {
  1114. reg = (reg_read(REG_TRAINING_WL_ADDR) &
  1115. REG_TRAINING_WL_RATIO_MASK) |
  1116. REG_TRAINING_WL_2TO1;
  1117. }
  1118. reg_write(REG_TRAINING_WL_ADDR, reg); /* 0x16AC */
  1119. /* Wait tWLdelay */
  1120. do {
  1121. reg = (reg_read(REG_TRAINING_WL_ADDR)) &
  1122. REG_TRAINING_WL_DELAYEXP_MASK;
  1123. } while (reg == 0x0); /* [29] Wait for '1' */
  1124. /* Read WL res */
  1125. reg = reg_read(REG_TRAINING_WL_ADDR);
  1126. reg = (reg >> REG_TRAINING_WL_RESULTS_OFFS) &
  1127. REG_TRAINING_WL_RESULTS_MASK; /* [28:20] */
  1128. DEBUG_WL_FULL_C("DDR3 - Write Leveling Single Cs - Seek Edge: Results = ",
  1129. (u32) reg, 3);
  1130. /* Update State machine */
  1131. for (pup = 0; pup < (max_pup_num); pup++) {
  1132. /* ECC support - bit 8 */
  1133. pup_num = (pup == dram_info->num_of_std_pups) ?
  1134. ECC_BIT : pup;
  1135. if (dram_info->wl_val[cs][pup][S] == 0) {
  1136. /* Update phase to PUP */
  1137. dram_info->wl_val[cs][pup][P] = phase;
  1138. /* Update delay to PUP */
  1139. dram_info->wl_val[cs][pup][D] = delay;
  1140. }
  1141. if (((reg >> pup_num) & 0x1) == 0)
  1142. flag[pup_num] = 1;
  1143. if (((reg >> pup_num) & 0x1)
  1144. && (flag[pup_num] == 1)
  1145. && (dram_info->wl_val[cs][pup][S] == 0)) {
  1146. /*
  1147. * If the PUP is locked now and in last
  1148. * counter states
  1149. */
  1150. /* Go to next state */
  1151. dram_info->wl_val[cs][pup][S] = 1;
  1152. /* Set res */
  1153. *result = *result | (1 << pup_num);
  1154. }
  1155. }
  1156. /* If all locked - Break the loops - Finished */
  1157. if (*result == max_pup_mask) {
  1158. phase = phaseMax;
  1159. delay = MAX_DELAY;
  1160. DEBUG_WL_S("DDR3 - Write Leveling Single Cs - Seek Edge: All Locked\n");
  1161. }
  1162. }
  1163. }
  1164. /* Debug message - Print res for cs[i]: cs,PUP,Phase,Delay */
  1165. DEBUG_WL_C("DDR3 - Write Leveling - Results for CS - ", (u32) cs, 1);
  1166. for (pup = 0; pup < (max_pup_num); pup++) {
  1167. DEBUG_WL_S("DDR3 - Write Leveling - PUP: ");
  1168. DEBUG_WL_D((u32) pup, 1);
  1169. DEBUG_WL_S(", Phase: ");
  1170. DEBUG_WL_D((u32) dram_info->wl_val[cs][pup][P], 1);
  1171. DEBUG_WL_S(", Delay: ");
  1172. DEBUG_WL_D((u32) dram_info->wl_val[cs][pup][D], 2);
  1173. DEBUG_WL_S("\n");
  1174. }
  1175. /* Check if some not locked and return error */
  1176. if (*result != max_pup_mask) {
  1177. DEBUG_WL_S("DDR3 - Write Leveling - ERROR - not all PUPS were locked\n");
  1178. return MV_FAIL;
  1179. }
  1180. /* Configure Each PUP with locked leveling settings */
  1181. for (pup = 0; pup < (max_pup_num); pup++) {
  1182. /* ECC support - bit 8 */
  1183. pup_num = (pup == dram_info->num_of_std_pups) ? ECC_BIT : pup;
  1184. phase = dram_info->wl_val[cs][pup][P];
  1185. delay = dram_info->wl_val[cs][pup][D];
  1186. ddr3_write_pup_reg(PUP_WL_MODE, cs, pup_num, phase, delay);
  1187. }
  1188. /* CS ODT Override */
  1189. reg = reg_read(REG_SDRAM_ODT_CTRL_HIGH_ADDR) &
  1190. REG_SDRAM_ODT_CTRL_HIGH_OVRD_MASK;
  1191. /* 0x1498 - SDRAM ODT Control high */
  1192. reg_write(REG_SDRAM_ODT_CTRL_HIGH_ADDR, reg);
  1193. return MV_OK;
  1194. }
  1195. /*
  1196. * Perform DDR3 Control PUP Indirect Write
  1197. */
  1198. static void ddr3_write_ctrl_pup_reg(int bc_acc, u32 pup, u32 reg_addr, u32 data)
  1199. {
  1200. u32 reg = 0;
  1201. /* Store value for write */
  1202. reg = (data & 0xFFFF);
  1203. /* Set bit 26 for control PHY access */
  1204. reg |= (1 << REG_PHY_CNTRL_OFFS);
  1205. /* Configure BC or UC access to PHYs */
  1206. if (bc_acc == 1)
  1207. reg |= (1 << REG_PHY_BC_OFFS);
  1208. else
  1209. reg |= (pup << REG_PHY_PUP_OFFS);
  1210. /* Set PHY register address to write to */
  1211. reg |= (reg_addr << REG_PHY_CS_OFFS);
  1212. reg_write(REG_PHY_REGISTRY_FILE_ACCESS_ADDR, reg); /* 0x16A0 */
  1213. reg |= REG_PHY_REGISTRY_FILE_ACCESS_OP_WR;
  1214. reg_write(REG_PHY_REGISTRY_FILE_ACCESS_ADDR, reg); /* 0x16A0 */
  1215. do {
  1216. reg = (reg_read(REG_PHY_REGISTRY_FILE_ACCESS_ADDR)) &
  1217. REG_PHY_REGISTRY_FILE_ACCESS_OP_DONE;
  1218. } while (reg); /* Wait for '0' to mark the end of the transaction */
  1219. }