ddr3_training_hw_algo.c 18 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675
  1. // SPDX-License-Identifier: GPL-2.0
  2. /*
  3. * Copyright (C) Marvell International Ltd. and its affiliates
  4. */
  5. #include "ddr3_init.h"
  6. #define VREF_INITIAL_STEP 3
  7. #define VREF_SECOND_STEP 1
  8. #define VREF_MAX_INDEX 7
  9. #define MAX_VALUE (1024 - 1)
  10. #define MIN_VALUE (-MAX_VALUE)
  11. #define GET_RD_SAMPLE_DELAY(data, cs) ((data >> rd_sample_mask[cs]) & 0xf)
  12. u32 ca_delay;
  13. int ddr3_tip_centr_skip_min_win_check = 0;
  14. u8 current_vref[MAX_BUS_NUM][MAX_INTERFACE_NUM];
  15. u8 last_vref[MAX_BUS_NUM][MAX_INTERFACE_NUM];
  16. u16 current_valid_window[MAX_BUS_NUM][MAX_INTERFACE_NUM];
  17. u16 last_valid_window[MAX_BUS_NUM][MAX_INTERFACE_NUM];
  18. u8 lim_vref[MAX_BUS_NUM][MAX_INTERFACE_NUM];
  19. u8 interface_state[MAX_INTERFACE_NUM];
  20. u8 vref_window_size[MAX_INTERFACE_NUM][MAX_BUS_NUM];
  21. u8 vref_window_size_th = 12;
  22. static u8 pup_st[MAX_BUS_NUM][MAX_INTERFACE_NUM];
  23. static u32 rd_sample_mask[] = {
  24. 0,
  25. 8,
  26. 16,
  27. 24
  28. };
  29. #define VREF_STEP_1 0
  30. #define VREF_STEP_2 1
  31. #define VREF_CONVERGE 2
  32. /*
  33. * ODT additional timing
  34. */
  35. int ddr3_tip_write_additional_odt_setting(u32 dev_num, u32 if_id)
  36. {
  37. u32 cs_num = 0, max_read_sample = 0, min_read_sample = 0x1f;
  38. u32 data_read[MAX_INTERFACE_NUM] = { 0 };
  39. u32 read_sample[MAX_CS_NUM];
  40. u32 val;
  41. u32 pup_index;
  42. int max_phase = MIN_VALUE, current_phase;
  43. enum hws_access_type access_type = ACCESS_TYPE_UNICAST;
  44. u32 octets_per_if_num = ddr3_tip_dev_attr_get(dev_num, MV_ATTR_OCTET_PER_INTERFACE);
  45. CHECK_STATUS(ddr3_tip_if_write(dev_num, access_type, if_id,
  46. DUNIT_ODT_CTRL_REG,
  47. 0 << 8, 0x3 << 8));
  48. CHECK_STATUS(ddr3_tip_if_read(dev_num, access_type, if_id,
  49. RD_DATA_SMPL_DLYS_REG,
  50. data_read, MASK_ALL_BITS));
  51. val = data_read[if_id];
  52. for (cs_num = 0; cs_num < MAX_CS_NUM; cs_num++) {
  53. read_sample[cs_num] = GET_RD_SAMPLE_DELAY(val, cs_num);
  54. /* find maximum of read_samples */
  55. if (read_sample[cs_num] >= max_read_sample) {
  56. if (read_sample[cs_num] == max_read_sample)
  57. max_phase = MIN_VALUE;
  58. else
  59. max_read_sample = read_sample[cs_num];
  60. for (pup_index = 0;
  61. pup_index < octets_per_if_num;
  62. pup_index++) {
  63. CHECK_STATUS(ddr3_tip_bus_read
  64. (dev_num, if_id,
  65. ACCESS_TYPE_UNICAST, pup_index,
  66. DDR_PHY_DATA,
  67. RL_PHY_REG(cs_num),
  68. &val));
  69. current_phase = ((int)val & 0xe0) >> 6;
  70. if (current_phase >= max_phase)
  71. max_phase = current_phase;
  72. }
  73. }
  74. /* find minimum */
  75. if (read_sample[cs_num] < min_read_sample)
  76. min_read_sample = read_sample[cs_num];
  77. }
  78. min_read_sample = min_read_sample - 1;
  79. max_read_sample = max_read_sample + 4 + (max_phase + 1) / 2 + 1;
  80. if (min_read_sample >= 0xf)
  81. min_read_sample = 0xf;
  82. if (max_read_sample >= 0x1f)
  83. max_read_sample = 0x1f;
  84. CHECK_STATUS(ddr3_tip_if_write(dev_num, access_type, if_id,
  85. DDR_ODT_TIMING_LOW_REG,
  86. ((min_read_sample - 1) << 12),
  87. 0xf << 12));
  88. CHECK_STATUS(ddr3_tip_if_write(dev_num, access_type, if_id,
  89. DDR_ODT_TIMING_LOW_REG,
  90. (max_read_sample << 16),
  91. 0x1f << 16));
  92. return MV_OK;
  93. }
  94. int get_valid_win_rx(u32 dev_num, u32 if_id, u8 res[4])
  95. {
  96. u32 reg_pup = RESULT_PHY_REG;
  97. u32 reg_data;
  98. u32 cs_num;
  99. int i;
  100. cs_num = 0;
  101. /* TBD */
  102. reg_pup += cs_num;
  103. for (i = 0; i < 4; i++) {
  104. CHECK_STATUS(ddr3_tip_bus_read(dev_num, if_id,
  105. ACCESS_TYPE_UNICAST, i,
  106. DDR_PHY_DATA, reg_pup,
  107. &reg_data));
  108. res[i] = (reg_data >> RESULT_PHY_RX_OFFS) & 0x1f;
  109. }
  110. return 0;
  111. }
  112. /*
  113. * This algorithm deals with the vertical optimum from Voltage point of view
  114. * of the sample signal.
  115. * Voltage sample point can improve the Eye / window size of the bit and the
  116. * pup.
  117. * The problem is that it is tune for all DQ the same so there isn't any
  118. * PBS like code.
  119. * It is more like centralization.
  120. * But because we don't have The training SM support we do it a bit more
  121. * smart search to save time.
  122. */
  123. int ddr3_tip_vref(u32 dev_num)
  124. {
  125. /*
  126. * The Vref register have non linear order. Need to check what will be
  127. * in future projects.
  128. */
  129. u32 vref_map[8] = {
  130. 1, 2, 3, 4, 5, 6, 7, 0
  131. };
  132. /* State and parameter definitions */
  133. u32 initial_step = VREF_INITIAL_STEP;
  134. /* need to be assign with minus ????? */
  135. u32 second_step = VREF_SECOND_STEP;
  136. u32 algo_run_flag = 0, currrent_vref = 0;
  137. u32 while_count = 0;
  138. u32 pup = 0, if_id = 0, num_pup = 0, rep = 0;
  139. u32 val = 0;
  140. u32 reg_addr = 0xa8;
  141. u32 copy_start_pattern, copy_end_pattern;
  142. enum hws_result *flow_result = ddr3_tip_get_result_ptr(training_stage);
  143. u8 res[4];
  144. u32 octets_per_if_num = ddr3_tip_dev_attr_get(dev_num, MV_ATTR_OCTET_PER_INTERFACE);
  145. struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
  146. CHECK_STATUS(ddr3_tip_special_rx(dev_num));
  147. /* save start/end pattern */
  148. copy_start_pattern = start_pattern;
  149. copy_end_pattern = end_pattern;
  150. /* set vref as centralization pattern */
  151. start_pattern = PATTERN_VREF;
  152. end_pattern = PATTERN_VREF;
  153. /* init params */
  154. for (if_id = 0; if_id < MAX_INTERFACE_NUM; if_id++) {
  155. VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
  156. for (pup = 0;
  157. pup < octets_per_if_num; pup++) {
  158. current_vref[pup][if_id] = 0;
  159. last_vref[pup][if_id] = 0;
  160. lim_vref[pup][if_id] = 0;
  161. current_valid_window[pup][if_id] = 0;
  162. last_valid_window[pup][if_id] = 0;
  163. if (vref_window_size[if_id][pup] >
  164. vref_window_size_th) {
  165. pup_st[pup][if_id] = VREF_CONVERGE;
  166. DEBUG_TRAINING_HW_ALG(
  167. DEBUG_LEVEL_INFO,
  168. ("VREF config, IF[ %d ]pup[ %d ] - Vref tune not requered (%d)\n",
  169. if_id, pup, __LINE__));
  170. } else {
  171. pup_st[pup][if_id] = VREF_STEP_1;
  172. CHECK_STATUS(ddr3_tip_bus_read
  173. (dev_num, if_id,
  174. ACCESS_TYPE_UNICAST, pup,
  175. DDR_PHY_DATA, reg_addr, &val));
  176. CHECK_STATUS(ddr3_tip_bus_write
  177. (dev_num, ACCESS_TYPE_UNICAST,
  178. if_id, ACCESS_TYPE_UNICAST,
  179. pup, DDR_PHY_DATA, reg_addr,
  180. (val & (~0xf)) | vref_map[0]));
  181. DEBUG_TRAINING_HW_ALG(
  182. DEBUG_LEVEL_INFO,
  183. ("VREF config, IF[ %d ]pup[ %d ] - Vref = %X (%d)\n",
  184. if_id, pup,
  185. (val & (~0xf)) | vref_map[0],
  186. __LINE__));
  187. }
  188. }
  189. interface_state[if_id] = 0;
  190. }
  191. /* TODO: Set number of active interfaces */
  192. num_pup = octets_per_if_num * MAX_INTERFACE_NUM;
  193. while ((algo_run_flag <= num_pup) & (while_count < 10)) {
  194. while_count++;
  195. for (rep = 1; rep < 4; rep++) {
  196. ddr3_tip_centr_skip_min_win_check = 1;
  197. ddr3_tip_centralization_rx(dev_num);
  198. ddr3_tip_centr_skip_min_win_check = 0;
  199. /* Read Valid window results only for non converge pups */
  200. for (if_id = 0; if_id < MAX_INTERFACE_NUM; if_id++) {
  201. VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
  202. if (interface_state[if_id] != 4) {
  203. get_valid_win_rx(dev_num, if_id, res);
  204. for (pup = 0;
  205. pup < octets_per_if_num;
  206. pup++) {
  207. VALIDATE_BUS_ACTIVE
  208. (tm->bus_act_mask, pup);
  209. if (pup_st[pup]
  210. [if_id] ==
  211. VREF_CONVERGE)
  212. continue;
  213. current_valid_window[pup]
  214. [if_id] =
  215. (current_valid_window[pup]
  216. [if_id] * (rep - 1) +
  217. 1000 * res[pup]) / rep;
  218. }
  219. }
  220. }
  221. }
  222. for (if_id = 0; if_id < MAX_INTERFACE_NUM; if_id++) {
  223. VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
  224. DEBUG_TRAINING_HW_ALG(
  225. DEBUG_LEVEL_TRACE,
  226. ("current_valid_window: IF[ %d ] - ", if_id));
  227. for (pup = 0;
  228. pup < octets_per_if_num; pup++) {
  229. VALIDATE_BUS_ACTIVE(tm->bus_act_mask, pup);
  230. DEBUG_TRAINING_HW_ALG(DEBUG_LEVEL_TRACE,
  231. ("%d ",
  232. current_valid_window
  233. [pup][if_id]));
  234. }
  235. DEBUG_TRAINING_HW_ALG(DEBUG_LEVEL_TRACE, ("\n"));
  236. }
  237. /* Compare results and respond as function of state */
  238. for (if_id = 0; if_id < MAX_INTERFACE_NUM; if_id++) {
  239. VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
  240. for (pup = 0;
  241. pup < octets_per_if_num; pup++) {
  242. VALIDATE_BUS_ACTIVE(tm->bus_act_mask, pup);
  243. DEBUG_TRAINING_HW_ALG(DEBUG_LEVEL_TRACE,
  244. ("I/F[ %d ], pup[ %d ] STATE #%d (%d)\n",
  245. if_id, pup,
  246. pup_st[pup]
  247. [if_id], __LINE__));
  248. if (pup_st[pup][if_id] == VREF_CONVERGE)
  249. continue;
  250. DEBUG_TRAINING_HW_ALG(DEBUG_LEVEL_TRACE,
  251. ("I/F[ %d ], pup[ %d ] CHECK progress - Current %d Last %d, limit VREF %d (%d)\n",
  252. if_id, pup,
  253. current_valid_window[pup]
  254. [if_id],
  255. last_valid_window[pup]
  256. [if_id], lim_vref[pup]
  257. [if_id], __LINE__));
  258. /*
  259. * The -1 is for solution resolution +/- 1 tap
  260. * of ADLL
  261. */
  262. if (current_valid_window[pup][if_id] + 200 >=
  263. (last_valid_window[pup][if_id])) {
  264. if (pup_st[pup][if_id] == VREF_STEP_1) {
  265. /*
  266. * We stay in the same state and
  267. * step just update the window
  268. * size (take the max) and Vref
  269. */
  270. if (current_vref[pup]
  271. [if_id] == VREF_MAX_INDEX) {
  272. /*
  273. * If we step to the end
  274. * and didn't converge
  275. * to some particular
  276. * better Vref value
  277. * define the pup as
  278. * converge and step
  279. * back to nominal
  280. * Vref.
  281. */
  282. pup_st[pup]
  283. [if_id] =
  284. VREF_CONVERGE;
  285. algo_run_flag++;
  286. interface_state
  287. [if_id]++;
  288. DEBUG_TRAINING_HW_ALG
  289. (DEBUG_LEVEL_TRACE,
  290. ("I/F[ %d ], pup[ %d ] VREF_CONVERGE - Vref = %X (%d)\n",
  291. if_id, pup,
  292. current_vref[pup]
  293. [if_id],
  294. __LINE__));
  295. } else {
  296. /* continue to update the Vref index */
  297. current_vref[pup]
  298. [if_id] =
  299. ((current_vref[pup]
  300. [if_id] +
  301. initial_step) >
  302. VREF_MAX_INDEX) ?
  303. VREF_MAX_INDEX
  304. : (current_vref[pup]
  305. [if_id] +
  306. initial_step);
  307. if (current_vref[pup]
  308. [if_id] ==
  309. VREF_MAX_INDEX) {
  310. pup_st[pup]
  311. [if_id]
  312. =
  313. VREF_STEP_2;
  314. }
  315. lim_vref[pup]
  316. [if_id] =
  317. last_vref[pup]
  318. [if_id] =
  319. current_vref[pup]
  320. [if_id];
  321. }
  322. last_valid_window[pup]
  323. [if_id] =
  324. GET_MAX(current_valid_window
  325. [pup][if_id],
  326. last_valid_window
  327. [pup]
  328. [if_id]);
  329. /* update the Vref for next stage */
  330. currrent_vref =
  331. current_vref[pup]
  332. [if_id];
  333. CHECK_STATUS
  334. (ddr3_tip_bus_read
  335. (dev_num, if_id,
  336. ACCESS_TYPE_UNICAST, pup,
  337. DDR_PHY_DATA, reg_addr,
  338. &val));
  339. CHECK_STATUS
  340. (ddr3_tip_bus_write
  341. (dev_num,
  342. ACCESS_TYPE_UNICAST,
  343. if_id,
  344. ACCESS_TYPE_UNICAST, pup,
  345. DDR_PHY_DATA, reg_addr,
  346. (val & (~0xf)) |
  347. vref_map[currrent_vref]));
  348. DEBUG_TRAINING_HW_ALG
  349. (DEBUG_LEVEL_TRACE,
  350. ("VREF config, IF[ %d ]pup[ %d ] - Vref = %X (%d)\n",
  351. if_id, pup,
  352. (val & (~0xf)) |
  353. vref_map[currrent_vref],
  354. __LINE__));
  355. } else if (pup_st[pup][if_id]
  356. == VREF_STEP_2) {
  357. /*
  358. * We keep on search back with
  359. * the same step size.
  360. */
  361. last_valid_window[pup]
  362. [if_id] =
  363. GET_MAX(current_valid_window
  364. [pup][if_id],
  365. last_valid_window
  366. [pup]
  367. [if_id]);
  368. last_vref[pup][if_id] =
  369. current_vref[pup]
  370. [if_id];
  371. /* we finish all search space */
  372. if ((current_vref[pup]
  373. [if_id] - second_step) == lim_vref[pup][if_id]) {
  374. /*
  375. * If we step to the end
  376. * and didn't converge
  377. * to some particular
  378. * better Vref value
  379. * define the pup as
  380. * converge and step
  381. * back to nominal
  382. * Vref.
  383. */
  384. pup_st[pup]
  385. [if_id] =
  386. VREF_CONVERGE;
  387. algo_run_flag++;
  388. interface_state
  389. [if_id]++;
  390. current_vref[pup]
  391. [if_id] =
  392. (current_vref[pup]
  393. [if_id] -
  394. second_step);
  395. DEBUG_TRAINING_HW_ALG
  396. (DEBUG_LEVEL_TRACE,
  397. ("I/F[ %d ], pup[ %d ] VREF_CONVERGE - Vref = %X (%d)\n",
  398. if_id, pup,
  399. current_vref[pup]
  400. [if_id],
  401. __LINE__));
  402. } else
  403. /* we finish all search space */
  404. if (current_vref[pup]
  405. [if_id] ==
  406. lim_vref[pup]
  407. [if_id]) {
  408. /*
  409. * If we step to the end
  410. * and didn't converge
  411. * to some particular
  412. * better Vref value
  413. * define the pup as
  414. * converge and step
  415. * back to nominal
  416. * Vref.
  417. */
  418. pup_st[pup]
  419. [if_id] =
  420. VREF_CONVERGE;
  421. algo_run_flag++;
  422. interface_state
  423. [if_id]++;
  424. DEBUG_TRAINING_HW_ALG
  425. (DEBUG_LEVEL_TRACE,
  426. ("I/F[ %d ], pup[ %d ] VREF_CONVERGE - Vref = %X (%d)\n",
  427. if_id, pup,
  428. current_vref[pup]
  429. [if_id],
  430. __LINE__));
  431. } else {
  432. current_vref[pup]
  433. [if_id] =
  434. current_vref[pup]
  435. [if_id] -
  436. second_step;
  437. }
  438. /* Update the Vref for next stage */
  439. currrent_vref =
  440. current_vref[pup]
  441. [if_id];
  442. CHECK_STATUS
  443. (ddr3_tip_bus_read
  444. (dev_num, if_id,
  445. ACCESS_TYPE_UNICAST, pup,
  446. DDR_PHY_DATA, reg_addr,
  447. &val));
  448. CHECK_STATUS
  449. (ddr3_tip_bus_write
  450. (dev_num,
  451. ACCESS_TYPE_UNICAST,
  452. if_id,
  453. ACCESS_TYPE_UNICAST, pup,
  454. DDR_PHY_DATA, reg_addr,
  455. (val & (~0xf)) |
  456. vref_map[currrent_vref]));
  457. DEBUG_TRAINING_HW_ALG
  458. (DEBUG_LEVEL_TRACE,
  459. ("VREF config, IF[ %d ]pup[ %d ] - Vref = %X (%d)\n",
  460. if_id, pup,
  461. (val & (~0xf)) |
  462. vref_map[currrent_vref],
  463. __LINE__));
  464. }
  465. } else {
  466. /* we change state and change step */
  467. if (pup_st[pup][if_id] == VREF_STEP_1) {
  468. pup_st[pup][if_id] =
  469. VREF_STEP_2;
  470. lim_vref[pup][if_id] =
  471. current_vref[pup]
  472. [if_id] - initial_step;
  473. last_valid_window[pup]
  474. [if_id] =
  475. current_valid_window[pup]
  476. [if_id];
  477. last_vref[pup][if_id] =
  478. current_vref[pup]
  479. [if_id];
  480. current_vref[pup][if_id] =
  481. last_vref[pup][if_id] -
  482. second_step;
  483. /* Update the Vref for next stage */
  484. CHECK_STATUS
  485. (ddr3_tip_bus_read
  486. (dev_num, if_id,
  487. ACCESS_TYPE_UNICAST, pup,
  488. DDR_PHY_DATA, reg_addr,
  489. &val));
  490. CHECK_STATUS
  491. (ddr3_tip_bus_write
  492. (dev_num,
  493. ACCESS_TYPE_UNICAST,
  494. if_id,
  495. ACCESS_TYPE_UNICAST, pup,
  496. DDR_PHY_DATA, reg_addr,
  497. (val & (~0xf)) |
  498. vref_map[current_vref[pup]
  499. [if_id]]));
  500. DEBUG_TRAINING_HW_ALG
  501. (DEBUG_LEVEL_TRACE,
  502. ("VREF config, IF[ %d ]pup[ %d ] - Vref = %X (%d)\n",
  503. if_id, pup,
  504. (val & (~0xf)) |
  505. vref_map[current_vref[pup]
  506. [if_id]],
  507. __LINE__));
  508. } else if (pup_st[pup][if_id] == VREF_STEP_2) {
  509. /*
  510. * The last search was the max
  511. * point set value and exit
  512. */
  513. CHECK_STATUS
  514. (ddr3_tip_bus_read
  515. (dev_num, if_id,
  516. ACCESS_TYPE_UNICAST, pup,
  517. DDR_PHY_DATA, reg_addr,
  518. &val));
  519. CHECK_STATUS
  520. (ddr3_tip_bus_write
  521. (dev_num,
  522. ACCESS_TYPE_UNICAST,
  523. if_id,
  524. ACCESS_TYPE_UNICAST, pup,
  525. DDR_PHY_DATA, reg_addr,
  526. (val & (~0xf)) |
  527. vref_map[last_vref[pup]
  528. [if_id]]));
  529. DEBUG_TRAINING_HW_ALG
  530. (DEBUG_LEVEL_TRACE,
  531. ("VREF config, IF[ %d ]pup[ %d ] - Vref = %X (%d)\n",
  532. if_id, pup,
  533. (val & (~0xf)) |
  534. vref_map[last_vref[pup]
  535. [if_id]],
  536. __LINE__));
  537. pup_st[pup][if_id] =
  538. VREF_CONVERGE;
  539. algo_run_flag++;
  540. interface_state[if_id]++;
  541. DEBUG_TRAINING_HW_ALG
  542. (DEBUG_LEVEL_TRACE,
  543. ("I/F[ %d ], pup[ %d ] VREF_CONVERGE - Vref = %X (%d)\n",
  544. if_id, pup,
  545. current_vref[pup]
  546. [if_id], __LINE__));
  547. }
  548. }
  549. }
  550. }
  551. }
  552. for (if_id = 0; if_id < MAX_INTERFACE_NUM; if_id++) {
  553. VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
  554. for (pup = 0;
  555. pup < octets_per_if_num; pup++) {
  556. VALIDATE_BUS_ACTIVE(tm->bus_act_mask, pup);
  557. CHECK_STATUS(ddr3_tip_bus_read
  558. (dev_num, if_id,
  559. ACCESS_TYPE_UNICAST, pup,
  560. DDR_PHY_DATA, reg_addr, &val));
  561. DEBUG_TRAINING_HW_ALG(
  562. DEBUG_LEVEL_INFO,
  563. ("FINAL values: I/F[ %d ], pup[ %d ] - Vref = %X (%d)\n",
  564. if_id, pup, val, __LINE__));
  565. }
  566. }
  567. flow_result[if_id] = TEST_SUCCESS;
  568. /* restore start/end pattern */
  569. start_pattern = copy_start_pattern;
  570. end_pattern = copy_end_pattern;
  571. return 0;
  572. }
  573. /*
  574. * CK/CA Delay
  575. */
  576. int ddr3_tip_cmd_addr_init_delay(u32 dev_num, u32 adll_tap)
  577. {
  578. u32 if_id = 0;
  579. u32 ck_num_adll_tap = 0, ca_num_adll_tap = 0, data = 0;
  580. struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
  581. /*
  582. * ck_delay_table is delaying the of the clock signal only.
  583. * (to overcome timing issues between_c_k & command/address signals)
  584. */
  585. /*
  586. * ca_delay is delaying the of the entire command & Address signals
  587. * (include Clock signal to overcome DGL error on the Clock versus
  588. * the DQS).
  589. */
  590. /* Calc ADLL Tap */
  591. if (ck_delay == PARAM_UNDEFINED)
  592. DEBUG_TRAINING_HW_ALG(
  593. DEBUG_LEVEL_ERROR,
  594. ("ERROR: ck_delay is not initialized!\n"));
  595. for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
  596. VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
  597. /* Calc delay ps in ADLL tap */
  598. ck_num_adll_tap = ck_delay / adll_tap;
  599. ca_num_adll_tap = ca_delay / adll_tap;
  600. data = (ck_num_adll_tap & 0x3f) +
  601. ((ca_num_adll_tap & 0x3f) << 10);
  602. /*
  603. * Set the ADLL number to the CK ADLL for Interfaces for
  604. * all Pup
  605. */
  606. DEBUG_TRAINING_HW_ALG(
  607. DEBUG_LEVEL_TRACE,
  608. ("ck_num_adll_tap %d ca_num_adll_tap %d adll_tap %d\n",
  609. ck_num_adll_tap, ca_num_adll_tap, adll_tap));
  610. CHECK_STATUS(ddr3_tip_bus_write(dev_num, ACCESS_TYPE_UNICAST,
  611. if_id, ACCESS_TYPE_MULTICAST,
  612. PARAM_NOT_CARE, DDR_PHY_CONTROL,
  613. 0x0, data));
  614. }
  615. return MV_OK;
  616. }