ddr3_training_pbs.c 30 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009
  1. // SPDX-License-Identifier: GPL-2.0
  2. /*
  3. * Copyright (C) Marvell International Ltd. and its affiliates
  4. */
  5. #include "ddr3_init.h"
  6. #define TYPICAL_PBS_VALUE 12
  7. u32 nominal_adll[MAX_INTERFACE_NUM * MAX_BUS_NUM];
  8. enum hws_training_ip_stat train_status[MAX_INTERFACE_NUM];
  9. u8 result_mat[MAX_INTERFACE_NUM][MAX_BUS_NUM][BUS_WIDTH_IN_BITS];
  10. u8 result_mat_rx_dqs[MAX_INTERFACE_NUM][MAX_BUS_NUM][MAX_CS_NUM];
  11. /* 4-EEWA, 3-EWA, 2-SWA, 1-Fail, 0-Pass */
  12. u8 result_all_bit[MAX_BUS_NUM * BUS_WIDTH_IN_BITS * MAX_INTERFACE_NUM];
  13. u8 max_pbs_per_pup[MAX_INTERFACE_NUM][MAX_BUS_NUM];
  14. u8 min_pbs_per_pup[MAX_INTERFACE_NUM][MAX_BUS_NUM];
  15. u8 max_adll_per_pup[MAX_INTERFACE_NUM][MAX_BUS_NUM];
  16. u8 min_adll_per_pup[MAX_INTERFACE_NUM][MAX_BUS_NUM];
  17. u32 pbsdelay_per_pup[NUM_OF_PBS_MODES][MAX_INTERFACE_NUM][MAX_BUS_NUM][MAX_CS_NUM];
  18. u8 adll_shift_lock[MAX_INTERFACE_NUM][MAX_BUS_NUM];
  19. u8 adll_shift_val[MAX_INTERFACE_NUM][MAX_BUS_NUM];
  20. enum hws_pattern pbs_pattern = PATTERN_VREF;
  21. static u8 pup_state[MAX_INTERFACE_NUM][MAX_BUS_NUM];
  22. /*
  23. * Name: ddr3_tip_pbs
  24. * Desc: PBS
  25. * Args: TBD
  26. * Notes:
  27. * Returns: OK if success, other error code if fail.
  28. */
  29. int ddr3_tip_pbs(u32 dev_num, enum pbs_dir pbs_mode)
  30. {
  31. u32 res0[MAX_INTERFACE_NUM];
  32. int adll_tap = MEGA / freq_val[medium_freq] / 64;
  33. int pad_num = 0;
  34. enum hws_search_dir search_dir =
  35. (pbs_mode == PBS_RX_MODE) ? HWS_HIGH2LOW : HWS_LOW2HIGH;
  36. enum hws_dir dir = (pbs_mode == PBS_RX_MODE) ? OPER_READ : OPER_WRITE;
  37. int iterations = (pbs_mode == PBS_RX_MODE) ? 31 : 63;
  38. u32 res_valid_mask = (pbs_mode == PBS_RX_MODE) ? 0x1f : 0x3f;
  39. int init_val = (search_dir == HWS_LOW2HIGH) ? 0 : iterations;
  40. enum hws_edge_compare search_edge = EDGE_FP;
  41. u32 pup = 0, bit = 0, if_id = 0, all_lock = 0, cs_num = 0;
  42. u32 reg_addr = 0;
  43. u32 validation_val = 0;
  44. u32 cs_enable_reg_val[MAX_INTERFACE_NUM];
  45. u16 *mask_results_dq_reg_map = ddr3_tip_get_mask_results_dq_reg();
  46. u8 temp = 0;
  47. u32 octets_per_if_num = ddr3_tip_dev_attr_get(dev_num, MV_ATTR_OCTET_PER_INTERFACE);
  48. struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
  49. /* save current cs enable reg val */
  50. for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
  51. VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
  52. /* save current cs enable reg val */
  53. CHECK_STATUS(ddr3_tip_if_read
  54. (dev_num, ACCESS_TYPE_UNICAST, if_id,
  55. DUAL_DUNIT_CFG_REG, cs_enable_reg_val, MASK_ALL_BITS));
  56. /* enable single cs */
  57. CHECK_STATUS(ddr3_tip_if_write
  58. (dev_num, ACCESS_TYPE_UNICAST, if_id,
  59. DUAL_DUNIT_CFG_REG, (1 << 3), (1 << 3)));
  60. }
  61. reg_addr = (pbs_mode == PBS_RX_MODE) ?
  62. CRX_PHY_REG(effective_cs) :
  63. CTX_PHY_REG(effective_cs);
  64. ddr3_tip_read_adll_value(dev_num, nominal_adll, reg_addr, MASK_ALL_BITS);
  65. /* stage 1 shift ADLL */
  66. ddr3_tip_ip_training(dev_num, ACCESS_TYPE_MULTICAST,
  67. PARAM_NOT_CARE, ACCESS_TYPE_MULTICAST,
  68. PARAM_NOT_CARE, RESULT_PER_BIT,
  69. HWS_CONTROL_ELEMENT_ADLL, search_dir, dir,
  70. tm->if_act_mask, init_val, iterations,
  71. pbs_pattern, search_edge, CS_SINGLE, cs_num,
  72. train_status);
  73. validation_val = (pbs_mode == PBS_RX_MODE) ? 0x1f : 0;
  74. for (pup = 0; pup < octets_per_if_num; pup++) {
  75. VALIDATE_BUS_ACTIVE(tm->bus_act_mask, pup);
  76. for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
  77. VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
  78. min_adll_per_pup[if_id][pup] =
  79. (pbs_mode == PBS_RX_MODE) ? 0x1f : 0x3f;
  80. pup_state[if_id][pup] = 0x3;
  81. adll_shift_lock[if_id][pup] = 1;
  82. max_adll_per_pup[if_id][pup] = 0x0;
  83. }
  84. }
  85. /* EBA */
  86. for (pup = 0; pup < octets_per_if_num; pup++) {
  87. VALIDATE_BUS_ACTIVE(tm->bus_act_mask, pup);
  88. for (bit = 0; bit < BUS_WIDTH_IN_BITS; bit++) {
  89. CHECK_STATUS(ddr3_tip_if_read
  90. (dev_num, ACCESS_TYPE_MULTICAST,
  91. PARAM_NOT_CARE,
  92. mask_results_dq_reg_map[
  93. bit + pup * BUS_WIDTH_IN_BITS],
  94. res0, MASK_ALL_BITS));
  95. for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1;
  96. if_id++) {
  97. VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
  98. DEBUG_PBS_ENGINE(DEBUG_LEVEL_TRACE,
  99. ("FP I/F %d, bit:%d, pup:%d res0 0x%x\n",
  100. if_id, bit, pup,
  101. res0[if_id]));
  102. if (pup_state[if_id][pup] != 3)
  103. continue;
  104. /* if not EBA state than move to next pup */
  105. if ((res0[if_id] & 0x2000000) == 0) {
  106. DEBUG_PBS_ENGINE(DEBUG_LEVEL_TRACE,
  107. ("-- Fail Training IP\n"));
  108. /* training machine failed */
  109. pup_state[if_id][pup] = 1;
  110. adll_shift_lock[if_id][pup] = 0;
  111. continue;
  112. }
  113. else if ((res0[if_id] & res_valid_mask) ==
  114. validation_val) {
  115. DEBUG_PBS_ENGINE(DEBUG_LEVEL_TRACE,
  116. ("-- FAIL EBA %d %d %d %d\n",
  117. if_id, bit, pup,
  118. res0[if_id]));
  119. pup_state[if_id][pup] = 4;
  120. /* this pup move to EEBA */
  121. adll_shift_lock[if_id][pup] = 0;
  122. continue;
  123. } else {
  124. /*
  125. * The search ended in Pass we need
  126. * Fail
  127. */
  128. res0[if_id] =
  129. (pbs_mode == PBS_RX_MODE) ?
  130. ((res0[if_id] &
  131. res_valid_mask) + 1) :
  132. ((res0[if_id] &
  133. res_valid_mask) - 1);
  134. max_adll_per_pup[if_id][pup] =
  135. (max_adll_per_pup[if_id][pup] <
  136. res0[if_id]) ?
  137. (u8)res0[if_id] :
  138. max_adll_per_pup[if_id][pup];
  139. min_adll_per_pup[if_id][pup] =
  140. (res0[if_id] >
  141. min_adll_per_pup[if_id][pup]) ?
  142. min_adll_per_pup[if_id][pup] :
  143. (u8)
  144. res0[if_id];
  145. /*
  146. * vs the Rx we are searching for the
  147. * smallest value of DQ shift so all
  148. * Bus would fail
  149. */
  150. adll_shift_val[if_id][pup] =
  151. (pbs_mode == PBS_RX_MODE) ?
  152. max_adll_per_pup[if_id][pup] :
  153. min_adll_per_pup[if_id][pup];
  154. }
  155. }
  156. }
  157. }
  158. /* EEBA */
  159. for (pup = 0; pup < octets_per_if_num; pup++) {
  160. VALIDATE_BUS_ACTIVE(tm->bus_act_mask, pup);
  161. for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
  162. VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
  163. if (pup_state[if_id][pup] != 4)
  164. continue;
  165. /*
  166. * if pup state different from EEBA than move to
  167. * next pup
  168. */
  169. reg_addr = (pbs_mode == PBS_RX_MODE) ?
  170. (0x54 + effective_cs * 0x10) :
  171. (0x14 + effective_cs * 0x10);
  172. CHECK_STATUS(ddr3_tip_bus_write
  173. (dev_num, ACCESS_TYPE_UNICAST, if_id,
  174. ACCESS_TYPE_UNICAST, pup, DDR_PHY_DATA,
  175. reg_addr, 0x1f));
  176. reg_addr = (pbs_mode == PBS_RX_MODE) ?
  177. (0x55 + effective_cs * 0x10) :
  178. (0x15 + effective_cs * 0x10);
  179. CHECK_STATUS(ddr3_tip_bus_write
  180. (dev_num, ACCESS_TYPE_UNICAST, if_id,
  181. ACCESS_TYPE_UNICAST, pup, DDR_PHY_DATA,
  182. reg_addr, 0x1f));
  183. /* initialize the Edge2 Max. */
  184. adll_shift_val[if_id][pup] = 0;
  185. min_adll_per_pup[if_id][pup] =
  186. (pbs_mode == PBS_RX_MODE) ? 0x1f : 0x3f;
  187. max_adll_per_pup[if_id][pup] = 0x0;
  188. ddr3_tip_ip_training(dev_num, ACCESS_TYPE_MULTICAST,
  189. PARAM_NOT_CARE,
  190. ACCESS_TYPE_MULTICAST,
  191. PARAM_NOT_CARE, RESULT_PER_BIT,
  192. HWS_CONTROL_ELEMENT_ADLL,
  193. search_dir, dir,
  194. tm->if_act_mask, init_val,
  195. iterations, pbs_pattern,
  196. search_edge, CS_SINGLE, cs_num,
  197. train_status);
  198. DEBUG_PBS_ENGINE(DEBUG_LEVEL_INFO,
  199. ("ADLL shift results:\n"));
  200. for (bit = 0; bit < BUS_WIDTH_IN_BITS; bit++) {
  201. CHECK_STATUS(ddr3_tip_if_read
  202. (dev_num, ACCESS_TYPE_MULTICAST,
  203. PARAM_NOT_CARE,
  204. mask_results_dq_reg_map[
  205. bit + pup *
  206. BUS_WIDTH_IN_BITS],
  207. res0, MASK_ALL_BITS));
  208. DEBUG_PBS_ENGINE(DEBUG_LEVEL_TRACE,
  209. ("FP I/F %d, bit:%d, pup:%d res0 0x%x\n",
  210. if_id, bit, pup,
  211. res0[if_id]));
  212. if ((res0[if_id] & 0x2000000) == 0) {
  213. DEBUG_PBS_ENGINE(DEBUG_LEVEL_TRACE,
  214. (" -- EEBA Fail\n"));
  215. bit = BUS_WIDTH_IN_BITS;
  216. /* exit bit loop */
  217. DEBUG_PBS_ENGINE(DEBUG_LEVEL_TRACE,
  218. ("-- EEBA Fail Training IP\n"));
  219. /*
  220. * training machine failed but pass
  221. * before in the EBA so maybe the DQS
  222. * shift change env.
  223. */
  224. pup_state[if_id][pup] = 2;
  225. adll_shift_lock[if_id][pup] = 0;
  226. reg_addr = (pbs_mode == PBS_RX_MODE) ?
  227. (0x54 + effective_cs * 0x10) :
  228. (0x14 + effective_cs * 0x10);
  229. CHECK_STATUS(ddr3_tip_bus_write
  230. (dev_num,
  231. ACCESS_TYPE_UNICAST,
  232. if_id,
  233. ACCESS_TYPE_UNICAST, pup,
  234. DDR_PHY_DATA, reg_addr,
  235. 0x0));
  236. reg_addr = (pbs_mode == PBS_RX_MODE) ?
  237. (0x55 + effective_cs * 0x10) :
  238. (0x15 + effective_cs * 0x10);
  239. CHECK_STATUS(ddr3_tip_bus_write
  240. (dev_num,
  241. ACCESS_TYPE_UNICAST,
  242. if_id,
  243. ACCESS_TYPE_UNICAST, pup,
  244. DDR_PHY_DATA, reg_addr,
  245. 0x0));
  246. continue;
  247. } else if ((res0[if_id] & res_valid_mask) ==
  248. validation_val) {
  249. /* exit bit loop */
  250. bit = BUS_WIDTH_IN_BITS;
  251. DEBUG_PBS_ENGINE(DEBUG_LEVEL_TRACE,
  252. ("-- FAIL EEBA\n"));
  253. /* this pup move to SBA */
  254. pup_state[if_id][pup] = 2;
  255. adll_shift_lock[if_id][pup] = 0;
  256. reg_addr = (pbs_mode == PBS_RX_MODE) ?
  257. (0x54 + effective_cs * 0x10) :
  258. (0x14 + effective_cs * 0x10);
  259. CHECK_STATUS(ddr3_tip_bus_write
  260. (dev_num,
  261. ACCESS_TYPE_UNICAST,
  262. if_id,
  263. ACCESS_TYPE_UNICAST, pup,
  264. DDR_PHY_DATA, reg_addr,
  265. 0x0));
  266. reg_addr = (pbs_mode == PBS_RX_MODE) ?
  267. (0x55 + effective_cs * 0x10) :
  268. (0x15 + effective_cs * 0x10);
  269. CHECK_STATUS(ddr3_tip_bus_write
  270. (dev_num,
  271. ACCESS_TYPE_UNICAST,
  272. if_id,
  273. ACCESS_TYPE_UNICAST, pup,
  274. DDR_PHY_DATA, reg_addr,
  275. 0x0));
  276. continue;
  277. } else {
  278. adll_shift_lock[if_id][pup] = 1;
  279. /*
  280. * The search ended in Pass we need
  281. * Fail
  282. */
  283. res0[if_id] =
  284. (pbs_mode == PBS_RX_MODE) ?
  285. ((res0[if_id] &
  286. res_valid_mask) + 1) :
  287. ((res0[if_id] &
  288. res_valid_mask) - 1);
  289. max_adll_per_pup[if_id][pup] =
  290. (max_adll_per_pup[if_id][pup] <
  291. res0[if_id]) ?
  292. (u8)res0[if_id] :
  293. max_adll_per_pup[if_id][pup];
  294. min_adll_per_pup[if_id][pup] =
  295. (res0[if_id] >
  296. min_adll_per_pup[if_id][pup]) ?
  297. min_adll_per_pup[if_id][pup] :
  298. (u8)res0[if_id];
  299. /*
  300. * vs the Rx we are searching for the
  301. * smallest value of DQ shift so all Bus
  302. * would fail
  303. */
  304. adll_shift_val[if_id][pup] =
  305. (pbs_mode == PBS_RX_MODE) ?
  306. max_adll_per_pup[if_id][pup] :
  307. min_adll_per_pup[if_id][pup];
  308. }
  309. }
  310. }
  311. }
  312. /* Print Stage result */
  313. for (pup = 0; pup < octets_per_if_num; pup++) {
  314. VALIDATE_BUS_ACTIVE(tm->bus_act_mask, pup);
  315. for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
  316. VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
  317. DEBUG_PBS_ENGINE(DEBUG_LEVEL_TRACE,
  318. ("FP I/F %d, ADLL Shift for EBA: pup[%d] Lock status = %d Lock Val = %d,%d\n",
  319. if_id, pup,
  320. adll_shift_lock[if_id][pup],
  321. max_adll_per_pup[if_id][pup],
  322. min_adll_per_pup[if_id][pup]));
  323. }
  324. }
  325. DEBUG_PBS_ENGINE(DEBUG_LEVEL_INFO,
  326. ("Update ADLL Shift of all pups:\n"));
  327. for (pup = 0; pup < octets_per_if_num; pup++) {
  328. VALIDATE_BUS_ACTIVE(tm->bus_act_mask, pup);
  329. for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
  330. VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
  331. if (adll_shift_lock[if_id][pup] != 1)
  332. continue;
  333. /* if pup not locked continue to next pup */
  334. reg_addr = (pbs_mode == PBS_RX_MODE) ?
  335. (0x3 + effective_cs * 4) :
  336. (0x1 + effective_cs * 4);
  337. CHECK_STATUS(ddr3_tip_bus_write
  338. (dev_num, ACCESS_TYPE_UNICAST, if_id,
  339. ACCESS_TYPE_UNICAST, pup, DDR_PHY_DATA,
  340. reg_addr, adll_shift_val[if_id][pup]));
  341. DEBUG_PBS_ENGINE(DEBUG_LEVEL_TRACE,
  342. ("FP I/F %d, Pup[%d] = %d\n", if_id,
  343. pup, adll_shift_val[if_id][pup]));
  344. }
  345. }
  346. /* PBS EEBA&EBA */
  347. /* Start the Per Bit Skew search */
  348. for (pup = 0; pup < octets_per_if_num; pup++) {
  349. VALIDATE_BUS_ACTIVE(tm->bus_act_mask, pup);
  350. for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
  351. VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
  352. max_pbs_per_pup[if_id][pup] = 0x0;
  353. min_pbs_per_pup[if_id][pup] = 0x1f;
  354. for (bit = 0; bit < BUS_WIDTH_IN_BITS; bit++) {
  355. /* reset result for PBS */
  356. result_all_bit[bit + pup * BUS_WIDTH_IN_BITS +
  357. if_id * MAX_BUS_NUM *
  358. BUS_WIDTH_IN_BITS] = 0;
  359. }
  360. }
  361. }
  362. iterations = 31;
  363. search_dir = HWS_LOW2HIGH;
  364. /* !!!!! ran sh (search_dir == HWS_LOW2HIGH)?0:iterations; */
  365. init_val = 0;
  366. ddr3_tip_ip_training(dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
  367. ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
  368. RESULT_PER_BIT, HWS_CONTROL_ELEMENT_DQ_SKEW,
  369. search_dir, dir, tm->if_act_mask, init_val,
  370. iterations, pbs_pattern, search_edge,
  371. CS_SINGLE, cs_num, train_status);
  372. for (pup = 0; pup < octets_per_if_num; pup++) {
  373. VALIDATE_BUS_ACTIVE(tm->bus_act_mask, pup);
  374. for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
  375. VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
  376. if (adll_shift_lock[if_id][pup] != 1) {
  377. /* if pup not lock continue to next pup */
  378. continue;
  379. }
  380. for (bit = 0; bit < BUS_WIDTH_IN_BITS; bit++) {
  381. CHECK_STATUS(ddr3_tip_if_read
  382. (dev_num, ACCESS_TYPE_MULTICAST,
  383. PARAM_NOT_CARE,
  384. mask_results_dq_reg_map[
  385. bit +
  386. pup * BUS_WIDTH_IN_BITS],
  387. res0, MASK_ALL_BITS));
  388. DEBUG_PBS_ENGINE(DEBUG_LEVEL_INFO,
  389. ("Per Bit Skew search, FP I/F %d, bit:%d, pup:%d res0 0x%x\n",
  390. if_id, bit, pup,
  391. res0[if_id]));
  392. if ((res0[if_id] & 0x2000000) == 0) {
  393. DEBUG_PBS_ENGINE(DEBUG_LEVEL_INFO,
  394. ("--EBA PBS Fail - Training IP machine\n"));
  395. /* exit the bit loop */
  396. bit = BUS_WIDTH_IN_BITS;
  397. /*
  398. * ADLL is no long in lock need new
  399. * search
  400. */
  401. adll_shift_lock[if_id][pup] = 0;
  402. /* Move to SBA */
  403. pup_state[if_id][pup] = 2;
  404. max_pbs_per_pup[if_id][pup] = 0x0;
  405. min_pbs_per_pup[if_id][pup] = 0x1f;
  406. continue;
  407. } else {
  408. temp = (u8)(res0[if_id] &
  409. res_valid_mask);
  410. max_pbs_per_pup[if_id][pup] =
  411. (temp >
  412. max_pbs_per_pup[if_id][pup]) ?
  413. temp :
  414. max_pbs_per_pup[if_id][pup];
  415. min_pbs_per_pup[if_id][pup] =
  416. (temp <
  417. min_pbs_per_pup[if_id][pup]) ?
  418. temp :
  419. min_pbs_per_pup[if_id][pup];
  420. result_all_bit[bit +
  421. pup * BUS_WIDTH_IN_BITS +
  422. if_id * MAX_BUS_NUM *
  423. BUS_WIDTH_IN_BITS] =
  424. temp;
  425. }
  426. }
  427. }
  428. }
  429. /* Check all Pup lock */
  430. all_lock = 1;
  431. for (pup = 0; pup < octets_per_if_num; pup++) {
  432. VALIDATE_BUS_ACTIVE(tm->bus_act_mask, pup);
  433. for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
  434. VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
  435. all_lock = all_lock * adll_shift_lock[if_id][pup];
  436. }
  437. }
  438. /* Only if not all Pups Lock */
  439. if (all_lock == 0) {
  440. DEBUG_PBS_ENGINE(DEBUG_LEVEL_INFO,
  441. ("##########ADLL shift for SBA###########\n"));
  442. /* ADLL shift for SBA */
  443. search_dir = (pbs_mode == PBS_RX_MODE) ? HWS_LOW2HIGH :
  444. HWS_HIGH2LOW;
  445. init_val = (search_dir == HWS_LOW2HIGH) ? 0 : iterations;
  446. for (pup = 0; pup < octets_per_if_num; pup++) {
  447. VALIDATE_BUS_ACTIVE(tm->bus_act_mask, pup);
  448. for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1;
  449. if_id++) {
  450. VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
  451. if (adll_shift_lock[if_id][pup] == 1) {
  452. /*if pup lock continue to next pup */
  453. continue;
  454. }
  455. /*init the var altogth init before */
  456. adll_shift_lock[if_id][pup] = 0;
  457. reg_addr = (pbs_mode == PBS_RX_MODE) ?
  458. (0x54 + effective_cs * 0x10) :
  459. (0x14 + effective_cs * 0x10);
  460. CHECK_STATUS(ddr3_tip_bus_write
  461. (dev_num, ACCESS_TYPE_UNICAST,
  462. if_id, ACCESS_TYPE_UNICAST, pup,
  463. DDR_PHY_DATA, reg_addr, 0));
  464. reg_addr = (pbs_mode == PBS_RX_MODE) ?
  465. (0x55 + effective_cs * 0x10) :
  466. (0x15 + effective_cs * 0x10);
  467. CHECK_STATUS(ddr3_tip_bus_write
  468. (dev_num, ACCESS_TYPE_UNICAST,
  469. if_id, ACCESS_TYPE_UNICAST, pup,
  470. DDR_PHY_DATA, reg_addr, 0));
  471. reg_addr = (pbs_mode == PBS_RX_MODE) ?
  472. (0x5f + effective_cs * 0x10) :
  473. (0x1f + effective_cs * 0x10);
  474. CHECK_STATUS(ddr3_tip_bus_write
  475. (dev_num, ACCESS_TYPE_UNICAST,
  476. if_id, ACCESS_TYPE_UNICAST, pup,
  477. DDR_PHY_DATA, reg_addr, 0));
  478. /* initilaze the Edge2 Max. */
  479. adll_shift_val[if_id][pup] = 0;
  480. min_adll_per_pup[if_id][pup] = 0x1f;
  481. max_adll_per_pup[if_id][pup] = 0x0;
  482. ddr3_tip_ip_training(dev_num,
  483. ACCESS_TYPE_MULTICAST,
  484. PARAM_NOT_CARE,
  485. ACCESS_TYPE_MULTICAST,
  486. PARAM_NOT_CARE,
  487. RESULT_PER_BIT,
  488. HWS_CONTROL_ELEMENT_ADLL,
  489. search_dir, dir,
  490. tm->if_act_mask,
  491. init_val, iterations,
  492. pbs_pattern,
  493. search_edge, CS_SINGLE,
  494. cs_num, train_status);
  495. for (bit = 0; bit < BUS_WIDTH_IN_BITS; bit++) {
  496. CHECK_STATUS(ddr3_tip_if_read
  497. (dev_num,
  498. ACCESS_TYPE_MULTICAST,
  499. PARAM_NOT_CARE,
  500. mask_results_dq_reg_map
  501. [bit +
  502. pup *
  503. BUS_WIDTH_IN_BITS],
  504. res0, MASK_ALL_BITS));
  505. DEBUG_PBS_ENGINE(
  506. DEBUG_LEVEL_INFO,
  507. ("FP I/F %d, bit:%d, pup:%d res0 0x%x\n",
  508. if_id, bit, pup, res0[if_id]));
  509. if ((res0[if_id] & 0x2000000) == 0) {
  510. /* exit the bit loop */
  511. bit = BUS_WIDTH_IN_BITS;
  512. /* Fail SBA --> Fail PBS */
  513. pup_state[if_id][pup] = 1;
  514. DEBUG_PBS_ENGINE
  515. (DEBUG_LEVEL_INFO,
  516. (" SBA Fail\n"));
  517. continue;
  518. } else {
  519. /*
  520. * - increment to get all
  521. * 8 bit lock.
  522. */
  523. adll_shift_lock[if_id][pup]++;
  524. /*
  525. * The search ended in Pass
  526. * we need Fail
  527. */
  528. res0[if_id] =
  529. (pbs_mode == PBS_RX_MODE) ?
  530. ((res0[if_id] & res_valid_mask) + 1) :
  531. ((res0[if_id] & res_valid_mask) - 1);
  532. max_adll_per_pup[if_id][pup] =
  533. (max_adll_per_pup[if_id]
  534. [pup] < res0[if_id]) ?
  535. (u8)res0[if_id] :
  536. max_adll_per_pup[if_id][pup];
  537. min_adll_per_pup[if_id][pup] =
  538. (res0[if_id] >
  539. min_adll_per_pup[if_id]
  540. [pup]) ?
  541. min_adll_per_pup[if_id][pup] :
  542. (u8)res0[if_id];
  543. /*
  544. * vs the Rx we are searching for
  545. * the smallest value of DQ shift
  546. * so all Bus would fail
  547. */
  548. adll_shift_val[if_id][pup] =
  549. (pbs_mode == PBS_RX_MODE) ?
  550. max_adll_per_pup[if_id][pup] :
  551. min_adll_per_pup[if_id][pup];
  552. }
  553. }
  554. /* 1 is lock */
  555. adll_shift_lock[if_id][pup] =
  556. (adll_shift_lock[if_id][pup] == 8) ?
  557. 1 : 0;
  558. reg_addr = (pbs_mode == PBS_RX_MODE) ?
  559. (0x3 + effective_cs * 4) :
  560. (0x1 + effective_cs * 4);
  561. CHECK_STATUS(ddr3_tip_bus_write
  562. (dev_num, ACCESS_TYPE_UNICAST,
  563. if_id, ACCESS_TYPE_UNICAST, pup,
  564. DDR_PHY_DATA, reg_addr,
  565. adll_shift_val[if_id][pup]));
  566. DEBUG_PBS_ENGINE(
  567. DEBUG_LEVEL_INFO,
  568. ("adll_shift_lock[%x][%x] = %x\n",
  569. if_id, pup,
  570. adll_shift_lock[if_id][pup]));
  571. }
  572. }
  573. /* End ADLL Shift for SBA */
  574. /* Start the Per Bit Skew search */
  575. /* The ADLL shift finished with a Pass */
  576. search_edge = (pbs_mode == PBS_RX_MODE) ? EDGE_PF : EDGE_FP;
  577. search_dir = (pbs_mode == PBS_RX_MODE) ?
  578. HWS_LOW2HIGH : HWS_HIGH2LOW;
  579. iterations = 0x1f;
  580. /* - The initial value is different in Rx and Tx mode */
  581. init_val = (pbs_mode == PBS_RX_MODE) ? 0 : iterations;
  582. ddr3_tip_ip_training(dev_num, ACCESS_TYPE_MULTICAST,
  583. PARAM_NOT_CARE, ACCESS_TYPE_MULTICAST,
  584. PARAM_NOT_CARE, RESULT_PER_BIT,
  585. HWS_CONTROL_ELEMENT_DQ_SKEW,
  586. search_dir, dir, tm->if_act_mask,
  587. init_val, iterations, pbs_pattern,
  588. search_edge, CS_SINGLE, cs_num,
  589. train_status);
  590. for (pup = 0; pup < octets_per_if_num; pup++) {
  591. VALIDATE_BUS_ACTIVE(tm->bus_act_mask, pup);
  592. for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1;
  593. if_id++) {
  594. VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
  595. for (bit = 0; bit < BUS_WIDTH_IN_BITS; bit++) {
  596. CHECK_STATUS(ddr3_tip_if_read
  597. (dev_num,
  598. ACCESS_TYPE_MULTICAST,
  599. PARAM_NOT_CARE,
  600. mask_results_dq_reg_map
  601. [bit +
  602. pup *
  603. BUS_WIDTH_IN_BITS],
  604. res0, MASK_ALL_BITS));
  605. if (pup_state[if_id][pup] != 2) {
  606. /*
  607. * if pup is not SBA continue
  608. * to next pup
  609. */
  610. bit = BUS_WIDTH_IN_BITS;
  611. continue;
  612. }
  613. DEBUG_PBS_ENGINE(
  614. DEBUG_LEVEL_INFO,
  615. ("Per Bit Skew search, PF I/F %d, bit:%d, pup:%d res0 0x%x\n",
  616. if_id, bit, pup, res0[if_id]));
  617. if ((res0[if_id] & 0x2000000) == 0) {
  618. DEBUG_PBS_ENGINE
  619. (DEBUG_LEVEL_INFO,
  620. ("SBA Fail\n"));
  621. max_pbs_per_pup[if_id][pup] =
  622. 0x1f;
  623. result_all_bit[
  624. bit + pup *
  625. BUS_WIDTH_IN_BITS +
  626. if_id * MAX_BUS_NUM *
  627. BUS_WIDTH_IN_BITS] =
  628. 0x1f;
  629. } else {
  630. temp = (u8)(res0[if_id] &
  631. res_valid_mask);
  632. max_pbs_per_pup[if_id][pup] =
  633. (temp >
  634. max_pbs_per_pup[if_id]
  635. [pup]) ? temp :
  636. max_pbs_per_pup
  637. [if_id][pup];
  638. min_pbs_per_pup[if_id][pup] =
  639. (temp <
  640. min_pbs_per_pup[if_id]
  641. [pup]) ? temp :
  642. min_pbs_per_pup
  643. [if_id][pup];
  644. result_all_bit[
  645. bit + pup *
  646. BUS_WIDTH_IN_BITS +
  647. if_id * MAX_BUS_NUM *
  648. BUS_WIDTH_IN_BITS] =
  649. temp;
  650. adll_shift_lock[if_id][pup] = 1;
  651. }
  652. }
  653. }
  654. }
  655. /* Check all Pup state */
  656. all_lock = 1;
  657. for (pup = 0; pup < octets_per_if_num; pup++) {
  658. /*
  659. * DEBUG_PBS_ENGINE(DEBUG_LEVEL_INFO,
  660. * ("pup_state[%d][%d] = %d\n",if_id,pup,pup_state
  661. * [if_id][pup]));
  662. */
  663. }
  664. }
  665. /* END OF SBA */
  666. /* Norm */
  667. for (pup = 0; pup < octets_per_if_num; pup++) {
  668. VALIDATE_BUS_ACTIVE(tm->bus_act_mask, pup);
  669. for (bit = 0; bit < BUS_WIDTH_IN_BITS; bit++) {
  670. for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1;
  671. if_id++) {
  672. VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
  673. /* if pup not lock continue to next pup */
  674. if (adll_shift_lock[if_id][pup] != 1) {
  675. DEBUG_PBS_ENGINE(
  676. DEBUG_LEVEL_ERROR,
  677. ("PBS failed for IF #%d\n",
  678. if_id));
  679. training_result[training_stage][if_id]
  680. = TEST_FAILED;
  681. result_mat[if_id][pup][bit] = 0;
  682. max_pbs_per_pup[if_id][pup] = 0;
  683. min_pbs_per_pup[if_id][pup] = 0;
  684. } else {
  685. training_result[
  686. training_stage][if_id] =
  687. (training_result[training_stage]
  688. [if_id] == TEST_FAILED) ?
  689. TEST_FAILED : TEST_SUCCESS;
  690. result_mat[if_id][pup][bit] =
  691. result_all_bit[
  692. bit + pup *
  693. BUS_WIDTH_IN_BITS +
  694. if_id * MAX_BUS_NUM *
  695. BUS_WIDTH_IN_BITS] -
  696. min_pbs_per_pup[if_id][pup];
  697. }
  698. DEBUG_PBS_ENGINE(
  699. DEBUG_LEVEL_INFO,
  700. ("The abs min_pbs[%d][%d] = %d\n",
  701. if_id, pup,
  702. min_pbs_per_pup[if_id][pup]));
  703. }
  704. }
  705. }
  706. /* Clean all results */
  707. ddr3_tip_clean_pbs_result(dev_num, pbs_mode);
  708. /* DQ PBS register update with the final result */
  709. for (if_id = 0; if_id < MAX_INTERFACE_NUM; if_id++) {
  710. VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
  711. for (pup = 0; pup < octets_per_if_num; pup++) {
  712. VALIDATE_BUS_ACTIVE(tm->bus_act_mask, pup);
  713. DEBUG_PBS_ENGINE(
  714. DEBUG_LEVEL_INFO,
  715. ("Final Results: if_id %d, pup %d, Pup State: %d\n",
  716. if_id, pup, pup_state[if_id][pup]));
  717. for (bit = 0; bit < BUS_WIDTH_IN_BITS; bit++) {
  718. if (dq_map_table == NULL) {
  719. DEBUG_PBS_ENGINE(
  720. DEBUG_LEVEL_ERROR,
  721. ("dq_map_table not initialized\n"));
  722. return MV_FAIL;
  723. }
  724. pad_num = dq_map_table[
  725. bit + pup * BUS_WIDTH_IN_BITS +
  726. if_id * BUS_WIDTH_IN_BITS *
  727. MAX_BUS_NUM];
  728. DEBUG_PBS_ENGINE(DEBUG_LEVEL_INFO,
  729. ("result_mat: %d ",
  730. result_mat[if_id][pup]
  731. [bit]));
  732. reg_addr = (pbs_mode == PBS_RX_MODE) ?
  733. PBS_RX_PHY_REG(effective_cs, 0) :
  734. PBS_TX_PHY_REG(effective_cs, 0);
  735. CHECK_STATUS(ddr3_tip_bus_write
  736. (dev_num, ACCESS_TYPE_UNICAST,
  737. if_id, ACCESS_TYPE_UNICAST, pup,
  738. DDR_PHY_DATA, reg_addr + pad_num,
  739. result_mat[if_id][pup][bit]));
  740. }
  741. if (max_pbs_per_pup[if_id][pup] == min_pbs_per_pup[if_id][pup]) {
  742. temp = TYPICAL_PBS_VALUE;
  743. } else {
  744. temp = ((max_adll_per_pup[if_id][pup] -
  745. min_adll_per_pup[if_id][pup]) *
  746. adll_tap /
  747. (max_pbs_per_pup[if_id][pup] -
  748. min_pbs_per_pup[if_id][pup]));
  749. }
  750. pbsdelay_per_pup[pbs_mode]
  751. [if_id][pup][effective_cs] = temp;
  752. /* RX results ready, write RX also */
  753. if (pbs_mode == PBS_TX_MODE) {
  754. /* Write TX results */
  755. reg_addr = (0x14 + effective_cs * 0x10);
  756. CHECK_STATUS(ddr3_tip_bus_write
  757. (dev_num, ACCESS_TYPE_UNICAST,
  758. if_id, ACCESS_TYPE_UNICAST, pup,
  759. DDR_PHY_DATA, reg_addr,
  760. (max_pbs_per_pup[if_id][pup] -
  761. min_pbs_per_pup[if_id][pup]) /
  762. 2));
  763. reg_addr = (0x15 + effective_cs * 0x10);
  764. CHECK_STATUS(ddr3_tip_bus_write
  765. (dev_num, ACCESS_TYPE_UNICAST,
  766. if_id, ACCESS_TYPE_UNICAST, pup,
  767. DDR_PHY_DATA, reg_addr,
  768. (max_pbs_per_pup[if_id][pup] -
  769. min_pbs_per_pup[if_id][pup]) /
  770. 2));
  771. /* Write previously stored RX results */
  772. reg_addr = (0x54 + effective_cs * 0x10);
  773. CHECK_STATUS(ddr3_tip_bus_write
  774. (dev_num, ACCESS_TYPE_UNICAST,
  775. if_id, ACCESS_TYPE_UNICAST, pup,
  776. DDR_PHY_DATA, reg_addr,
  777. result_mat_rx_dqs[if_id][pup]
  778. [effective_cs]));
  779. reg_addr = (0x55 + effective_cs * 0x10);
  780. CHECK_STATUS(ddr3_tip_bus_write
  781. (dev_num, ACCESS_TYPE_UNICAST,
  782. if_id, ACCESS_TYPE_UNICAST, pup,
  783. DDR_PHY_DATA, reg_addr,
  784. result_mat_rx_dqs[if_id][pup]
  785. [effective_cs]));
  786. } else {
  787. /*
  788. * RX results may affect RL results correctess,
  789. * so just store the results that will written
  790. * in TX stage
  791. */
  792. result_mat_rx_dqs[if_id][pup][effective_cs] =
  793. (max_pbs_per_pup[if_id][pup] -
  794. min_pbs_per_pup[if_id][pup]) / 2;
  795. }
  796. DEBUG_PBS_ENGINE(
  797. DEBUG_LEVEL_INFO,
  798. (", PBS tap=%d [psec] ==> skew observed = %d\n",
  799. temp,
  800. ((max_pbs_per_pup[if_id][pup] -
  801. min_pbs_per_pup[if_id][pup]) *
  802. temp)));
  803. }
  804. }
  805. /* Write back to the phy the default values */
  806. reg_addr = (pbs_mode == PBS_RX_MODE) ?
  807. CRX_PHY_REG(effective_cs) :
  808. CTX_PHY_REG(effective_cs);
  809. ddr3_tip_write_adll_value(dev_num, nominal_adll, reg_addr);
  810. for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
  811. reg_addr = (pbs_mode == PBS_RX_MODE) ?
  812. (0x5a + effective_cs * 0x10) :
  813. (0x1a + effective_cs * 0x10);
  814. CHECK_STATUS(ddr3_tip_bus_write
  815. (dev_num, ACCESS_TYPE_UNICAST, if_id,
  816. ACCESS_TYPE_UNICAST, pup, DDR_PHY_DATA, reg_addr,
  817. 0));
  818. /* restore cs enable value */
  819. VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
  820. CHECK_STATUS(ddr3_tip_if_write
  821. (dev_num, ACCESS_TYPE_UNICAST, if_id,
  822. DUAL_DUNIT_CFG_REG, cs_enable_reg_val[if_id],
  823. MASK_ALL_BITS));
  824. }
  825. /* exit test mode */
  826. CHECK_STATUS(ddr3_tip_if_write
  827. (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
  828. ODPG_WR_RD_MODE_ENA_REG, 0xffff, MASK_ALL_BITS));
  829. for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
  830. VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
  831. for (pup = 0; pup < octets_per_if_num; pup++) {
  832. VALIDATE_BUS_ACTIVE(tm->bus_act_mask, pup);
  833. /*
  834. * no valid window found
  835. * (no lock at EBA ADLL shift at EBS)
  836. */
  837. if (pup_state[if_id][pup] == 1)
  838. return MV_FAIL;
  839. }
  840. }
  841. return MV_OK;
  842. }
  843. /*
  844. * Name: ddr3_tip_pbs_rx.
  845. * Desc: PBS TX
  846. * Args: TBD
  847. * Notes:
  848. * Returns: OK if success, other error code if fail.
  849. */
  850. int ddr3_tip_pbs_rx(u32 uidev_num)
  851. {
  852. return ddr3_tip_pbs(uidev_num, PBS_RX_MODE);
  853. }
  854. /*
  855. * Name: ddr3_tip_pbs_tx.
  856. * Desc: PBS TX
  857. * Args: TBD
  858. * Notes:
  859. * Returns: OK if success, other error code if fail.
  860. */
  861. int ddr3_tip_pbs_tx(u32 uidev_num)
  862. {
  863. return ddr3_tip_pbs(uidev_num, PBS_TX_MODE);
  864. }
  865. #ifdef DDR_VIEWER_TOOL
  866. /*
  867. * Print PBS Result
  868. */
  869. int ddr3_tip_print_all_pbs_result(u32 dev_num)
  870. {
  871. u32 curr_cs;
  872. u32 max_cs = ddr3_tip_max_cs_get(dev_num);
  873. for (curr_cs = 0; curr_cs < max_cs; curr_cs++) {
  874. ddr3_tip_print_pbs_result(dev_num, curr_cs, PBS_RX_MODE);
  875. ddr3_tip_print_pbs_result(dev_num, curr_cs, PBS_TX_MODE);
  876. }
  877. return MV_OK;
  878. }
  879. /*
  880. * Print PBS Result
  881. */
  882. int ddr3_tip_print_pbs_result(u32 dev_num, u32 cs_num, enum pbs_dir pbs_mode)
  883. {
  884. u32 data_value = 0, bit = 0, if_id = 0, pup = 0;
  885. u32 reg_addr = (pbs_mode == PBS_RX_MODE) ?
  886. PBS_RX_PHY_REG(cs_num, 0) :
  887. PBS_TX_PHY_REG(cs_num , 0);
  888. u32 octets_per_if_num = ddr3_tip_dev_attr_get(dev_num, MV_ATTR_OCTET_PER_INTERFACE);
  889. struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
  890. printf("%s,CS%d,PBS,ADLLRATIO,,,",
  891. (pbs_mode == PBS_RX_MODE) ? "Rx" : "Tx", cs_num);
  892. for (if_id = 0; if_id < MAX_INTERFACE_NUM; if_id++) {
  893. VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
  894. for (pup = 0; pup < octets_per_if_num; pup++) {
  895. VALIDATE_BUS_ACTIVE(tm->bus_act_mask, pup);
  896. printf("%d,",
  897. pbsdelay_per_pup[pbs_mode][if_id][pup][cs_num]);
  898. }
  899. }
  900. printf("CS%d, %s ,PBS\n", cs_num,
  901. (pbs_mode == PBS_RX_MODE) ? "Rx" : "Tx");
  902. for (bit = 0; bit < BUS_WIDTH_IN_BITS; bit++) {
  903. printf("%s, DQ", (pbs_mode == PBS_RX_MODE) ? "Rx" : "Tx");
  904. for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
  905. VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
  906. printf("%d ,PBS,,, ", bit);
  907. for (pup = 0; pup <= octets_per_if_num;
  908. pup++) {
  909. VALIDATE_BUS_ACTIVE(tm->bus_act_mask, pup);
  910. CHECK_STATUS(ddr3_tip_bus_read
  911. (dev_num, if_id,
  912. ACCESS_TYPE_UNICAST, pup,
  913. DDR_PHY_DATA, reg_addr + bit,
  914. &data_value));
  915. printf("%d , ", data_value);
  916. }
  917. }
  918. printf("\n");
  919. }
  920. printf("\n");
  921. return MV_OK;
  922. }
  923. #endif /* DDR_VIEWER_TOOL */
  924. /*
  925. * Fixup PBS Result
  926. */
  927. int ddr3_tip_clean_pbs_result(u32 dev_num, enum pbs_dir pbs_mode)
  928. {
  929. u32 if_id, pup, bit;
  930. u32 reg_addr = (pbs_mode == PBS_RX_MODE) ?
  931. PBS_RX_PHY_REG(effective_cs, 0) :
  932. PBS_TX_PHY_REG(effective_cs, 0);
  933. u32 octets_per_if_num = ddr3_tip_dev_attr_get(dev_num, MV_ATTR_OCTET_PER_INTERFACE);
  934. struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
  935. for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
  936. VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
  937. for (pup = 0; pup <= octets_per_if_num; pup++) {
  938. for (bit = 0; bit <= BUS_WIDTH_IN_BITS + 3; bit++) {
  939. CHECK_STATUS(ddr3_tip_bus_write
  940. (dev_num, ACCESS_TYPE_UNICAST,
  941. if_id, ACCESS_TYPE_UNICAST, pup,
  942. DDR_PHY_DATA, reg_addr + bit, 0));
  943. }
  944. }
  945. }
  946. return MV_OK;
  947. }