ddr3_training_pbs.c 30 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994
  1. // SPDX-License-Identifier: GPL-2.0
  2. /*
  3. * Copyright (C) Marvell International Ltd. and its affiliates
  4. */
  5. #include <common.h>
  6. #include <spl.h>
  7. #include <asm/io.h>
  8. #include <asm/arch/cpu.h>
  9. #include <asm/arch/soc.h>
  10. #include "ddr3_init.h"
  11. #define TYPICAL_PBS_VALUE 12
  12. u32 nominal_adll[MAX_INTERFACE_NUM * MAX_BUS_NUM];
  13. enum hws_training_ip_stat train_status[MAX_INTERFACE_NUM];
  14. u8 result_mat[MAX_INTERFACE_NUM][MAX_BUS_NUM][BUS_WIDTH_IN_BITS];
  15. u8 result_mat_rx_dqs[MAX_INTERFACE_NUM][MAX_BUS_NUM][MAX_CS_NUM];
  16. /* 4-EEWA, 3-EWA, 2-SWA, 1-Fail, 0-Pass */
  17. u8 result_all_bit[MAX_BUS_NUM * BUS_WIDTH_IN_BITS * MAX_INTERFACE_NUM];
  18. u8 max_pbs_per_pup[MAX_INTERFACE_NUM][MAX_BUS_NUM];
  19. u8 min_pbs_per_pup[MAX_INTERFACE_NUM][MAX_BUS_NUM];
  20. u8 max_adll_per_pup[MAX_INTERFACE_NUM][MAX_BUS_NUM];
  21. u8 min_adll_per_pup[MAX_INTERFACE_NUM][MAX_BUS_NUM];
  22. u32 pbsdelay_per_pup[NUM_OF_PBS_MODES][MAX_INTERFACE_NUM][MAX_BUS_NUM];
  23. u8 adll_shift_lock[MAX_INTERFACE_NUM][MAX_BUS_NUM];
  24. u8 adll_shift_val[MAX_INTERFACE_NUM][MAX_BUS_NUM];
  25. enum hws_pattern pbs_pattern = PATTERN_VREF;
  26. static u8 pup_state[MAX_INTERFACE_NUM][MAX_BUS_NUM];
  27. /*
  28. * Name: ddr3_tip_pbs
  29. * Desc: PBS
  30. * Args: TBD
  31. * Notes:
  32. * Returns: OK if success, other error code if fail.
  33. */
  34. int ddr3_tip_pbs(u32 dev_num, enum pbs_dir pbs_mode)
  35. {
  36. u32 res0[MAX_INTERFACE_NUM];
  37. int adll_tap = MEGA / freq_val[medium_freq] / 64;
  38. int pad_num = 0;
  39. enum hws_search_dir search_dir =
  40. (pbs_mode == PBS_RX_MODE) ? HWS_HIGH2LOW : HWS_LOW2HIGH;
  41. enum hws_dir dir = (pbs_mode == PBS_RX_MODE) ? OPER_READ : OPER_WRITE;
  42. int iterations = (pbs_mode == PBS_RX_MODE) ? 31 : 63;
  43. u32 res_valid_mask = (pbs_mode == PBS_RX_MODE) ? 0x1f : 0x3f;
  44. int init_val = (search_dir == HWS_LOW2HIGH) ? 0 : iterations;
  45. enum hws_edge_compare search_edge = EDGE_FP;
  46. u32 pup = 0, bit = 0, if_id = 0, all_lock = 0, cs_num = 0;
  47. int reg_addr = 0;
  48. u32 validation_val = 0;
  49. u32 cs_enable_reg_val[MAX_INTERFACE_NUM];
  50. u16 *mask_results_dq_reg_map = ddr3_tip_get_mask_results_dq_reg();
  51. u8 temp = 0;
  52. struct hws_topology_map *tm = ddr3_get_topology_map();
  53. /* save current cs enable reg val */
  54. for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
  55. VALIDATE_ACTIVE(tm->if_act_mask, if_id);
  56. /* save current cs enable reg val */
  57. CHECK_STATUS(ddr3_tip_if_read
  58. (dev_num, ACCESS_TYPE_UNICAST, if_id,
  59. CS_ENABLE_REG, cs_enable_reg_val, MASK_ALL_BITS));
  60. /* enable single cs */
  61. CHECK_STATUS(ddr3_tip_if_write
  62. (dev_num, ACCESS_TYPE_UNICAST, if_id,
  63. CS_ENABLE_REG, (1 << 3), (1 << 3)));
  64. }
  65. reg_addr = (pbs_mode == PBS_RX_MODE) ?
  66. (READ_CENTRALIZATION_PHY_REG +
  67. (effective_cs * CS_REGISTER_ADDR_OFFSET)) :
  68. (WRITE_CENTRALIZATION_PHY_REG +
  69. (effective_cs * CS_REGISTER_ADDR_OFFSET));
  70. read_adll_value(nominal_adll, reg_addr, MASK_ALL_BITS);
  71. /* stage 1 shift ADLL */
  72. ddr3_tip_ip_training(dev_num, ACCESS_TYPE_MULTICAST,
  73. PARAM_NOT_CARE, ACCESS_TYPE_MULTICAST,
  74. PARAM_NOT_CARE, RESULT_PER_BIT,
  75. HWS_CONTROL_ELEMENT_ADLL, search_dir, dir,
  76. tm->if_act_mask, init_val, iterations,
  77. pbs_pattern, search_edge, CS_SINGLE, cs_num,
  78. train_status);
  79. validation_val = (pbs_mode == PBS_RX_MODE) ? 0x1f : 0;
  80. for (pup = 0; pup < tm->num_of_bus_per_interface; pup++) {
  81. VALIDATE_ACTIVE(tm->bus_act_mask, pup);
  82. for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
  83. VALIDATE_ACTIVE(tm->if_act_mask, if_id);
  84. min_adll_per_pup[if_id][pup] =
  85. (pbs_mode == PBS_RX_MODE) ? 0x1f : 0x3f;
  86. pup_state[if_id][pup] = 0x3;
  87. adll_shift_lock[if_id][pup] = 1;
  88. max_adll_per_pup[if_id][pup] = 0x0;
  89. }
  90. }
  91. /* EBA */
  92. for (pup = 0; pup < tm->num_of_bus_per_interface; pup++) {
  93. VALIDATE_ACTIVE(tm->bus_act_mask, pup);
  94. for (bit = 0; bit < BUS_WIDTH_IN_BITS; bit++) {
  95. CHECK_STATUS(ddr3_tip_if_read
  96. (dev_num, ACCESS_TYPE_MULTICAST,
  97. PARAM_NOT_CARE,
  98. mask_results_dq_reg_map[
  99. bit + pup * BUS_WIDTH_IN_BITS],
  100. res0, MASK_ALL_BITS));
  101. for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1;
  102. if_id++) {
  103. VALIDATE_ACTIVE(tm->if_act_mask, if_id);
  104. DEBUG_PBS_ENGINE(DEBUG_LEVEL_TRACE,
  105. ("FP I/F %d, bit:%d, pup:%d res0 0x%x\n",
  106. if_id, bit, pup,
  107. res0[if_id]));
  108. if (pup_state[if_id][pup] != 3)
  109. continue;
  110. /* if not EBA state than move to next pup */
  111. if ((res0[if_id] & 0x2000000) == 0) {
  112. DEBUG_PBS_ENGINE(DEBUG_LEVEL_TRACE,
  113. ("-- Fail Training IP\n"));
  114. /* training machine failed */
  115. pup_state[if_id][pup] = 1;
  116. adll_shift_lock[if_id][pup] = 0;
  117. continue;
  118. }
  119. else if ((res0[if_id] & res_valid_mask) ==
  120. validation_val) {
  121. DEBUG_PBS_ENGINE(DEBUG_LEVEL_TRACE,
  122. ("-- FAIL EBA %d %d %d %d\n",
  123. if_id, bit, pup,
  124. res0[if_id]));
  125. pup_state[if_id][pup] = 4;
  126. /* this pup move to EEBA */
  127. adll_shift_lock[if_id][pup] = 0;
  128. continue;
  129. } else {
  130. /*
  131. * The search ended in Pass we need
  132. * Fail
  133. */
  134. res0[if_id] =
  135. (pbs_mode == PBS_RX_MODE) ?
  136. ((res0[if_id] &
  137. res_valid_mask) + 1) :
  138. ((res0[if_id] &
  139. res_valid_mask) - 1);
  140. max_adll_per_pup[if_id][pup] =
  141. (max_adll_per_pup[if_id][pup] <
  142. res0[if_id]) ?
  143. (u8)res0[if_id] :
  144. max_adll_per_pup[if_id][pup];
  145. min_adll_per_pup[if_id][pup] =
  146. (res0[if_id] >
  147. min_adll_per_pup[if_id][pup]) ?
  148. min_adll_per_pup[if_id][pup] :
  149. (u8)
  150. res0[if_id];
  151. /*
  152. * vs the Rx we are searching for the
  153. * smallest value of DQ shift so all
  154. * Bus would fail
  155. */
  156. adll_shift_val[if_id][pup] =
  157. (pbs_mode == PBS_RX_MODE) ?
  158. max_adll_per_pup[if_id][pup] :
  159. min_adll_per_pup[if_id][pup];
  160. }
  161. }
  162. }
  163. }
  164. /* EEBA */
  165. for (pup = 0; pup < tm->num_of_bus_per_interface; pup++) {
  166. VALIDATE_ACTIVE(tm->bus_act_mask, pup);
  167. for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
  168. VALIDATE_ACTIVE(tm->if_act_mask, if_id);
  169. if (pup_state[if_id][pup] != 4)
  170. continue;
  171. /*
  172. * if pup state different from EEBA than move to
  173. * next pup
  174. */
  175. reg_addr = (pbs_mode == PBS_RX_MODE) ?
  176. (0x54 + effective_cs * 0x10) :
  177. (0x14 + effective_cs * 0x10);
  178. CHECK_STATUS(ddr3_tip_bus_write
  179. (dev_num, ACCESS_TYPE_UNICAST, if_id,
  180. ACCESS_TYPE_UNICAST, pup, DDR_PHY_DATA,
  181. reg_addr, 0x1f));
  182. reg_addr = (pbs_mode == PBS_RX_MODE) ?
  183. (0x55 + effective_cs * 0x10) :
  184. (0x15 + effective_cs * 0x10);
  185. CHECK_STATUS(ddr3_tip_bus_write
  186. (dev_num, ACCESS_TYPE_UNICAST, if_id,
  187. ACCESS_TYPE_UNICAST, pup, DDR_PHY_DATA,
  188. reg_addr, 0x1f));
  189. /* initialize the Edge2 Max. */
  190. adll_shift_val[if_id][pup] = 0;
  191. min_adll_per_pup[if_id][pup] =
  192. (pbs_mode == PBS_RX_MODE) ? 0x1f : 0x3f;
  193. max_adll_per_pup[if_id][pup] = 0x0;
  194. ddr3_tip_ip_training(dev_num, ACCESS_TYPE_MULTICAST,
  195. PARAM_NOT_CARE,
  196. ACCESS_TYPE_MULTICAST,
  197. PARAM_NOT_CARE, RESULT_PER_BIT,
  198. HWS_CONTROL_ELEMENT_ADLL,
  199. search_dir, dir,
  200. tm->if_act_mask, init_val,
  201. iterations, pbs_pattern,
  202. search_edge, CS_SINGLE, cs_num,
  203. train_status);
  204. DEBUG_PBS_ENGINE(DEBUG_LEVEL_INFO,
  205. ("ADLL shift results:\n"));
  206. for (bit = 0; bit < BUS_WIDTH_IN_BITS; bit++) {
  207. CHECK_STATUS(ddr3_tip_if_read
  208. (dev_num, ACCESS_TYPE_MULTICAST,
  209. PARAM_NOT_CARE,
  210. mask_results_dq_reg_map[
  211. bit + pup *
  212. BUS_WIDTH_IN_BITS],
  213. res0, MASK_ALL_BITS));
  214. DEBUG_PBS_ENGINE(DEBUG_LEVEL_TRACE,
  215. ("FP I/F %d, bit:%d, pup:%d res0 0x%x\n",
  216. if_id, bit, pup,
  217. res0[if_id]));
  218. if ((res0[if_id] & 0x2000000) == 0) {
  219. DEBUG_PBS_ENGINE(DEBUG_LEVEL_TRACE,
  220. (" -- EEBA Fail\n"));
  221. bit = BUS_WIDTH_IN_BITS;
  222. /* exit bit loop */
  223. DEBUG_PBS_ENGINE(DEBUG_LEVEL_TRACE,
  224. ("-- EEBA Fail Training IP\n"));
  225. /*
  226. * training machine failed but pass
  227. * before in the EBA so maybe the DQS
  228. * shift change env.
  229. */
  230. pup_state[if_id][pup] = 2;
  231. adll_shift_lock[if_id][pup] = 0;
  232. reg_addr = (pbs_mode == PBS_RX_MODE) ?
  233. (0x54 + effective_cs * 0x10) :
  234. (0x14 + effective_cs * 0x10);
  235. CHECK_STATUS(ddr3_tip_bus_write
  236. (dev_num,
  237. ACCESS_TYPE_UNICAST,
  238. if_id,
  239. ACCESS_TYPE_UNICAST, pup,
  240. DDR_PHY_DATA, reg_addr,
  241. 0x0));
  242. reg_addr = (pbs_mode == PBS_RX_MODE) ?
  243. (0x55 + effective_cs * 0x10) :
  244. (0x15 + effective_cs * 0x10);
  245. CHECK_STATUS(ddr3_tip_bus_write
  246. (dev_num,
  247. ACCESS_TYPE_UNICAST,
  248. if_id,
  249. ACCESS_TYPE_UNICAST, pup,
  250. DDR_PHY_DATA, reg_addr,
  251. 0x0));
  252. continue;
  253. } else if ((res0[if_id] & res_valid_mask) ==
  254. validation_val) {
  255. /* exit bit loop */
  256. bit = BUS_WIDTH_IN_BITS;
  257. DEBUG_PBS_ENGINE(DEBUG_LEVEL_TRACE,
  258. ("-- FAIL EEBA\n"));
  259. /* this pup move to SBA */
  260. pup_state[if_id][pup] = 2;
  261. adll_shift_lock[if_id][pup] = 0;
  262. reg_addr = (pbs_mode == PBS_RX_MODE) ?
  263. (0x54 + effective_cs * 0x10) :
  264. (0x14 + effective_cs * 0x10);
  265. CHECK_STATUS(ddr3_tip_bus_write
  266. (dev_num,
  267. ACCESS_TYPE_UNICAST,
  268. if_id,
  269. ACCESS_TYPE_UNICAST, pup,
  270. DDR_PHY_DATA, reg_addr,
  271. 0x0));
  272. reg_addr = (pbs_mode == PBS_RX_MODE) ?
  273. (0x55 + effective_cs * 0x10) :
  274. (0x15 + effective_cs * 0x10);
  275. CHECK_STATUS(ddr3_tip_bus_write
  276. (dev_num,
  277. ACCESS_TYPE_UNICAST,
  278. if_id,
  279. ACCESS_TYPE_UNICAST, pup,
  280. DDR_PHY_DATA, reg_addr,
  281. 0x0));
  282. continue;
  283. } else {
  284. adll_shift_lock[if_id][pup] = 1;
  285. /*
  286. * The search ended in Pass we need
  287. * Fail
  288. */
  289. res0[if_id] =
  290. (pbs_mode == PBS_RX_MODE) ?
  291. ((res0[if_id] &
  292. res_valid_mask) + 1) :
  293. ((res0[if_id] &
  294. res_valid_mask) - 1);
  295. max_adll_per_pup[if_id][pup] =
  296. (max_adll_per_pup[if_id][pup] <
  297. res0[if_id]) ?
  298. (u8)res0[if_id] :
  299. max_adll_per_pup[if_id][pup];
  300. min_adll_per_pup[if_id][pup] =
  301. (res0[if_id] >
  302. min_adll_per_pup[if_id][pup]) ?
  303. min_adll_per_pup[if_id][pup] :
  304. (u8)res0[if_id];
  305. /*
  306. * vs the Rx we are searching for the
  307. * smallest value of DQ shift so all Bus
  308. * would fail
  309. */
  310. adll_shift_val[if_id][pup] =
  311. (pbs_mode == PBS_RX_MODE) ?
  312. max_adll_per_pup[if_id][pup] :
  313. min_adll_per_pup[if_id][pup];
  314. }
  315. }
  316. }
  317. }
  318. /* Print Stage result */
  319. for (pup = 0; pup < tm->num_of_bus_per_interface; pup++) {
  320. VALIDATE_ACTIVE(tm->bus_act_mask, pup);
  321. for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
  322. VALIDATE_ACTIVE(tm->if_act_mask, if_id);
  323. DEBUG_PBS_ENGINE(DEBUG_LEVEL_TRACE,
  324. ("FP I/F %d, ADLL Shift for EBA: pup[%d] Lock status = %d Lock Val = %d,%d\n",
  325. if_id, pup,
  326. adll_shift_lock[if_id][pup],
  327. max_adll_per_pup[if_id][pup],
  328. min_adll_per_pup[if_id][pup]));
  329. }
  330. }
  331. DEBUG_PBS_ENGINE(DEBUG_LEVEL_INFO,
  332. ("Update ADLL Shift of all pups:\n"));
  333. for (pup = 0; pup < tm->num_of_bus_per_interface; pup++) {
  334. VALIDATE_ACTIVE(tm->bus_act_mask, pup);
  335. for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
  336. VALIDATE_ACTIVE(tm->if_act_mask, if_id);
  337. if (adll_shift_lock[if_id][pup] != 1)
  338. continue;
  339. /* if pup not locked continue to next pup */
  340. reg_addr = (pbs_mode == PBS_RX_MODE) ?
  341. (0x3 + effective_cs * 4) :
  342. (0x1 + effective_cs * 4);
  343. CHECK_STATUS(ddr3_tip_bus_write
  344. (dev_num, ACCESS_TYPE_UNICAST, if_id,
  345. ACCESS_TYPE_UNICAST, pup, DDR_PHY_DATA,
  346. reg_addr, adll_shift_val[if_id][pup]));
  347. DEBUG_PBS_ENGINE(DEBUG_LEVEL_TRACE,
  348. ("FP I/F %d, Pup[%d] = %d\n", if_id,
  349. pup, adll_shift_val[if_id][pup]));
  350. }
  351. }
  352. /* PBS EEBA&EBA */
  353. /* Start the Per Bit Skew search */
  354. for (pup = 0; pup < tm->num_of_bus_per_interface; pup++) {
  355. VALIDATE_ACTIVE(tm->bus_act_mask, pup);
  356. for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
  357. VALIDATE_ACTIVE(tm->if_act_mask, if_id);
  358. max_pbs_per_pup[if_id][pup] = 0x0;
  359. min_pbs_per_pup[if_id][pup] = 0x1f;
  360. for (bit = 0; bit < BUS_WIDTH_IN_BITS; bit++) {
  361. /* reset result for PBS */
  362. result_all_bit[bit + pup * BUS_WIDTH_IN_BITS +
  363. if_id * MAX_BUS_NUM *
  364. BUS_WIDTH_IN_BITS] = 0;
  365. }
  366. }
  367. }
  368. iterations = 31;
  369. search_dir = HWS_LOW2HIGH;
  370. /* !!!!! ran sh (search_dir == HWS_LOW2HIGH)?0:iterations; */
  371. init_val = 0;
  372. ddr3_tip_ip_training(dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
  373. ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
  374. RESULT_PER_BIT, HWS_CONTROL_ELEMENT_DQ_SKEW,
  375. search_dir, dir, tm->if_act_mask, init_val,
  376. iterations, pbs_pattern, search_edge,
  377. CS_SINGLE, cs_num, train_status);
  378. for (pup = 0; pup < tm->num_of_bus_per_interface; pup++) {
  379. VALIDATE_ACTIVE(tm->bus_act_mask, pup);
  380. for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
  381. VALIDATE_ACTIVE(tm->if_act_mask, if_id);
  382. if (adll_shift_lock[if_id][pup] != 1) {
  383. /* if pup not lock continue to next pup */
  384. continue;
  385. }
  386. for (bit = 0; bit < BUS_WIDTH_IN_BITS; bit++) {
  387. CHECK_STATUS(ddr3_tip_if_read
  388. (dev_num, ACCESS_TYPE_MULTICAST,
  389. PARAM_NOT_CARE,
  390. mask_results_dq_reg_map[
  391. bit +
  392. pup * BUS_WIDTH_IN_BITS],
  393. res0, MASK_ALL_BITS));
  394. DEBUG_PBS_ENGINE(DEBUG_LEVEL_INFO,
  395. ("Per Bit Skew search, FP I/F %d, bit:%d, pup:%d res0 0x%x\n",
  396. if_id, bit, pup,
  397. res0[if_id]));
  398. if ((res0[if_id] & 0x2000000) == 0) {
  399. DEBUG_PBS_ENGINE(DEBUG_LEVEL_INFO,
  400. ("--EBA PBS Fail - Training IP machine\n"));
  401. /* exit the bit loop */
  402. bit = BUS_WIDTH_IN_BITS;
  403. /*
  404. * ADLL is no long in lock need new
  405. * search
  406. */
  407. adll_shift_lock[if_id][pup] = 0;
  408. /* Move to SBA */
  409. pup_state[if_id][pup] = 2;
  410. max_pbs_per_pup[if_id][pup] = 0x0;
  411. min_pbs_per_pup[if_id][pup] = 0x1f;
  412. continue;
  413. } else {
  414. temp = (u8)(res0[if_id] &
  415. res_valid_mask);
  416. max_pbs_per_pup[if_id][pup] =
  417. (temp >
  418. max_pbs_per_pup[if_id][pup]) ?
  419. temp :
  420. max_pbs_per_pup[if_id][pup];
  421. min_pbs_per_pup[if_id][pup] =
  422. (temp <
  423. min_pbs_per_pup[if_id][pup]) ?
  424. temp :
  425. min_pbs_per_pup[if_id][pup];
  426. result_all_bit[bit +
  427. pup * BUS_WIDTH_IN_BITS +
  428. if_id * MAX_BUS_NUM *
  429. BUS_WIDTH_IN_BITS] =
  430. temp;
  431. }
  432. }
  433. }
  434. }
  435. /* Check all Pup lock */
  436. all_lock = 1;
  437. for (pup = 0; pup < tm->num_of_bus_per_interface; pup++) {
  438. VALIDATE_ACTIVE(tm->bus_act_mask, pup);
  439. for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
  440. VALIDATE_ACTIVE(tm->if_act_mask, if_id);
  441. all_lock = all_lock * adll_shift_lock[if_id][pup];
  442. }
  443. }
  444. /* Only if not all Pups Lock */
  445. if (all_lock == 0) {
  446. DEBUG_PBS_ENGINE(DEBUG_LEVEL_INFO,
  447. ("##########ADLL shift for SBA###########\n"));
  448. /* ADLL shift for SBA */
  449. search_dir = (pbs_mode == PBS_RX_MODE) ? HWS_LOW2HIGH :
  450. HWS_HIGH2LOW;
  451. init_val = (search_dir == HWS_LOW2HIGH) ? 0 : iterations;
  452. for (pup = 0; pup < tm->num_of_bus_per_interface; pup++) {
  453. VALIDATE_ACTIVE(tm->bus_act_mask, pup);
  454. for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1;
  455. if_id++) {
  456. VALIDATE_ACTIVE(tm->if_act_mask, if_id);
  457. if (adll_shift_lock[if_id][pup] == 1) {
  458. /*if pup lock continue to next pup */
  459. continue;
  460. }
  461. /*init the var altogth init before */
  462. adll_shift_lock[if_id][pup] = 0;
  463. reg_addr = (pbs_mode == PBS_RX_MODE) ?
  464. (0x54 + effective_cs * 0x10) :
  465. (0x14 + effective_cs * 0x10);
  466. CHECK_STATUS(ddr3_tip_bus_write
  467. (dev_num, ACCESS_TYPE_UNICAST,
  468. if_id, ACCESS_TYPE_UNICAST, pup,
  469. DDR_PHY_DATA, reg_addr, 0));
  470. reg_addr = (pbs_mode == PBS_RX_MODE) ?
  471. (0x55 + effective_cs * 0x10) :
  472. (0x15 + effective_cs * 0x10);
  473. CHECK_STATUS(ddr3_tip_bus_write
  474. (dev_num, ACCESS_TYPE_UNICAST,
  475. if_id, ACCESS_TYPE_UNICAST, pup,
  476. DDR_PHY_DATA, reg_addr, 0));
  477. reg_addr = (pbs_mode == PBS_RX_MODE) ?
  478. (0x5f + effective_cs * 0x10) :
  479. (0x1f + effective_cs * 0x10);
  480. CHECK_STATUS(ddr3_tip_bus_write
  481. (dev_num, ACCESS_TYPE_UNICAST,
  482. if_id, ACCESS_TYPE_UNICAST, pup,
  483. DDR_PHY_DATA, reg_addr, 0));
  484. /* initilaze the Edge2 Max. */
  485. adll_shift_val[if_id][pup] = 0;
  486. min_adll_per_pup[if_id][pup] = 0x1f;
  487. max_adll_per_pup[if_id][pup] = 0x0;
  488. ddr3_tip_ip_training(dev_num,
  489. ACCESS_TYPE_MULTICAST,
  490. PARAM_NOT_CARE,
  491. ACCESS_TYPE_MULTICAST,
  492. PARAM_NOT_CARE,
  493. RESULT_PER_BIT,
  494. HWS_CONTROL_ELEMENT_ADLL,
  495. search_dir, dir,
  496. tm->if_act_mask,
  497. init_val, iterations,
  498. pbs_pattern,
  499. search_edge, CS_SINGLE,
  500. cs_num, train_status);
  501. for (bit = 0; bit < BUS_WIDTH_IN_BITS; bit++) {
  502. CHECK_STATUS(ddr3_tip_if_read
  503. (dev_num,
  504. ACCESS_TYPE_MULTICAST,
  505. PARAM_NOT_CARE,
  506. mask_results_dq_reg_map
  507. [bit +
  508. pup *
  509. BUS_WIDTH_IN_BITS],
  510. res0, MASK_ALL_BITS));
  511. DEBUG_PBS_ENGINE(
  512. DEBUG_LEVEL_INFO,
  513. ("FP I/F %d, bit:%d, pup:%d res0 0x%x\n",
  514. if_id, bit, pup, res0[if_id]));
  515. if ((res0[if_id] & 0x2000000) == 0) {
  516. /* exit the bit loop */
  517. bit = BUS_WIDTH_IN_BITS;
  518. /* Fail SBA --> Fail PBS */
  519. pup_state[if_id][pup] = 1;
  520. DEBUG_PBS_ENGINE
  521. (DEBUG_LEVEL_INFO,
  522. (" SBA Fail\n"));
  523. continue;
  524. } else {
  525. /*
  526. * - increment to get all
  527. * 8 bit lock.
  528. */
  529. adll_shift_lock[if_id][pup]++;
  530. /*
  531. * The search ended in Pass
  532. * we need Fail
  533. */
  534. res0[if_id] =
  535. (pbs_mode == PBS_RX_MODE) ?
  536. ((res0[if_id] & res_valid_mask) + 1) :
  537. ((res0[if_id] & res_valid_mask) - 1);
  538. max_adll_per_pup[if_id][pup] =
  539. (max_adll_per_pup[if_id]
  540. [pup] < res0[if_id]) ?
  541. (u8)res0[if_id] :
  542. max_adll_per_pup[if_id][pup];
  543. min_adll_per_pup[if_id][pup] =
  544. (res0[if_id] >
  545. min_adll_per_pup[if_id]
  546. [pup]) ?
  547. min_adll_per_pup[if_id][pup] :
  548. (u8)res0[if_id];
  549. /*
  550. * vs the Rx we are searching for
  551. * the smallest value of DQ shift
  552. * so all Bus would fail
  553. */
  554. adll_shift_val[if_id][pup] =
  555. (pbs_mode == PBS_RX_MODE) ?
  556. max_adll_per_pup[if_id][pup] :
  557. min_adll_per_pup[if_id][pup];
  558. }
  559. }
  560. /* 1 is lock */
  561. adll_shift_lock[if_id][pup] =
  562. (adll_shift_lock[if_id][pup] == 8) ?
  563. 1 : 0;
  564. reg_addr = (pbs_mode == PBS_RX_MODE) ?
  565. (0x3 + effective_cs * 4) :
  566. (0x1 + effective_cs * 4);
  567. CHECK_STATUS(ddr3_tip_bus_write
  568. (dev_num, ACCESS_TYPE_UNICAST,
  569. if_id, ACCESS_TYPE_UNICAST, pup,
  570. DDR_PHY_DATA, reg_addr,
  571. adll_shift_val[if_id][pup]));
  572. DEBUG_PBS_ENGINE(
  573. DEBUG_LEVEL_INFO,
  574. ("adll_shift_lock[%x][%x] = %x\n",
  575. if_id, pup,
  576. adll_shift_lock[if_id][pup]));
  577. }
  578. }
  579. /* End ADLL Shift for SBA */
  580. /* Start the Per Bit Skew search */
  581. /* The ADLL shift finished with a Pass */
  582. search_edge = (pbs_mode == PBS_RX_MODE) ? EDGE_PF : EDGE_FP;
  583. search_dir = (pbs_mode == PBS_RX_MODE) ?
  584. HWS_LOW2HIGH : HWS_HIGH2LOW;
  585. iterations = 0x1f;
  586. /* - The initial value is different in Rx and Tx mode */
  587. init_val = (pbs_mode == PBS_RX_MODE) ? 0 : iterations;
  588. ddr3_tip_ip_training(dev_num, ACCESS_TYPE_MULTICAST,
  589. PARAM_NOT_CARE, ACCESS_TYPE_MULTICAST,
  590. PARAM_NOT_CARE, RESULT_PER_BIT,
  591. HWS_CONTROL_ELEMENT_DQ_SKEW,
  592. search_dir, dir, tm->if_act_mask,
  593. init_val, iterations, pbs_pattern,
  594. search_edge, CS_SINGLE, cs_num,
  595. train_status);
  596. for (pup = 0; pup < tm->num_of_bus_per_interface; pup++) {
  597. VALIDATE_ACTIVE(tm->bus_act_mask, pup);
  598. for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1;
  599. if_id++) {
  600. VALIDATE_ACTIVE(tm->if_act_mask, if_id);
  601. for (bit = 0; bit < BUS_WIDTH_IN_BITS; bit++) {
  602. CHECK_STATUS(ddr3_tip_if_read
  603. (dev_num,
  604. ACCESS_TYPE_MULTICAST,
  605. PARAM_NOT_CARE,
  606. mask_results_dq_reg_map
  607. [bit +
  608. pup *
  609. BUS_WIDTH_IN_BITS],
  610. res0, MASK_ALL_BITS));
  611. if (pup_state[if_id][pup] != 2) {
  612. /*
  613. * if pup is not SBA continue
  614. * to next pup
  615. */
  616. bit = BUS_WIDTH_IN_BITS;
  617. continue;
  618. }
  619. DEBUG_PBS_ENGINE(
  620. DEBUG_LEVEL_INFO,
  621. ("Per Bit Skew search, PF I/F %d, bit:%d, pup:%d res0 0x%x\n",
  622. if_id, bit, pup, res0[if_id]));
  623. if ((res0[if_id] & 0x2000000) == 0) {
  624. DEBUG_PBS_ENGINE
  625. (DEBUG_LEVEL_INFO,
  626. ("SBA Fail\n"));
  627. max_pbs_per_pup[if_id][pup] =
  628. 0x1f;
  629. result_all_bit[
  630. bit + pup *
  631. BUS_WIDTH_IN_BITS +
  632. if_id * MAX_BUS_NUM *
  633. BUS_WIDTH_IN_BITS] =
  634. 0x1f;
  635. } else {
  636. temp = (u8)(res0[if_id] &
  637. res_valid_mask);
  638. max_pbs_per_pup[if_id][pup] =
  639. (temp >
  640. max_pbs_per_pup[if_id]
  641. [pup]) ? temp :
  642. max_pbs_per_pup
  643. [if_id][pup];
  644. min_pbs_per_pup[if_id][pup] =
  645. (temp <
  646. min_pbs_per_pup[if_id]
  647. [pup]) ? temp :
  648. min_pbs_per_pup
  649. [if_id][pup];
  650. result_all_bit[
  651. bit + pup *
  652. BUS_WIDTH_IN_BITS +
  653. if_id * MAX_BUS_NUM *
  654. BUS_WIDTH_IN_BITS] =
  655. temp;
  656. adll_shift_lock[if_id][pup] = 1;
  657. }
  658. }
  659. }
  660. }
  661. /* Check all Pup state */
  662. all_lock = 1;
  663. for (pup = 0; pup < tm->num_of_bus_per_interface; pup++) {
  664. /*
  665. * DEBUG_PBS_ENGINE(DEBUG_LEVEL_INFO,
  666. * ("pup_state[%d][%d] = %d\n",if_id,pup,pup_state
  667. * [if_id][pup]));
  668. */
  669. }
  670. }
  671. /* END OF SBA */
  672. /* Norm */
  673. for (pup = 0; pup < tm->num_of_bus_per_interface; pup++) {
  674. VALIDATE_ACTIVE(tm->bus_act_mask, pup);
  675. for (bit = 0; bit < BUS_WIDTH_IN_BITS; bit++) {
  676. for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1;
  677. if_id++) {
  678. VALIDATE_ACTIVE(tm->if_act_mask, if_id);
  679. /* if pup not lock continue to next pup */
  680. if (adll_shift_lock[if_id][pup] != 1) {
  681. DEBUG_PBS_ENGINE(
  682. DEBUG_LEVEL_ERROR,
  683. ("PBS failed for IF #%d\n",
  684. if_id));
  685. training_result[training_stage][if_id]
  686. = TEST_FAILED;
  687. result_mat[if_id][pup][bit] = 0;
  688. max_pbs_per_pup[if_id][pup] = 0;
  689. min_pbs_per_pup[if_id][pup] = 0;
  690. } else {
  691. training_result[
  692. training_stage][if_id] =
  693. (training_result[training_stage]
  694. [if_id] == TEST_FAILED) ?
  695. TEST_FAILED : TEST_SUCCESS;
  696. result_mat[if_id][pup][bit] =
  697. result_all_bit[
  698. bit + pup *
  699. BUS_WIDTH_IN_BITS +
  700. if_id * MAX_BUS_NUM *
  701. BUS_WIDTH_IN_BITS] -
  702. min_pbs_per_pup[if_id][pup];
  703. }
  704. DEBUG_PBS_ENGINE(
  705. DEBUG_LEVEL_INFO,
  706. ("The abs min_pbs[%d][%d] = %d\n",
  707. if_id, pup,
  708. min_pbs_per_pup[if_id][pup]));
  709. }
  710. }
  711. }
  712. /* Clean all results */
  713. ddr3_tip_clean_pbs_result(dev_num, pbs_mode);
  714. /* DQ PBS register update with the final result */
  715. for (if_id = 0; if_id < MAX_INTERFACE_NUM; if_id++) {
  716. VALIDATE_ACTIVE(tm->if_act_mask, if_id);
  717. for (pup = 0; pup < tm->num_of_bus_per_interface; pup++) {
  718. VALIDATE_ACTIVE(tm->bus_act_mask, pup);
  719. DEBUG_PBS_ENGINE(
  720. DEBUG_LEVEL_INFO,
  721. ("Final Results: if_id %d, pup %d, Pup State: %d\n",
  722. if_id, pup, pup_state[if_id][pup]));
  723. for (bit = 0; bit < BUS_WIDTH_IN_BITS; bit++) {
  724. if (dq_map_table == NULL) {
  725. DEBUG_PBS_ENGINE(
  726. DEBUG_LEVEL_ERROR,
  727. ("dq_map_table not initialized\n"));
  728. return MV_FAIL;
  729. }
  730. pad_num = dq_map_table[
  731. bit + pup * BUS_WIDTH_IN_BITS +
  732. if_id * BUS_WIDTH_IN_BITS *
  733. tm->num_of_bus_per_interface];
  734. DEBUG_PBS_ENGINE(DEBUG_LEVEL_INFO,
  735. ("result_mat: %d ",
  736. result_mat[if_id][pup]
  737. [bit]));
  738. reg_addr = (pbs_mode == PBS_RX_MODE) ?
  739. (PBS_RX_PHY_REG + effective_cs * 0x10) :
  740. (PBS_TX_PHY_REG + effective_cs * 0x10);
  741. CHECK_STATUS(ddr3_tip_bus_write
  742. (dev_num, ACCESS_TYPE_UNICAST,
  743. if_id, ACCESS_TYPE_UNICAST, pup,
  744. DDR_PHY_DATA, reg_addr + pad_num,
  745. result_mat[if_id][pup][bit]));
  746. }
  747. pbsdelay_per_pup[pbs_mode][if_id][pup] =
  748. (max_pbs_per_pup[if_id][pup] ==
  749. min_pbs_per_pup[if_id][pup]) ?
  750. TYPICAL_PBS_VALUE :
  751. ((max_adll_per_pup[if_id][pup] -
  752. min_adll_per_pup[if_id][pup]) * adll_tap /
  753. (max_pbs_per_pup[if_id][pup] -
  754. min_pbs_per_pup[if_id][pup]));
  755. /* RX results ready, write RX also */
  756. if (pbs_mode == PBS_TX_MODE) {
  757. /* Write TX results */
  758. reg_addr = (0x14 + effective_cs * 0x10);
  759. CHECK_STATUS(ddr3_tip_bus_write
  760. (dev_num, ACCESS_TYPE_UNICAST,
  761. if_id, ACCESS_TYPE_UNICAST, pup,
  762. DDR_PHY_DATA, reg_addr,
  763. (max_pbs_per_pup[if_id][pup] -
  764. min_pbs_per_pup[if_id][pup]) /
  765. 2));
  766. reg_addr = (0x15 + effective_cs * 0x10);
  767. CHECK_STATUS(ddr3_tip_bus_write
  768. (dev_num, ACCESS_TYPE_UNICAST,
  769. if_id, ACCESS_TYPE_UNICAST, pup,
  770. DDR_PHY_DATA, reg_addr,
  771. (max_pbs_per_pup[if_id][pup] -
  772. min_pbs_per_pup[if_id][pup]) /
  773. 2));
  774. /* Write previously stored RX results */
  775. reg_addr = (0x54 + effective_cs * 0x10);
  776. CHECK_STATUS(ddr3_tip_bus_write
  777. (dev_num, ACCESS_TYPE_UNICAST,
  778. if_id, ACCESS_TYPE_UNICAST, pup,
  779. DDR_PHY_DATA, reg_addr,
  780. result_mat_rx_dqs[if_id][pup]
  781. [effective_cs]));
  782. reg_addr = (0x55 + effective_cs * 0x10);
  783. CHECK_STATUS(ddr3_tip_bus_write
  784. (dev_num, ACCESS_TYPE_UNICAST,
  785. if_id, ACCESS_TYPE_UNICAST, pup,
  786. DDR_PHY_DATA, reg_addr,
  787. result_mat_rx_dqs[if_id][pup]
  788. [effective_cs]));
  789. } else {
  790. /*
  791. * RX results may affect RL results correctess,
  792. * so just store the results that will written
  793. * in TX stage
  794. */
  795. result_mat_rx_dqs[if_id][pup][effective_cs] =
  796. (max_pbs_per_pup[if_id][pup] -
  797. min_pbs_per_pup[if_id][pup]) / 2;
  798. }
  799. DEBUG_PBS_ENGINE(
  800. DEBUG_LEVEL_INFO,
  801. (", PBS tap=%d [psec] ==> skew observed = %d\n",
  802. pbsdelay_per_pup[pbs_mode][if_id][pup],
  803. ((max_pbs_per_pup[if_id][pup] -
  804. min_pbs_per_pup[if_id][pup]) *
  805. pbsdelay_per_pup[pbs_mode][if_id][pup])));
  806. }
  807. }
  808. /* Write back to the phy the default values */
  809. reg_addr = (pbs_mode == PBS_RX_MODE) ?
  810. (READ_CENTRALIZATION_PHY_REG + effective_cs * 4) :
  811. (WRITE_CENTRALIZATION_PHY_REG + effective_cs * 4);
  812. write_adll_value(nominal_adll, reg_addr);
  813. for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
  814. reg_addr = (pbs_mode == PBS_RX_MODE) ?
  815. (0x5a + effective_cs * 0x10) :
  816. (0x1a + effective_cs * 0x10);
  817. CHECK_STATUS(ddr3_tip_bus_write
  818. (dev_num, ACCESS_TYPE_UNICAST, if_id,
  819. ACCESS_TYPE_UNICAST, pup, DDR_PHY_DATA, reg_addr,
  820. 0));
  821. /* restore cs enable value */
  822. VALIDATE_ACTIVE(tm->if_act_mask, if_id);
  823. CHECK_STATUS(ddr3_tip_if_write
  824. (dev_num, ACCESS_TYPE_UNICAST, if_id,
  825. CS_ENABLE_REG, cs_enable_reg_val[if_id],
  826. MASK_ALL_BITS));
  827. }
  828. /* exit test mode */
  829. CHECK_STATUS(ddr3_tip_if_write
  830. (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
  831. ODPG_WRITE_READ_MODE_ENABLE_REG, 0xffff, MASK_ALL_BITS));
  832. for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
  833. /*
  834. * meaning that there is no VW exist at all (No lock at
  835. * the EBA ADLL shift at EBS)
  836. */
  837. if (pup_state[if_id][pup] == 1)
  838. return MV_FAIL;
  839. }
  840. return MV_OK;
  841. }
  842. /*
  843. * Name: ddr3_tip_pbs_rx.
  844. * Desc: PBS TX
  845. * Args: TBD
  846. * Notes:
  847. * Returns: OK if success, other error code if fail.
  848. */
  849. int ddr3_tip_pbs_rx(u32 uidev_num)
  850. {
  851. return ddr3_tip_pbs(uidev_num, PBS_RX_MODE);
  852. }
  853. /*
  854. * Name: ddr3_tip_pbs_tx.
  855. * Desc: PBS TX
  856. * Args: TBD
  857. * Notes:
  858. * Returns: OK if success, other error code if fail.
  859. */
  860. int ddr3_tip_pbs_tx(u32 uidev_num)
  861. {
  862. return ddr3_tip_pbs(uidev_num, PBS_TX_MODE);
  863. }
  864. #ifndef EXCLUDE_SWITCH_DEBUG
  865. /*
  866. * Print PBS Result
  867. */
  868. int ddr3_tip_print_all_pbs_result(u32 dev_num)
  869. {
  870. u32 curr_cs;
  871. u32 max_cs = hws_ddr3_tip_max_cs_get();
  872. for (curr_cs = 0; curr_cs < max_cs; curr_cs++) {
  873. ddr3_tip_print_pbs_result(dev_num, curr_cs, PBS_RX_MODE);
  874. ddr3_tip_print_pbs_result(dev_num, curr_cs, PBS_TX_MODE);
  875. }
  876. return MV_OK;
  877. }
  878. /*
  879. * Print PBS Result
  880. */
  881. int ddr3_tip_print_pbs_result(u32 dev_num, u32 cs_num, enum pbs_dir pbs_mode)
  882. {
  883. u32 data_value = 0, bit = 0, if_id = 0, pup = 0;
  884. u32 reg_addr = (pbs_mode == PBS_RX_MODE) ?
  885. (PBS_RX_PHY_REG + cs_num * 0x10) :
  886. (PBS_TX_PHY_REG + cs_num * 0x10);
  887. struct hws_topology_map *tm = ddr3_get_topology_map();
  888. printf("CS%d, %s ,PBS\n", cs_num,
  889. (pbs_mode == PBS_RX_MODE) ? "Rx" : "Tx");
  890. for (bit = 0; bit < BUS_WIDTH_IN_BITS; bit++) {
  891. printf("%s, DQ", (pbs_mode == PBS_RX_MODE) ? "Rx" : "Tx");
  892. for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
  893. VALIDATE_ACTIVE(tm->if_act_mask, if_id);
  894. printf("%d ,PBS,,, ", bit);
  895. for (pup = 0; pup <= tm->num_of_bus_per_interface;
  896. pup++) {
  897. VALIDATE_ACTIVE(tm->bus_act_mask, pup);
  898. CHECK_STATUS(ddr3_tip_bus_read
  899. (dev_num, if_id,
  900. ACCESS_TYPE_UNICAST, pup,
  901. DDR_PHY_DATA, reg_addr + bit,
  902. &data_value));
  903. printf("%d , ", data_value);
  904. }
  905. }
  906. printf("\n");
  907. }
  908. printf("\n");
  909. return MV_OK;
  910. }
  911. #endif
  912. /*
  913. * Fixup PBS Result
  914. */
  915. int ddr3_tip_clean_pbs_result(u32 dev_num, enum pbs_dir pbs_mode)
  916. {
  917. u32 if_id, pup, bit;
  918. u32 reg_addr = (pbs_mode == PBS_RX_MODE) ?
  919. (PBS_RX_PHY_REG + effective_cs * 0x10) :
  920. (PBS_TX_PHY_REG + effective_cs * 0x10);
  921. struct hws_topology_map *tm = ddr3_get_topology_map();
  922. for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
  923. VALIDATE_ACTIVE(tm->if_act_mask, if_id);
  924. for (pup = 0; pup <= tm->num_of_bus_per_interface; pup++) {
  925. for (bit = 0; bit <= BUS_WIDTH_IN_BITS + 3; bit++) {
  926. CHECK_STATUS(ddr3_tip_bus_write
  927. (dev_num, ACCESS_TYPE_UNICAST,
  928. if_id, ACCESS_TYPE_UNICAST, pup,
  929. DDR_PHY_DATA, reg_addr + bit, 0));
  930. }
  931. }
  932. }
  933. return MV_OK;
  934. }