ddr3_training_pbs.c 30 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995
  1. /*
  2. * Copyright (C) Marvell International Ltd. and its affiliates
  3. *
  4. * SPDX-License-Identifier: GPL-2.0
  5. */
  6. #include <common.h>
  7. #include <spl.h>
  8. #include <asm/io.h>
  9. #include <asm/arch/cpu.h>
  10. #include <asm/arch/soc.h>
  11. #include "ddr3_init.h"
  12. #define TYPICAL_PBS_VALUE 12
  13. u32 nominal_adll[MAX_INTERFACE_NUM * MAX_BUS_NUM];
  14. enum hws_training_ip_stat train_status[MAX_INTERFACE_NUM];
  15. u8 result_mat[MAX_INTERFACE_NUM][MAX_BUS_NUM][BUS_WIDTH_IN_BITS];
  16. u8 result_mat_rx_dqs[MAX_INTERFACE_NUM][MAX_BUS_NUM][MAX_CS_NUM];
  17. /* 4-EEWA, 3-EWA, 2-SWA, 1-Fail, 0-Pass */
  18. u8 result_all_bit[MAX_BUS_NUM * BUS_WIDTH_IN_BITS * MAX_INTERFACE_NUM];
  19. u8 max_pbs_per_pup[MAX_INTERFACE_NUM][MAX_BUS_NUM];
  20. u8 min_pbs_per_pup[MAX_INTERFACE_NUM][MAX_BUS_NUM];
  21. u8 max_adll_per_pup[MAX_INTERFACE_NUM][MAX_BUS_NUM];
  22. u8 min_adll_per_pup[MAX_INTERFACE_NUM][MAX_BUS_NUM];
  23. u32 pbsdelay_per_pup[NUM_OF_PBS_MODES][MAX_INTERFACE_NUM][MAX_BUS_NUM];
  24. u8 adll_shift_lock[MAX_INTERFACE_NUM][MAX_BUS_NUM];
  25. u8 adll_shift_val[MAX_INTERFACE_NUM][MAX_BUS_NUM];
  26. enum hws_pattern pbs_pattern = PATTERN_VREF;
  27. static u8 pup_state[MAX_INTERFACE_NUM][MAX_BUS_NUM];
  28. /*
  29. * Name: ddr3_tip_pbs
  30. * Desc: PBS
  31. * Args: TBD
  32. * Notes:
  33. * Returns: OK if success, other error code if fail.
  34. */
  35. int ddr3_tip_pbs(u32 dev_num, enum pbs_dir pbs_mode)
  36. {
  37. u32 res0[MAX_INTERFACE_NUM];
  38. int adll_tap = MEGA / freq_val[medium_freq] / 64;
  39. int pad_num = 0;
  40. enum hws_search_dir search_dir =
  41. (pbs_mode == PBS_RX_MODE) ? HWS_HIGH2LOW : HWS_LOW2HIGH;
  42. enum hws_dir dir = (pbs_mode == PBS_RX_MODE) ? OPER_READ : OPER_WRITE;
  43. int iterations = (pbs_mode == PBS_RX_MODE) ? 31 : 63;
  44. u32 res_valid_mask = (pbs_mode == PBS_RX_MODE) ? 0x1f : 0x3f;
  45. int init_val = (search_dir == HWS_LOW2HIGH) ? 0 : iterations;
  46. enum hws_edge_compare search_edge = EDGE_FP;
  47. u32 pup = 0, bit = 0, if_id = 0, all_lock = 0, cs_num = 0;
  48. int reg_addr = 0;
  49. u32 validation_val = 0;
  50. u32 cs_enable_reg_val[MAX_INTERFACE_NUM];
  51. u16 *mask_results_dq_reg_map = ddr3_tip_get_mask_results_dq_reg();
  52. u8 temp = 0;
  53. struct hws_topology_map *tm = ddr3_get_topology_map();
  54. /* save current cs enable reg val */
  55. for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
  56. VALIDATE_ACTIVE(tm->if_act_mask, if_id);
  57. /* save current cs enable reg val */
  58. CHECK_STATUS(ddr3_tip_if_read
  59. (dev_num, ACCESS_TYPE_UNICAST, if_id,
  60. CS_ENABLE_REG, cs_enable_reg_val, MASK_ALL_BITS));
  61. /* enable single cs */
  62. CHECK_STATUS(ddr3_tip_if_write
  63. (dev_num, ACCESS_TYPE_UNICAST, if_id,
  64. CS_ENABLE_REG, (1 << 3), (1 << 3)));
  65. }
  66. reg_addr = (pbs_mode == PBS_RX_MODE) ?
  67. (READ_CENTRALIZATION_PHY_REG +
  68. (effective_cs * CS_REGISTER_ADDR_OFFSET)) :
  69. (WRITE_CENTRALIZATION_PHY_REG +
  70. (effective_cs * CS_REGISTER_ADDR_OFFSET));
  71. read_adll_value(nominal_adll, reg_addr, MASK_ALL_BITS);
  72. /* stage 1 shift ADLL */
  73. ddr3_tip_ip_training(dev_num, ACCESS_TYPE_MULTICAST,
  74. PARAM_NOT_CARE, ACCESS_TYPE_MULTICAST,
  75. PARAM_NOT_CARE, RESULT_PER_BIT,
  76. HWS_CONTROL_ELEMENT_ADLL, search_dir, dir,
  77. tm->if_act_mask, init_val, iterations,
  78. pbs_pattern, search_edge, CS_SINGLE, cs_num,
  79. train_status);
  80. validation_val = (pbs_mode == PBS_RX_MODE) ? 0x1f : 0;
  81. for (pup = 0; pup < tm->num_of_bus_per_interface; pup++) {
  82. VALIDATE_ACTIVE(tm->bus_act_mask, pup);
  83. for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
  84. VALIDATE_ACTIVE(tm->if_act_mask, if_id);
  85. min_adll_per_pup[if_id][pup] =
  86. (pbs_mode == PBS_RX_MODE) ? 0x1f : 0x3f;
  87. pup_state[if_id][pup] = 0x3;
  88. adll_shift_lock[if_id][pup] = 1;
  89. max_adll_per_pup[if_id][pup] = 0x0;
  90. }
  91. }
  92. /* EBA */
  93. for (pup = 0; pup < tm->num_of_bus_per_interface; pup++) {
  94. VALIDATE_ACTIVE(tm->bus_act_mask, pup);
  95. for (bit = 0; bit < BUS_WIDTH_IN_BITS; bit++) {
  96. CHECK_STATUS(ddr3_tip_if_read
  97. (dev_num, ACCESS_TYPE_MULTICAST,
  98. PARAM_NOT_CARE,
  99. mask_results_dq_reg_map[
  100. bit + pup * BUS_WIDTH_IN_BITS],
  101. res0, MASK_ALL_BITS));
  102. for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1;
  103. if_id++) {
  104. VALIDATE_ACTIVE(tm->if_act_mask, if_id);
  105. DEBUG_PBS_ENGINE(DEBUG_LEVEL_TRACE,
  106. ("FP I/F %d, bit:%d, pup:%d res0 0x%x\n",
  107. if_id, bit, pup,
  108. res0[if_id]));
  109. if (pup_state[if_id][pup] != 3)
  110. continue;
  111. /* if not EBA state than move to next pup */
  112. if ((res0[if_id] & 0x2000000) == 0) {
  113. DEBUG_PBS_ENGINE(DEBUG_LEVEL_TRACE,
  114. ("-- Fail Training IP\n"));
  115. /* training machine failed */
  116. pup_state[if_id][pup] = 1;
  117. adll_shift_lock[if_id][pup] = 0;
  118. continue;
  119. }
  120. else if ((res0[if_id] & res_valid_mask) ==
  121. validation_val) {
  122. DEBUG_PBS_ENGINE(DEBUG_LEVEL_TRACE,
  123. ("-- FAIL EBA %d %d %d %d\n",
  124. if_id, bit, pup,
  125. res0[if_id]));
  126. pup_state[if_id][pup] = 4;
  127. /* this pup move to EEBA */
  128. adll_shift_lock[if_id][pup] = 0;
  129. continue;
  130. } else {
  131. /*
  132. * The search ended in Pass we need
  133. * Fail
  134. */
  135. res0[if_id] =
  136. (pbs_mode == PBS_RX_MODE) ?
  137. ((res0[if_id] &
  138. res_valid_mask) + 1) :
  139. ((res0[if_id] &
  140. res_valid_mask) - 1);
  141. max_adll_per_pup[if_id][pup] =
  142. (max_adll_per_pup[if_id][pup] <
  143. res0[if_id]) ?
  144. (u8)res0[if_id] :
  145. max_adll_per_pup[if_id][pup];
  146. min_adll_per_pup[if_id][pup] =
  147. (res0[if_id] >
  148. min_adll_per_pup[if_id][pup]) ?
  149. min_adll_per_pup[if_id][pup] :
  150. (u8)
  151. res0[if_id];
  152. /*
  153. * vs the Rx we are searching for the
  154. * smallest value of DQ shift so all
  155. * Bus would fail
  156. */
  157. adll_shift_val[if_id][pup] =
  158. (pbs_mode == PBS_RX_MODE) ?
  159. max_adll_per_pup[if_id][pup] :
  160. min_adll_per_pup[if_id][pup];
  161. }
  162. }
  163. }
  164. }
  165. /* EEBA */
  166. for (pup = 0; pup < tm->num_of_bus_per_interface; pup++) {
  167. VALIDATE_ACTIVE(tm->bus_act_mask, pup);
  168. for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
  169. VALIDATE_ACTIVE(tm->if_act_mask, if_id);
  170. if (pup_state[if_id][pup] != 4)
  171. continue;
  172. /*
  173. * if pup state different from EEBA than move to
  174. * next pup
  175. */
  176. reg_addr = (pbs_mode == PBS_RX_MODE) ?
  177. (0x54 + effective_cs * 0x10) :
  178. (0x14 + effective_cs * 0x10);
  179. CHECK_STATUS(ddr3_tip_bus_write
  180. (dev_num, ACCESS_TYPE_UNICAST, if_id,
  181. ACCESS_TYPE_UNICAST, pup, DDR_PHY_DATA,
  182. reg_addr, 0x1f));
  183. reg_addr = (pbs_mode == PBS_RX_MODE) ?
  184. (0x55 + effective_cs * 0x10) :
  185. (0x15 + effective_cs * 0x10);
  186. CHECK_STATUS(ddr3_tip_bus_write
  187. (dev_num, ACCESS_TYPE_UNICAST, if_id,
  188. ACCESS_TYPE_UNICAST, pup, DDR_PHY_DATA,
  189. reg_addr, 0x1f));
  190. /* initialize the Edge2 Max. */
  191. adll_shift_val[if_id][pup] = 0;
  192. min_adll_per_pup[if_id][pup] =
  193. (pbs_mode == PBS_RX_MODE) ? 0x1f : 0x3f;
  194. max_adll_per_pup[if_id][pup] = 0x0;
  195. ddr3_tip_ip_training(dev_num, ACCESS_TYPE_MULTICAST,
  196. PARAM_NOT_CARE,
  197. ACCESS_TYPE_MULTICAST,
  198. PARAM_NOT_CARE, RESULT_PER_BIT,
  199. HWS_CONTROL_ELEMENT_ADLL,
  200. search_dir, dir,
  201. tm->if_act_mask, init_val,
  202. iterations, pbs_pattern,
  203. search_edge, CS_SINGLE, cs_num,
  204. train_status);
  205. DEBUG_PBS_ENGINE(DEBUG_LEVEL_INFO,
  206. ("ADLL shift results:\n"));
  207. for (bit = 0; bit < BUS_WIDTH_IN_BITS; bit++) {
  208. CHECK_STATUS(ddr3_tip_if_read
  209. (dev_num, ACCESS_TYPE_MULTICAST,
  210. PARAM_NOT_CARE,
  211. mask_results_dq_reg_map[
  212. bit + pup *
  213. BUS_WIDTH_IN_BITS],
  214. res0, MASK_ALL_BITS));
  215. DEBUG_PBS_ENGINE(DEBUG_LEVEL_TRACE,
  216. ("FP I/F %d, bit:%d, pup:%d res0 0x%x\n",
  217. if_id, bit, pup,
  218. res0[if_id]));
  219. if ((res0[if_id] & 0x2000000) == 0) {
  220. DEBUG_PBS_ENGINE(DEBUG_LEVEL_TRACE,
  221. (" -- EEBA Fail\n"));
  222. bit = BUS_WIDTH_IN_BITS;
  223. /* exit bit loop */
  224. DEBUG_PBS_ENGINE(DEBUG_LEVEL_TRACE,
  225. ("-- EEBA Fail Training IP\n"));
  226. /*
  227. * training machine failed but pass
  228. * before in the EBA so maybe the DQS
  229. * shift change env.
  230. */
  231. pup_state[if_id][pup] = 2;
  232. adll_shift_lock[if_id][pup] = 0;
  233. reg_addr = (pbs_mode == PBS_RX_MODE) ?
  234. (0x54 + effective_cs * 0x10) :
  235. (0x14 + effective_cs * 0x10);
  236. CHECK_STATUS(ddr3_tip_bus_write
  237. (dev_num,
  238. ACCESS_TYPE_UNICAST,
  239. if_id,
  240. ACCESS_TYPE_UNICAST, pup,
  241. DDR_PHY_DATA, reg_addr,
  242. 0x0));
  243. reg_addr = (pbs_mode == PBS_RX_MODE) ?
  244. (0x55 + effective_cs * 0x10) :
  245. (0x15 + effective_cs * 0x10);
  246. CHECK_STATUS(ddr3_tip_bus_write
  247. (dev_num,
  248. ACCESS_TYPE_UNICAST,
  249. if_id,
  250. ACCESS_TYPE_UNICAST, pup,
  251. DDR_PHY_DATA, reg_addr,
  252. 0x0));
  253. continue;
  254. } else if ((res0[if_id] & res_valid_mask) ==
  255. validation_val) {
  256. /* exit bit loop */
  257. bit = BUS_WIDTH_IN_BITS;
  258. DEBUG_PBS_ENGINE(DEBUG_LEVEL_TRACE,
  259. ("-- FAIL EEBA\n"));
  260. /* this pup move to SBA */
  261. pup_state[if_id][pup] = 2;
  262. adll_shift_lock[if_id][pup] = 0;
  263. reg_addr = (pbs_mode == PBS_RX_MODE) ?
  264. (0x54 + effective_cs * 0x10) :
  265. (0x14 + effective_cs * 0x10);
  266. CHECK_STATUS(ddr3_tip_bus_write
  267. (dev_num,
  268. ACCESS_TYPE_UNICAST,
  269. if_id,
  270. ACCESS_TYPE_UNICAST, pup,
  271. DDR_PHY_DATA, reg_addr,
  272. 0x0));
  273. reg_addr = (pbs_mode == PBS_RX_MODE) ?
  274. (0x55 + effective_cs * 0x10) :
  275. (0x15 + effective_cs * 0x10);
  276. CHECK_STATUS(ddr3_tip_bus_write
  277. (dev_num,
  278. ACCESS_TYPE_UNICAST,
  279. if_id,
  280. ACCESS_TYPE_UNICAST, pup,
  281. DDR_PHY_DATA, reg_addr,
  282. 0x0));
  283. continue;
  284. } else {
  285. adll_shift_lock[if_id][pup] = 1;
  286. /*
  287. * The search ended in Pass we need
  288. * Fail
  289. */
  290. res0[if_id] =
  291. (pbs_mode == PBS_RX_MODE) ?
  292. ((res0[if_id] &
  293. res_valid_mask) + 1) :
  294. ((res0[if_id] &
  295. res_valid_mask) - 1);
  296. max_adll_per_pup[if_id][pup] =
  297. (max_adll_per_pup[if_id][pup] <
  298. res0[if_id]) ?
  299. (u8)res0[if_id] :
  300. max_adll_per_pup[if_id][pup];
  301. min_adll_per_pup[if_id][pup] =
  302. (res0[if_id] >
  303. min_adll_per_pup[if_id][pup]) ?
  304. min_adll_per_pup[if_id][pup] :
  305. (u8)res0[if_id];
  306. /*
  307. * vs the Rx we are searching for the
  308. * smallest value of DQ shift so all Bus
  309. * would fail
  310. */
  311. adll_shift_val[if_id][pup] =
  312. (pbs_mode == PBS_RX_MODE) ?
  313. max_adll_per_pup[if_id][pup] :
  314. min_adll_per_pup[if_id][pup];
  315. }
  316. }
  317. }
  318. }
  319. /* Print Stage result */
  320. for (pup = 0; pup < tm->num_of_bus_per_interface; pup++) {
  321. VALIDATE_ACTIVE(tm->bus_act_mask, pup);
  322. for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
  323. VALIDATE_ACTIVE(tm->if_act_mask, if_id);
  324. DEBUG_PBS_ENGINE(DEBUG_LEVEL_TRACE,
  325. ("FP I/F %d, ADLL Shift for EBA: pup[%d] Lock status = %d Lock Val = %d,%d\n",
  326. if_id, pup,
  327. adll_shift_lock[if_id][pup],
  328. max_adll_per_pup[if_id][pup],
  329. min_adll_per_pup[if_id][pup]));
  330. }
  331. }
  332. DEBUG_PBS_ENGINE(DEBUG_LEVEL_INFO,
  333. ("Update ADLL Shift of all pups:\n"));
  334. for (pup = 0; pup < tm->num_of_bus_per_interface; pup++) {
  335. VALIDATE_ACTIVE(tm->bus_act_mask, pup);
  336. for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
  337. VALIDATE_ACTIVE(tm->if_act_mask, if_id);
  338. if (adll_shift_lock[if_id][pup] != 1)
  339. continue;
  340. /* if pup not locked continue to next pup */
  341. reg_addr = (pbs_mode == PBS_RX_MODE) ?
  342. (0x3 + effective_cs * 4) :
  343. (0x1 + effective_cs * 4);
  344. CHECK_STATUS(ddr3_tip_bus_write
  345. (dev_num, ACCESS_TYPE_UNICAST, if_id,
  346. ACCESS_TYPE_UNICAST, pup, DDR_PHY_DATA,
  347. reg_addr, adll_shift_val[if_id][pup]));
  348. DEBUG_PBS_ENGINE(DEBUG_LEVEL_TRACE,
  349. ("FP I/F %d, Pup[%d] = %d\n", if_id,
  350. pup, adll_shift_val[if_id][pup]));
  351. }
  352. }
  353. /* PBS EEBA&EBA */
  354. /* Start the Per Bit Skew search */
  355. for (pup = 0; pup < tm->num_of_bus_per_interface; pup++) {
  356. VALIDATE_ACTIVE(tm->bus_act_mask, pup);
  357. for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
  358. VALIDATE_ACTIVE(tm->if_act_mask, if_id);
  359. max_pbs_per_pup[if_id][pup] = 0x0;
  360. min_pbs_per_pup[if_id][pup] = 0x1f;
  361. for (bit = 0; bit < BUS_WIDTH_IN_BITS; bit++) {
  362. /* reset result for PBS */
  363. result_all_bit[bit + pup * BUS_WIDTH_IN_BITS +
  364. if_id * MAX_BUS_NUM *
  365. BUS_WIDTH_IN_BITS] = 0;
  366. }
  367. }
  368. }
  369. iterations = 31;
  370. search_dir = HWS_LOW2HIGH;
  371. /* !!!!! ran sh (search_dir == HWS_LOW2HIGH)?0:iterations; */
  372. init_val = 0;
  373. ddr3_tip_ip_training(dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
  374. ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
  375. RESULT_PER_BIT, HWS_CONTROL_ELEMENT_DQ_SKEW,
  376. search_dir, dir, tm->if_act_mask, init_val,
  377. iterations, pbs_pattern, search_edge,
  378. CS_SINGLE, cs_num, train_status);
  379. for (pup = 0; pup < tm->num_of_bus_per_interface; pup++) {
  380. VALIDATE_ACTIVE(tm->bus_act_mask, pup);
  381. for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
  382. VALIDATE_ACTIVE(tm->if_act_mask, if_id);
  383. if (adll_shift_lock[if_id][pup] != 1) {
  384. /* if pup not lock continue to next pup */
  385. continue;
  386. }
  387. for (bit = 0; bit < BUS_WIDTH_IN_BITS; bit++) {
  388. CHECK_STATUS(ddr3_tip_if_read
  389. (dev_num, ACCESS_TYPE_MULTICAST,
  390. PARAM_NOT_CARE,
  391. mask_results_dq_reg_map[
  392. bit +
  393. pup * BUS_WIDTH_IN_BITS],
  394. res0, MASK_ALL_BITS));
  395. DEBUG_PBS_ENGINE(DEBUG_LEVEL_INFO,
  396. ("Per Bit Skew search, FP I/F %d, bit:%d, pup:%d res0 0x%x\n",
  397. if_id, bit, pup,
  398. res0[if_id]));
  399. if ((res0[if_id] & 0x2000000) == 0) {
  400. DEBUG_PBS_ENGINE(DEBUG_LEVEL_INFO,
  401. ("--EBA PBS Fail - Training IP machine\n"));
  402. /* exit the bit loop */
  403. bit = BUS_WIDTH_IN_BITS;
  404. /*
  405. * ADLL is no long in lock need new
  406. * search
  407. */
  408. adll_shift_lock[if_id][pup] = 0;
  409. /* Move to SBA */
  410. pup_state[if_id][pup] = 2;
  411. max_pbs_per_pup[if_id][pup] = 0x0;
  412. min_pbs_per_pup[if_id][pup] = 0x1f;
  413. continue;
  414. } else {
  415. temp = (u8)(res0[if_id] &
  416. res_valid_mask);
  417. max_pbs_per_pup[if_id][pup] =
  418. (temp >
  419. max_pbs_per_pup[if_id][pup]) ?
  420. temp :
  421. max_pbs_per_pup[if_id][pup];
  422. min_pbs_per_pup[if_id][pup] =
  423. (temp <
  424. min_pbs_per_pup[if_id][pup]) ?
  425. temp :
  426. min_pbs_per_pup[if_id][pup];
  427. result_all_bit[bit +
  428. pup * BUS_WIDTH_IN_BITS +
  429. if_id * MAX_BUS_NUM *
  430. BUS_WIDTH_IN_BITS] =
  431. temp;
  432. }
  433. }
  434. }
  435. }
  436. /* Check all Pup lock */
  437. all_lock = 1;
  438. for (pup = 0; pup < tm->num_of_bus_per_interface; pup++) {
  439. VALIDATE_ACTIVE(tm->bus_act_mask, pup);
  440. for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
  441. VALIDATE_ACTIVE(tm->if_act_mask, if_id);
  442. all_lock = all_lock * adll_shift_lock[if_id][pup];
  443. }
  444. }
  445. /* Only if not all Pups Lock */
  446. if (all_lock == 0) {
  447. DEBUG_PBS_ENGINE(DEBUG_LEVEL_INFO,
  448. ("##########ADLL shift for SBA###########\n"));
  449. /* ADLL shift for SBA */
  450. search_dir = (pbs_mode == PBS_RX_MODE) ? HWS_LOW2HIGH :
  451. HWS_HIGH2LOW;
  452. init_val = (search_dir == HWS_LOW2HIGH) ? 0 : iterations;
  453. for (pup = 0; pup < tm->num_of_bus_per_interface; pup++) {
  454. VALIDATE_ACTIVE(tm->bus_act_mask, pup);
  455. for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1;
  456. if_id++) {
  457. VALIDATE_ACTIVE(tm->if_act_mask, if_id);
  458. if (adll_shift_lock[if_id][pup] == 1) {
  459. /*if pup lock continue to next pup */
  460. continue;
  461. }
  462. /*init the var altogth init before */
  463. adll_shift_lock[if_id][pup] = 0;
  464. reg_addr = (pbs_mode == PBS_RX_MODE) ?
  465. (0x54 + effective_cs * 0x10) :
  466. (0x14 + effective_cs * 0x10);
  467. CHECK_STATUS(ddr3_tip_bus_write
  468. (dev_num, ACCESS_TYPE_UNICAST,
  469. if_id, ACCESS_TYPE_UNICAST, pup,
  470. DDR_PHY_DATA, reg_addr, 0));
  471. reg_addr = (pbs_mode == PBS_RX_MODE) ?
  472. (0x55 + effective_cs * 0x10) :
  473. (0x15 + effective_cs * 0x10);
  474. CHECK_STATUS(ddr3_tip_bus_write
  475. (dev_num, ACCESS_TYPE_UNICAST,
  476. if_id, ACCESS_TYPE_UNICAST, pup,
  477. DDR_PHY_DATA, reg_addr, 0));
  478. reg_addr = (pbs_mode == PBS_RX_MODE) ?
  479. (0x5f + effective_cs * 0x10) :
  480. (0x1f + effective_cs * 0x10);
  481. CHECK_STATUS(ddr3_tip_bus_write
  482. (dev_num, ACCESS_TYPE_UNICAST,
  483. if_id, ACCESS_TYPE_UNICAST, pup,
  484. DDR_PHY_DATA, reg_addr, 0));
  485. /* initilaze the Edge2 Max. */
  486. adll_shift_val[if_id][pup] = 0;
  487. min_adll_per_pup[if_id][pup] = 0x1f;
  488. max_adll_per_pup[if_id][pup] = 0x0;
  489. ddr3_tip_ip_training(dev_num,
  490. ACCESS_TYPE_MULTICAST,
  491. PARAM_NOT_CARE,
  492. ACCESS_TYPE_MULTICAST,
  493. PARAM_NOT_CARE,
  494. RESULT_PER_BIT,
  495. HWS_CONTROL_ELEMENT_ADLL,
  496. search_dir, dir,
  497. tm->if_act_mask,
  498. init_val, iterations,
  499. pbs_pattern,
  500. search_edge, CS_SINGLE,
  501. cs_num, train_status);
  502. for (bit = 0; bit < BUS_WIDTH_IN_BITS; bit++) {
  503. CHECK_STATUS(ddr3_tip_if_read
  504. (dev_num,
  505. ACCESS_TYPE_MULTICAST,
  506. PARAM_NOT_CARE,
  507. mask_results_dq_reg_map
  508. [bit +
  509. pup *
  510. BUS_WIDTH_IN_BITS],
  511. res0, MASK_ALL_BITS));
  512. DEBUG_PBS_ENGINE(
  513. DEBUG_LEVEL_INFO,
  514. ("FP I/F %d, bit:%d, pup:%d res0 0x%x\n",
  515. if_id, bit, pup, res0[if_id]));
  516. if ((res0[if_id] & 0x2000000) == 0) {
  517. /* exit the bit loop */
  518. bit = BUS_WIDTH_IN_BITS;
  519. /* Fail SBA --> Fail PBS */
  520. pup_state[if_id][pup] = 1;
  521. DEBUG_PBS_ENGINE
  522. (DEBUG_LEVEL_INFO,
  523. (" SBA Fail\n"));
  524. continue;
  525. } else {
  526. /*
  527. * - increment to get all
  528. * 8 bit lock.
  529. */
  530. adll_shift_lock[if_id][pup]++;
  531. /*
  532. * The search ended in Pass
  533. * we need Fail
  534. */
  535. res0[if_id] =
  536. (pbs_mode == PBS_RX_MODE) ?
  537. ((res0[if_id] & res_valid_mask) + 1) :
  538. ((res0[if_id] & res_valid_mask) - 1);
  539. max_adll_per_pup[if_id][pup] =
  540. (max_adll_per_pup[if_id]
  541. [pup] < res0[if_id]) ?
  542. (u8)res0[if_id] :
  543. max_adll_per_pup[if_id][pup];
  544. min_adll_per_pup[if_id][pup] =
  545. (res0[if_id] >
  546. min_adll_per_pup[if_id]
  547. [pup]) ?
  548. min_adll_per_pup[if_id][pup] :
  549. (u8)res0[if_id];
  550. /*
  551. * vs the Rx we are searching for
  552. * the smallest value of DQ shift
  553. * so all Bus would fail
  554. */
  555. adll_shift_val[if_id][pup] =
  556. (pbs_mode == PBS_RX_MODE) ?
  557. max_adll_per_pup[if_id][pup] :
  558. min_adll_per_pup[if_id][pup];
  559. }
  560. }
  561. /* 1 is lock */
  562. adll_shift_lock[if_id][pup] =
  563. (adll_shift_lock[if_id][pup] == 8) ?
  564. 1 : 0;
  565. reg_addr = (pbs_mode == PBS_RX_MODE) ?
  566. (0x3 + effective_cs * 4) :
  567. (0x1 + effective_cs * 4);
  568. CHECK_STATUS(ddr3_tip_bus_write
  569. (dev_num, ACCESS_TYPE_UNICAST,
  570. if_id, ACCESS_TYPE_UNICAST, pup,
  571. DDR_PHY_DATA, reg_addr,
  572. adll_shift_val[if_id][pup]));
  573. DEBUG_PBS_ENGINE(
  574. DEBUG_LEVEL_INFO,
  575. ("adll_shift_lock[%x][%x] = %x\n",
  576. if_id, pup,
  577. adll_shift_lock[if_id][pup]));
  578. }
  579. }
  580. /* End ADLL Shift for SBA */
  581. /* Start the Per Bit Skew search */
  582. /* The ADLL shift finished with a Pass */
  583. search_edge = (pbs_mode == PBS_RX_MODE) ? EDGE_PF : EDGE_FP;
  584. search_dir = (pbs_mode == PBS_RX_MODE) ?
  585. HWS_LOW2HIGH : HWS_HIGH2LOW;
  586. iterations = 0x1f;
  587. /* - The initial value is different in Rx and Tx mode */
  588. init_val = (pbs_mode == PBS_RX_MODE) ? 0 : iterations;
  589. ddr3_tip_ip_training(dev_num, ACCESS_TYPE_MULTICAST,
  590. PARAM_NOT_CARE, ACCESS_TYPE_MULTICAST,
  591. PARAM_NOT_CARE, RESULT_PER_BIT,
  592. HWS_CONTROL_ELEMENT_DQ_SKEW,
  593. search_dir, dir, tm->if_act_mask,
  594. init_val, iterations, pbs_pattern,
  595. search_edge, CS_SINGLE, cs_num,
  596. train_status);
  597. for (pup = 0; pup < tm->num_of_bus_per_interface; pup++) {
  598. VALIDATE_ACTIVE(tm->bus_act_mask, pup);
  599. for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1;
  600. if_id++) {
  601. VALIDATE_ACTIVE(tm->if_act_mask, if_id);
  602. for (bit = 0; bit < BUS_WIDTH_IN_BITS; bit++) {
  603. CHECK_STATUS(ddr3_tip_if_read
  604. (dev_num,
  605. ACCESS_TYPE_MULTICAST,
  606. PARAM_NOT_CARE,
  607. mask_results_dq_reg_map
  608. [bit +
  609. pup *
  610. BUS_WIDTH_IN_BITS],
  611. res0, MASK_ALL_BITS));
  612. if (pup_state[if_id][pup] != 2) {
  613. /*
  614. * if pup is not SBA continue
  615. * to next pup
  616. */
  617. bit = BUS_WIDTH_IN_BITS;
  618. continue;
  619. }
  620. DEBUG_PBS_ENGINE(
  621. DEBUG_LEVEL_INFO,
  622. ("Per Bit Skew search, PF I/F %d, bit:%d, pup:%d res0 0x%x\n",
  623. if_id, bit, pup, res0[if_id]));
  624. if ((res0[if_id] & 0x2000000) == 0) {
  625. DEBUG_PBS_ENGINE
  626. (DEBUG_LEVEL_INFO,
  627. ("SBA Fail\n"));
  628. max_pbs_per_pup[if_id][pup] =
  629. 0x1f;
  630. result_all_bit[
  631. bit + pup *
  632. BUS_WIDTH_IN_BITS +
  633. if_id * MAX_BUS_NUM *
  634. BUS_WIDTH_IN_BITS] =
  635. 0x1f;
  636. } else {
  637. temp = (u8)(res0[if_id] &
  638. res_valid_mask);
  639. max_pbs_per_pup[if_id][pup] =
  640. (temp >
  641. max_pbs_per_pup[if_id]
  642. [pup]) ? temp :
  643. max_pbs_per_pup
  644. [if_id][pup];
  645. min_pbs_per_pup[if_id][pup] =
  646. (temp <
  647. min_pbs_per_pup[if_id]
  648. [pup]) ? temp :
  649. min_pbs_per_pup
  650. [if_id][pup];
  651. result_all_bit[
  652. bit + pup *
  653. BUS_WIDTH_IN_BITS +
  654. if_id * MAX_BUS_NUM *
  655. BUS_WIDTH_IN_BITS] =
  656. temp;
  657. adll_shift_lock[if_id][pup] = 1;
  658. }
  659. }
  660. }
  661. }
  662. /* Check all Pup state */
  663. all_lock = 1;
  664. for (pup = 0; pup < tm->num_of_bus_per_interface; pup++) {
  665. /*
  666. * DEBUG_PBS_ENGINE(DEBUG_LEVEL_INFO,
  667. * ("pup_state[%d][%d] = %d\n",if_id,pup,pup_state
  668. * [if_id][pup]));
  669. */
  670. }
  671. }
  672. /* END OF SBA */
  673. /* Norm */
  674. for (pup = 0; pup < tm->num_of_bus_per_interface; pup++) {
  675. VALIDATE_ACTIVE(tm->bus_act_mask, pup);
  676. for (bit = 0; bit < BUS_WIDTH_IN_BITS; bit++) {
  677. for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1;
  678. if_id++) {
  679. VALIDATE_ACTIVE(tm->if_act_mask, if_id);
  680. /* if pup not lock continue to next pup */
  681. if (adll_shift_lock[if_id][pup] != 1) {
  682. DEBUG_PBS_ENGINE(
  683. DEBUG_LEVEL_ERROR,
  684. ("PBS failed for IF #%d\n",
  685. if_id));
  686. training_result[training_stage][if_id]
  687. = TEST_FAILED;
  688. result_mat[if_id][pup][bit] = 0;
  689. max_pbs_per_pup[if_id][pup] = 0;
  690. min_pbs_per_pup[if_id][pup] = 0;
  691. } else {
  692. training_result[
  693. training_stage][if_id] =
  694. (training_result[training_stage]
  695. [if_id] == TEST_FAILED) ?
  696. TEST_FAILED : TEST_SUCCESS;
  697. result_mat[if_id][pup][bit] =
  698. result_all_bit[
  699. bit + pup *
  700. BUS_WIDTH_IN_BITS +
  701. if_id * MAX_BUS_NUM *
  702. BUS_WIDTH_IN_BITS] -
  703. min_pbs_per_pup[if_id][pup];
  704. }
  705. DEBUG_PBS_ENGINE(
  706. DEBUG_LEVEL_INFO,
  707. ("The abs min_pbs[%d][%d] = %d\n",
  708. if_id, pup,
  709. min_pbs_per_pup[if_id][pup]));
  710. }
  711. }
  712. }
  713. /* Clean all results */
  714. ddr3_tip_clean_pbs_result(dev_num, pbs_mode);
  715. /* DQ PBS register update with the final result */
  716. for (if_id = 0; if_id < MAX_INTERFACE_NUM; if_id++) {
  717. VALIDATE_ACTIVE(tm->if_act_mask, if_id);
  718. for (pup = 0; pup < tm->num_of_bus_per_interface; pup++) {
  719. VALIDATE_ACTIVE(tm->bus_act_mask, pup);
  720. DEBUG_PBS_ENGINE(
  721. DEBUG_LEVEL_INFO,
  722. ("Final Results: if_id %d, pup %d, Pup State: %d\n",
  723. if_id, pup, pup_state[if_id][pup]));
  724. for (bit = 0; bit < BUS_WIDTH_IN_BITS; bit++) {
  725. if (dq_map_table == NULL) {
  726. DEBUG_PBS_ENGINE(
  727. DEBUG_LEVEL_ERROR,
  728. ("dq_map_table not initialized\n"));
  729. return MV_FAIL;
  730. }
  731. pad_num = dq_map_table[
  732. bit + pup * BUS_WIDTH_IN_BITS +
  733. if_id * BUS_WIDTH_IN_BITS *
  734. tm->num_of_bus_per_interface];
  735. DEBUG_PBS_ENGINE(DEBUG_LEVEL_INFO,
  736. ("result_mat: %d ",
  737. result_mat[if_id][pup]
  738. [bit]));
  739. reg_addr = (pbs_mode == PBS_RX_MODE) ?
  740. (PBS_RX_PHY_REG + effective_cs * 0x10) :
  741. (PBS_TX_PHY_REG + effective_cs * 0x10);
  742. CHECK_STATUS(ddr3_tip_bus_write
  743. (dev_num, ACCESS_TYPE_UNICAST,
  744. if_id, ACCESS_TYPE_UNICAST, pup,
  745. DDR_PHY_DATA, reg_addr + pad_num,
  746. result_mat[if_id][pup][bit]));
  747. }
  748. pbsdelay_per_pup[pbs_mode][if_id][pup] =
  749. (max_pbs_per_pup[if_id][pup] ==
  750. min_pbs_per_pup[if_id][pup]) ?
  751. TYPICAL_PBS_VALUE :
  752. ((max_adll_per_pup[if_id][pup] -
  753. min_adll_per_pup[if_id][pup]) * adll_tap /
  754. (max_pbs_per_pup[if_id][pup] -
  755. min_pbs_per_pup[if_id][pup]));
  756. /* RX results ready, write RX also */
  757. if (pbs_mode == PBS_TX_MODE) {
  758. /* Write TX results */
  759. reg_addr = (0x14 + effective_cs * 0x10);
  760. CHECK_STATUS(ddr3_tip_bus_write
  761. (dev_num, ACCESS_TYPE_UNICAST,
  762. if_id, ACCESS_TYPE_UNICAST, pup,
  763. DDR_PHY_DATA, reg_addr,
  764. (max_pbs_per_pup[if_id][pup] -
  765. min_pbs_per_pup[if_id][pup]) /
  766. 2));
  767. reg_addr = (0x15 + effective_cs * 0x10);
  768. CHECK_STATUS(ddr3_tip_bus_write
  769. (dev_num, ACCESS_TYPE_UNICAST,
  770. if_id, ACCESS_TYPE_UNICAST, pup,
  771. DDR_PHY_DATA, reg_addr,
  772. (max_pbs_per_pup[if_id][pup] -
  773. min_pbs_per_pup[if_id][pup]) /
  774. 2));
  775. /* Write previously stored RX results */
  776. reg_addr = (0x54 + effective_cs * 0x10);
  777. CHECK_STATUS(ddr3_tip_bus_write
  778. (dev_num, ACCESS_TYPE_UNICAST,
  779. if_id, ACCESS_TYPE_UNICAST, pup,
  780. DDR_PHY_DATA, reg_addr,
  781. result_mat_rx_dqs[if_id][pup]
  782. [effective_cs]));
  783. reg_addr = (0x55 + effective_cs * 0x10);
  784. CHECK_STATUS(ddr3_tip_bus_write
  785. (dev_num, ACCESS_TYPE_UNICAST,
  786. if_id, ACCESS_TYPE_UNICAST, pup,
  787. DDR_PHY_DATA, reg_addr,
  788. result_mat_rx_dqs[if_id][pup]
  789. [effective_cs]));
  790. } else {
  791. /*
  792. * RX results may affect RL results correctess,
  793. * so just store the results that will written
  794. * in TX stage
  795. */
  796. result_mat_rx_dqs[if_id][pup][effective_cs] =
  797. (max_pbs_per_pup[if_id][pup] -
  798. min_pbs_per_pup[if_id][pup]) / 2;
  799. }
  800. DEBUG_PBS_ENGINE(
  801. DEBUG_LEVEL_INFO,
  802. (", PBS tap=%d [psec] ==> skew observed = %d\n",
  803. pbsdelay_per_pup[pbs_mode][if_id][pup],
  804. ((max_pbs_per_pup[if_id][pup] -
  805. min_pbs_per_pup[if_id][pup]) *
  806. pbsdelay_per_pup[pbs_mode][if_id][pup])));
  807. }
  808. }
  809. /* Write back to the phy the default values */
  810. reg_addr = (pbs_mode == PBS_RX_MODE) ?
  811. (READ_CENTRALIZATION_PHY_REG + effective_cs * 4) :
  812. (WRITE_CENTRALIZATION_PHY_REG + effective_cs * 4);
  813. write_adll_value(nominal_adll, reg_addr);
  814. for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
  815. reg_addr = (pbs_mode == PBS_RX_MODE) ?
  816. (0x5a + effective_cs * 0x10) :
  817. (0x1a + effective_cs * 0x10);
  818. CHECK_STATUS(ddr3_tip_bus_write
  819. (dev_num, ACCESS_TYPE_UNICAST, if_id,
  820. ACCESS_TYPE_UNICAST, pup, DDR_PHY_DATA, reg_addr,
  821. 0));
  822. /* restore cs enable value */
  823. VALIDATE_ACTIVE(tm->if_act_mask, if_id);
  824. CHECK_STATUS(ddr3_tip_if_write
  825. (dev_num, ACCESS_TYPE_UNICAST, if_id,
  826. CS_ENABLE_REG, cs_enable_reg_val[if_id],
  827. MASK_ALL_BITS));
  828. }
  829. /* exit test mode */
  830. CHECK_STATUS(ddr3_tip_if_write
  831. (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
  832. ODPG_WRITE_READ_MODE_ENABLE_REG, 0xffff, MASK_ALL_BITS));
  833. for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
  834. /*
  835. * meaning that there is no VW exist at all (No lock at
  836. * the EBA ADLL shift at EBS)
  837. */
  838. if (pup_state[if_id][pup] == 1)
  839. return MV_FAIL;
  840. }
  841. return MV_OK;
  842. }
  843. /*
  844. * Name: ddr3_tip_pbs_rx.
  845. * Desc: PBS TX
  846. * Args: TBD
  847. * Notes:
  848. * Returns: OK if success, other error code if fail.
  849. */
  850. int ddr3_tip_pbs_rx(u32 uidev_num)
  851. {
  852. return ddr3_tip_pbs(uidev_num, PBS_RX_MODE);
  853. }
  854. /*
  855. * Name: ddr3_tip_pbs_tx.
  856. * Desc: PBS TX
  857. * Args: TBD
  858. * Notes:
  859. * Returns: OK if success, other error code if fail.
  860. */
  861. int ddr3_tip_pbs_tx(u32 uidev_num)
  862. {
  863. return ddr3_tip_pbs(uidev_num, PBS_TX_MODE);
  864. }
  865. #ifndef EXCLUDE_SWITCH_DEBUG
  866. /*
  867. * Print PBS Result
  868. */
  869. int ddr3_tip_print_all_pbs_result(u32 dev_num)
  870. {
  871. u32 curr_cs;
  872. u32 max_cs = hws_ddr3_tip_max_cs_get();
  873. for (curr_cs = 0; curr_cs < max_cs; curr_cs++) {
  874. ddr3_tip_print_pbs_result(dev_num, curr_cs, PBS_RX_MODE);
  875. ddr3_tip_print_pbs_result(dev_num, curr_cs, PBS_TX_MODE);
  876. }
  877. return MV_OK;
  878. }
  879. /*
  880. * Print PBS Result
  881. */
  882. int ddr3_tip_print_pbs_result(u32 dev_num, u32 cs_num, enum pbs_dir pbs_mode)
  883. {
  884. u32 data_value = 0, bit = 0, if_id = 0, pup = 0;
  885. u32 reg_addr = (pbs_mode == PBS_RX_MODE) ?
  886. (PBS_RX_PHY_REG + cs_num * 0x10) :
  887. (PBS_TX_PHY_REG + cs_num * 0x10);
  888. struct hws_topology_map *tm = ddr3_get_topology_map();
  889. printf("CS%d, %s ,PBS\n", cs_num,
  890. (pbs_mode == PBS_RX_MODE) ? "Rx" : "Tx");
  891. for (bit = 0; bit < BUS_WIDTH_IN_BITS; bit++) {
  892. printf("%s, DQ", (pbs_mode == PBS_RX_MODE) ? "Rx" : "Tx");
  893. for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
  894. VALIDATE_ACTIVE(tm->if_act_mask, if_id);
  895. printf("%d ,PBS,,, ", bit);
  896. for (pup = 0; pup <= tm->num_of_bus_per_interface;
  897. pup++) {
  898. VALIDATE_ACTIVE(tm->bus_act_mask, pup);
  899. CHECK_STATUS(ddr3_tip_bus_read
  900. (dev_num, if_id,
  901. ACCESS_TYPE_UNICAST, pup,
  902. DDR_PHY_DATA, reg_addr + bit,
  903. &data_value));
  904. printf("%d , ", data_value);
  905. }
  906. }
  907. printf("\n");
  908. }
  909. printf("\n");
  910. return MV_OK;
  911. }
  912. #endif
  913. /*
  914. * Fixup PBS Result
  915. */
  916. int ddr3_tip_clean_pbs_result(u32 dev_num, enum pbs_dir pbs_mode)
  917. {
  918. u32 if_id, pup, bit;
  919. u32 reg_addr = (pbs_mode == PBS_RX_MODE) ?
  920. (PBS_RX_PHY_REG + effective_cs * 0x10) :
  921. (PBS_TX_PHY_REG + effective_cs * 0x10);
  922. struct hws_topology_map *tm = ddr3_get_topology_map();
  923. for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
  924. VALIDATE_ACTIVE(tm->if_act_mask, if_id);
  925. for (pup = 0; pup <= tm->num_of_bus_per_interface; pup++) {
  926. for (bit = 0; bit <= BUS_WIDTH_IN_BITS + 3; bit++) {
  927. CHECK_STATUS(ddr3_tip_bus_write
  928. (dev_num, ACCESS_TYPE_UNICAST,
  929. if_id, ACCESS_TYPE_UNICAST, pup,
  930. DDR_PHY_DATA, reg_addr + bit, 0));
  931. }
  932. }
  933. }
  934. return MV_OK;
  935. }