ddr3_training_ip_engine.c 42 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354
  1. /*
  2. * Copyright (C) Marvell International Ltd. and its affiliates
  3. *
  4. * SPDX-License-Identifier: GPL-2.0
  5. */
  6. #include <common.h>
  7. #include <spl.h>
  8. #include <asm/io.h>
  9. #include <asm/arch/cpu.h>
  10. #include <asm/arch/soc.h>
  11. #include "ddr3_init.h"
  12. #define PATTERN_1 0x55555555
  13. #define PATTERN_2 0xaaaaaaaa
  14. #define VALIDATE_TRAINING_LIMIT(e1, e2) \
  15. ((((e2) - (e1) + 1) > 33) && ((e1) < 67))
  16. u32 phy_reg_bk[MAX_INTERFACE_NUM][MAX_BUS_NUM][BUS_WIDTH_IN_BITS];
  17. u32 training_res[MAX_INTERFACE_NUM * MAX_BUS_NUM * BUS_WIDTH_IN_BITS *
  18. HWS_SEARCH_DIR_LIMIT];
  19. u16 mask_results_dq_reg_map[] = {
  20. RESULT_CONTROL_PUP_0_BIT_0_REG, RESULT_CONTROL_PUP_0_BIT_1_REG,
  21. RESULT_CONTROL_PUP_0_BIT_2_REG, RESULT_CONTROL_PUP_0_BIT_3_REG,
  22. RESULT_CONTROL_PUP_0_BIT_4_REG, RESULT_CONTROL_PUP_0_BIT_5_REG,
  23. RESULT_CONTROL_PUP_0_BIT_6_REG, RESULT_CONTROL_PUP_0_BIT_7_REG,
  24. RESULT_CONTROL_PUP_1_BIT_0_REG, RESULT_CONTROL_PUP_1_BIT_1_REG,
  25. RESULT_CONTROL_PUP_1_BIT_2_REG, RESULT_CONTROL_PUP_1_BIT_3_REG,
  26. RESULT_CONTROL_PUP_1_BIT_4_REG, RESULT_CONTROL_PUP_1_BIT_5_REG,
  27. RESULT_CONTROL_PUP_1_BIT_6_REG, RESULT_CONTROL_PUP_1_BIT_7_REG,
  28. RESULT_CONTROL_PUP_2_BIT_0_REG, RESULT_CONTROL_PUP_2_BIT_1_REG,
  29. RESULT_CONTROL_PUP_2_BIT_2_REG, RESULT_CONTROL_PUP_2_BIT_3_REG,
  30. RESULT_CONTROL_PUP_2_BIT_4_REG, RESULT_CONTROL_PUP_2_BIT_5_REG,
  31. RESULT_CONTROL_PUP_2_BIT_6_REG, RESULT_CONTROL_PUP_2_BIT_7_REG,
  32. RESULT_CONTROL_PUP_3_BIT_0_REG, RESULT_CONTROL_PUP_3_BIT_1_REG,
  33. RESULT_CONTROL_PUP_3_BIT_2_REG, RESULT_CONTROL_PUP_3_BIT_3_REG,
  34. RESULT_CONTROL_PUP_3_BIT_4_REG, RESULT_CONTROL_PUP_3_BIT_5_REG,
  35. RESULT_CONTROL_PUP_3_BIT_6_REG, RESULT_CONTROL_PUP_3_BIT_7_REG,
  36. RESULT_CONTROL_PUP_4_BIT_0_REG, RESULT_CONTROL_PUP_4_BIT_1_REG,
  37. RESULT_CONTROL_PUP_4_BIT_2_REG, RESULT_CONTROL_PUP_4_BIT_3_REG,
  38. RESULT_CONTROL_PUP_4_BIT_4_REG, RESULT_CONTROL_PUP_4_BIT_5_REG,
  39. RESULT_CONTROL_PUP_4_BIT_6_REG, RESULT_CONTROL_PUP_4_BIT_7_REG,
  40. };
  41. u16 mask_results_pup_reg_map[] = {
  42. RESULT_CONTROL_BYTE_PUP_0_REG, RESULT_CONTROL_BYTE_PUP_1_REG,
  43. RESULT_CONTROL_BYTE_PUP_2_REG, RESULT_CONTROL_BYTE_PUP_3_REG,
  44. RESULT_CONTROL_BYTE_PUP_4_REG
  45. };
  46. u16 mask_results_dq_reg_map_pup3_ecc[] = {
  47. RESULT_CONTROL_PUP_0_BIT_0_REG, RESULT_CONTROL_PUP_0_BIT_1_REG,
  48. RESULT_CONTROL_PUP_0_BIT_2_REG, RESULT_CONTROL_PUP_0_BIT_3_REG,
  49. RESULT_CONTROL_PUP_0_BIT_4_REG, RESULT_CONTROL_PUP_0_BIT_5_REG,
  50. RESULT_CONTROL_PUP_0_BIT_6_REG, RESULT_CONTROL_PUP_0_BIT_7_REG,
  51. RESULT_CONTROL_PUP_1_BIT_0_REG, RESULT_CONTROL_PUP_1_BIT_1_REG,
  52. RESULT_CONTROL_PUP_1_BIT_2_REG, RESULT_CONTROL_PUP_1_BIT_3_REG,
  53. RESULT_CONTROL_PUP_1_BIT_4_REG, RESULT_CONTROL_PUP_1_BIT_5_REG,
  54. RESULT_CONTROL_PUP_1_BIT_6_REG, RESULT_CONTROL_PUP_1_BIT_7_REG,
  55. RESULT_CONTROL_PUP_2_BIT_0_REG, RESULT_CONTROL_PUP_2_BIT_1_REG,
  56. RESULT_CONTROL_PUP_2_BIT_2_REG, RESULT_CONTROL_PUP_2_BIT_3_REG,
  57. RESULT_CONTROL_PUP_2_BIT_4_REG, RESULT_CONTROL_PUP_2_BIT_5_REG,
  58. RESULT_CONTROL_PUP_2_BIT_6_REG, RESULT_CONTROL_PUP_2_BIT_7_REG,
  59. RESULT_CONTROL_PUP_4_BIT_0_REG, RESULT_CONTROL_PUP_4_BIT_1_REG,
  60. RESULT_CONTROL_PUP_4_BIT_2_REG, RESULT_CONTROL_PUP_4_BIT_3_REG,
  61. RESULT_CONTROL_PUP_4_BIT_4_REG, RESULT_CONTROL_PUP_4_BIT_5_REG,
  62. RESULT_CONTROL_PUP_4_BIT_6_REG, RESULT_CONTROL_PUP_4_BIT_7_REG,
  63. RESULT_CONTROL_PUP_4_BIT_0_REG, RESULT_CONTROL_PUP_4_BIT_1_REG,
  64. RESULT_CONTROL_PUP_4_BIT_2_REG, RESULT_CONTROL_PUP_4_BIT_3_REG,
  65. RESULT_CONTROL_PUP_4_BIT_4_REG, RESULT_CONTROL_PUP_4_BIT_5_REG,
  66. RESULT_CONTROL_PUP_4_BIT_6_REG, RESULT_CONTROL_PUP_4_BIT_7_REG,
  67. };
  68. u16 mask_results_pup_reg_map_pup3_ecc[] = {
  69. RESULT_CONTROL_BYTE_PUP_0_REG, RESULT_CONTROL_BYTE_PUP_1_REG,
  70. RESULT_CONTROL_BYTE_PUP_2_REG, RESULT_CONTROL_BYTE_PUP_4_REG,
  71. RESULT_CONTROL_BYTE_PUP_4_REG
  72. };
  73. struct pattern_info pattern_table_16[] = {
  74. /*
  75. * num tx phases, tx burst, delay between, rx pattern,
  76. * start_address, pattern_len
  77. */
  78. {1, 1, 2, 1, 0x0080, 2}, /* PATTERN_PBS1 */
  79. {1, 1, 2, 1, 0x00c0, 2}, /* PATTERN_PBS2 */
  80. {1, 1, 2, 1, 0x0100, 2}, /* PATTERN_RL */
  81. {0xf, 0x7, 2, 0x7, 0x0140, 16}, /* PATTERN_STATIC_PBS */
  82. {0xf, 0x7, 2, 0x7, 0x0190, 16}, /* PATTERN_KILLER_DQ0 */
  83. {0xf, 0x7, 2, 0x7, 0x01d0, 16}, /* PATTERN_KILLER_DQ1 */
  84. {0xf, 0x7, 2, 0x7, 0x0210, 16}, /* PATTERN_KILLER_DQ2 */
  85. {0xf, 0x7, 2, 0x7, 0x0250, 16}, /* PATTERN_KILLER_DQ3 */
  86. {0xf, 0x7, 2, 0x7, 0x0290, 16}, /* PATTERN_KILLER_DQ4 */
  87. {0xf, 0x7, 2, 0x7, 0x02d0, 16}, /* PATTERN_KILLER_DQ5 */
  88. {0xf, 0x7, 2, 0x7, 0x0310, 16}, /* PATTERN_KILLER_DQ6 */
  89. {0xf, 0x7, 2, 0x7, 0x0350, 16}, /* PATTERN_KILLER_DQ7 */
  90. {1, 1, 2, 1, 0x0380, 2}, /* PATTERN_PBS3 */
  91. {1, 1, 2, 1, 0x0000, 2}, /* PATTERN_RL2 */
  92. {1, 1, 2, 1, 0x0040, 2}, /* PATTERN_TEST */
  93. {0xf, 0x7, 2, 0x7, 0x03c0, 16}, /* PATTERN_FULL_SSO_1T */
  94. {0xf, 0x7, 2, 0x7, 0x0400, 16}, /* PATTERN_FULL_SSO_2T */
  95. {0xf, 0x7, 2, 0x7, 0x0440, 16}, /* PATTERN_FULL_SSO_3T */
  96. {0xf, 0x7, 2, 0x7, 0x0480, 16}, /* PATTERN_FULL_SSO_4T */
  97. {0xf, 0x7, 2, 0x7, 0x04c0, 16} /* PATTERN_VREF */
  98. /*Note: actual start_address is <<3 of defined addess */
  99. };
  100. struct pattern_info pattern_table_32[] = {
  101. /*
  102. * num tx phases, tx burst, delay between, rx pattern,
  103. * start_address, pattern_len
  104. */
  105. {3, 3, 2, 3, 0x0080, 4}, /* PATTERN_PBS1 */
  106. {3, 3, 2, 3, 0x00c0, 4}, /* PATTERN_PBS2 */
  107. {3, 3, 2, 3, 0x0100, 4}, /* PATTERN_RL */
  108. {0x1f, 0xf, 2, 0xf, 0x0140, 32}, /* PATTERN_STATIC_PBS */
  109. {0x1f, 0xf, 2, 0xf, 0x0190, 32}, /* PATTERN_KILLER_DQ0 */
  110. {0x1f, 0xf, 2, 0xf, 0x01d0, 32}, /* PATTERN_KILLER_DQ1 */
  111. {0x1f, 0xf, 2, 0xf, 0x0210, 32}, /* PATTERN_KILLER_DQ2 */
  112. {0x1f, 0xf, 2, 0xf, 0x0250, 32}, /* PATTERN_KILLER_DQ3 */
  113. {0x1f, 0xf, 2, 0xf, 0x0290, 32}, /* PATTERN_KILLER_DQ4 */
  114. {0x1f, 0xf, 2, 0xf, 0x02d0, 32}, /* PATTERN_KILLER_DQ5 */
  115. {0x1f, 0xf, 2, 0xf, 0x0310, 32}, /* PATTERN_KILLER_DQ6 */
  116. {0x1f, 0xf, 2, 0xf, 0x0350, 32}, /* PATTERN_KILLER_DQ7 */
  117. {3, 3, 2, 3, 0x0380, 4}, /* PATTERN_PBS3 */
  118. {3, 3, 2, 3, 0x0000, 4}, /* PATTERN_RL2 */
  119. {3, 3, 2, 3, 0x0040, 4}, /* PATTERN_TEST */
  120. {0x1f, 0xf, 2, 0xf, 0x03c0, 32}, /* PATTERN_FULL_SSO_1T */
  121. {0x1f, 0xf, 2, 0xf, 0x0400, 32}, /* PATTERN_FULL_SSO_2T */
  122. {0x1f, 0xf, 2, 0xf, 0x0440, 32}, /* PATTERN_FULL_SSO_3T */
  123. {0x1f, 0xf, 2, 0xf, 0x0480, 32}, /* PATTERN_FULL_SSO_4T */
  124. {0x1f, 0xf, 2, 0xf, 0x04c0, 32} /* PATTERN_VREF */
  125. /*Note: actual start_address is <<3 of defined addess */
  126. };
  127. u32 train_dev_num;
  128. enum hws_ddr_cs traintrain_cs_type;
  129. u32 train_pup_num;
  130. enum hws_training_result train_result_type;
  131. enum hws_control_element train_control_element;
  132. enum hws_search_dir traine_search_dir;
  133. enum hws_dir train_direction;
  134. u32 train_if_select;
  135. u32 train_init_value;
  136. u32 train_number_iterations;
  137. enum hws_pattern train_pattern;
  138. enum hws_edge_compare train_edge_compare;
  139. u32 train_cs_num;
  140. u32 train_if_acess, train_if_id, train_pup_access;
  141. u32 max_polling_for_done = 1000000;
  142. u32 *ddr3_tip_get_buf_ptr(u32 dev_num, enum hws_search_dir search,
  143. enum hws_training_result result_type,
  144. u32 interface_num)
  145. {
  146. u32 *buf_ptr = NULL;
  147. buf_ptr = &training_res
  148. [MAX_INTERFACE_NUM * MAX_BUS_NUM * BUS_WIDTH_IN_BITS * search +
  149. interface_num * MAX_BUS_NUM * BUS_WIDTH_IN_BITS];
  150. return buf_ptr;
  151. }
  152. /*
  153. * IP Training search
  154. * Note: for one edge search only from fail to pass, else jitter can
  155. * be be entered into solution.
  156. */
  157. int ddr3_tip_ip_training(u32 dev_num, enum hws_access_type access_type,
  158. u32 interface_num,
  159. enum hws_access_type pup_access_type,
  160. u32 pup_num, enum hws_training_result result_type,
  161. enum hws_control_element control_element,
  162. enum hws_search_dir search_dir, enum hws_dir direction,
  163. u32 interface_mask, u32 init_value, u32 num_iter,
  164. enum hws_pattern pattern,
  165. enum hws_edge_compare edge_comp,
  166. enum hws_ddr_cs cs_type, u32 cs_num,
  167. enum hws_training_ip_stat *train_status)
  168. {
  169. u32 mask_dq_num_of_regs, mask_pup_num_of_regs, index_cnt, poll_cnt,
  170. reg_data, pup_id;
  171. u32 tx_burst_size;
  172. u32 delay_between_burst;
  173. u32 rd_mode;
  174. u32 read_data[MAX_INTERFACE_NUM];
  175. struct pattern_info *pattern_table = ddr3_tip_get_pattern_table();
  176. u16 *mask_results_pup_reg_map = ddr3_tip_get_mask_results_pup_reg_map();
  177. u16 *mask_results_dq_reg_map = ddr3_tip_get_mask_results_dq_reg();
  178. struct hws_topology_map *tm = ddr3_get_topology_map();
  179. if (pup_num >= tm->num_of_bus_per_interface) {
  180. DEBUG_TRAINING_IP_ENGINE(DEBUG_LEVEL_ERROR,
  181. ("pup_num %d not valid\n", pup_num));
  182. }
  183. if (interface_num >= MAX_INTERFACE_NUM) {
  184. DEBUG_TRAINING_IP_ENGINE(DEBUG_LEVEL_ERROR,
  185. ("if_id %d not valid\n",
  186. interface_num));
  187. }
  188. if (train_status == NULL) {
  189. DEBUG_TRAINING_IP_ENGINE(DEBUG_LEVEL_ERROR,
  190. ("error param 4\n"));
  191. return MV_BAD_PARAM;
  192. }
  193. /* load pattern */
  194. if (cs_type == CS_SINGLE) {
  195. /* All CSs to CS0 */
  196. CHECK_STATUS(ddr3_tip_if_write
  197. (dev_num, access_type, interface_num,
  198. CS_ENABLE_REG, 1 << 3, 1 << 3));
  199. /* All CSs to CS0 */
  200. CHECK_STATUS(ddr3_tip_if_write
  201. (dev_num, access_type, interface_num,
  202. ODPG_DATA_CONTROL_REG,
  203. (0x3 | (effective_cs << 26)), 0xc000003));
  204. } else {
  205. CHECK_STATUS(ddr3_tip_if_write
  206. (dev_num, access_type, interface_num,
  207. CS_ENABLE_REG, 0, 1 << 3));
  208. /* CS select */
  209. CHECK_STATUS(ddr3_tip_if_write
  210. (dev_num, access_type, interface_num,
  211. ODPG_DATA_CONTROL_REG, 0x3 | cs_num << 26,
  212. 0x3 | 3 << 26));
  213. }
  214. /* load pattern to ODPG */
  215. ddr3_tip_load_pattern_to_odpg(dev_num, access_type, interface_num,
  216. pattern,
  217. pattern_table[pattern].start_addr);
  218. tx_burst_size = (direction == OPER_WRITE) ?
  219. pattern_table[pattern].tx_burst_size : 0;
  220. delay_between_burst = (direction == OPER_WRITE) ? 2 : 0;
  221. rd_mode = (direction == OPER_WRITE) ? 1 : 0;
  222. CHECK_STATUS(ddr3_tip_configure_odpg
  223. (dev_num, access_type, interface_num, direction,
  224. pattern_table[pattern].num_of_phases_tx, tx_burst_size,
  225. pattern_table[pattern].num_of_phases_rx,
  226. delay_between_burst, rd_mode, effective_cs, STRESS_NONE,
  227. DURATION_SINGLE));
  228. reg_data = (direction == OPER_READ) ? 0 : (0x3 << 30);
  229. reg_data |= (direction == OPER_READ) ? 0x60 : 0xfa;
  230. CHECK_STATUS(ddr3_tip_if_write
  231. (dev_num, access_type, interface_num,
  232. ODPG_WRITE_READ_MODE_ENABLE_REG, reg_data,
  233. MASK_ALL_BITS));
  234. reg_data = (edge_comp == EDGE_PF || edge_comp == EDGE_FP) ? 0 : 1 << 6;
  235. reg_data |= (edge_comp == EDGE_PF || edge_comp == EDGE_PFP) ?
  236. (1 << 7) : 0;
  237. /* change from Pass to Fail will lock the result */
  238. if (pup_access_type == ACCESS_TYPE_MULTICAST)
  239. reg_data |= 0xe << 14;
  240. else
  241. reg_data |= pup_num << 14;
  242. if (edge_comp == EDGE_FP) {
  243. /* don't search for readl edge change, only the state */
  244. reg_data |= (0 << 20);
  245. } else if (edge_comp == EDGE_FPF) {
  246. reg_data |= (0 << 20);
  247. } else {
  248. reg_data |= (3 << 20);
  249. }
  250. CHECK_STATUS(ddr3_tip_if_write
  251. (dev_num, access_type, interface_num,
  252. ODPG_TRAINING_CONTROL_REG,
  253. reg_data | (0x7 << 8) | (0x7 << 11),
  254. (0x3 | (0x3 << 2) | (0x3 << 6) | (1 << 5) | (0x7 << 8) |
  255. (0x7 << 11) | (0xf << 14) | (0x3 << 18) | (3 << 20))));
  256. reg_data = (search_dir == HWS_LOW2HIGH) ? 0 : (1 << 8);
  257. CHECK_STATUS(ddr3_tip_if_write
  258. (dev_num, access_type, interface_num, ODPG_OBJ1_OPCODE_REG,
  259. 1 | reg_data | init_value << 9 | (1 << 25) | (1 << 26),
  260. 0xff | (1 << 8) | (0xffff << 9) | (1 << 25) | (1 << 26)));
  261. /*
  262. * Write2_dunit(0x10b4, Number_iteration , [15:0])
  263. * Max number of iterations
  264. */
  265. CHECK_STATUS(ddr3_tip_if_write(dev_num, access_type, interface_num,
  266. ODPG_OBJ1_ITER_CNT_REG, num_iter,
  267. 0xffff));
  268. if (control_element == HWS_CONTROL_ELEMENT_DQ_SKEW &&
  269. direction == OPER_READ) {
  270. /*
  271. * Write2_dunit(0x10c0, 0x5f , [7:0])
  272. * MC PBS Reg Address at DDR PHY
  273. */
  274. reg_data = 0x5f +
  275. effective_cs * CALIBRATED_OBJECTS_REG_ADDR_OFFSET;
  276. } else if (control_element == HWS_CONTROL_ELEMENT_DQ_SKEW &&
  277. direction == OPER_WRITE) {
  278. reg_data = 0x1f +
  279. effective_cs * CALIBRATED_OBJECTS_REG_ADDR_OFFSET;
  280. } else if (control_element == HWS_CONTROL_ELEMENT_ADLL &&
  281. direction == OPER_WRITE) {
  282. /*
  283. * LOOP 0x00000001 + 4*n:
  284. * where n (0-3) represents M_CS number
  285. */
  286. /*
  287. * Write2_dunit(0x10c0, 0x1 , [7:0])
  288. * ADLL WR Reg Address at DDR PHY
  289. */
  290. reg_data = 1 + effective_cs * CS_REGISTER_ADDR_OFFSET;
  291. } else if (control_element == HWS_CONTROL_ELEMENT_ADLL &&
  292. direction == OPER_READ) {
  293. /* ADLL RD Reg Address at DDR PHY */
  294. reg_data = 3 + effective_cs * CS_REGISTER_ADDR_OFFSET;
  295. } else if (control_element == HWS_CONTROL_ELEMENT_DQS_SKEW &&
  296. direction == OPER_WRITE) {
  297. /* TBD not defined in 0.5.0 requirement */
  298. } else if (control_element == HWS_CONTROL_ELEMENT_DQS_SKEW &&
  299. direction == OPER_READ) {
  300. /* TBD not defined in 0.5.0 requirement */
  301. }
  302. reg_data |= (0x6 << 28);
  303. CHECK_STATUS(ddr3_tip_if_write
  304. (dev_num, access_type, interface_num, CALIB_OBJ_PRFA_REG,
  305. reg_data | (init_value << 8),
  306. 0xff | (0xffff << 8) | (0xf << 24) | (u32) (0xf << 28)));
  307. mask_dq_num_of_regs = tm->num_of_bus_per_interface * BUS_WIDTH_IN_BITS;
  308. mask_pup_num_of_regs = tm->num_of_bus_per_interface;
  309. if (result_type == RESULT_PER_BIT) {
  310. for (index_cnt = 0; index_cnt < mask_dq_num_of_regs;
  311. index_cnt++) {
  312. CHECK_STATUS(ddr3_tip_if_write
  313. (dev_num, access_type, interface_num,
  314. mask_results_dq_reg_map[index_cnt], 0,
  315. 1 << 24));
  316. }
  317. /* Mask disabled buses */
  318. for (pup_id = 0; pup_id < tm->num_of_bus_per_interface;
  319. pup_id++) {
  320. if (IS_ACTIVE(tm->bus_act_mask, pup_id) == 1)
  321. continue;
  322. for (index_cnt = (mask_dq_num_of_regs - pup_id * 8);
  323. index_cnt <
  324. (mask_dq_num_of_regs - (pup_id + 1) * 8);
  325. index_cnt++) {
  326. CHECK_STATUS(ddr3_tip_if_write
  327. (dev_num, access_type,
  328. interface_num,
  329. mask_results_dq_reg_map
  330. [index_cnt], (1 << 24), 1 << 24));
  331. }
  332. }
  333. for (index_cnt = 0; index_cnt < mask_pup_num_of_regs;
  334. index_cnt++) {
  335. CHECK_STATUS(ddr3_tip_if_write
  336. (dev_num, access_type, interface_num,
  337. mask_results_pup_reg_map[index_cnt],
  338. (1 << 24), 1 << 24));
  339. }
  340. } else if (result_type == RESULT_PER_BYTE) {
  341. /* write to adll */
  342. for (index_cnt = 0; index_cnt < mask_pup_num_of_regs;
  343. index_cnt++) {
  344. CHECK_STATUS(ddr3_tip_if_write
  345. (dev_num, access_type, interface_num,
  346. mask_results_pup_reg_map[index_cnt], 0,
  347. 1 << 24));
  348. }
  349. for (index_cnt = 0; index_cnt < mask_dq_num_of_regs;
  350. index_cnt++) {
  351. CHECK_STATUS(ddr3_tip_if_write
  352. (dev_num, access_type, interface_num,
  353. mask_results_dq_reg_map[index_cnt],
  354. (1 << 24), (1 << 24)));
  355. }
  356. }
  357. /* Start Training Trigger */
  358. CHECK_STATUS(ddr3_tip_if_write(dev_num, access_type, interface_num,
  359. ODPG_TRAINING_TRIGGER_REG, 1, 1));
  360. /* wait for all RFU tests to finish (or timeout) */
  361. /* WA for 16 bit mode, more investigation needed */
  362. mdelay(1);
  363. /* Training "Done ?" */
  364. for (index_cnt = 0; index_cnt < MAX_INTERFACE_NUM; index_cnt++) {
  365. if (IS_ACTIVE(tm->if_act_mask, index_cnt) == 0)
  366. continue;
  367. if (interface_mask & (1 << index_cnt)) {
  368. /* need to check results for this Dunit */
  369. for (poll_cnt = 0; poll_cnt < max_polling_for_done;
  370. poll_cnt++) {
  371. CHECK_STATUS(ddr3_tip_if_read
  372. (dev_num, ACCESS_TYPE_UNICAST,
  373. index_cnt,
  374. ODPG_TRAINING_STATUS_REG,
  375. &reg_data, MASK_ALL_BITS));
  376. if ((reg_data & 0x2) != 0) {
  377. /*done */
  378. train_status[index_cnt] =
  379. HWS_TRAINING_IP_STATUS_SUCCESS;
  380. break;
  381. }
  382. }
  383. if (poll_cnt == max_polling_for_done) {
  384. train_status[index_cnt] =
  385. HWS_TRAINING_IP_STATUS_TIMEOUT;
  386. }
  387. }
  388. /* Be sure that ODPG done */
  389. CHECK_STATUS(is_odpg_access_done(dev_num, index_cnt));
  390. }
  391. /* Write ODPG done in Dunit */
  392. CHECK_STATUS(ddr3_tip_if_write
  393. (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
  394. ODPG_STATUS_DONE_REG, 0, 0x1));
  395. /* wait for all Dunit tests to finish (or timeout) */
  396. /* Training "Done ?" */
  397. /* Training "Pass ?" */
  398. for (index_cnt = 0; index_cnt < MAX_INTERFACE_NUM; index_cnt++) {
  399. if (IS_ACTIVE(tm->if_act_mask, index_cnt) == 0)
  400. continue;
  401. if (interface_mask & (1 << index_cnt)) {
  402. /* need to check results for this Dunit */
  403. for (poll_cnt = 0; poll_cnt < max_polling_for_done;
  404. poll_cnt++) {
  405. CHECK_STATUS(ddr3_tip_if_read
  406. (dev_num, ACCESS_TYPE_UNICAST,
  407. index_cnt,
  408. ODPG_TRAINING_TRIGGER_REG,
  409. read_data, MASK_ALL_BITS));
  410. reg_data = read_data[index_cnt];
  411. if ((reg_data & 0x2) != 0) {
  412. /* done */
  413. if ((reg_data & 0x4) == 0) {
  414. train_status[index_cnt] =
  415. HWS_TRAINING_IP_STATUS_SUCCESS;
  416. } else {
  417. train_status[index_cnt] =
  418. HWS_TRAINING_IP_STATUS_FAIL;
  419. }
  420. break;
  421. }
  422. }
  423. if (poll_cnt == max_polling_for_done) {
  424. train_status[index_cnt] =
  425. HWS_TRAINING_IP_STATUS_TIMEOUT;
  426. }
  427. }
  428. }
  429. CHECK_STATUS(ddr3_tip_if_write
  430. (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
  431. ODPG_DATA_CONTROL_REG, 0, MASK_ALL_BITS));
  432. return MV_OK;
  433. }
  434. /*
  435. * Load expected Pattern to ODPG
  436. */
  437. int ddr3_tip_load_pattern_to_odpg(u32 dev_num, enum hws_access_type access_type,
  438. u32 if_id, enum hws_pattern pattern,
  439. u32 load_addr)
  440. {
  441. u32 pattern_length_cnt = 0;
  442. struct pattern_info *pattern_table = ddr3_tip_get_pattern_table();
  443. for (pattern_length_cnt = 0;
  444. pattern_length_cnt < pattern_table[pattern].pattern_len;
  445. pattern_length_cnt++) {
  446. CHECK_STATUS(ddr3_tip_if_write
  447. (dev_num, access_type, if_id,
  448. ODPG_PATTERN_DATA_LOW_REG,
  449. pattern_table_get_word(dev_num, pattern,
  450. (u8) (pattern_length_cnt *
  451. 2)), MASK_ALL_BITS));
  452. CHECK_STATUS(ddr3_tip_if_write
  453. (dev_num, access_type, if_id,
  454. ODPG_PATTERN_DATA_HI_REG,
  455. pattern_table_get_word(dev_num, pattern,
  456. (u8) (pattern_length_cnt *
  457. 2 + 1)),
  458. MASK_ALL_BITS));
  459. CHECK_STATUS(ddr3_tip_if_write
  460. (dev_num, access_type, if_id,
  461. ODPG_PATTERN_ADDR_REG, pattern_length_cnt,
  462. MASK_ALL_BITS));
  463. }
  464. CHECK_STATUS(ddr3_tip_if_write
  465. (dev_num, access_type, if_id,
  466. ODPG_PATTERN_ADDR_OFFSET_REG, load_addr, MASK_ALL_BITS));
  467. return MV_OK;
  468. }
  469. /*
  470. * Configure ODPG
  471. */
  472. int ddr3_tip_configure_odpg(u32 dev_num, enum hws_access_type access_type,
  473. u32 if_id, enum hws_dir direction, u32 tx_phases,
  474. u32 tx_burst_size, u32 rx_phases,
  475. u32 delay_between_burst, u32 rd_mode, u32 cs_num,
  476. u32 addr_stress_jump, u32 single_pattern)
  477. {
  478. u32 data_value = 0;
  479. int ret;
  480. data_value = ((single_pattern << 2) | (tx_phases << 5) |
  481. (tx_burst_size << 11) | (delay_between_burst << 15) |
  482. (rx_phases << 21) | (rd_mode << 25) | (cs_num << 26) |
  483. (addr_stress_jump << 29));
  484. ret = ddr3_tip_if_write(dev_num, access_type, if_id,
  485. ODPG_DATA_CONTROL_REG, data_value, 0xaffffffc);
  486. if (ret != MV_OK)
  487. return ret;
  488. return MV_OK;
  489. }
  490. int ddr3_tip_process_result(u32 *ar_result, enum hws_edge e_edge,
  491. enum hws_edge_search e_edge_search,
  492. u32 *edge_result)
  493. {
  494. u32 i, res;
  495. int tap_val, max_val = -10000, min_val = 10000;
  496. int lock_success = 1;
  497. for (i = 0; i < BUS_WIDTH_IN_BITS; i++) {
  498. res = GET_LOCK_RESULT(ar_result[i]);
  499. if (res == 0) {
  500. lock_success = 0;
  501. break;
  502. }
  503. DEBUG_TRAINING_IP_ENGINE(DEBUG_LEVEL_ERROR,
  504. ("lock failed for bit %d\n", i));
  505. }
  506. if (lock_success == 1) {
  507. for (i = 0; i < BUS_WIDTH_IN_BITS; i++) {
  508. tap_val = GET_TAP_RESULT(ar_result[i], e_edge);
  509. if (tap_val > max_val)
  510. max_val = tap_val;
  511. if (tap_val < min_val)
  512. min_val = tap_val;
  513. if (e_edge_search == TRAINING_EDGE_MAX)
  514. *edge_result = (u32) max_val;
  515. else
  516. *edge_result = (u32) min_val;
  517. DEBUG_TRAINING_IP_ENGINE(DEBUG_LEVEL_ERROR,
  518. ("i %d ar_result[i] 0x%x tap_val %d max_val %d min_val %d Edge_result %d\n",
  519. i, ar_result[i], tap_val,
  520. max_val, min_val,
  521. *edge_result));
  522. }
  523. } else {
  524. return MV_FAIL;
  525. }
  526. return MV_OK;
  527. }
  528. /*
  529. * Read training search result
  530. */
  531. int ddr3_tip_read_training_result(u32 dev_num, u32 if_id,
  532. enum hws_access_type pup_access_type,
  533. u32 pup_num, u32 bit_num,
  534. enum hws_search_dir search,
  535. enum hws_dir direction,
  536. enum hws_training_result result_type,
  537. enum hws_training_load_op operation,
  538. u32 cs_num_type, u32 **load_res,
  539. int is_read_from_db, u8 cons_tap,
  540. int is_check_result_validity)
  541. {
  542. u32 reg_offset, pup_cnt, start_pup, end_pup, start_reg, end_reg;
  543. u32 *interface_train_res = NULL;
  544. u16 *reg_addr = NULL;
  545. u32 read_data[MAX_INTERFACE_NUM];
  546. u16 *mask_results_pup_reg_map = ddr3_tip_get_mask_results_pup_reg_map();
  547. u16 *mask_results_dq_reg_map = ddr3_tip_get_mask_results_dq_reg();
  548. struct hws_topology_map *tm = ddr3_get_topology_map();
  549. /*
  550. * Agreed assumption: all CS mask contain same number of bits,
  551. * i.e. in multi CS, the number of CS per memory is the same for
  552. * all pups
  553. */
  554. CHECK_STATUS(ddr3_tip_if_write
  555. (dev_num, ACCESS_TYPE_UNICAST, if_id, CS_ENABLE_REG,
  556. (cs_num_type == 0) ? 1 << 3 : 0, (1 << 3)));
  557. CHECK_STATUS(ddr3_tip_if_write
  558. (dev_num, ACCESS_TYPE_UNICAST, if_id,
  559. ODPG_DATA_CONTROL_REG, (cs_num_type << 26), (3 << 26)));
  560. DEBUG_TRAINING_IP_ENGINE(DEBUG_LEVEL_TRACE,
  561. ("Read_from_d_b %d cs_type %d oper %d result_type %d direction %d search %d pup_num %d if_id %d pup_access_type %d\n",
  562. is_read_from_db, cs_num_type, operation,
  563. result_type, direction, search, pup_num,
  564. if_id, pup_access_type));
  565. if ((load_res == NULL) && (is_read_from_db == 1)) {
  566. DEBUG_TRAINING_IP_ENGINE(DEBUG_LEVEL_ERROR,
  567. ("ddr3_tip_read_training_result load_res = NULL"));
  568. return MV_FAIL;
  569. }
  570. if (pup_num >= tm->num_of_bus_per_interface) {
  571. DEBUG_TRAINING_IP_ENGINE(DEBUG_LEVEL_ERROR,
  572. ("pup_num %d not valid\n", pup_num));
  573. }
  574. if (if_id >= MAX_INTERFACE_NUM) {
  575. DEBUG_TRAINING_IP_ENGINE(DEBUG_LEVEL_ERROR,
  576. ("if_id %d not valid\n", if_id));
  577. }
  578. if (result_type == RESULT_PER_BIT)
  579. reg_addr = mask_results_dq_reg_map;
  580. else
  581. reg_addr = mask_results_pup_reg_map;
  582. if (pup_access_type == ACCESS_TYPE_UNICAST) {
  583. start_pup = pup_num;
  584. end_pup = pup_num;
  585. } else { /*pup_access_type == ACCESS_TYPE_MULTICAST) */
  586. start_pup = 0;
  587. end_pup = tm->num_of_bus_per_interface - 1;
  588. }
  589. for (pup_cnt = start_pup; pup_cnt <= end_pup; pup_cnt++) {
  590. VALIDATE_ACTIVE(tm->bus_act_mask, pup_cnt);
  591. DEBUG_TRAINING_IP_ENGINE(
  592. DEBUG_LEVEL_TRACE,
  593. ("if_id %d start_pup %d end_pup %d pup_cnt %d\n",
  594. if_id, start_pup, end_pup, pup_cnt));
  595. if (result_type == RESULT_PER_BIT) {
  596. if (bit_num == ALL_BITS_PER_PUP) {
  597. start_reg = pup_cnt * BUS_WIDTH_IN_BITS;
  598. end_reg = (pup_cnt + 1) * BUS_WIDTH_IN_BITS - 1;
  599. } else {
  600. start_reg =
  601. pup_cnt * BUS_WIDTH_IN_BITS + bit_num;
  602. end_reg = pup_cnt * BUS_WIDTH_IN_BITS + bit_num;
  603. }
  604. } else {
  605. start_reg = pup_cnt;
  606. end_reg = pup_cnt;
  607. }
  608. interface_train_res =
  609. ddr3_tip_get_buf_ptr(dev_num, search, result_type,
  610. if_id);
  611. DEBUG_TRAINING_IP_ENGINE(
  612. DEBUG_LEVEL_TRACE,
  613. ("start_reg %d end_reg %d interface %p\n",
  614. start_reg, end_reg, interface_train_res));
  615. if (interface_train_res == NULL) {
  616. DEBUG_TRAINING_IP_ENGINE(
  617. DEBUG_LEVEL_ERROR,
  618. ("interface_train_res is NULL\n"));
  619. return MV_FAIL;
  620. }
  621. for (reg_offset = start_reg; reg_offset <= end_reg;
  622. reg_offset++) {
  623. if (operation == TRAINING_LOAD_OPERATION_UNLOAD) {
  624. if (is_read_from_db == 0) {
  625. CHECK_STATUS(ddr3_tip_if_read
  626. (dev_num,
  627. ACCESS_TYPE_UNICAST,
  628. if_id,
  629. reg_addr[reg_offset],
  630. read_data,
  631. MASK_ALL_BITS));
  632. if (is_check_result_validity == 1) {
  633. if ((read_data[if_id] &
  634. 0x02000000) == 0) {
  635. interface_train_res
  636. [reg_offset] =
  637. 0x02000000 +
  638. 64 + cons_tap;
  639. } else {
  640. interface_train_res
  641. [reg_offset] =
  642. read_data
  643. [if_id] +
  644. cons_tap;
  645. }
  646. } else {
  647. interface_train_res[reg_offset]
  648. = read_data[if_id] +
  649. cons_tap;
  650. }
  651. DEBUG_TRAINING_IP_ENGINE
  652. (DEBUG_LEVEL_TRACE,
  653. ("reg_offset %d value 0x%x addr %p\n",
  654. reg_offset,
  655. interface_train_res
  656. [reg_offset],
  657. &interface_train_res
  658. [reg_offset]));
  659. } else {
  660. *load_res =
  661. &interface_train_res[start_reg];
  662. DEBUG_TRAINING_IP_ENGINE
  663. (DEBUG_LEVEL_TRACE,
  664. ("*load_res %p\n", *load_res));
  665. }
  666. } else {
  667. DEBUG_TRAINING_IP_ENGINE(DEBUG_LEVEL_TRACE,
  668. ("not supported\n"));
  669. }
  670. }
  671. }
  672. return MV_OK;
  673. }
  674. /*
  675. * Load all pattern to memory using ODPG
  676. */
  677. int ddr3_tip_load_all_pattern_to_mem(u32 dev_num)
  678. {
  679. u32 pattern = 0, if_id;
  680. struct hws_topology_map *tm = ddr3_get_topology_map();
  681. for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
  682. VALIDATE_ACTIVE(tm->if_act_mask, if_id);
  683. training_result[training_stage][if_id] = TEST_SUCCESS;
  684. }
  685. for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
  686. VALIDATE_ACTIVE(tm->if_act_mask, if_id);
  687. /* enable single cs */
  688. CHECK_STATUS(ddr3_tip_if_write
  689. (dev_num, ACCESS_TYPE_UNICAST, if_id,
  690. CS_ENABLE_REG, (1 << 3), (1 << 3)));
  691. }
  692. for (pattern = 0; pattern < PATTERN_LIMIT; pattern++)
  693. ddr3_tip_load_pattern_to_mem(dev_num, pattern);
  694. return MV_OK;
  695. }
  696. /*
  697. * Wait till ODPG access is ready
  698. */
  699. int is_odpg_access_done(u32 dev_num, u32 if_id)
  700. {
  701. u32 poll_cnt = 0, data_value;
  702. u32 read_data[MAX_INTERFACE_NUM];
  703. for (poll_cnt = 0; poll_cnt < MAX_POLLING_ITERATIONS; poll_cnt++) {
  704. CHECK_STATUS(ddr3_tip_if_read
  705. (dev_num, ACCESS_TYPE_UNICAST, if_id,
  706. ODPG_BIST_DONE, read_data, MASK_ALL_BITS));
  707. data_value = read_data[if_id];
  708. if (((data_value >> ODPG_BIST_DONE_BIT_OFFS) & 0x1) ==
  709. ODPG_BIST_DONE_BIT_VALUE) {
  710. data_value = data_value & 0xfffffffe;
  711. CHECK_STATUS(ddr3_tip_if_write
  712. (dev_num, ACCESS_TYPE_UNICAST,
  713. if_id, ODPG_BIST_DONE, data_value,
  714. MASK_ALL_BITS));
  715. break;
  716. }
  717. }
  718. if (poll_cnt >= MAX_POLLING_ITERATIONS) {
  719. DEBUG_TRAINING_IP_ENGINE(DEBUG_LEVEL_ERROR,
  720. ("Bist Activate: poll failure 2\n"));
  721. return MV_FAIL;
  722. }
  723. return MV_OK;
  724. }
  725. /*
  726. * Load specific pattern to memory using ODPG
  727. */
  728. int ddr3_tip_load_pattern_to_mem(u32 dev_num, enum hws_pattern pattern)
  729. {
  730. u32 reg_data, if_id;
  731. struct pattern_info *pattern_table = ddr3_tip_get_pattern_table();
  732. struct hws_topology_map *tm = ddr3_get_topology_map();
  733. /* load pattern to memory */
  734. /*
  735. * Write Tx mode, CS0, phases, Tx burst size, delay between burst,
  736. * rx pattern phases
  737. */
  738. reg_data =
  739. 0x1 | (pattern_table[pattern].num_of_phases_tx << 5) |
  740. (pattern_table[pattern].tx_burst_size << 11) |
  741. (pattern_table[pattern].delay_between_bursts << 15) |
  742. (pattern_table[pattern].num_of_phases_rx << 21) | (0x1 << 25) |
  743. (effective_cs << 26);
  744. CHECK_STATUS(ddr3_tip_if_write
  745. (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
  746. ODPG_DATA_CONTROL_REG, reg_data, MASK_ALL_BITS));
  747. /* ODPG Write enable from BIST */
  748. CHECK_STATUS(ddr3_tip_if_write
  749. (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
  750. ODPG_DATA_CONTROL_REG, (0x1 | (effective_cs << 26)),
  751. 0xc000003));
  752. /* disable error injection */
  753. CHECK_STATUS(ddr3_tip_if_write
  754. (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
  755. ODPG_WRITE_DATA_ERROR_REG, 0, 0x1));
  756. /* load pattern to ODPG */
  757. ddr3_tip_load_pattern_to_odpg(dev_num, ACCESS_TYPE_MULTICAST,
  758. PARAM_NOT_CARE, pattern,
  759. pattern_table[pattern].start_addr);
  760. for (if_id = 0; if_id < MAX_INTERFACE_NUM; if_id++) {
  761. if (IS_ACTIVE(tm->if_act_mask, if_id) == 0)
  762. continue;
  763. CHECK_STATUS(ddr3_tip_if_write
  764. (dev_num, ACCESS_TYPE_UNICAST, if_id, 0x1498,
  765. 0x3, 0xf));
  766. }
  767. CHECK_STATUS(ddr3_tip_if_write
  768. (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
  769. ODPG_ENABLE_REG, 0x1 << ODPG_ENABLE_OFFS,
  770. (0x1 << ODPG_ENABLE_OFFS)));
  771. mdelay(1);
  772. for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
  773. VALIDATE_ACTIVE(tm->if_act_mask, if_id);
  774. CHECK_STATUS(is_odpg_access_done(dev_num, if_id));
  775. }
  776. /* Disable ODPG and stop write to memory */
  777. CHECK_STATUS(ddr3_tip_if_write
  778. (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
  779. ODPG_DATA_CONTROL_REG, (0x1 << 30), (u32) (0x3 << 30)));
  780. /* return to default */
  781. CHECK_STATUS(ddr3_tip_if_write
  782. (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
  783. ODPG_DATA_CONTROL_REG, 0, MASK_ALL_BITS));
  784. /* Disable odt0 for CS0 training - need to adjust for multy CS */
  785. CHECK_STATUS(ddr3_tip_if_write
  786. (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 0x1498,
  787. 0x0, 0xf));
  788. /* temporary added */
  789. mdelay(1);
  790. return MV_OK;
  791. }
  792. /*
  793. * Load specific pattern to memory using CPU
  794. */
  795. int ddr3_tip_load_pattern_to_mem_by_cpu(u32 dev_num, enum hws_pattern pattern,
  796. u32 offset)
  797. {
  798. /* eranba - TBD */
  799. return MV_OK;
  800. }
  801. /*
  802. * Training search routine
  803. */
  804. int ddr3_tip_ip_training_wrapper_int(u32 dev_num,
  805. enum hws_access_type access_type,
  806. u32 if_id,
  807. enum hws_access_type pup_access_type,
  808. u32 pup_num, u32 bit_num,
  809. enum hws_training_result result_type,
  810. enum hws_control_element control_element,
  811. enum hws_search_dir search_dir,
  812. enum hws_dir direction,
  813. u32 interface_mask, u32 init_value_l2h,
  814. u32 init_value_h2l, u32 num_iter,
  815. enum hws_pattern pattern,
  816. enum hws_edge_compare edge_comp,
  817. enum hws_ddr_cs train_cs_type, u32 cs_num,
  818. enum hws_training_ip_stat *train_status)
  819. {
  820. u32 interface_num = 0, start_if, end_if, init_value_used;
  821. enum hws_search_dir search_dir_id, start_search, end_search;
  822. enum hws_edge_compare edge_comp_used;
  823. u8 cons_tap = (direction == OPER_WRITE) ? (64) : (0);
  824. struct hws_topology_map *tm = ddr3_get_topology_map();
  825. if (train_status == NULL) {
  826. DEBUG_TRAINING_IP_ENGINE(DEBUG_LEVEL_ERROR,
  827. ("train_status is NULL\n"));
  828. return MV_FAIL;
  829. }
  830. if ((train_cs_type > CS_NON_SINGLE) ||
  831. (edge_comp >= EDGE_PFP) ||
  832. (pattern >= PATTERN_LIMIT) ||
  833. (direction > OPER_WRITE_AND_READ) ||
  834. (search_dir > HWS_HIGH2LOW) ||
  835. (control_element > HWS_CONTROL_ELEMENT_DQS_SKEW) ||
  836. (result_type > RESULT_PER_BYTE) ||
  837. (pup_num >= tm->num_of_bus_per_interface) ||
  838. (pup_access_type > ACCESS_TYPE_MULTICAST) ||
  839. (if_id > 11) || (access_type > ACCESS_TYPE_MULTICAST)) {
  840. DEBUG_TRAINING_IP_ENGINE(
  841. DEBUG_LEVEL_ERROR,
  842. ("wrong parameter train_cs_type %d edge_comp %d pattern %d direction %d search_dir %d control_element %d result_type %d pup_num %d pup_access_type %d if_id %d access_type %d\n",
  843. train_cs_type, edge_comp, pattern, direction,
  844. search_dir, control_element, result_type, pup_num,
  845. pup_access_type, if_id, access_type));
  846. return MV_FAIL;
  847. }
  848. if (edge_comp == EDGE_FPF) {
  849. start_search = HWS_LOW2HIGH;
  850. end_search = HWS_HIGH2LOW;
  851. edge_comp_used = EDGE_FP;
  852. } else {
  853. start_search = search_dir;
  854. end_search = search_dir;
  855. edge_comp_used = edge_comp;
  856. }
  857. for (search_dir_id = start_search; search_dir_id <= end_search;
  858. search_dir_id++) {
  859. init_value_used = (search_dir_id == HWS_LOW2HIGH) ?
  860. init_value_l2h : init_value_h2l;
  861. DEBUG_TRAINING_IP_ENGINE(
  862. DEBUG_LEVEL_TRACE,
  863. ("dev_num %d, access_type %d, if_id %d, pup_access_type %d,pup_num %d, result_type %d, control_element %d search_dir_id %d, direction %d, interface_mask %d,init_value_used %d, num_iter %d, pattern %d, edge_comp_used %d, train_cs_type %d, cs_num %d\n",
  864. dev_num, access_type, if_id, pup_access_type, pup_num,
  865. result_type, control_element, search_dir_id,
  866. direction, interface_mask, init_value_used, num_iter,
  867. pattern, edge_comp_used, train_cs_type, cs_num));
  868. ddr3_tip_ip_training(dev_num, access_type, if_id,
  869. pup_access_type, pup_num, result_type,
  870. control_element, search_dir_id, direction,
  871. interface_mask, init_value_used, num_iter,
  872. pattern, edge_comp_used, train_cs_type,
  873. cs_num, train_status);
  874. if (access_type == ACCESS_TYPE_MULTICAST) {
  875. start_if = 0;
  876. end_if = MAX_INTERFACE_NUM - 1;
  877. } else {
  878. start_if = if_id;
  879. end_if = if_id;
  880. }
  881. for (interface_num = start_if; interface_num <= end_if;
  882. interface_num++) {
  883. VALIDATE_ACTIVE(tm->if_act_mask, interface_num);
  884. cs_num = 0;
  885. CHECK_STATUS(ddr3_tip_read_training_result
  886. (dev_num, interface_num, pup_access_type,
  887. pup_num, bit_num, search_dir_id,
  888. direction, result_type,
  889. TRAINING_LOAD_OPERATION_UNLOAD,
  890. train_cs_type, NULL, 0, cons_tap,
  891. 0));
  892. }
  893. }
  894. return MV_OK;
  895. }
  896. /*
  897. * Training search & read result routine
  898. */
  899. int ddr3_tip_ip_training_wrapper(u32 dev_num, enum hws_access_type access_type,
  900. u32 if_id,
  901. enum hws_access_type pup_access_type,
  902. u32 pup_num,
  903. enum hws_training_result result_type,
  904. enum hws_control_element control_element,
  905. enum hws_search_dir search_dir,
  906. enum hws_dir direction, u32 interface_mask,
  907. u32 init_value_l2h, u32 init_value_h2l,
  908. u32 num_iter, enum hws_pattern pattern,
  909. enum hws_edge_compare edge_comp,
  910. enum hws_ddr_cs train_cs_type, u32 cs_num,
  911. enum hws_training_ip_stat *train_status)
  912. {
  913. u8 e1, e2;
  914. u32 interface_cnt, bit_id, start_if, end_if, bit_end = 0;
  915. u32 *result[HWS_SEARCH_DIR_LIMIT] = { 0 };
  916. u8 cons_tap = (direction == OPER_WRITE) ? (64) : (0);
  917. u8 bit_bit_mask[MAX_BUS_NUM] = { 0 }, bit_bit_mask_active = 0;
  918. u8 pup_id;
  919. struct hws_topology_map *tm = ddr3_get_topology_map();
  920. if (pup_num >= tm->num_of_bus_per_interface) {
  921. DEBUG_TRAINING_IP_ENGINE(DEBUG_LEVEL_ERROR,
  922. ("pup_num %d not valid\n", pup_num));
  923. }
  924. if (if_id >= MAX_INTERFACE_NUM) {
  925. DEBUG_TRAINING_IP_ENGINE(DEBUG_LEVEL_ERROR,
  926. ("if_id %d not valid\n", if_id));
  927. }
  928. CHECK_STATUS(ddr3_tip_ip_training_wrapper_int
  929. (dev_num, access_type, if_id, pup_access_type, pup_num,
  930. ALL_BITS_PER_PUP, result_type, control_element,
  931. search_dir, direction, interface_mask, init_value_l2h,
  932. init_value_h2l, num_iter, pattern, edge_comp,
  933. train_cs_type, cs_num, train_status));
  934. if (access_type == ACCESS_TYPE_MULTICAST) {
  935. start_if = 0;
  936. end_if = MAX_INTERFACE_NUM - 1;
  937. } else {
  938. start_if = if_id;
  939. end_if = if_id;
  940. }
  941. for (interface_cnt = start_if; interface_cnt <= end_if;
  942. interface_cnt++) {
  943. VALIDATE_ACTIVE(tm->if_act_mask, interface_cnt);
  944. for (pup_id = 0;
  945. pup_id <= (tm->num_of_bus_per_interface - 1); pup_id++) {
  946. VALIDATE_ACTIVE(tm->bus_act_mask, pup_id);
  947. if (result_type == RESULT_PER_BIT)
  948. bit_end = BUS_WIDTH_IN_BITS - 1;
  949. else
  950. bit_end = 0;
  951. bit_bit_mask[pup_id] = 0;
  952. for (bit_id = 0; bit_id <= bit_end; bit_id++) {
  953. enum hws_search_dir search_dir_id;
  954. for (search_dir_id = HWS_LOW2HIGH;
  955. search_dir_id <= HWS_HIGH2LOW;
  956. search_dir_id++) {
  957. CHECK_STATUS
  958. (ddr3_tip_read_training_result
  959. (dev_num, interface_cnt,
  960. ACCESS_TYPE_UNICAST, pup_id,
  961. bit_id, search_dir_id,
  962. direction, result_type,
  963. TRAINING_LOAD_OPERATION_UNLOAD,
  964. CS_SINGLE,
  965. &result[search_dir_id],
  966. 1, 0, 0));
  967. }
  968. e1 = GET_TAP_RESULT(result[HWS_LOW2HIGH][0],
  969. EDGE_1);
  970. e2 = GET_TAP_RESULT(result[HWS_HIGH2LOW][0],
  971. EDGE_1);
  972. DEBUG_TRAINING_IP_ENGINE(
  973. DEBUG_LEVEL_INFO,
  974. ("wrapper if_id %d pup_id %d bit %d l2h 0x%x (e1 0x%x) h2l 0x%x (e2 0x%x)\n",
  975. interface_cnt, pup_id, bit_id,
  976. result[HWS_LOW2HIGH][0], e1,
  977. result[HWS_HIGH2LOW][0], e2));
  978. /* TBD validate is valid only for tx */
  979. if (VALIDATE_TRAINING_LIMIT(e1, e2) == 1 &&
  980. GET_LOCK_RESULT(result[HWS_LOW2HIGH][0]) &&
  981. GET_LOCK_RESULT(result[HWS_LOW2HIGH][0])) {
  982. /* Mark problem bits */
  983. bit_bit_mask[pup_id] |= 1 << bit_id;
  984. bit_bit_mask_active = 1;
  985. }
  986. } /* For all bits */
  987. } /* For all PUPs */
  988. /* Fix problem bits */
  989. if (bit_bit_mask_active != 0) {
  990. u32 *l2h_if_train_res = NULL;
  991. u32 *h2l_if_train_res = NULL;
  992. l2h_if_train_res =
  993. ddr3_tip_get_buf_ptr(dev_num, HWS_LOW2HIGH,
  994. result_type,
  995. interface_cnt);
  996. h2l_if_train_res =
  997. ddr3_tip_get_buf_ptr(dev_num, HWS_HIGH2LOW,
  998. result_type,
  999. interface_cnt);
  1000. ddr3_tip_ip_training(dev_num, ACCESS_TYPE_UNICAST,
  1001. interface_cnt,
  1002. ACCESS_TYPE_MULTICAST,
  1003. PARAM_NOT_CARE, result_type,
  1004. control_element, HWS_LOW2HIGH,
  1005. direction, interface_mask,
  1006. num_iter / 2, num_iter / 2,
  1007. pattern, EDGE_FP, train_cs_type,
  1008. cs_num, train_status);
  1009. for (pup_id = 0;
  1010. pup_id <= (tm->num_of_bus_per_interface - 1);
  1011. pup_id++) {
  1012. VALIDATE_ACTIVE(tm->bus_act_mask, pup_id);
  1013. if (bit_bit_mask[pup_id] == 0)
  1014. continue;
  1015. for (bit_id = 0; bit_id <= bit_end; bit_id++) {
  1016. if ((bit_bit_mask[pup_id] &
  1017. (1 << bit_id)) == 0)
  1018. continue;
  1019. CHECK_STATUS
  1020. (ddr3_tip_read_training_result
  1021. (dev_num, interface_cnt,
  1022. ACCESS_TYPE_UNICAST, pup_id,
  1023. bit_id, HWS_LOW2HIGH,
  1024. direction,
  1025. result_type,
  1026. TRAINING_LOAD_OPERATION_UNLOAD,
  1027. CS_SINGLE, &l2h_if_train_res,
  1028. 0, 0, 1));
  1029. }
  1030. }
  1031. ddr3_tip_ip_training(dev_num, ACCESS_TYPE_UNICAST,
  1032. interface_cnt,
  1033. ACCESS_TYPE_MULTICAST,
  1034. PARAM_NOT_CARE, result_type,
  1035. control_element, HWS_HIGH2LOW,
  1036. direction, interface_mask,
  1037. num_iter / 2, num_iter / 2,
  1038. pattern, EDGE_FP, train_cs_type,
  1039. cs_num, train_status);
  1040. for (pup_id = 0;
  1041. pup_id <= (tm->num_of_bus_per_interface - 1);
  1042. pup_id++) {
  1043. VALIDATE_ACTIVE(tm->bus_act_mask, pup_id);
  1044. if (bit_bit_mask[pup_id] == 0)
  1045. continue;
  1046. for (bit_id = 0; bit_id <= bit_end; bit_id++) {
  1047. if ((bit_bit_mask[pup_id] &
  1048. (1 << bit_id)) == 0)
  1049. continue;
  1050. CHECK_STATUS
  1051. (ddr3_tip_read_training_result
  1052. (dev_num, interface_cnt,
  1053. ACCESS_TYPE_UNICAST, pup_id,
  1054. bit_id, HWS_HIGH2LOW, direction,
  1055. result_type,
  1056. TRAINING_LOAD_OPERATION_UNLOAD,
  1057. CS_SINGLE, &h2l_if_train_res,
  1058. 0, cons_tap, 1));
  1059. }
  1060. }
  1061. } /* if bit_bit_mask_active */
  1062. } /* For all Interfacess */
  1063. return MV_OK;
  1064. }
  1065. /*
  1066. * Load phy values
  1067. */
  1068. int ddr3_tip_load_phy_values(int b_load)
  1069. {
  1070. u32 bus_cnt = 0, if_id, dev_num = 0;
  1071. struct hws_topology_map *tm = ddr3_get_topology_map();
  1072. for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
  1073. VALIDATE_ACTIVE(tm->if_act_mask, if_id);
  1074. for (bus_cnt = 0; bus_cnt < GET_TOPOLOGY_NUM_OF_BUSES();
  1075. bus_cnt++) {
  1076. VALIDATE_ACTIVE(tm->bus_act_mask, bus_cnt);
  1077. if (b_load == 1) {
  1078. CHECK_STATUS(ddr3_tip_bus_read
  1079. (dev_num, if_id,
  1080. ACCESS_TYPE_UNICAST, bus_cnt,
  1081. DDR_PHY_DATA,
  1082. WRITE_CENTRALIZATION_PHY_REG +
  1083. (effective_cs *
  1084. CS_REGISTER_ADDR_OFFSET),
  1085. &phy_reg_bk[if_id][bus_cnt]
  1086. [0]));
  1087. CHECK_STATUS(ddr3_tip_bus_read
  1088. (dev_num, if_id,
  1089. ACCESS_TYPE_UNICAST, bus_cnt,
  1090. DDR_PHY_DATA,
  1091. RL_PHY_REG +
  1092. (effective_cs *
  1093. CS_REGISTER_ADDR_OFFSET),
  1094. &phy_reg_bk[if_id][bus_cnt]
  1095. [1]));
  1096. CHECK_STATUS(ddr3_tip_bus_read
  1097. (dev_num, if_id,
  1098. ACCESS_TYPE_UNICAST, bus_cnt,
  1099. DDR_PHY_DATA,
  1100. READ_CENTRALIZATION_PHY_REG +
  1101. (effective_cs *
  1102. CS_REGISTER_ADDR_OFFSET),
  1103. &phy_reg_bk[if_id][bus_cnt]
  1104. [2]));
  1105. } else {
  1106. CHECK_STATUS(ddr3_tip_bus_write
  1107. (dev_num, ACCESS_TYPE_UNICAST,
  1108. if_id, ACCESS_TYPE_UNICAST,
  1109. bus_cnt, DDR_PHY_DATA,
  1110. WRITE_CENTRALIZATION_PHY_REG +
  1111. (effective_cs *
  1112. CS_REGISTER_ADDR_OFFSET),
  1113. phy_reg_bk[if_id][bus_cnt]
  1114. [0]));
  1115. CHECK_STATUS(ddr3_tip_bus_write
  1116. (dev_num, ACCESS_TYPE_UNICAST,
  1117. if_id, ACCESS_TYPE_UNICAST,
  1118. bus_cnt, DDR_PHY_DATA,
  1119. RL_PHY_REG +
  1120. (effective_cs *
  1121. CS_REGISTER_ADDR_OFFSET),
  1122. phy_reg_bk[if_id][bus_cnt]
  1123. [1]));
  1124. CHECK_STATUS(ddr3_tip_bus_write
  1125. (dev_num, ACCESS_TYPE_UNICAST,
  1126. if_id, ACCESS_TYPE_UNICAST,
  1127. bus_cnt, DDR_PHY_DATA,
  1128. READ_CENTRALIZATION_PHY_REG +
  1129. (effective_cs *
  1130. CS_REGISTER_ADDR_OFFSET),
  1131. phy_reg_bk[if_id][bus_cnt]
  1132. [2]));
  1133. }
  1134. }
  1135. }
  1136. return MV_OK;
  1137. }
  1138. int ddr3_tip_training_ip_test(u32 dev_num, enum hws_training_result result_type,
  1139. enum hws_search_dir search_dir,
  1140. enum hws_dir direction,
  1141. enum hws_edge_compare edge,
  1142. u32 init_val1, u32 init_val2,
  1143. u32 num_of_iterations,
  1144. u32 start_pattern, u32 end_pattern)
  1145. {
  1146. u32 pattern, if_id, pup_id;
  1147. enum hws_training_ip_stat train_status[MAX_INTERFACE_NUM];
  1148. u32 *res = NULL;
  1149. u32 search_state = 0;
  1150. struct hws_topology_map *tm = ddr3_get_topology_map();
  1151. ddr3_tip_load_phy_values(1);
  1152. for (pattern = start_pattern; pattern <= end_pattern; pattern++) {
  1153. for (search_state = 0; search_state < HWS_SEARCH_DIR_LIMIT;
  1154. search_state++) {
  1155. ddr3_tip_ip_training_wrapper(dev_num,
  1156. ACCESS_TYPE_MULTICAST, 0,
  1157. ACCESS_TYPE_MULTICAST, 0,
  1158. result_type,
  1159. HWS_CONTROL_ELEMENT_ADLL,
  1160. search_dir, direction,
  1161. 0xfff, init_val1,
  1162. init_val2,
  1163. num_of_iterations, pattern,
  1164. edge, CS_SINGLE,
  1165. PARAM_NOT_CARE,
  1166. train_status);
  1167. for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1;
  1168. if_id++) {
  1169. VALIDATE_ACTIVE(tm->if_act_mask, if_id);
  1170. for (pup_id = 0; pup_id <
  1171. tm->num_of_bus_per_interface;
  1172. pup_id++) {
  1173. VALIDATE_ACTIVE(tm->bus_act_mask,
  1174. pup_id);
  1175. CHECK_STATUS
  1176. (ddr3_tip_read_training_result
  1177. (dev_num, if_id,
  1178. ACCESS_TYPE_UNICAST, pup_id,
  1179. ALL_BITS_PER_PUP,
  1180. search_state,
  1181. direction, result_type,
  1182. TRAINING_LOAD_OPERATION_UNLOAD,
  1183. CS_SINGLE, &res, 1, 0,
  1184. 0));
  1185. if (result_type == RESULT_PER_BYTE) {
  1186. DEBUG_TRAINING_IP_ENGINE
  1187. (DEBUG_LEVEL_INFO,
  1188. ("search_state %d if_id %d pup_id %d 0x%x\n",
  1189. search_state, if_id,
  1190. pup_id, res[0]));
  1191. } else {
  1192. DEBUG_TRAINING_IP_ENGINE
  1193. (DEBUG_LEVEL_INFO,
  1194. ("search_state %d if_id %d pup_id %d 0x%x 0x%x 0x%x 0x%x 0x%x 0x%x 0x%x 0x%x\n",
  1195. search_state, if_id,
  1196. pup_id, res[0],
  1197. res[1], res[2],
  1198. res[3], res[4],
  1199. res[5], res[6],
  1200. res[7]));
  1201. }
  1202. }
  1203. } /* interface */
  1204. } /* search */
  1205. } /* pattern */
  1206. ddr3_tip_load_phy_values(0);
  1207. return MV_OK;
  1208. }
  1209. struct pattern_info *ddr3_tip_get_pattern_table()
  1210. {
  1211. struct hws_topology_map *tm = ddr3_get_topology_map();
  1212. if (DDR3_IS_16BIT_DRAM_MODE(tm->bus_act_mask) == 0)
  1213. return pattern_table_32;
  1214. else
  1215. return pattern_table_16;
  1216. }
  1217. u16 *ddr3_tip_get_mask_results_dq_reg()
  1218. {
  1219. struct hws_topology_map *tm = ddr3_get_topology_map();
  1220. if (DDR3_IS_ECC_PUP3_MODE(tm->bus_act_mask))
  1221. return mask_results_dq_reg_map_pup3_ecc;
  1222. else
  1223. return mask_results_dq_reg_map;
  1224. }
  1225. u16 *ddr3_tip_get_mask_results_pup_reg_map()
  1226. {
  1227. struct hws_topology_map *tm = ddr3_get_topology_map();
  1228. if (DDR3_IS_ECC_PUP3_MODE(tm->bus_act_mask))
  1229. return mask_results_pup_reg_map_pup3_ecc;
  1230. else
  1231. return mask_results_pup_reg_map;
  1232. }