ddr3_training_ip_engine.c 42 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353
  1. // SPDX-License-Identifier: GPL-2.0
  2. /*
  3. * Copyright (C) Marvell International Ltd. and its affiliates
  4. */
  5. #include <common.h>
  6. #include <spl.h>
  7. #include <asm/io.h>
  8. #include <asm/arch/cpu.h>
  9. #include <asm/arch/soc.h>
  10. #include "ddr3_init.h"
  11. #define PATTERN_1 0x55555555
  12. #define PATTERN_2 0xaaaaaaaa
  13. #define VALIDATE_TRAINING_LIMIT(e1, e2) \
  14. ((((e2) - (e1) + 1) > 33) && ((e1) < 67))
  15. u32 phy_reg_bk[MAX_INTERFACE_NUM][MAX_BUS_NUM][BUS_WIDTH_IN_BITS];
  16. u32 training_res[MAX_INTERFACE_NUM * MAX_BUS_NUM * BUS_WIDTH_IN_BITS *
  17. HWS_SEARCH_DIR_LIMIT];
  18. u16 mask_results_dq_reg_map[] = {
  19. RESULT_CONTROL_PUP_0_BIT_0_REG, RESULT_CONTROL_PUP_0_BIT_1_REG,
  20. RESULT_CONTROL_PUP_0_BIT_2_REG, RESULT_CONTROL_PUP_0_BIT_3_REG,
  21. RESULT_CONTROL_PUP_0_BIT_4_REG, RESULT_CONTROL_PUP_0_BIT_5_REG,
  22. RESULT_CONTROL_PUP_0_BIT_6_REG, RESULT_CONTROL_PUP_0_BIT_7_REG,
  23. RESULT_CONTROL_PUP_1_BIT_0_REG, RESULT_CONTROL_PUP_1_BIT_1_REG,
  24. RESULT_CONTROL_PUP_1_BIT_2_REG, RESULT_CONTROL_PUP_1_BIT_3_REG,
  25. RESULT_CONTROL_PUP_1_BIT_4_REG, RESULT_CONTROL_PUP_1_BIT_5_REG,
  26. RESULT_CONTROL_PUP_1_BIT_6_REG, RESULT_CONTROL_PUP_1_BIT_7_REG,
  27. RESULT_CONTROL_PUP_2_BIT_0_REG, RESULT_CONTROL_PUP_2_BIT_1_REG,
  28. RESULT_CONTROL_PUP_2_BIT_2_REG, RESULT_CONTROL_PUP_2_BIT_3_REG,
  29. RESULT_CONTROL_PUP_2_BIT_4_REG, RESULT_CONTROL_PUP_2_BIT_5_REG,
  30. RESULT_CONTROL_PUP_2_BIT_6_REG, RESULT_CONTROL_PUP_2_BIT_7_REG,
  31. RESULT_CONTROL_PUP_3_BIT_0_REG, RESULT_CONTROL_PUP_3_BIT_1_REG,
  32. RESULT_CONTROL_PUP_3_BIT_2_REG, RESULT_CONTROL_PUP_3_BIT_3_REG,
  33. RESULT_CONTROL_PUP_3_BIT_4_REG, RESULT_CONTROL_PUP_3_BIT_5_REG,
  34. RESULT_CONTROL_PUP_3_BIT_6_REG, RESULT_CONTROL_PUP_3_BIT_7_REG,
  35. RESULT_CONTROL_PUP_4_BIT_0_REG, RESULT_CONTROL_PUP_4_BIT_1_REG,
  36. RESULT_CONTROL_PUP_4_BIT_2_REG, RESULT_CONTROL_PUP_4_BIT_3_REG,
  37. RESULT_CONTROL_PUP_4_BIT_4_REG, RESULT_CONTROL_PUP_4_BIT_5_REG,
  38. RESULT_CONTROL_PUP_4_BIT_6_REG, RESULT_CONTROL_PUP_4_BIT_7_REG,
  39. };
  40. u16 mask_results_pup_reg_map[] = {
  41. RESULT_CONTROL_BYTE_PUP_0_REG, RESULT_CONTROL_BYTE_PUP_1_REG,
  42. RESULT_CONTROL_BYTE_PUP_2_REG, RESULT_CONTROL_BYTE_PUP_3_REG,
  43. RESULT_CONTROL_BYTE_PUP_4_REG
  44. };
  45. u16 mask_results_dq_reg_map_pup3_ecc[] = {
  46. RESULT_CONTROL_PUP_0_BIT_0_REG, RESULT_CONTROL_PUP_0_BIT_1_REG,
  47. RESULT_CONTROL_PUP_0_BIT_2_REG, RESULT_CONTROL_PUP_0_BIT_3_REG,
  48. RESULT_CONTROL_PUP_0_BIT_4_REG, RESULT_CONTROL_PUP_0_BIT_5_REG,
  49. RESULT_CONTROL_PUP_0_BIT_6_REG, RESULT_CONTROL_PUP_0_BIT_7_REG,
  50. RESULT_CONTROL_PUP_1_BIT_0_REG, RESULT_CONTROL_PUP_1_BIT_1_REG,
  51. RESULT_CONTROL_PUP_1_BIT_2_REG, RESULT_CONTROL_PUP_1_BIT_3_REG,
  52. RESULT_CONTROL_PUP_1_BIT_4_REG, RESULT_CONTROL_PUP_1_BIT_5_REG,
  53. RESULT_CONTROL_PUP_1_BIT_6_REG, RESULT_CONTROL_PUP_1_BIT_7_REG,
  54. RESULT_CONTROL_PUP_2_BIT_0_REG, RESULT_CONTROL_PUP_2_BIT_1_REG,
  55. RESULT_CONTROL_PUP_2_BIT_2_REG, RESULT_CONTROL_PUP_2_BIT_3_REG,
  56. RESULT_CONTROL_PUP_2_BIT_4_REG, RESULT_CONTROL_PUP_2_BIT_5_REG,
  57. RESULT_CONTROL_PUP_2_BIT_6_REG, RESULT_CONTROL_PUP_2_BIT_7_REG,
  58. RESULT_CONTROL_PUP_4_BIT_0_REG, RESULT_CONTROL_PUP_4_BIT_1_REG,
  59. RESULT_CONTROL_PUP_4_BIT_2_REG, RESULT_CONTROL_PUP_4_BIT_3_REG,
  60. RESULT_CONTROL_PUP_4_BIT_4_REG, RESULT_CONTROL_PUP_4_BIT_5_REG,
  61. RESULT_CONTROL_PUP_4_BIT_6_REG, RESULT_CONTROL_PUP_4_BIT_7_REG,
  62. RESULT_CONTROL_PUP_4_BIT_0_REG, RESULT_CONTROL_PUP_4_BIT_1_REG,
  63. RESULT_CONTROL_PUP_4_BIT_2_REG, RESULT_CONTROL_PUP_4_BIT_3_REG,
  64. RESULT_CONTROL_PUP_4_BIT_4_REG, RESULT_CONTROL_PUP_4_BIT_5_REG,
  65. RESULT_CONTROL_PUP_4_BIT_6_REG, RESULT_CONTROL_PUP_4_BIT_7_REG,
  66. };
  67. u16 mask_results_pup_reg_map_pup3_ecc[] = {
  68. RESULT_CONTROL_BYTE_PUP_0_REG, RESULT_CONTROL_BYTE_PUP_1_REG,
  69. RESULT_CONTROL_BYTE_PUP_2_REG, RESULT_CONTROL_BYTE_PUP_4_REG,
  70. RESULT_CONTROL_BYTE_PUP_4_REG
  71. };
  72. struct pattern_info pattern_table_16[] = {
  73. /*
  74. * num tx phases, tx burst, delay between, rx pattern,
  75. * start_address, pattern_len
  76. */
  77. {1, 1, 2, 1, 0x0080, 2}, /* PATTERN_PBS1 */
  78. {1, 1, 2, 1, 0x00c0, 2}, /* PATTERN_PBS2 */
  79. {1, 1, 2, 1, 0x0100, 2}, /* PATTERN_RL */
  80. {0xf, 0x7, 2, 0x7, 0x0140, 16}, /* PATTERN_STATIC_PBS */
  81. {0xf, 0x7, 2, 0x7, 0x0190, 16}, /* PATTERN_KILLER_DQ0 */
  82. {0xf, 0x7, 2, 0x7, 0x01d0, 16}, /* PATTERN_KILLER_DQ1 */
  83. {0xf, 0x7, 2, 0x7, 0x0210, 16}, /* PATTERN_KILLER_DQ2 */
  84. {0xf, 0x7, 2, 0x7, 0x0250, 16}, /* PATTERN_KILLER_DQ3 */
  85. {0xf, 0x7, 2, 0x7, 0x0290, 16}, /* PATTERN_KILLER_DQ4 */
  86. {0xf, 0x7, 2, 0x7, 0x02d0, 16}, /* PATTERN_KILLER_DQ5 */
  87. {0xf, 0x7, 2, 0x7, 0x0310, 16}, /* PATTERN_KILLER_DQ6 */
  88. {0xf, 0x7, 2, 0x7, 0x0350, 16}, /* PATTERN_KILLER_DQ7 */
  89. {1, 1, 2, 1, 0x0380, 2}, /* PATTERN_PBS3 */
  90. {1, 1, 2, 1, 0x0000, 2}, /* PATTERN_RL2 */
  91. {1, 1, 2, 1, 0x0040, 2}, /* PATTERN_TEST */
  92. {0xf, 0x7, 2, 0x7, 0x03c0, 16}, /* PATTERN_FULL_SSO_1T */
  93. {0xf, 0x7, 2, 0x7, 0x0400, 16}, /* PATTERN_FULL_SSO_2T */
  94. {0xf, 0x7, 2, 0x7, 0x0440, 16}, /* PATTERN_FULL_SSO_3T */
  95. {0xf, 0x7, 2, 0x7, 0x0480, 16}, /* PATTERN_FULL_SSO_4T */
  96. {0xf, 0x7, 2, 0x7, 0x04c0, 16} /* PATTERN_VREF */
  97. /*Note: actual start_address is <<3 of defined addess */
  98. };
  99. struct pattern_info pattern_table_32[] = {
  100. /*
  101. * num tx phases, tx burst, delay between, rx pattern,
  102. * start_address, pattern_len
  103. */
  104. {3, 3, 2, 3, 0x0080, 4}, /* PATTERN_PBS1 */
  105. {3, 3, 2, 3, 0x00c0, 4}, /* PATTERN_PBS2 */
  106. {3, 3, 2, 3, 0x0100, 4}, /* PATTERN_RL */
  107. {0x1f, 0xf, 2, 0xf, 0x0140, 32}, /* PATTERN_STATIC_PBS */
  108. {0x1f, 0xf, 2, 0xf, 0x0190, 32}, /* PATTERN_KILLER_DQ0 */
  109. {0x1f, 0xf, 2, 0xf, 0x01d0, 32}, /* PATTERN_KILLER_DQ1 */
  110. {0x1f, 0xf, 2, 0xf, 0x0210, 32}, /* PATTERN_KILLER_DQ2 */
  111. {0x1f, 0xf, 2, 0xf, 0x0250, 32}, /* PATTERN_KILLER_DQ3 */
  112. {0x1f, 0xf, 2, 0xf, 0x0290, 32}, /* PATTERN_KILLER_DQ4 */
  113. {0x1f, 0xf, 2, 0xf, 0x02d0, 32}, /* PATTERN_KILLER_DQ5 */
  114. {0x1f, 0xf, 2, 0xf, 0x0310, 32}, /* PATTERN_KILLER_DQ6 */
  115. {0x1f, 0xf, 2, 0xf, 0x0350, 32}, /* PATTERN_KILLER_DQ7 */
  116. {3, 3, 2, 3, 0x0380, 4}, /* PATTERN_PBS3 */
  117. {3, 3, 2, 3, 0x0000, 4}, /* PATTERN_RL2 */
  118. {3, 3, 2, 3, 0x0040, 4}, /* PATTERN_TEST */
  119. {0x1f, 0xf, 2, 0xf, 0x03c0, 32}, /* PATTERN_FULL_SSO_1T */
  120. {0x1f, 0xf, 2, 0xf, 0x0400, 32}, /* PATTERN_FULL_SSO_2T */
  121. {0x1f, 0xf, 2, 0xf, 0x0440, 32}, /* PATTERN_FULL_SSO_3T */
  122. {0x1f, 0xf, 2, 0xf, 0x0480, 32}, /* PATTERN_FULL_SSO_4T */
  123. {0x1f, 0xf, 2, 0xf, 0x04c0, 32} /* PATTERN_VREF */
  124. /*Note: actual start_address is <<3 of defined addess */
  125. };
  126. u32 train_dev_num;
  127. enum hws_ddr_cs traintrain_cs_type;
  128. u32 train_pup_num;
  129. enum hws_training_result train_result_type;
  130. enum hws_control_element train_control_element;
  131. enum hws_search_dir traine_search_dir;
  132. enum hws_dir train_direction;
  133. u32 train_if_select;
  134. u32 train_init_value;
  135. u32 train_number_iterations;
  136. enum hws_pattern train_pattern;
  137. enum hws_edge_compare train_edge_compare;
  138. u32 train_cs_num;
  139. u32 train_if_acess, train_if_id, train_pup_access;
  140. u32 max_polling_for_done = 1000000;
  141. u32 *ddr3_tip_get_buf_ptr(u32 dev_num, enum hws_search_dir search,
  142. enum hws_training_result result_type,
  143. u32 interface_num)
  144. {
  145. u32 *buf_ptr = NULL;
  146. buf_ptr = &training_res
  147. [MAX_INTERFACE_NUM * MAX_BUS_NUM * BUS_WIDTH_IN_BITS * search +
  148. interface_num * MAX_BUS_NUM * BUS_WIDTH_IN_BITS];
  149. return buf_ptr;
  150. }
  151. /*
  152. * IP Training search
  153. * Note: for one edge search only from fail to pass, else jitter can
  154. * be be entered into solution.
  155. */
  156. int ddr3_tip_ip_training(u32 dev_num, enum hws_access_type access_type,
  157. u32 interface_num,
  158. enum hws_access_type pup_access_type,
  159. u32 pup_num, enum hws_training_result result_type,
  160. enum hws_control_element control_element,
  161. enum hws_search_dir search_dir, enum hws_dir direction,
  162. u32 interface_mask, u32 init_value, u32 num_iter,
  163. enum hws_pattern pattern,
  164. enum hws_edge_compare edge_comp,
  165. enum hws_ddr_cs cs_type, u32 cs_num,
  166. enum hws_training_ip_stat *train_status)
  167. {
  168. u32 mask_dq_num_of_regs, mask_pup_num_of_regs, index_cnt, poll_cnt,
  169. reg_data, pup_id;
  170. u32 tx_burst_size;
  171. u32 delay_between_burst;
  172. u32 rd_mode;
  173. u32 read_data[MAX_INTERFACE_NUM];
  174. struct pattern_info *pattern_table = ddr3_tip_get_pattern_table();
  175. u16 *mask_results_pup_reg_map = ddr3_tip_get_mask_results_pup_reg_map();
  176. u16 *mask_results_dq_reg_map = ddr3_tip_get_mask_results_dq_reg();
  177. struct hws_topology_map *tm = ddr3_get_topology_map();
  178. if (pup_num >= tm->num_of_bus_per_interface) {
  179. DEBUG_TRAINING_IP_ENGINE(DEBUG_LEVEL_ERROR,
  180. ("pup_num %d not valid\n", pup_num));
  181. }
  182. if (interface_num >= MAX_INTERFACE_NUM) {
  183. DEBUG_TRAINING_IP_ENGINE(DEBUG_LEVEL_ERROR,
  184. ("if_id %d not valid\n",
  185. interface_num));
  186. }
  187. if (train_status == NULL) {
  188. DEBUG_TRAINING_IP_ENGINE(DEBUG_LEVEL_ERROR,
  189. ("error param 4\n"));
  190. return MV_BAD_PARAM;
  191. }
  192. /* load pattern */
  193. if (cs_type == CS_SINGLE) {
  194. /* All CSs to CS0 */
  195. CHECK_STATUS(ddr3_tip_if_write
  196. (dev_num, access_type, interface_num,
  197. CS_ENABLE_REG, 1 << 3, 1 << 3));
  198. /* All CSs to CS0 */
  199. CHECK_STATUS(ddr3_tip_if_write
  200. (dev_num, access_type, interface_num,
  201. ODPG_DATA_CONTROL_REG,
  202. (0x3 | (effective_cs << 26)), 0xc000003));
  203. } else {
  204. CHECK_STATUS(ddr3_tip_if_write
  205. (dev_num, access_type, interface_num,
  206. CS_ENABLE_REG, 0, 1 << 3));
  207. /* CS select */
  208. CHECK_STATUS(ddr3_tip_if_write
  209. (dev_num, access_type, interface_num,
  210. ODPG_DATA_CONTROL_REG, 0x3 | cs_num << 26,
  211. 0x3 | 3 << 26));
  212. }
  213. /* load pattern to ODPG */
  214. ddr3_tip_load_pattern_to_odpg(dev_num, access_type, interface_num,
  215. pattern,
  216. pattern_table[pattern].start_addr);
  217. tx_burst_size = (direction == OPER_WRITE) ?
  218. pattern_table[pattern].tx_burst_size : 0;
  219. delay_between_burst = (direction == OPER_WRITE) ? 2 : 0;
  220. rd_mode = (direction == OPER_WRITE) ? 1 : 0;
  221. CHECK_STATUS(ddr3_tip_configure_odpg
  222. (dev_num, access_type, interface_num, direction,
  223. pattern_table[pattern].num_of_phases_tx, tx_burst_size,
  224. pattern_table[pattern].num_of_phases_rx,
  225. delay_between_burst, rd_mode, effective_cs, STRESS_NONE,
  226. DURATION_SINGLE));
  227. reg_data = (direction == OPER_READ) ? 0 : (0x3 << 30);
  228. reg_data |= (direction == OPER_READ) ? 0x60 : 0xfa;
  229. CHECK_STATUS(ddr3_tip_if_write
  230. (dev_num, access_type, interface_num,
  231. ODPG_WRITE_READ_MODE_ENABLE_REG, reg_data,
  232. MASK_ALL_BITS));
  233. reg_data = (edge_comp == EDGE_PF || edge_comp == EDGE_FP) ? 0 : 1 << 6;
  234. reg_data |= (edge_comp == EDGE_PF || edge_comp == EDGE_PFP) ?
  235. (1 << 7) : 0;
  236. /* change from Pass to Fail will lock the result */
  237. if (pup_access_type == ACCESS_TYPE_MULTICAST)
  238. reg_data |= 0xe << 14;
  239. else
  240. reg_data |= pup_num << 14;
  241. if (edge_comp == EDGE_FP) {
  242. /* don't search for readl edge change, only the state */
  243. reg_data |= (0 << 20);
  244. } else if (edge_comp == EDGE_FPF) {
  245. reg_data |= (0 << 20);
  246. } else {
  247. reg_data |= (3 << 20);
  248. }
  249. CHECK_STATUS(ddr3_tip_if_write
  250. (dev_num, access_type, interface_num,
  251. ODPG_TRAINING_CONTROL_REG,
  252. reg_data | (0x7 << 8) | (0x7 << 11),
  253. (0x3 | (0x3 << 2) | (0x3 << 6) | (1 << 5) | (0x7 << 8) |
  254. (0x7 << 11) | (0xf << 14) | (0x3 << 18) | (3 << 20))));
  255. reg_data = (search_dir == HWS_LOW2HIGH) ? 0 : (1 << 8);
  256. CHECK_STATUS(ddr3_tip_if_write
  257. (dev_num, access_type, interface_num, ODPG_OBJ1_OPCODE_REG,
  258. 1 | reg_data | init_value << 9 | (1 << 25) | (1 << 26),
  259. 0xff | (1 << 8) | (0xffff << 9) | (1 << 25) | (1 << 26)));
  260. /*
  261. * Write2_dunit(0x10b4, Number_iteration , [15:0])
  262. * Max number of iterations
  263. */
  264. CHECK_STATUS(ddr3_tip_if_write(dev_num, access_type, interface_num,
  265. ODPG_OBJ1_ITER_CNT_REG, num_iter,
  266. 0xffff));
  267. if (control_element == HWS_CONTROL_ELEMENT_DQ_SKEW &&
  268. direction == OPER_READ) {
  269. /*
  270. * Write2_dunit(0x10c0, 0x5f , [7:0])
  271. * MC PBS Reg Address at DDR PHY
  272. */
  273. reg_data = 0x5f +
  274. effective_cs * CALIBRATED_OBJECTS_REG_ADDR_OFFSET;
  275. } else if (control_element == HWS_CONTROL_ELEMENT_DQ_SKEW &&
  276. direction == OPER_WRITE) {
  277. reg_data = 0x1f +
  278. effective_cs * CALIBRATED_OBJECTS_REG_ADDR_OFFSET;
  279. } else if (control_element == HWS_CONTROL_ELEMENT_ADLL &&
  280. direction == OPER_WRITE) {
  281. /*
  282. * LOOP 0x00000001 + 4*n:
  283. * where n (0-3) represents M_CS number
  284. */
  285. /*
  286. * Write2_dunit(0x10c0, 0x1 , [7:0])
  287. * ADLL WR Reg Address at DDR PHY
  288. */
  289. reg_data = 1 + effective_cs * CS_REGISTER_ADDR_OFFSET;
  290. } else if (control_element == HWS_CONTROL_ELEMENT_ADLL &&
  291. direction == OPER_READ) {
  292. /* ADLL RD Reg Address at DDR PHY */
  293. reg_data = 3 + effective_cs * CS_REGISTER_ADDR_OFFSET;
  294. } else if (control_element == HWS_CONTROL_ELEMENT_DQS_SKEW &&
  295. direction == OPER_WRITE) {
  296. /* TBD not defined in 0.5.0 requirement */
  297. } else if (control_element == HWS_CONTROL_ELEMENT_DQS_SKEW &&
  298. direction == OPER_READ) {
  299. /* TBD not defined in 0.5.0 requirement */
  300. }
  301. reg_data |= (0x6 << 28);
  302. CHECK_STATUS(ddr3_tip_if_write
  303. (dev_num, access_type, interface_num, CALIB_OBJ_PRFA_REG,
  304. reg_data | (init_value << 8),
  305. 0xff | (0xffff << 8) | (0xf << 24) | (u32) (0xf << 28)));
  306. mask_dq_num_of_regs = tm->num_of_bus_per_interface * BUS_WIDTH_IN_BITS;
  307. mask_pup_num_of_regs = tm->num_of_bus_per_interface;
  308. if (result_type == RESULT_PER_BIT) {
  309. for (index_cnt = 0; index_cnt < mask_dq_num_of_regs;
  310. index_cnt++) {
  311. CHECK_STATUS(ddr3_tip_if_write
  312. (dev_num, access_type, interface_num,
  313. mask_results_dq_reg_map[index_cnt], 0,
  314. 1 << 24));
  315. }
  316. /* Mask disabled buses */
  317. for (pup_id = 0; pup_id < tm->num_of_bus_per_interface;
  318. pup_id++) {
  319. if (IS_ACTIVE(tm->bus_act_mask, pup_id) == 1)
  320. continue;
  321. for (index_cnt = (mask_dq_num_of_regs - pup_id * 8);
  322. index_cnt <
  323. (mask_dq_num_of_regs - (pup_id + 1) * 8);
  324. index_cnt++) {
  325. CHECK_STATUS(ddr3_tip_if_write
  326. (dev_num, access_type,
  327. interface_num,
  328. mask_results_dq_reg_map
  329. [index_cnt], (1 << 24), 1 << 24));
  330. }
  331. }
  332. for (index_cnt = 0; index_cnt < mask_pup_num_of_regs;
  333. index_cnt++) {
  334. CHECK_STATUS(ddr3_tip_if_write
  335. (dev_num, access_type, interface_num,
  336. mask_results_pup_reg_map[index_cnt],
  337. (1 << 24), 1 << 24));
  338. }
  339. } else if (result_type == RESULT_PER_BYTE) {
  340. /* write to adll */
  341. for (index_cnt = 0; index_cnt < mask_pup_num_of_regs;
  342. index_cnt++) {
  343. CHECK_STATUS(ddr3_tip_if_write
  344. (dev_num, access_type, interface_num,
  345. mask_results_pup_reg_map[index_cnt], 0,
  346. 1 << 24));
  347. }
  348. for (index_cnt = 0; index_cnt < mask_dq_num_of_regs;
  349. index_cnt++) {
  350. CHECK_STATUS(ddr3_tip_if_write
  351. (dev_num, access_type, interface_num,
  352. mask_results_dq_reg_map[index_cnt],
  353. (1 << 24), (1 << 24)));
  354. }
  355. }
  356. /* Start Training Trigger */
  357. CHECK_STATUS(ddr3_tip_if_write(dev_num, access_type, interface_num,
  358. ODPG_TRAINING_TRIGGER_REG, 1, 1));
  359. /* wait for all RFU tests to finish (or timeout) */
  360. /* WA for 16 bit mode, more investigation needed */
  361. mdelay(1);
  362. /* Training "Done ?" */
  363. for (index_cnt = 0; index_cnt < MAX_INTERFACE_NUM; index_cnt++) {
  364. if (IS_ACTIVE(tm->if_act_mask, index_cnt) == 0)
  365. continue;
  366. if (interface_mask & (1 << index_cnt)) {
  367. /* need to check results for this Dunit */
  368. for (poll_cnt = 0; poll_cnt < max_polling_for_done;
  369. poll_cnt++) {
  370. CHECK_STATUS(ddr3_tip_if_read
  371. (dev_num, ACCESS_TYPE_UNICAST,
  372. index_cnt,
  373. ODPG_TRAINING_STATUS_REG,
  374. &reg_data, MASK_ALL_BITS));
  375. if ((reg_data & 0x2) != 0) {
  376. /*done */
  377. train_status[index_cnt] =
  378. HWS_TRAINING_IP_STATUS_SUCCESS;
  379. break;
  380. }
  381. }
  382. if (poll_cnt == max_polling_for_done) {
  383. train_status[index_cnt] =
  384. HWS_TRAINING_IP_STATUS_TIMEOUT;
  385. }
  386. }
  387. /* Be sure that ODPG done */
  388. CHECK_STATUS(is_odpg_access_done(dev_num, index_cnt));
  389. }
  390. /* Write ODPG done in Dunit */
  391. CHECK_STATUS(ddr3_tip_if_write
  392. (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
  393. ODPG_STATUS_DONE_REG, 0, 0x1));
  394. /* wait for all Dunit tests to finish (or timeout) */
  395. /* Training "Done ?" */
  396. /* Training "Pass ?" */
  397. for (index_cnt = 0; index_cnt < MAX_INTERFACE_NUM; index_cnt++) {
  398. if (IS_ACTIVE(tm->if_act_mask, index_cnt) == 0)
  399. continue;
  400. if (interface_mask & (1 << index_cnt)) {
  401. /* need to check results for this Dunit */
  402. for (poll_cnt = 0; poll_cnt < max_polling_for_done;
  403. poll_cnt++) {
  404. CHECK_STATUS(ddr3_tip_if_read
  405. (dev_num, ACCESS_TYPE_UNICAST,
  406. index_cnt,
  407. ODPG_TRAINING_TRIGGER_REG,
  408. read_data, MASK_ALL_BITS));
  409. reg_data = read_data[index_cnt];
  410. if ((reg_data & 0x2) != 0) {
  411. /* done */
  412. if ((reg_data & 0x4) == 0) {
  413. train_status[index_cnt] =
  414. HWS_TRAINING_IP_STATUS_SUCCESS;
  415. } else {
  416. train_status[index_cnt] =
  417. HWS_TRAINING_IP_STATUS_FAIL;
  418. }
  419. break;
  420. }
  421. }
  422. if (poll_cnt == max_polling_for_done) {
  423. train_status[index_cnt] =
  424. HWS_TRAINING_IP_STATUS_TIMEOUT;
  425. }
  426. }
  427. }
  428. CHECK_STATUS(ddr3_tip_if_write
  429. (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
  430. ODPG_DATA_CONTROL_REG, 0, MASK_ALL_BITS));
  431. return MV_OK;
  432. }
  433. /*
  434. * Load expected Pattern to ODPG
  435. */
  436. int ddr3_tip_load_pattern_to_odpg(u32 dev_num, enum hws_access_type access_type,
  437. u32 if_id, enum hws_pattern pattern,
  438. u32 load_addr)
  439. {
  440. u32 pattern_length_cnt = 0;
  441. struct pattern_info *pattern_table = ddr3_tip_get_pattern_table();
  442. for (pattern_length_cnt = 0;
  443. pattern_length_cnt < pattern_table[pattern].pattern_len;
  444. pattern_length_cnt++) {
  445. CHECK_STATUS(ddr3_tip_if_write
  446. (dev_num, access_type, if_id,
  447. ODPG_PATTERN_DATA_LOW_REG,
  448. pattern_table_get_word(dev_num, pattern,
  449. (u8) (pattern_length_cnt *
  450. 2)), MASK_ALL_BITS));
  451. CHECK_STATUS(ddr3_tip_if_write
  452. (dev_num, access_type, if_id,
  453. ODPG_PATTERN_DATA_HI_REG,
  454. pattern_table_get_word(dev_num, pattern,
  455. (u8) (pattern_length_cnt *
  456. 2 + 1)),
  457. MASK_ALL_BITS));
  458. CHECK_STATUS(ddr3_tip_if_write
  459. (dev_num, access_type, if_id,
  460. ODPG_PATTERN_ADDR_REG, pattern_length_cnt,
  461. MASK_ALL_BITS));
  462. }
  463. CHECK_STATUS(ddr3_tip_if_write
  464. (dev_num, access_type, if_id,
  465. ODPG_PATTERN_ADDR_OFFSET_REG, load_addr, MASK_ALL_BITS));
  466. return MV_OK;
  467. }
  468. /*
  469. * Configure ODPG
  470. */
  471. int ddr3_tip_configure_odpg(u32 dev_num, enum hws_access_type access_type,
  472. u32 if_id, enum hws_dir direction, u32 tx_phases,
  473. u32 tx_burst_size, u32 rx_phases,
  474. u32 delay_between_burst, u32 rd_mode, u32 cs_num,
  475. u32 addr_stress_jump, u32 single_pattern)
  476. {
  477. u32 data_value = 0;
  478. int ret;
  479. data_value = ((single_pattern << 2) | (tx_phases << 5) |
  480. (tx_burst_size << 11) | (delay_between_burst << 15) |
  481. (rx_phases << 21) | (rd_mode << 25) | (cs_num << 26) |
  482. (addr_stress_jump << 29));
  483. ret = ddr3_tip_if_write(dev_num, access_type, if_id,
  484. ODPG_DATA_CONTROL_REG, data_value, 0xaffffffc);
  485. if (ret != MV_OK)
  486. return ret;
  487. return MV_OK;
  488. }
  489. int ddr3_tip_process_result(u32 *ar_result, enum hws_edge e_edge,
  490. enum hws_edge_search e_edge_search,
  491. u32 *edge_result)
  492. {
  493. u32 i, res;
  494. int tap_val, max_val = -10000, min_val = 10000;
  495. int lock_success = 1;
  496. for (i = 0; i < BUS_WIDTH_IN_BITS; i++) {
  497. res = GET_LOCK_RESULT(ar_result[i]);
  498. if (res == 0) {
  499. lock_success = 0;
  500. break;
  501. }
  502. DEBUG_TRAINING_IP_ENGINE(DEBUG_LEVEL_ERROR,
  503. ("lock failed for bit %d\n", i));
  504. }
  505. if (lock_success == 1) {
  506. for (i = 0; i < BUS_WIDTH_IN_BITS; i++) {
  507. tap_val = GET_TAP_RESULT(ar_result[i], e_edge);
  508. if (tap_val > max_val)
  509. max_val = tap_val;
  510. if (tap_val < min_val)
  511. min_val = tap_val;
  512. if (e_edge_search == TRAINING_EDGE_MAX)
  513. *edge_result = (u32) max_val;
  514. else
  515. *edge_result = (u32) min_val;
  516. DEBUG_TRAINING_IP_ENGINE(DEBUG_LEVEL_ERROR,
  517. ("i %d ar_result[i] 0x%x tap_val %d max_val %d min_val %d Edge_result %d\n",
  518. i, ar_result[i], tap_val,
  519. max_val, min_val,
  520. *edge_result));
  521. }
  522. } else {
  523. return MV_FAIL;
  524. }
  525. return MV_OK;
  526. }
  527. /*
  528. * Read training search result
  529. */
  530. int ddr3_tip_read_training_result(u32 dev_num, u32 if_id,
  531. enum hws_access_type pup_access_type,
  532. u32 pup_num, u32 bit_num,
  533. enum hws_search_dir search,
  534. enum hws_dir direction,
  535. enum hws_training_result result_type,
  536. enum hws_training_load_op operation,
  537. u32 cs_num_type, u32 **load_res,
  538. int is_read_from_db, u8 cons_tap,
  539. int is_check_result_validity)
  540. {
  541. u32 reg_offset, pup_cnt, start_pup, end_pup, start_reg, end_reg;
  542. u32 *interface_train_res = NULL;
  543. u16 *reg_addr = NULL;
  544. u32 read_data[MAX_INTERFACE_NUM];
  545. u16 *mask_results_pup_reg_map = ddr3_tip_get_mask_results_pup_reg_map();
  546. u16 *mask_results_dq_reg_map = ddr3_tip_get_mask_results_dq_reg();
  547. struct hws_topology_map *tm = ddr3_get_topology_map();
  548. /*
  549. * Agreed assumption: all CS mask contain same number of bits,
  550. * i.e. in multi CS, the number of CS per memory is the same for
  551. * all pups
  552. */
  553. CHECK_STATUS(ddr3_tip_if_write
  554. (dev_num, ACCESS_TYPE_UNICAST, if_id, CS_ENABLE_REG,
  555. (cs_num_type == 0) ? 1 << 3 : 0, (1 << 3)));
  556. CHECK_STATUS(ddr3_tip_if_write
  557. (dev_num, ACCESS_TYPE_UNICAST, if_id,
  558. ODPG_DATA_CONTROL_REG, (cs_num_type << 26), (3 << 26)));
  559. DEBUG_TRAINING_IP_ENGINE(DEBUG_LEVEL_TRACE,
  560. ("Read_from_d_b %d cs_type %d oper %d result_type %d direction %d search %d pup_num %d if_id %d pup_access_type %d\n",
  561. is_read_from_db, cs_num_type, operation,
  562. result_type, direction, search, pup_num,
  563. if_id, pup_access_type));
  564. if ((load_res == NULL) && (is_read_from_db == 1)) {
  565. DEBUG_TRAINING_IP_ENGINE(DEBUG_LEVEL_ERROR,
  566. ("ddr3_tip_read_training_result load_res = NULL"));
  567. return MV_FAIL;
  568. }
  569. if (pup_num >= tm->num_of_bus_per_interface) {
  570. DEBUG_TRAINING_IP_ENGINE(DEBUG_LEVEL_ERROR,
  571. ("pup_num %d not valid\n", pup_num));
  572. }
  573. if (if_id >= MAX_INTERFACE_NUM) {
  574. DEBUG_TRAINING_IP_ENGINE(DEBUG_LEVEL_ERROR,
  575. ("if_id %d not valid\n", if_id));
  576. }
  577. if (result_type == RESULT_PER_BIT)
  578. reg_addr = mask_results_dq_reg_map;
  579. else
  580. reg_addr = mask_results_pup_reg_map;
  581. if (pup_access_type == ACCESS_TYPE_UNICAST) {
  582. start_pup = pup_num;
  583. end_pup = pup_num;
  584. } else { /*pup_access_type == ACCESS_TYPE_MULTICAST) */
  585. start_pup = 0;
  586. end_pup = tm->num_of_bus_per_interface - 1;
  587. }
  588. for (pup_cnt = start_pup; pup_cnt <= end_pup; pup_cnt++) {
  589. VALIDATE_ACTIVE(tm->bus_act_mask, pup_cnt);
  590. DEBUG_TRAINING_IP_ENGINE(
  591. DEBUG_LEVEL_TRACE,
  592. ("if_id %d start_pup %d end_pup %d pup_cnt %d\n",
  593. if_id, start_pup, end_pup, pup_cnt));
  594. if (result_type == RESULT_PER_BIT) {
  595. if (bit_num == ALL_BITS_PER_PUP) {
  596. start_reg = pup_cnt * BUS_WIDTH_IN_BITS;
  597. end_reg = (pup_cnt + 1) * BUS_WIDTH_IN_BITS - 1;
  598. } else {
  599. start_reg =
  600. pup_cnt * BUS_WIDTH_IN_BITS + bit_num;
  601. end_reg = pup_cnt * BUS_WIDTH_IN_BITS + bit_num;
  602. }
  603. } else {
  604. start_reg = pup_cnt;
  605. end_reg = pup_cnt;
  606. }
  607. interface_train_res =
  608. ddr3_tip_get_buf_ptr(dev_num, search, result_type,
  609. if_id);
  610. DEBUG_TRAINING_IP_ENGINE(
  611. DEBUG_LEVEL_TRACE,
  612. ("start_reg %d end_reg %d interface %p\n",
  613. start_reg, end_reg, interface_train_res));
  614. if (interface_train_res == NULL) {
  615. DEBUG_TRAINING_IP_ENGINE(
  616. DEBUG_LEVEL_ERROR,
  617. ("interface_train_res is NULL\n"));
  618. return MV_FAIL;
  619. }
  620. for (reg_offset = start_reg; reg_offset <= end_reg;
  621. reg_offset++) {
  622. if (operation == TRAINING_LOAD_OPERATION_UNLOAD) {
  623. if (is_read_from_db == 0) {
  624. CHECK_STATUS(ddr3_tip_if_read
  625. (dev_num,
  626. ACCESS_TYPE_UNICAST,
  627. if_id,
  628. reg_addr[reg_offset],
  629. read_data,
  630. MASK_ALL_BITS));
  631. if (is_check_result_validity == 1) {
  632. if ((read_data[if_id] &
  633. 0x02000000) == 0) {
  634. interface_train_res
  635. [reg_offset] =
  636. 0x02000000 +
  637. 64 + cons_tap;
  638. } else {
  639. interface_train_res
  640. [reg_offset] =
  641. read_data
  642. [if_id] +
  643. cons_tap;
  644. }
  645. } else {
  646. interface_train_res[reg_offset]
  647. = read_data[if_id] +
  648. cons_tap;
  649. }
  650. DEBUG_TRAINING_IP_ENGINE
  651. (DEBUG_LEVEL_TRACE,
  652. ("reg_offset %d value 0x%x addr %p\n",
  653. reg_offset,
  654. interface_train_res
  655. [reg_offset],
  656. &interface_train_res
  657. [reg_offset]));
  658. } else {
  659. *load_res =
  660. &interface_train_res[start_reg];
  661. DEBUG_TRAINING_IP_ENGINE
  662. (DEBUG_LEVEL_TRACE,
  663. ("*load_res %p\n", *load_res));
  664. }
  665. } else {
  666. DEBUG_TRAINING_IP_ENGINE(DEBUG_LEVEL_TRACE,
  667. ("not supported\n"));
  668. }
  669. }
  670. }
  671. return MV_OK;
  672. }
  673. /*
  674. * Load all pattern to memory using ODPG
  675. */
  676. int ddr3_tip_load_all_pattern_to_mem(u32 dev_num)
  677. {
  678. u32 pattern = 0, if_id;
  679. struct hws_topology_map *tm = ddr3_get_topology_map();
  680. for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
  681. VALIDATE_ACTIVE(tm->if_act_mask, if_id);
  682. training_result[training_stage][if_id] = TEST_SUCCESS;
  683. }
  684. for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
  685. VALIDATE_ACTIVE(tm->if_act_mask, if_id);
  686. /* enable single cs */
  687. CHECK_STATUS(ddr3_tip_if_write
  688. (dev_num, ACCESS_TYPE_UNICAST, if_id,
  689. CS_ENABLE_REG, (1 << 3), (1 << 3)));
  690. }
  691. for (pattern = 0; pattern < PATTERN_LIMIT; pattern++)
  692. ddr3_tip_load_pattern_to_mem(dev_num, pattern);
  693. return MV_OK;
  694. }
  695. /*
  696. * Wait till ODPG access is ready
  697. */
  698. int is_odpg_access_done(u32 dev_num, u32 if_id)
  699. {
  700. u32 poll_cnt = 0, data_value;
  701. u32 read_data[MAX_INTERFACE_NUM];
  702. for (poll_cnt = 0; poll_cnt < MAX_POLLING_ITERATIONS; poll_cnt++) {
  703. CHECK_STATUS(ddr3_tip_if_read
  704. (dev_num, ACCESS_TYPE_UNICAST, if_id,
  705. ODPG_BIST_DONE, read_data, MASK_ALL_BITS));
  706. data_value = read_data[if_id];
  707. if (((data_value >> ODPG_BIST_DONE_BIT_OFFS) & 0x1) ==
  708. ODPG_BIST_DONE_BIT_VALUE) {
  709. data_value = data_value & 0xfffffffe;
  710. CHECK_STATUS(ddr3_tip_if_write
  711. (dev_num, ACCESS_TYPE_UNICAST,
  712. if_id, ODPG_BIST_DONE, data_value,
  713. MASK_ALL_BITS));
  714. break;
  715. }
  716. }
  717. if (poll_cnt >= MAX_POLLING_ITERATIONS) {
  718. DEBUG_TRAINING_IP_ENGINE(DEBUG_LEVEL_ERROR,
  719. ("Bist Activate: poll failure 2\n"));
  720. return MV_FAIL;
  721. }
  722. return MV_OK;
  723. }
  724. /*
  725. * Load specific pattern to memory using ODPG
  726. */
  727. int ddr3_tip_load_pattern_to_mem(u32 dev_num, enum hws_pattern pattern)
  728. {
  729. u32 reg_data, if_id;
  730. struct pattern_info *pattern_table = ddr3_tip_get_pattern_table();
  731. struct hws_topology_map *tm = ddr3_get_topology_map();
  732. /* load pattern to memory */
  733. /*
  734. * Write Tx mode, CS0, phases, Tx burst size, delay between burst,
  735. * rx pattern phases
  736. */
  737. reg_data =
  738. 0x1 | (pattern_table[pattern].num_of_phases_tx << 5) |
  739. (pattern_table[pattern].tx_burst_size << 11) |
  740. (pattern_table[pattern].delay_between_bursts << 15) |
  741. (pattern_table[pattern].num_of_phases_rx << 21) | (0x1 << 25) |
  742. (effective_cs << 26);
  743. CHECK_STATUS(ddr3_tip_if_write
  744. (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
  745. ODPG_DATA_CONTROL_REG, reg_data, MASK_ALL_BITS));
  746. /* ODPG Write enable from BIST */
  747. CHECK_STATUS(ddr3_tip_if_write
  748. (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
  749. ODPG_DATA_CONTROL_REG, (0x1 | (effective_cs << 26)),
  750. 0xc000003));
  751. /* disable error injection */
  752. CHECK_STATUS(ddr3_tip_if_write
  753. (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
  754. ODPG_WRITE_DATA_ERROR_REG, 0, 0x1));
  755. /* load pattern to ODPG */
  756. ddr3_tip_load_pattern_to_odpg(dev_num, ACCESS_TYPE_MULTICAST,
  757. PARAM_NOT_CARE, pattern,
  758. pattern_table[pattern].start_addr);
  759. for (if_id = 0; if_id < MAX_INTERFACE_NUM; if_id++) {
  760. if (IS_ACTIVE(tm->if_act_mask, if_id) == 0)
  761. continue;
  762. CHECK_STATUS(ddr3_tip_if_write
  763. (dev_num, ACCESS_TYPE_UNICAST, if_id, 0x1498,
  764. 0x3, 0xf));
  765. }
  766. CHECK_STATUS(ddr3_tip_if_write
  767. (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
  768. ODPG_ENABLE_REG, 0x1 << ODPG_ENABLE_OFFS,
  769. (0x1 << ODPG_ENABLE_OFFS)));
  770. mdelay(1);
  771. for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
  772. VALIDATE_ACTIVE(tm->if_act_mask, if_id);
  773. CHECK_STATUS(is_odpg_access_done(dev_num, if_id));
  774. }
  775. /* Disable ODPG and stop write to memory */
  776. CHECK_STATUS(ddr3_tip_if_write
  777. (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
  778. ODPG_DATA_CONTROL_REG, (0x1 << 30), (u32) (0x3 << 30)));
  779. /* return to default */
  780. CHECK_STATUS(ddr3_tip_if_write
  781. (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
  782. ODPG_DATA_CONTROL_REG, 0, MASK_ALL_BITS));
  783. /* Disable odt0 for CS0 training - need to adjust for multy CS */
  784. CHECK_STATUS(ddr3_tip_if_write
  785. (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 0x1498,
  786. 0x0, 0xf));
  787. /* temporary added */
  788. mdelay(1);
  789. return MV_OK;
  790. }
  791. /*
  792. * Load specific pattern to memory using CPU
  793. */
  794. int ddr3_tip_load_pattern_to_mem_by_cpu(u32 dev_num, enum hws_pattern pattern,
  795. u32 offset)
  796. {
  797. /* eranba - TBD */
  798. return MV_OK;
  799. }
  800. /*
  801. * Training search routine
  802. */
  803. int ddr3_tip_ip_training_wrapper_int(u32 dev_num,
  804. enum hws_access_type access_type,
  805. u32 if_id,
  806. enum hws_access_type pup_access_type,
  807. u32 pup_num, u32 bit_num,
  808. enum hws_training_result result_type,
  809. enum hws_control_element control_element,
  810. enum hws_search_dir search_dir,
  811. enum hws_dir direction,
  812. u32 interface_mask, u32 init_value_l2h,
  813. u32 init_value_h2l, u32 num_iter,
  814. enum hws_pattern pattern,
  815. enum hws_edge_compare edge_comp,
  816. enum hws_ddr_cs train_cs_type, u32 cs_num,
  817. enum hws_training_ip_stat *train_status)
  818. {
  819. u32 interface_num = 0, start_if, end_if, init_value_used;
  820. enum hws_search_dir search_dir_id, start_search, end_search;
  821. enum hws_edge_compare edge_comp_used;
  822. u8 cons_tap = (direction == OPER_WRITE) ? (64) : (0);
  823. struct hws_topology_map *tm = ddr3_get_topology_map();
  824. if (train_status == NULL) {
  825. DEBUG_TRAINING_IP_ENGINE(DEBUG_LEVEL_ERROR,
  826. ("train_status is NULL\n"));
  827. return MV_FAIL;
  828. }
  829. if ((train_cs_type > CS_NON_SINGLE) ||
  830. (edge_comp >= EDGE_PFP) ||
  831. (pattern >= PATTERN_LIMIT) ||
  832. (direction > OPER_WRITE_AND_READ) ||
  833. (search_dir > HWS_HIGH2LOW) ||
  834. (control_element > HWS_CONTROL_ELEMENT_DQS_SKEW) ||
  835. (result_type > RESULT_PER_BYTE) ||
  836. (pup_num >= tm->num_of_bus_per_interface) ||
  837. (pup_access_type > ACCESS_TYPE_MULTICAST) ||
  838. (if_id > 11) || (access_type > ACCESS_TYPE_MULTICAST)) {
  839. DEBUG_TRAINING_IP_ENGINE(
  840. DEBUG_LEVEL_ERROR,
  841. ("wrong parameter train_cs_type %d edge_comp %d pattern %d direction %d search_dir %d control_element %d result_type %d pup_num %d pup_access_type %d if_id %d access_type %d\n",
  842. train_cs_type, edge_comp, pattern, direction,
  843. search_dir, control_element, result_type, pup_num,
  844. pup_access_type, if_id, access_type));
  845. return MV_FAIL;
  846. }
  847. if (edge_comp == EDGE_FPF) {
  848. start_search = HWS_LOW2HIGH;
  849. end_search = HWS_HIGH2LOW;
  850. edge_comp_used = EDGE_FP;
  851. } else {
  852. start_search = search_dir;
  853. end_search = search_dir;
  854. edge_comp_used = edge_comp;
  855. }
  856. for (search_dir_id = start_search; search_dir_id <= end_search;
  857. search_dir_id++) {
  858. init_value_used = (search_dir_id == HWS_LOW2HIGH) ?
  859. init_value_l2h : init_value_h2l;
  860. DEBUG_TRAINING_IP_ENGINE(
  861. DEBUG_LEVEL_TRACE,
  862. ("dev_num %d, access_type %d, if_id %d, pup_access_type %d,pup_num %d, result_type %d, control_element %d search_dir_id %d, direction %d, interface_mask %d,init_value_used %d, num_iter %d, pattern %d, edge_comp_used %d, train_cs_type %d, cs_num %d\n",
  863. dev_num, access_type, if_id, pup_access_type, pup_num,
  864. result_type, control_element, search_dir_id,
  865. direction, interface_mask, init_value_used, num_iter,
  866. pattern, edge_comp_used, train_cs_type, cs_num));
  867. ddr3_tip_ip_training(dev_num, access_type, if_id,
  868. pup_access_type, pup_num, result_type,
  869. control_element, search_dir_id, direction,
  870. interface_mask, init_value_used, num_iter,
  871. pattern, edge_comp_used, train_cs_type,
  872. cs_num, train_status);
  873. if (access_type == ACCESS_TYPE_MULTICAST) {
  874. start_if = 0;
  875. end_if = MAX_INTERFACE_NUM - 1;
  876. } else {
  877. start_if = if_id;
  878. end_if = if_id;
  879. }
  880. for (interface_num = start_if; interface_num <= end_if;
  881. interface_num++) {
  882. VALIDATE_ACTIVE(tm->if_act_mask, interface_num);
  883. cs_num = 0;
  884. CHECK_STATUS(ddr3_tip_read_training_result
  885. (dev_num, interface_num, pup_access_type,
  886. pup_num, bit_num, search_dir_id,
  887. direction, result_type,
  888. TRAINING_LOAD_OPERATION_UNLOAD,
  889. train_cs_type, NULL, 0, cons_tap,
  890. 0));
  891. }
  892. }
  893. return MV_OK;
  894. }
  895. /*
  896. * Training search & read result routine
  897. */
  898. int ddr3_tip_ip_training_wrapper(u32 dev_num, enum hws_access_type access_type,
  899. u32 if_id,
  900. enum hws_access_type pup_access_type,
  901. u32 pup_num,
  902. enum hws_training_result result_type,
  903. enum hws_control_element control_element,
  904. enum hws_search_dir search_dir,
  905. enum hws_dir direction, u32 interface_mask,
  906. u32 init_value_l2h, u32 init_value_h2l,
  907. u32 num_iter, enum hws_pattern pattern,
  908. enum hws_edge_compare edge_comp,
  909. enum hws_ddr_cs train_cs_type, u32 cs_num,
  910. enum hws_training_ip_stat *train_status)
  911. {
  912. u8 e1, e2;
  913. u32 interface_cnt, bit_id, start_if, end_if, bit_end = 0;
  914. u32 *result[HWS_SEARCH_DIR_LIMIT] = { 0 };
  915. u8 cons_tap = (direction == OPER_WRITE) ? (64) : (0);
  916. u8 bit_bit_mask[MAX_BUS_NUM] = { 0 }, bit_bit_mask_active = 0;
  917. u8 pup_id;
  918. struct hws_topology_map *tm = ddr3_get_topology_map();
  919. if (pup_num >= tm->num_of_bus_per_interface) {
  920. DEBUG_TRAINING_IP_ENGINE(DEBUG_LEVEL_ERROR,
  921. ("pup_num %d not valid\n", pup_num));
  922. }
  923. if (if_id >= MAX_INTERFACE_NUM) {
  924. DEBUG_TRAINING_IP_ENGINE(DEBUG_LEVEL_ERROR,
  925. ("if_id %d not valid\n", if_id));
  926. }
  927. CHECK_STATUS(ddr3_tip_ip_training_wrapper_int
  928. (dev_num, access_type, if_id, pup_access_type, pup_num,
  929. ALL_BITS_PER_PUP, result_type, control_element,
  930. search_dir, direction, interface_mask, init_value_l2h,
  931. init_value_h2l, num_iter, pattern, edge_comp,
  932. train_cs_type, cs_num, train_status));
  933. if (access_type == ACCESS_TYPE_MULTICAST) {
  934. start_if = 0;
  935. end_if = MAX_INTERFACE_NUM - 1;
  936. } else {
  937. start_if = if_id;
  938. end_if = if_id;
  939. }
  940. for (interface_cnt = start_if; interface_cnt <= end_if;
  941. interface_cnt++) {
  942. VALIDATE_ACTIVE(tm->if_act_mask, interface_cnt);
  943. for (pup_id = 0;
  944. pup_id <= (tm->num_of_bus_per_interface - 1); pup_id++) {
  945. VALIDATE_ACTIVE(tm->bus_act_mask, pup_id);
  946. if (result_type == RESULT_PER_BIT)
  947. bit_end = BUS_WIDTH_IN_BITS - 1;
  948. else
  949. bit_end = 0;
  950. bit_bit_mask[pup_id] = 0;
  951. for (bit_id = 0; bit_id <= bit_end; bit_id++) {
  952. enum hws_search_dir search_dir_id;
  953. for (search_dir_id = HWS_LOW2HIGH;
  954. search_dir_id <= HWS_HIGH2LOW;
  955. search_dir_id++) {
  956. CHECK_STATUS
  957. (ddr3_tip_read_training_result
  958. (dev_num, interface_cnt,
  959. ACCESS_TYPE_UNICAST, pup_id,
  960. bit_id, search_dir_id,
  961. direction, result_type,
  962. TRAINING_LOAD_OPERATION_UNLOAD,
  963. CS_SINGLE,
  964. &result[search_dir_id],
  965. 1, 0, 0));
  966. }
  967. e1 = GET_TAP_RESULT(result[HWS_LOW2HIGH][0],
  968. EDGE_1);
  969. e2 = GET_TAP_RESULT(result[HWS_HIGH2LOW][0],
  970. EDGE_1);
  971. DEBUG_TRAINING_IP_ENGINE(
  972. DEBUG_LEVEL_INFO,
  973. ("wrapper if_id %d pup_id %d bit %d l2h 0x%x (e1 0x%x) h2l 0x%x (e2 0x%x)\n",
  974. interface_cnt, pup_id, bit_id,
  975. result[HWS_LOW2HIGH][0], e1,
  976. result[HWS_HIGH2LOW][0], e2));
  977. /* TBD validate is valid only for tx */
  978. if (VALIDATE_TRAINING_LIMIT(e1, e2) == 1 &&
  979. GET_LOCK_RESULT(result[HWS_LOW2HIGH][0]) &&
  980. GET_LOCK_RESULT(result[HWS_LOW2HIGH][0])) {
  981. /* Mark problem bits */
  982. bit_bit_mask[pup_id] |= 1 << bit_id;
  983. bit_bit_mask_active = 1;
  984. }
  985. } /* For all bits */
  986. } /* For all PUPs */
  987. /* Fix problem bits */
  988. if (bit_bit_mask_active != 0) {
  989. u32 *l2h_if_train_res = NULL;
  990. u32 *h2l_if_train_res = NULL;
  991. l2h_if_train_res =
  992. ddr3_tip_get_buf_ptr(dev_num, HWS_LOW2HIGH,
  993. result_type,
  994. interface_cnt);
  995. h2l_if_train_res =
  996. ddr3_tip_get_buf_ptr(dev_num, HWS_HIGH2LOW,
  997. result_type,
  998. interface_cnt);
  999. ddr3_tip_ip_training(dev_num, ACCESS_TYPE_UNICAST,
  1000. interface_cnt,
  1001. ACCESS_TYPE_MULTICAST,
  1002. PARAM_NOT_CARE, result_type,
  1003. control_element, HWS_LOW2HIGH,
  1004. direction, interface_mask,
  1005. num_iter / 2, num_iter / 2,
  1006. pattern, EDGE_FP, train_cs_type,
  1007. cs_num, train_status);
  1008. for (pup_id = 0;
  1009. pup_id <= (tm->num_of_bus_per_interface - 1);
  1010. pup_id++) {
  1011. VALIDATE_ACTIVE(tm->bus_act_mask, pup_id);
  1012. if (bit_bit_mask[pup_id] == 0)
  1013. continue;
  1014. for (bit_id = 0; bit_id <= bit_end; bit_id++) {
  1015. if ((bit_bit_mask[pup_id] &
  1016. (1 << bit_id)) == 0)
  1017. continue;
  1018. CHECK_STATUS
  1019. (ddr3_tip_read_training_result
  1020. (dev_num, interface_cnt,
  1021. ACCESS_TYPE_UNICAST, pup_id,
  1022. bit_id, HWS_LOW2HIGH,
  1023. direction,
  1024. result_type,
  1025. TRAINING_LOAD_OPERATION_UNLOAD,
  1026. CS_SINGLE, &l2h_if_train_res,
  1027. 0, 0, 1));
  1028. }
  1029. }
  1030. ddr3_tip_ip_training(dev_num, ACCESS_TYPE_UNICAST,
  1031. interface_cnt,
  1032. ACCESS_TYPE_MULTICAST,
  1033. PARAM_NOT_CARE, result_type,
  1034. control_element, HWS_HIGH2LOW,
  1035. direction, interface_mask,
  1036. num_iter / 2, num_iter / 2,
  1037. pattern, EDGE_FP, train_cs_type,
  1038. cs_num, train_status);
  1039. for (pup_id = 0;
  1040. pup_id <= (tm->num_of_bus_per_interface - 1);
  1041. pup_id++) {
  1042. VALIDATE_ACTIVE(tm->bus_act_mask, pup_id);
  1043. if (bit_bit_mask[pup_id] == 0)
  1044. continue;
  1045. for (bit_id = 0; bit_id <= bit_end; bit_id++) {
  1046. if ((bit_bit_mask[pup_id] &
  1047. (1 << bit_id)) == 0)
  1048. continue;
  1049. CHECK_STATUS
  1050. (ddr3_tip_read_training_result
  1051. (dev_num, interface_cnt,
  1052. ACCESS_TYPE_UNICAST, pup_id,
  1053. bit_id, HWS_HIGH2LOW, direction,
  1054. result_type,
  1055. TRAINING_LOAD_OPERATION_UNLOAD,
  1056. CS_SINGLE, &h2l_if_train_res,
  1057. 0, cons_tap, 1));
  1058. }
  1059. }
  1060. } /* if bit_bit_mask_active */
  1061. } /* For all Interfacess */
  1062. return MV_OK;
  1063. }
  1064. /*
  1065. * Load phy values
  1066. */
  1067. int ddr3_tip_load_phy_values(int b_load)
  1068. {
  1069. u32 bus_cnt = 0, if_id, dev_num = 0;
  1070. struct hws_topology_map *tm = ddr3_get_topology_map();
  1071. for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
  1072. VALIDATE_ACTIVE(tm->if_act_mask, if_id);
  1073. for (bus_cnt = 0; bus_cnt < GET_TOPOLOGY_NUM_OF_BUSES();
  1074. bus_cnt++) {
  1075. VALIDATE_ACTIVE(tm->bus_act_mask, bus_cnt);
  1076. if (b_load == 1) {
  1077. CHECK_STATUS(ddr3_tip_bus_read
  1078. (dev_num, if_id,
  1079. ACCESS_TYPE_UNICAST, bus_cnt,
  1080. DDR_PHY_DATA,
  1081. WRITE_CENTRALIZATION_PHY_REG +
  1082. (effective_cs *
  1083. CS_REGISTER_ADDR_OFFSET),
  1084. &phy_reg_bk[if_id][bus_cnt]
  1085. [0]));
  1086. CHECK_STATUS(ddr3_tip_bus_read
  1087. (dev_num, if_id,
  1088. ACCESS_TYPE_UNICAST, bus_cnt,
  1089. DDR_PHY_DATA,
  1090. RL_PHY_REG +
  1091. (effective_cs *
  1092. CS_REGISTER_ADDR_OFFSET),
  1093. &phy_reg_bk[if_id][bus_cnt]
  1094. [1]));
  1095. CHECK_STATUS(ddr3_tip_bus_read
  1096. (dev_num, if_id,
  1097. ACCESS_TYPE_UNICAST, bus_cnt,
  1098. DDR_PHY_DATA,
  1099. READ_CENTRALIZATION_PHY_REG +
  1100. (effective_cs *
  1101. CS_REGISTER_ADDR_OFFSET),
  1102. &phy_reg_bk[if_id][bus_cnt]
  1103. [2]));
  1104. } else {
  1105. CHECK_STATUS(ddr3_tip_bus_write
  1106. (dev_num, ACCESS_TYPE_UNICAST,
  1107. if_id, ACCESS_TYPE_UNICAST,
  1108. bus_cnt, DDR_PHY_DATA,
  1109. WRITE_CENTRALIZATION_PHY_REG +
  1110. (effective_cs *
  1111. CS_REGISTER_ADDR_OFFSET),
  1112. phy_reg_bk[if_id][bus_cnt]
  1113. [0]));
  1114. CHECK_STATUS(ddr3_tip_bus_write
  1115. (dev_num, ACCESS_TYPE_UNICAST,
  1116. if_id, ACCESS_TYPE_UNICAST,
  1117. bus_cnt, DDR_PHY_DATA,
  1118. RL_PHY_REG +
  1119. (effective_cs *
  1120. CS_REGISTER_ADDR_OFFSET),
  1121. phy_reg_bk[if_id][bus_cnt]
  1122. [1]));
  1123. CHECK_STATUS(ddr3_tip_bus_write
  1124. (dev_num, ACCESS_TYPE_UNICAST,
  1125. if_id, ACCESS_TYPE_UNICAST,
  1126. bus_cnt, DDR_PHY_DATA,
  1127. READ_CENTRALIZATION_PHY_REG +
  1128. (effective_cs *
  1129. CS_REGISTER_ADDR_OFFSET),
  1130. phy_reg_bk[if_id][bus_cnt]
  1131. [2]));
  1132. }
  1133. }
  1134. }
  1135. return MV_OK;
  1136. }
  1137. int ddr3_tip_training_ip_test(u32 dev_num, enum hws_training_result result_type,
  1138. enum hws_search_dir search_dir,
  1139. enum hws_dir direction,
  1140. enum hws_edge_compare edge,
  1141. u32 init_val1, u32 init_val2,
  1142. u32 num_of_iterations,
  1143. u32 start_pattern, u32 end_pattern)
  1144. {
  1145. u32 pattern, if_id, pup_id;
  1146. enum hws_training_ip_stat train_status[MAX_INTERFACE_NUM];
  1147. u32 *res = NULL;
  1148. u32 search_state = 0;
  1149. struct hws_topology_map *tm = ddr3_get_topology_map();
  1150. ddr3_tip_load_phy_values(1);
  1151. for (pattern = start_pattern; pattern <= end_pattern; pattern++) {
  1152. for (search_state = 0; search_state < HWS_SEARCH_DIR_LIMIT;
  1153. search_state++) {
  1154. ddr3_tip_ip_training_wrapper(dev_num,
  1155. ACCESS_TYPE_MULTICAST, 0,
  1156. ACCESS_TYPE_MULTICAST, 0,
  1157. result_type,
  1158. HWS_CONTROL_ELEMENT_ADLL,
  1159. search_dir, direction,
  1160. 0xfff, init_val1,
  1161. init_val2,
  1162. num_of_iterations, pattern,
  1163. edge, CS_SINGLE,
  1164. PARAM_NOT_CARE,
  1165. train_status);
  1166. for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1;
  1167. if_id++) {
  1168. VALIDATE_ACTIVE(tm->if_act_mask, if_id);
  1169. for (pup_id = 0; pup_id <
  1170. tm->num_of_bus_per_interface;
  1171. pup_id++) {
  1172. VALIDATE_ACTIVE(tm->bus_act_mask,
  1173. pup_id);
  1174. CHECK_STATUS
  1175. (ddr3_tip_read_training_result
  1176. (dev_num, if_id,
  1177. ACCESS_TYPE_UNICAST, pup_id,
  1178. ALL_BITS_PER_PUP,
  1179. search_state,
  1180. direction, result_type,
  1181. TRAINING_LOAD_OPERATION_UNLOAD,
  1182. CS_SINGLE, &res, 1, 0,
  1183. 0));
  1184. if (result_type == RESULT_PER_BYTE) {
  1185. DEBUG_TRAINING_IP_ENGINE
  1186. (DEBUG_LEVEL_INFO,
  1187. ("search_state %d if_id %d pup_id %d 0x%x\n",
  1188. search_state, if_id,
  1189. pup_id, res[0]));
  1190. } else {
  1191. DEBUG_TRAINING_IP_ENGINE
  1192. (DEBUG_LEVEL_INFO,
  1193. ("search_state %d if_id %d pup_id %d 0x%x 0x%x 0x%x 0x%x 0x%x 0x%x 0x%x 0x%x\n",
  1194. search_state, if_id,
  1195. pup_id, res[0],
  1196. res[1], res[2],
  1197. res[3], res[4],
  1198. res[5], res[6],
  1199. res[7]));
  1200. }
  1201. }
  1202. } /* interface */
  1203. } /* search */
  1204. } /* pattern */
  1205. ddr3_tip_load_phy_values(0);
  1206. return MV_OK;
  1207. }
  1208. struct pattern_info *ddr3_tip_get_pattern_table()
  1209. {
  1210. struct hws_topology_map *tm = ddr3_get_topology_map();
  1211. if (DDR3_IS_16BIT_DRAM_MODE(tm->bus_act_mask) == 0)
  1212. return pattern_table_32;
  1213. else
  1214. return pattern_table_16;
  1215. }
  1216. u16 *ddr3_tip_get_mask_results_dq_reg()
  1217. {
  1218. struct hws_topology_map *tm = ddr3_get_topology_map();
  1219. if (DDR3_IS_ECC_PUP3_MODE(tm->bus_act_mask))
  1220. return mask_results_dq_reg_map_pup3_ecc;
  1221. else
  1222. return mask_results_dq_reg_map;
  1223. }
  1224. u16 *ddr3_tip_get_mask_results_pup_reg_map()
  1225. {
  1226. struct hws_topology_map *tm = ddr3_get_topology_map();
  1227. if (DDR3_IS_ECC_PUP3_MODE(tm->bus_act_mask))
  1228. return mask_results_pup_reg_map_pup3_ecc;
  1229. else
  1230. return mask_results_pup_reg_map;
  1231. }