ddr3_training_ip_engine.c 61 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519152015211522152315241525152615271528152915301531153215331534153515361537153815391540154115421543154415451546154715481549155015511552155315541555155615571558155915601561156215631564156515661567156815691570157115721573157415751576157715781579158015811582158315841585158615871588158915901591159215931594159515961597159815991600160116021603160416051606160716081609161016111612161316141615161616171618161916201621162216231624162516261627162816291630163116321633163416351636163716381639164016411642164316441645164616471648164916501651165216531654165516561657165816591660166116621663166416651666166716681669167016711672167316741675
  1. // SPDX-License-Identifier: GPL-2.0
  2. /*
  3. * Copyright (C) Marvell International Ltd. and its affiliates
  4. */
  5. #include "ddr3_init.h"
  6. #define PATTERN_1 0x55555555
  7. #define PATTERN_2 0xaaaaaaaa
  8. #define VALIDATE_TRAINING_LIMIT(e1, e2) \
  9. ((((e2) - (e1) + 1) > 33) && ((e1) < 67))
  10. u32 phy_reg_bk[MAX_INTERFACE_NUM][MAX_BUS_NUM][BUS_WIDTH_IN_BITS];
  11. u32 training_res[MAX_INTERFACE_NUM * MAX_BUS_NUM * BUS_WIDTH_IN_BITS *
  12. HWS_SEARCH_DIR_LIMIT];
  13. u8 byte_status[MAX_INTERFACE_NUM][MAX_BUS_NUM]; /* holds the bit status in the byte in wrapper function*/
  14. u16 mask_results_dq_reg_map[] = {
  15. RESULT_CONTROL_PUP_0_BIT_0_REG, RESULT_CONTROL_PUP_0_BIT_1_REG,
  16. RESULT_CONTROL_PUP_0_BIT_2_REG, RESULT_CONTROL_PUP_0_BIT_3_REG,
  17. RESULT_CONTROL_PUP_0_BIT_4_REG, RESULT_CONTROL_PUP_0_BIT_5_REG,
  18. RESULT_CONTROL_PUP_0_BIT_6_REG, RESULT_CONTROL_PUP_0_BIT_7_REG,
  19. RESULT_CONTROL_PUP_1_BIT_0_REG, RESULT_CONTROL_PUP_1_BIT_1_REG,
  20. RESULT_CONTROL_PUP_1_BIT_2_REG, RESULT_CONTROL_PUP_1_BIT_3_REG,
  21. RESULT_CONTROL_PUP_1_BIT_4_REG, RESULT_CONTROL_PUP_1_BIT_5_REG,
  22. RESULT_CONTROL_PUP_1_BIT_6_REG, RESULT_CONTROL_PUP_1_BIT_7_REG,
  23. RESULT_CONTROL_PUP_2_BIT_0_REG, RESULT_CONTROL_PUP_2_BIT_1_REG,
  24. RESULT_CONTROL_PUP_2_BIT_2_REG, RESULT_CONTROL_PUP_2_BIT_3_REG,
  25. RESULT_CONTROL_PUP_2_BIT_4_REG, RESULT_CONTROL_PUP_2_BIT_5_REG,
  26. RESULT_CONTROL_PUP_2_BIT_6_REG, RESULT_CONTROL_PUP_2_BIT_7_REG,
  27. RESULT_CONTROL_PUP_3_BIT_0_REG, RESULT_CONTROL_PUP_3_BIT_1_REG,
  28. RESULT_CONTROL_PUP_3_BIT_2_REG, RESULT_CONTROL_PUP_3_BIT_3_REG,
  29. RESULT_CONTROL_PUP_3_BIT_4_REG, RESULT_CONTROL_PUP_3_BIT_5_REG,
  30. RESULT_CONTROL_PUP_3_BIT_6_REG, RESULT_CONTROL_PUP_3_BIT_7_REG,
  31. RESULT_CONTROL_PUP_4_BIT_0_REG, RESULT_CONTROL_PUP_4_BIT_1_REG,
  32. RESULT_CONTROL_PUP_4_BIT_2_REG, RESULT_CONTROL_PUP_4_BIT_3_REG,
  33. RESULT_CONTROL_PUP_4_BIT_4_REG, RESULT_CONTROL_PUP_4_BIT_5_REG,
  34. RESULT_CONTROL_PUP_4_BIT_6_REG, RESULT_CONTROL_PUP_4_BIT_7_REG,
  35. #if MAX_BUS_NUM == 9
  36. RESULT_CONTROL_PUP_5_BIT_0_REG, RESULT_CONTROL_PUP_5_BIT_1_REG,
  37. RESULT_CONTROL_PUP_5_BIT_2_REG, RESULT_CONTROL_PUP_5_BIT_3_REG,
  38. RESULT_CONTROL_PUP_5_BIT_4_REG, RESULT_CONTROL_PUP_5_BIT_5_REG,
  39. RESULT_CONTROL_PUP_5_BIT_6_REG, RESULT_CONTROL_PUP_5_BIT_7_REG,
  40. RESULT_CONTROL_PUP_6_BIT_0_REG, RESULT_CONTROL_PUP_6_BIT_1_REG,
  41. RESULT_CONTROL_PUP_6_BIT_2_REG, RESULT_CONTROL_PUP_6_BIT_3_REG,
  42. RESULT_CONTROL_PUP_6_BIT_4_REG, RESULT_CONTROL_PUP_6_BIT_5_REG,
  43. RESULT_CONTROL_PUP_6_BIT_6_REG, RESULT_CONTROL_PUP_6_BIT_7_REG,
  44. RESULT_CONTROL_PUP_7_BIT_0_REG, RESULT_CONTROL_PUP_7_BIT_1_REG,
  45. RESULT_CONTROL_PUP_7_BIT_2_REG, RESULT_CONTROL_PUP_7_BIT_3_REG,
  46. RESULT_CONTROL_PUP_7_BIT_4_REG, RESULT_CONTROL_PUP_7_BIT_5_REG,
  47. RESULT_CONTROL_PUP_7_BIT_6_REG, RESULT_CONTROL_PUP_7_BIT_7_REG,
  48. RESULT_CONTROL_PUP_8_BIT_0_REG, RESULT_CONTROL_PUP_8_BIT_1_REG,
  49. RESULT_CONTROL_PUP_8_BIT_2_REG, RESULT_CONTROL_PUP_8_BIT_3_REG,
  50. RESULT_CONTROL_PUP_8_BIT_4_REG, RESULT_CONTROL_PUP_8_BIT_5_REG,
  51. RESULT_CONTROL_PUP_8_BIT_6_REG, RESULT_CONTROL_PUP_8_BIT_7_REG,
  52. #endif
  53. 0xffff
  54. };
  55. u16 mask_results_pup_reg_map[] = {
  56. RESULT_CONTROL_BYTE_PUP_0_REG, RESULT_CONTROL_BYTE_PUP_1_REG,
  57. RESULT_CONTROL_BYTE_PUP_2_REG, RESULT_CONTROL_BYTE_PUP_3_REG,
  58. RESULT_CONTROL_BYTE_PUP_4_REG,
  59. #if MAX_BUS_NUM == 9
  60. RESULT_CONTROL_BYTE_PUP_5_REG, RESULT_CONTROL_BYTE_PUP_6_REG,
  61. RESULT_CONTROL_BYTE_PUP_7_REG, RESULT_CONTROL_BYTE_PUP_8_REG,
  62. #endif
  63. 0xffff
  64. };
  65. #if MAX_BUS_NUM == 5
  66. u16 mask_results_dq_reg_map_pup3_ecc[] = {
  67. RESULT_CONTROL_PUP_0_BIT_0_REG, RESULT_CONTROL_PUP_0_BIT_1_REG,
  68. RESULT_CONTROL_PUP_0_BIT_2_REG, RESULT_CONTROL_PUP_0_BIT_3_REG,
  69. RESULT_CONTROL_PUP_0_BIT_4_REG, RESULT_CONTROL_PUP_0_BIT_5_REG,
  70. RESULT_CONTROL_PUP_0_BIT_6_REG, RESULT_CONTROL_PUP_0_BIT_7_REG,
  71. RESULT_CONTROL_PUP_1_BIT_0_REG, RESULT_CONTROL_PUP_1_BIT_1_REG,
  72. RESULT_CONTROL_PUP_1_BIT_2_REG, RESULT_CONTROL_PUP_1_BIT_3_REG,
  73. RESULT_CONTROL_PUP_1_BIT_4_REG, RESULT_CONTROL_PUP_1_BIT_5_REG,
  74. RESULT_CONTROL_PUP_1_BIT_6_REG, RESULT_CONTROL_PUP_1_BIT_7_REG,
  75. RESULT_CONTROL_PUP_2_BIT_0_REG, RESULT_CONTROL_PUP_2_BIT_1_REG,
  76. RESULT_CONTROL_PUP_2_BIT_2_REG, RESULT_CONTROL_PUP_2_BIT_3_REG,
  77. RESULT_CONTROL_PUP_2_BIT_4_REG, RESULT_CONTROL_PUP_2_BIT_5_REG,
  78. RESULT_CONTROL_PUP_2_BIT_6_REG, RESULT_CONTROL_PUP_2_BIT_7_REG,
  79. RESULT_CONTROL_PUP_4_BIT_0_REG, RESULT_CONTROL_PUP_4_BIT_1_REG,
  80. RESULT_CONTROL_PUP_4_BIT_2_REG, RESULT_CONTROL_PUP_4_BIT_3_REG,
  81. RESULT_CONTROL_PUP_4_BIT_4_REG, RESULT_CONTROL_PUP_4_BIT_5_REG,
  82. RESULT_CONTROL_PUP_4_BIT_6_REG, RESULT_CONTROL_PUP_4_BIT_7_REG,
  83. RESULT_CONTROL_PUP_3_BIT_0_REG, RESULT_CONTROL_PUP_3_BIT_1_REG,
  84. RESULT_CONTROL_PUP_3_BIT_2_REG, RESULT_CONTROL_PUP_3_BIT_3_REG,
  85. RESULT_CONTROL_PUP_3_BIT_4_REG, RESULT_CONTROL_PUP_3_BIT_5_REG,
  86. RESULT_CONTROL_PUP_3_BIT_6_REG, RESULT_CONTROL_PUP_3_BIT_7_REG
  87. };
  88. #endif
  89. #if MAX_BUS_NUM == 5
  90. u16 mask_results_pup_reg_map_pup3_ecc[] = {
  91. RESULT_CONTROL_BYTE_PUP_0_REG, RESULT_CONTROL_BYTE_PUP_1_REG,
  92. RESULT_CONTROL_BYTE_PUP_2_REG, RESULT_CONTROL_BYTE_PUP_4_REG,
  93. RESULT_CONTROL_BYTE_PUP_4_REG
  94. };
  95. #endif
  96. struct pattern_info pattern_table_64[] = {
  97. /*
  98. * num_of_phases_tx, tx_burst_size;
  99. * delay_between_bursts, num_of_phases_rx,
  100. * start_addr, pattern_len
  101. */
  102. {0x7, 0x7, 2, 0x7, 0x00000, 8}, /* PATTERN_PBS1 */
  103. {0x7, 0x7, 2, 0x7, 0x00080, 8}, /* PATTERN_PBS2 */
  104. {0x7, 0x7, 2, 0x7, 0x00100, 8}, /* PATTERN_PBS3 */
  105. {0x7, 0x7, 2, 0x7, 0x00030, 8}, /* PATTERN_TEST */
  106. {0x7, 0x7, 2, 0x7, 0x00100, 8}, /* PATTERN_RL */
  107. {0x7, 0x7, 2, 0x7, 0x00100, 8}, /* PATTERN_RL2 */
  108. {0x1f, 0xf, 2, 0xf, 0x00680, 32}, /* PATTERN_STATIC_PBS */
  109. {0x1f, 0xf, 2, 0xf, 0x00a80, 32}, /* PATTERN_KILLER_DQ0 */
  110. {0x1f, 0xf, 2, 0xf, 0x01280, 32}, /* PATTERN_KILLER_DQ1 */
  111. {0x1f, 0xf, 2, 0xf, 0x01a80, 32}, /* PATTERN_KILLER_DQ2 */
  112. {0x1f, 0xf, 2, 0xf, 0x02280, 32}, /* PATTERN_KILLER_DQ3 */
  113. {0x1f, 0xf, 2, 0xf, 0x02a80, 32}, /* PATTERN_KILLER_DQ4 */
  114. {0x1f, 0xf, 2, 0xf, 0x03280, 32}, /* PATTERN_KILLER_DQ5 */
  115. {0x1f, 0xf, 2, 0xf, 0x03a80, 32}, /* PATTERN_KILLER_DQ6 */
  116. {0x1f, 0xf, 2, 0xf, 0x04280, 32}, /* PATTERN_KILLER_DQ7 */
  117. {0x1f, 0xf, 2, 0xf, 0x00e80, 32}, /* PATTERN_KILLER_DQ0_64 */
  118. {0x1f, 0xf, 2, 0xf, 0x01680, 32}, /* PATTERN_KILLER_DQ1_64 */
  119. {0x1f, 0xf, 2, 0xf, 0x01e80, 32}, /* PATTERN_KILLER_DQ2_64 */
  120. {0x1f, 0xf, 2, 0xf, 0x02680, 32}, /* PATTERN_KILLER_DQ3_64 */
  121. {0x1f, 0xf, 2, 0xf, 0x02e80, 32}, /* PATTERN_KILLER_DQ4_64 */
  122. {0x1f, 0xf, 2, 0xf, 0x03680, 32}, /* PATTERN_KILLER_DQ5_64 */
  123. {0x1f, 0xf, 2, 0xf, 0x03e80, 32}, /* PATTERN_KILLER_DQ6_64 */
  124. {0x1f, 0xf, 2, 0xf, 0x04680, 32}, /* PATTERN_KILLER_DQ7_64 */
  125. {0x1f, 0xf, 2, 0xf, 0x04a80, 32}, /* PATTERN_KILLER_DQ0_INV */
  126. {0x1f, 0xf, 2, 0xf, 0x05280, 32}, /* PATTERN_KILLER_DQ1_INV */
  127. {0x1f, 0xf, 2, 0xf, 0x05a80, 32}, /* PATTERN_KILLER_DQ2_INV */
  128. {0x1f, 0xf, 2, 0xf, 0x06280, 32}, /* PATTERN_KILLER_DQ3_INV */
  129. {0x1f, 0xf, 2, 0xf, 0x06a80, 32}, /* PATTERN_KILLER_DQ4_INV */
  130. {0x1f, 0xf, 2, 0xf, 0x07280, 32}, /* PATTERN_KILLER_DQ5_INV */
  131. {0x1f, 0xf, 2, 0xf, 0x07a80, 32}, /* PATTERN_KILLER_DQ6_INV */
  132. {0x1f, 0xf, 2, 0xf, 0x08280, 32}, /* PATTERN_KILLER_DQ7_INV */
  133. {0x1f, 0xf, 2, 0xf, 0x04e80, 32}, /* PATTERN_KILLER_DQ0_INV_64 */
  134. {0x1f, 0xf, 2, 0xf, 0x05680, 32}, /* PATTERN_KILLER_DQ1_INV_64 */
  135. {0x1f, 0xf, 2, 0xf, 0x05e80, 32}, /* PATTERN_KILLER_DQ2_INV_64 */
  136. {0x1f, 0xf, 2, 0xf, 0x06680, 32}, /* PATTERN_KILLER_DQ3_INV_64 */
  137. {0x1f, 0xf, 2, 0xf, 0x06e80, 32}, /* PATTERN_KILLER_DQ4_INV_64 */
  138. {0x1f, 0xf, 2, 0xf, 0x07680, 32}, /* PATTERN_KILLER_DQ5_INV_64 */
  139. {0x1f, 0xf, 2, 0xf, 0x07e80, 32}, /* PATTERN_KILLER_DQ6_INV_64 */
  140. {0x1f, 0xf, 2, 0xf, 0x08680, 32}, /* PATTERN_KILLER_DQ7_INV_64 */
  141. {0x1f, 0xf, 2, 0xf, 0x08a80, 32}, /* PATTERN_SSO_FULL_XTALK_DQ0 */
  142. {0x1f, 0xf, 2, 0xf, 0x09280, 32}, /* PATTERN_SSO_FULL_XTALK_DQ1 */
  143. {0x1f, 0xf, 2, 0xf, 0x09a80, 32}, /* PATTERN_SSO_FULL_XTALK_DQ2 */
  144. {0x1f, 0xf, 2, 0xf, 0x0a280, 32}, /* PATTERN_SSO_FULL_XTALK_DQ3 */
  145. {0x1f, 0xf, 2, 0xf, 0x0aa80, 32}, /* PATTERN_SSO_FULL_XTALK_DQ4 */
  146. {0x1f, 0xf, 2, 0xf, 0x0b280, 32}, /* PATTERN_SSO_FULL_XTALK_DQ5 */
  147. {0x1f, 0xf, 2, 0xf, 0x0ba80, 32}, /* PATTERN_SSO_FULL_XTALK_DQ6 */
  148. {0x1f, 0xf, 2, 0xf, 0x0c280, 32}, /* PATTERN_SSO_FULL_XTALK_DQ7 */
  149. {0x1f, 0xf, 2, 0xf, 0x08e80, 32}, /* PATTERN_SSO_FULL_XTALK_DQ0_64 */
  150. {0x1f, 0xf, 2, 0xf, 0x09680, 32}, /* PATTERN_SSO_FULL_XTALK_DQ1_64 */
  151. {0x1f, 0xf, 2, 0xf, 0x09e80, 32}, /* PATTERN_SSO_FULL_XTALK_DQ2_64 */
  152. {0x1f, 0xf, 2, 0xf, 0x0a680, 32}, /* PATTERN_SSO_FULL_XTALK_DQ3_64 */
  153. {0x1f, 0xf, 2, 0xf, 0x0ae80, 32}, /* PATTERN_SSO_FULL_XTALK_DQ4_64 */
  154. {0x1f, 0xf, 2, 0xf, 0x0b680, 32}, /* PATTERN_SSO_FULL_XTALK_DQ5_64 */
  155. {0x1f, 0xf, 2, 0xf, 0x0be80, 32}, /* PATTERN_SSO_FULL_XTALK_DQ6_64 */
  156. {0x1f, 0xf, 2, 0xf, 0x0c680, 32}, /* PATTERN_SSO_FULL_XTALK_DQ7_64 */
  157. {0x1f, 0xf, 2, 0xf, 0x0ca80, 32}, /* PATTERN_SSO_XTALK_FREE_DQ0 */
  158. {0x1f, 0xf, 2, 0xf, 0x0d280, 32}, /* PATTERN_SSO_XTALK_FREE_DQ1 */
  159. {0x1f, 0xf, 2, 0xf, 0x0da80, 32}, /* PATTERN_SSO_XTALK_FREE_DQ2 */
  160. {0x1f, 0xf, 2, 0xf, 0x0e280, 32}, /* PATTERN_SSO_XTALK_FREE_DQ3 */
  161. {0x1f, 0xf, 2, 0xf, 0x0ea80, 32}, /* PATTERN_SSO_XTALK_FREE_DQ4 */
  162. {0x1f, 0xf, 2, 0xf, 0x0f280, 32}, /* PATTERN_SSO_XTALK_FREE_DQ5 */
  163. {0x1f, 0xf, 2, 0xf, 0x0fa80, 32}, /* PATTERN_SSO_XTALK_FREE_DQ6 */
  164. {0x1f, 0xf, 2, 0xf, 0x10280, 32}, /* PATTERN_SSO_XTALK_FREE_DQ7 */
  165. {0x1f, 0xf, 2, 0xf, 0x0ce80, 32}, /* PATTERN_SSO_XTALK_FREE_DQ0_64 */
  166. {0x1f, 0xf, 2, 0xf, 0x0d680, 32}, /* PATTERN_SSO_XTALK_FREE_DQ1_64 */
  167. {0x1f, 0xf, 2, 0xf, 0x0de80, 32}, /* PATTERN_SSO_XTALK_FREE_DQ2_64 */
  168. {0x1f, 0xf, 2, 0xf, 0x0e680, 32}, /* PATTERN_SSO_XTALK_FREE_DQ3_64 */
  169. {0x1f, 0xf, 2, 0xf, 0x0ee80, 32}, /* PATTERN_SSO_XTALK_FREE_DQ4_64 */
  170. {0x1f, 0xf, 2, 0xf, 0x0f680, 32}, /* PATTERN_SSO_XTALK_FREE_DQ5_64 */
  171. {0x1f, 0xf, 2, 0xf, 0x0fe80, 32}, /* PATTERN_SSO_XTALK_FREE_DQ6_64 */
  172. {0x1f, 0xf, 2, 0xf, 0x10680, 32}, /* PATTERN_SSO_XTALK_FREE_DQ7_64 */
  173. {0x1f, 0xf, 2, 0xf, 0x10a80, 32}, /* PATTERN_ISI_XTALK_FREE */
  174. {0x1f, 0xf, 2, 0xf, 0x10e80, 32}, /* PATTERN_ISI_XTALK_FREE_64 */
  175. {0x1f, 0xf, 2, 0xf, 0x11280, 32}, /* PATTERN_VREF */
  176. {0x1f, 0xf, 2, 0xf, 0x11680, 32}, /* PATTERN_VREF_64 */
  177. {0x1f, 0xf, 2, 0xf, 0x11a80, 32}, /* PATTERN_VREF_INV */
  178. {0x1f, 0xf, 2, 0xf, 0x11e80, 32}, /* PATTERN_FULL_SSO_0T */
  179. {0x1f, 0xf, 2, 0xf, 0x12280, 32}, /* PATTERN_FULL_SSO_1T */
  180. {0x1f, 0xf, 2, 0xf, 0x12680, 32}, /* PATTERN_FULL_SSO_2T */
  181. {0x1f, 0xf, 2, 0xf, 0x12a80, 32}, /* PATTERN_FULL_SSO_3T */
  182. {0x1f, 0xf, 2, 0xf, 0x12e80, 32}, /* PATTERN_RESONANCE_1T */
  183. {0x1f, 0xf, 2, 0xf, 0x13280, 32}, /* PATTERN_RESONANCE_2T */
  184. {0x1f, 0xf, 2, 0xf, 0x13680, 32}, /* PATTERN_RESONANCE_3T */
  185. {0x1f, 0xf, 2, 0xf, 0x13a80, 32}, /* PATTERN_RESONANCE_4T */
  186. {0x1f, 0xf, 2, 0xf, 0x13e80, 32}, /* PATTERN_RESONANCE_5T */
  187. {0x1f, 0xf, 2, 0xf, 0x14280, 32}, /* PATTERN_RESONANCE_6T */
  188. {0x1f, 0xf, 2, 0xf, 0x14680, 32}, /* PATTERN_RESONANCE_7T */
  189. {0x1f, 0xf, 2, 0xf, 0x14a80, 32}, /* PATTERN_RESONANCE_8T */
  190. {0x1f, 0xf, 2, 0xf, 0x14e80, 32}, /* PATTERN_RESONANCE_9T */
  191. {0x1f, 0xf, 2, 0xf, 0x15280, 32}, /* PATTERN_ZERO */
  192. {0x1f, 0xf, 2, 0xf, 0x15680, 32} /* PATTERN_ONE */
  193. /* Note: actual start_address is "<< 3" of defined address */
  194. };
  195. struct pattern_info pattern_table_16[] = {
  196. /*
  197. * num tx phases, tx burst, delay between, rx pattern,
  198. * start_address, pattern_len
  199. */
  200. {1, 1, 2, 1, 0x0080, 2}, /* PATTERN_PBS1 */
  201. {1, 1, 2, 1, 0x00c0, 2}, /* PATTERN_PBS2 */
  202. {1, 1, 2, 1, 0x0380, 2}, /* PATTERN_PBS3 */
  203. {1, 1, 2, 1, 0x0040, 2}, /* PATTERN_TEST */
  204. {1, 1, 2, 1, 0x0100, 2}, /* PATTERN_RL */
  205. {1, 1, 2, 1, 0x0000, 2}, /* PATTERN_RL2 */
  206. {0xf, 0x7, 2, 0x7, 0x0140, 16}, /* PATTERN_STATIC_PBS */
  207. {0xf, 0x7, 2, 0x7, 0x0190, 16}, /* PATTERN_KILLER_DQ0 */
  208. {0xf, 0x7, 2, 0x7, 0x01d0, 16}, /* PATTERN_KILLER_DQ1 */
  209. {0xf, 0x7, 2, 0x7, 0x0210, 16}, /* PATTERN_KILLER_DQ2 */
  210. {0xf, 0x7, 2, 0x7, 0x0250, 16}, /* PATTERN_KILLER_DQ3 */
  211. {0xf, 0x7, 2, 0x7, 0x0290, 16}, /* PATTERN_KILLER_DQ4 */
  212. {0xf, 0x7, 2, 0x7, 0x02d0, 16}, /* PATTERN_KILLER_DQ5 */
  213. {0xf, 0x7, 2, 0x7, 0x0310, 16}, /* PATTERN_KILLER_DQ6 */
  214. {0xf, 0x7, 2, 0x7, 0x0350, 16}, /* PATTERN_KILLER_DQ7 */
  215. {0xf, 0x7, 2, 0x7, 0x04c0, 16}, /* PATTERN_VREF */
  216. {0xf, 0x7, 2, 0x7, 0x03c0, 16}, /* PATTERN_FULL_SSO_1T */
  217. {0xf, 0x7, 2, 0x7, 0x0400, 16}, /* PATTERN_FULL_SSO_2T */
  218. {0xf, 0x7, 2, 0x7, 0x0440, 16}, /* PATTERN_FULL_SSO_3T */
  219. {0xf, 0x7, 2, 0x7, 0x0480, 16}, /* PATTERN_FULL_SSO_4T */
  220. {0xf, 7, 2, 7, 0x6280, 16}, /* PATTERN_SSO_FULL_XTALK_DQ1 */
  221. {0xf, 7, 2, 7, 0x6680, 16}, /* PATTERN_SSO_FULL_XTALK_DQ1 */
  222. {0xf, 7, 2, 7, 0x6A80, 16}, /* PATTERN_SSO_FULL_XTALK_DQ2 */
  223. {0xf, 7, 2, 7, 0x6E80, 16}, /* PATTERN_SSO_FULL_XTALK_DQ3 */
  224. {0xf, 7, 2, 7, 0x7280, 16}, /* PATTERN_SSO_FULL_XTALK_DQ4 */
  225. {0xf, 7, 2, 7, 0x7680, 16}, /* PATTERN_SSO_FULL_XTALK_DQ5 */
  226. {0xf, 7, 2, 7, 0x7A80, 16}, /* PATTERN_SSO_FULL_XTALK_DQ6 */
  227. {0xf, 7, 2, 7, 0x7E80, 16}, /* PATTERN_SSO_FULL_XTALK_DQ7 */
  228. {0xf, 7, 2, 7, 0x8280, 16}, /* PATTERN_SSO_XTALK_FREE_DQ0 */
  229. {0xf, 7, 2, 7, 0x8680, 16}, /* PATTERN_SSO_XTALK_FREE_DQ1 */
  230. {0xf, 7, 2, 7, 0x8A80, 16}, /* PATTERN_SSO_XTALK_FREE_DQ2 */
  231. {0xf, 7, 2, 7, 0x8E80, 16}, /* PATTERN_SSO_XTALK_FREE_DQ3 */
  232. {0xf, 7, 2, 7, 0x9280, 16}, /* PATTERN_SSO_XTALK_FREE_DQ4 */
  233. {0xf, 7, 2, 7, 0x9680, 16}, /* PATTERN_SSO_XTALK_FREE_DQ5 */
  234. {0xf, 7, 2, 7, 0x9A80, 16}, /* PATTERN_SSO_XTALK_FREE_DQ6 */
  235. {0xf, 7, 2, 7, 0x9E80, 16}, /* PATTERN_SSO_XTALK_FREE_DQ7 */
  236. {0xf, 7, 2, 7, 0xA280, 16} /* PATTERN_ISI_XTALK_FREE */
  237. /* Note: actual start_address is "<< 3" of defined address */
  238. };
  239. struct pattern_info pattern_table_32[] = {
  240. /*
  241. * num tx phases, tx burst, delay between, rx pattern,
  242. * start_address, pattern_len
  243. */
  244. {3, 3, 2, 3, 0x0080, 4}, /* PATTERN_PBS1 */
  245. {3, 3, 2, 3, 0x00c0, 4}, /* PATTERN_PBS2 */
  246. {3, 3, 2, 3, 0x0380, 4}, /* PATTERN_PBS3 */
  247. {3, 3, 2, 3, 0x0040, 4}, /* PATTERN_TEST */
  248. {3, 3, 2, 3, 0x0100, 4}, /* PATTERN_RL */
  249. {3, 3, 2, 3, 0x0000, 4}, /* PATTERN_RL2 */
  250. {0x1f, 0xf, 2, 0xf, 0x0140, 32}, /* PATTERN_STATIC_PBS */
  251. {0x1f, 0xf, 2, 0xf, 0x0190, 32}, /* PATTERN_KILLER_DQ0 */
  252. {0x1f, 0xf, 2, 0xf, 0x01d0, 32}, /* PATTERN_KILLER_DQ1 */
  253. {0x1f, 0xf, 2, 0xf, 0x0210, 32}, /* PATTERN_KILLER_DQ2 */
  254. {0x1f, 0xf, 2, 0xf, 0x0250, 32}, /* PATTERN_KILLER_DQ3 */
  255. {0x1f, 0xf, 2, 0xf, 0x0290, 32}, /* PATTERN_KILLER_DQ4 */
  256. {0x1f, 0xf, 2, 0xf, 0x02d0, 32}, /* PATTERN_KILLER_DQ5 */
  257. {0x1f, 0xf, 2, 0xf, 0x0310, 32}, /* PATTERN_KILLER_DQ6 */
  258. {0x1f, 0xf, 2, 0xf, 0x0350, 32}, /* PATTERN_KILLER_DQ7 */
  259. {0x1f, 0xf, 2, 0xf, 0x04c0, 32}, /* PATTERN_VREF */
  260. {0x1f, 0xf, 2, 0xf, 0x03c0, 32}, /* PATTERN_FULL_SSO_1T */
  261. {0x1f, 0xf, 2, 0xf, 0x0400, 32}, /* PATTERN_FULL_SSO_2T */
  262. {0x1f, 0xf, 2, 0xf, 0x0440, 32}, /* PATTERN_FULL_SSO_3T */
  263. {0x1f, 0xf, 2, 0xf, 0x0480, 32}, /* PATTERN_FULL_SSO_4T */
  264. {0x1f, 0xF, 2, 0xf, 0x6280, 32}, /* PATTERN_SSO_FULL_XTALK_DQ0 */
  265. {0x1f, 0xF, 2, 0xf, 0x6680, 32}, /* PATTERN_SSO_FULL_XTALK_DQ1 */
  266. {0x1f, 0xF, 2, 0xf, 0x6A80, 32}, /* PATTERN_SSO_FULL_XTALK_DQ2 */
  267. {0x1f, 0xF, 2, 0xf, 0x6E80, 32}, /* PATTERN_SSO_FULL_XTALK_DQ3 */
  268. {0x1f, 0xF, 2, 0xf, 0x7280, 32}, /* PATTERN_SSO_FULL_XTALK_DQ4 */
  269. {0x1f, 0xF, 2, 0xf, 0x7680, 32}, /* PATTERN_SSO_FULL_XTALK_DQ5 */
  270. {0x1f, 0xF, 2, 0xf, 0x7A80, 32}, /* PATTERN_SSO_FULL_XTALK_DQ6 */
  271. {0x1f, 0xF, 2, 0xf, 0x7E80, 32}, /* PATTERN_SSO_FULL_XTALK_DQ7 */
  272. {0x1f, 0xF, 2, 0xf, 0x8280, 32}, /* PATTERN_SSO_XTALK_FREE_DQ0 */
  273. {0x1f, 0xF, 2, 0xf, 0x8680, 32}, /* PATTERN_SSO_XTALK_FREE_DQ1 */
  274. {0x1f, 0xF, 2, 0xf, 0x8A80, 32}, /* PATTERN_SSO_XTALK_FREE_DQ2 */
  275. {0x1f, 0xF, 2, 0xf, 0x8E80, 32}, /* PATTERN_SSO_XTALK_FREE_DQ3 */
  276. {0x1f, 0xF, 2, 0xf, 0x9280, 32}, /* PATTERN_SSO_XTALK_FREE_DQ4 */
  277. {0x1f, 0xF, 2, 0xf, 0x9680, 32}, /* PATTERN_SSO_XTALK_FREE_DQ5 */
  278. {0x1f, 0xF, 2, 0xf, 0x9A80, 32}, /* PATTERN_SSO_XTALK_FREE_DQ6 */
  279. {0x1f, 0xF, 2, 0xf, 0x9E80, 32}, /* PATTERN_SSO_XTALK_FREE_DQ7 */
  280. {0x1f, 0xF, 2, 0xf, 0xA280, 32} /* PATTERN_ISI_XTALK_FREE */
  281. /* Note: actual start_address is "<< 3" of defined address */
  282. };
  283. u32 train_dev_num;
  284. enum hws_ddr_cs traintrain_cs_type;
  285. u32 train_pup_num;
  286. enum hws_training_result train_result_type;
  287. enum hws_control_element train_control_element;
  288. enum hws_search_dir traine_search_dir;
  289. enum hws_dir train_direction;
  290. u32 train_if_select;
  291. u32 train_init_value;
  292. u32 train_number_iterations;
  293. enum hws_pattern train_pattern;
  294. enum hws_edge_compare train_edge_compare;
  295. u32 train_cs_num;
  296. u32 train_if_acess, train_if_id, train_pup_access;
  297. u32 max_polling_for_done = 1000000;
  298. u32 *ddr3_tip_get_buf_ptr(u32 dev_num, enum hws_search_dir search,
  299. enum hws_training_result result_type,
  300. u32 interface_num)
  301. {
  302. u32 *buf_ptr = NULL;
  303. buf_ptr = &training_res
  304. [MAX_INTERFACE_NUM * MAX_BUS_NUM * BUS_WIDTH_IN_BITS * search +
  305. interface_num * MAX_BUS_NUM * BUS_WIDTH_IN_BITS];
  306. return buf_ptr;
  307. }
  308. enum {
  309. PASS,
  310. FAIL
  311. };
  312. /*
  313. * IP Training search
  314. * Note: for one edge search only from fail to pass, else jitter can
  315. * be be entered into solution.
  316. */
  317. int ddr3_tip_ip_training(u32 dev_num, enum hws_access_type access_type,
  318. u32 interface_num,
  319. enum hws_access_type pup_access_type,
  320. u32 pup_num, enum hws_training_result result_type,
  321. enum hws_control_element control_element,
  322. enum hws_search_dir search_dir, enum hws_dir direction,
  323. u32 interface_mask, u32 init_value, u32 num_iter,
  324. enum hws_pattern pattern,
  325. enum hws_edge_compare edge_comp,
  326. enum hws_ddr_cs cs_type, u32 cs_num,
  327. enum hws_training_ip_stat *train_status)
  328. {
  329. u32 mask_dq_num_of_regs, mask_pup_num_of_regs, index_cnt,
  330. reg_data, pup_id;
  331. u32 tx_burst_size;
  332. u32 delay_between_burst;
  333. u32 rd_mode;
  334. u32 data;
  335. struct pattern_info *pattern_table = ddr3_tip_get_pattern_table();
  336. u16 *mask_results_pup_reg_map = ddr3_tip_get_mask_results_pup_reg_map();
  337. u16 *mask_results_dq_reg_map = ddr3_tip_get_mask_results_dq_reg();
  338. u32 octets_per_if_num = ddr3_tip_dev_attr_get(dev_num, MV_ATTR_OCTET_PER_INTERFACE);
  339. struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
  340. if (pup_num >= octets_per_if_num) {
  341. DEBUG_TRAINING_IP_ENGINE(DEBUG_LEVEL_ERROR,
  342. ("pup_num %d not valid\n", pup_num));
  343. }
  344. if (interface_num >= MAX_INTERFACE_NUM) {
  345. DEBUG_TRAINING_IP_ENGINE(DEBUG_LEVEL_ERROR,
  346. ("if_id %d not valid\n",
  347. interface_num));
  348. }
  349. if (train_status == NULL) {
  350. DEBUG_TRAINING_IP_ENGINE(DEBUG_LEVEL_ERROR,
  351. ("error param 4\n"));
  352. return MV_BAD_PARAM;
  353. }
  354. /* load pattern */
  355. if (cs_type == CS_SINGLE) {
  356. /* All CSs to CS0 */
  357. CHECK_STATUS(ddr3_tip_if_write
  358. (dev_num, access_type, interface_num,
  359. DUAL_DUNIT_CFG_REG, 1 << 3, 1 << 3));
  360. /* All CSs to CS0 */
  361. CHECK_STATUS(ddr3_tip_if_write
  362. (dev_num, access_type, interface_num,
  363. ODPG_DATA_CTRL_REG,
  364. (0x3 | (effective_cs << 26)), 0xc000003));
  365. } else {
  366. CHECK_STATUS(ddr3_tip_if_write
  367. (dev_num, access_type, interface_num,
  368. DUAL_DUNIT_CFG_REG, 0, 1 << 3));
  369. /* CS select */
  370. CHECK_STATUS(ddr3_tip_if_write
  371. (dev_num, access_type, interface_num,
  372. ODPG_DATA_CTRL_REG, 0x3 | cs_num << 26,
  373. 0x3 | 3 << 26));
  374. }
  375. /* load pattern to ODPG */
  376. ddr3_tip_load_pattern_to_odpg(dev_num, access_type, interface_num,
  377. pattern,
  378. pattern_table[pattern].start_addr);
  379. tx_burst_size = (direction == OPER_WRITE) ?
  380. pattern_table[pattern].tx_burst_size : 0;
  381. delay_between_burst = (direction == OPER_WRITE) ? 2 : 0;
  382. rd_mode = (direction == OPER_WRITE) ? 1 : 0;
  383. CHECK_STATUS(ddr3_tip_configure_odpg
  384. (dev_num, access_type, interface_num, direction,
  385. pattern_table[pattern].num_of_phases_tx, tx_burst_size,
  386. pattern_table[pattern].num_of_phases_rx,
  387. delay_between_burst, rd_mode, effective_cs, STRESS_NONE,
  388. DURATION_SINGLE));
  389. reg_data = (direction == OPER_READ) ? 0 : (0x3 << 30);
  390. reg_data |= (direction == OPER_READ) ? 0x60 : 0xfa;
  391. CHECK_STATUS(ddr3_tip_if_write
  392. (dev_num, access_type, interface_num,
  393. ODPG_WR_RD_MODE_ENA_REG, reg_data,
  394. MASK_ALL_BITS));
  395. reg_data = (edge_comp == EDGE_PF || edge_comp == EDGE_FP) ? 0 : 1 << 6;
  396. reg_data |= (edge_comp == EDGE_PF || edge_comp == EDGE_PFP) ?
  397. (1 << 7) : 0;
  398. /* change from Pass to Fail will lock the result */
  399. if (pup_access_type == ACCESS_TYPE_MULTICAST)
  400. reg_data |= 0xe << 14;
  401. else
  402. reg_data |= pup_num << 14;
  403. if (edge_comp == EDGE_FP) {
  404. /* don't search for readl edge change, only the state */
  405. reg_data |= (0 << 20);
  406. } else if (edge_comp == EDGE_FPF) {
  407. reg_data |= (0 << 20);
  408. } else {
  409. reg_data |= (3 << 20);
  410. }
  411. CHECK_STATUS(ddr3_tip_if_write
  412. (dev_num, access_type, interface_num,
  413. GENERAL_TRAINING_OPCODE_REG,
  414. reg_data | (0x7 << 8) | (0x7 << 11),
  415. (0x3 | (0x3 << 2) | (0x3 << 6) | (1 << 5) | (0x7 << 8) |
  416. (0x7 << 11) | (0xf << 14) | (0x3 << 18) | (3 << 20))));
  417. reg_data = (search_dir == HWS_LOW2HIGH) ? 0 : (1 << 8);
  418. CHECK_STATUS(ddr3_tip_if_write
  419. (dev_num, access_type, interface_num, OPCODE_REG0_REG(1),
  420. 1 | reg_data | init_value << 9 | (1 << 25) | (1 << 26),
  421. 0xff | (1 << 8) | (0xffff << 9) | (1 << 25) | (1 << 26)));
  422. /*
  423. * Write2_dunit(0x10b4, Number_iteration , [15:0])
  424. * Max number of iterations
  425. */
  426. CHECK_STATUS(ddr3_tip_if_write(dev_num, access_type, interface_num,
  427. OPCODE_REG1_REG(1), num_iter,
  428. 0xffff));
  429. if (control_element == HWS_CONTROL_ELEMENT_DQ_SKEW &&
  430. direction == OPER_READ) {
  431. /*
  432. * Write2_dunit(0x10c0, 0x5f , [7:0])
  433. * MC PBS Reg Address at DDR PHY
  434. */
  435. reg_data = PBS_RX_BCAST_PHY_REG(effective_cs);
  436. } else if (control_element == HWS_CONTROL_ELEMENT_DQ_SKEW &&
  437. direction == OPER_WRITE) {
  438. reg_data = PBS_TX_BCAST_PHY_REG(effective_cs);
  439. } else if (control_element == HWS_CONTROL_ELEMENT_ADLL &&
  440. direction == OPER_WRITE) {
  441. /*
  442. * LOOP 0x00000001 + 4*n:
  443. * where n (0-3) represents M_CS number
  444. */
  445. /*
  446. * Write2_dunit(0x10c0, 0x1 , [7:0])
  447. * ADLL WR Reg Address at DDR PHY
  448. */
  449. reg_data = CTX_PHY_REG(effective_cs);
  450. } else if (control_element == HWS_CONTROL_ELEMENT_ADLL &&
  451. direction == OPER_READ) {
  452. /* ADLL RD Reg Address at DDR PHY */
  453. reg_data = CRX_PHY_REG(effective_cs);
  454. } else if (control_element == HWS_CONTROL_ELEMENT_DQS_SKEW &&
  455. direction == OPER_WRITE) {
  456. /* TBD not defined in 0.5.0 requirement */
  457. } else if (control_element == HWS_CONTROL_ELEMENT_DQS_SKEW &&
  458. direction == OPER_READ) {
  459. /* TBD not defined in 0.5.0 requirement */
  460. }
  461. reg_data |= (0x6 << 28);
  462. CHECK_STATUS(ddr3_tip_if_write
  463. (dev_num, access_type, interface_num, CAL_PHY_REG(1),
  464. reg_data | (init_value << 8),
  465. 0xff | (0xffff << 8) | (0xf << 24) | (u32) (0xf << 28)));
  466. mask_dq_num_of_regs = octets_per_if_num * BUS_WIDTH_IN_BITS;
  467. mask_pup_num_of_regs = octets_per_if_num;
  468. if (result_type == RESULT_PER_BIT) {
  469. for (index_cnt = 0; index_cnt < mask_dq_num_of_regs;
  470. index_cnt++) {
  471. CHECK_STATUS(ddr3_tip_if_write
  472. (dev_num, access_type, interface_num,
  473. mask_results_dq_reg_map[index_cnt], 0,
  474. 1 << 24));
  475. }
  476. /* Mask disabled buses */
  477. for (pup_id = 0; pup_id < octets_per_if_num;
  478. pup_id++) {
  479. if (IS_BUS_ACTIVE(tm->bus_act_mask, pup_id) == 1)
  480. continue;
  481. for (index_cnt = (pup_id * 8); index_cnt < (pup_id + 1) * 8; index_cnt++) {
  482. CHECK_STATUS(ddr3_tip_if_write
  483. (dev_num, access_type,
  484. interface_num,
  485. mask_results_dq_reg_map
  486. [index_cnt], (1 << 24), 1 << 24));
  487. }
  488. }
  489. for (index_cnt = 0; index_cnt < mask_pup_num_of_regs;
  490. index_cnt++) {
  491. CHECK_STATUS(ddr3_tip_if_write
  492. (dev_num, access_type, interface_num,
  493. mask_results_pup_reg_map[index_cnt],
  494. (1 << 24), 1 << 24));
  495. }
  496. } else if (result_type == RESULT_PER_BYTE) {
  497. /* write to adll */
  498. for (index_cnt = 0; index_cnt < mask_pup_num_of_regs;
  499. index_cnt++) {
  500. CHECK_STATUS(ddr3_tip_if_write
  501. (dev_num, access_type, interface_num,
  502. mask_results_pup_reg_map[index_cnt], 0,
  503. 1 << 24));
  504. }
  505. for (index_cnt = 0; index_cnt < mask_dq_num_of_regs;
  506. index_cnt++) {
  507. CHECK_STATUS(ddr3_tip_if_write
  508. (dev_num, access_type, interface_num,
  509. mask_results_dq_reg_map[index_cnt],
  510. (1 << 24), (1 << 24)));
  511. }
  512. }
  513. /* trigger training */
  514. mv_ddr_training_enable();
  515. /* wa for 16-bit mode: wait for all rfu tests to finish or timeout */
  516. mdelay(1);
  517. /* check for training done */
  518. if (mv_ddr_is_training_done(MAX_POLLING_ITERATIONS, &data) != MV_OK) {
  519. train_status[0] = HWS_TRAINING_IP_STATUS_TIMEOUT;
  520. } else { /* training done; check for pass */
  521. if (data == PASS)
  522. train_status[0] = HWS_TRAINING_IP_STATUS_SUCCESS;
  523. else
  524. train_status[0] = HWS_TRAINING_IP_STATUS_FAIL;
  525. }
  526. ddr3_tip_if_write(0, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
  527. ODPG_DATA_CTRL_REG, 0, MASK_ALL_BITS);
  528. return MV_OK;
  529. }
  530. /*
  531. * Load expected Pattern to ODPG
  532. */
  533. int ddr3_tip_load_pattern_to_odpg(u32 dev_num, enum hws_access_type access_type,
  534. u32 if_id, enum hws_pattern pattern,
  535. u32 load_addr)
  536. {
  537. u32 pattern_length_cnt = 0;
  538. struct pattern_info *pattern_table = ddr3_tip_get_pattern_table();
  539. struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
  540. for (pattern_length_cnt = 0;
  541. pattern_length_cnt < pattern_table[pattern].pattern_len;
  542. pattern_length_cnt++) { /* FIXME: the ecc patch below is only for a7040 A0 */
  543. if (MV_DDR_IS_64BIT_DRAM_MODE(tm->bus_act_mask)/* || tm->bus_act_mask == MV_DDR_32BIT_ECC_PUP8_BUS_MASK*/) {
  544. CHECK_STATUS(ddr3_tip_if_write
  545. (dev_num, access_type, if_id,
  546. ODPG_DATA_WR_DATA_LOW_REG,
  547. pattern_table_get_word(dev_num, pattern,
  548. (u8) (pattern_length_cnt)),
  549. MASK_ALL_BITS));
  550. CHECK_STATUS(ddr3_tip_if_write
  551. (dev_num, access_type, if_id,
  552. ODPG_DATA_WR_DATA_HIGH_REG,
  553. pattern_table_get_word(dev_num, pattern,
  554. (u8) (pattern_length_cnt)),
  555. MASK_ALL_BITS));
  556. } else {
  557. CHECK_STATUS(ddr3_tip_if_write
  558. (dev_num, access_type, if_id,
  559. ODPG_DATA_WR_DATA_LOW_REG,
  560. pattern_table_get_word(dev_num, pattern,
  561. (u8) (pattern_length_cnt * 2)),
  562. MASK_ALL_BITS));
  563. CHECK_STATUS(ddr3_tip_if_write
  564. (dev_num, access_type, if_id,
  565. ODPG_DATA_WR_DATA_HIGH_REG,
  566. pattern_table_get_word(dev_num, pattern,
  567. (u8) (pattern_length_cnt * 2 + 1)),
  568. MASK_ALL_BITS));
  569. }
  570. CHECK_STATUS(ddr3_tip_if_write
  571. (dev_num, access_type, if_id,
  572. ODPG_DATA_WR_ADDR_REG, pattern_length_cnt,
  573. MASK_ALL_BITS));
  574. }
  575. CHECK_STATUS(ddr3_tip_if_write
  576. (dev_num, access_type, if_id,
  577. ODPG_DATA_BUFFER_OFFS_REG, load_addr, MASK_ALL_BITS));
  578. return MV_OK;
  579. }
  580. /*
  581. * Configure ODPG
  582. */
  583. int ddr3_tip_configure_odpg(u32 dev_num, enum hws_access_type access_type,
  584. u32 if_id, enum hws_dir direction, u32 tx_phases,
  585. u32 tx_burst_size, u32 rx_phases,
  586. u32 delay_between_burst, u32 rd_mode, u32 cs_num,
  587. u32 addr_stress_jump, u32 single_pattern)
  588. {
  589. u32 data_value = 0;
  590. int ret;
  591. data_value = ((single_pattern << 2) | (tx_phases << 5) |
  592. (tx_burst_size << 11) | (delay_between_burst << 15) |
  593. (rx_phases << 21) | (rd_mode << 25) | (cs_num << 26) |
  594. (addr_stress_jump << 29));
  595. ret = ddr3_tip_if_write(dev_num, access_type, if_id,
  596. ODPG_DATA_CTRL_REG, data_value, 0xaffffffc);
  597. if (ret != MV_OK)
  598. return ret;
  599. return MV_OK;
  600. }
  601. int ddr3_tip_process_result(u32 *ar_result, enum hws_edge e_edge,
  602. enum hws_edge_search e_edge_search,
  603. u32 *edge_result)
  604. {
  605. u32 i, res;
  606. int tap_val, max_val = -10000, min_val = 10000;
  607. int lock_success = 1;
  608. for (i = 0; i < BUS_WIDTH_IN_BITS; i++) {
  609. res = GET_LOCK_RESULT(ar_result[i]);
  610. if (res == 0) {
  611. lock_success = 0;
  612. break;
  613. }
  614. DEBUG_TRAINING_IP_ENGINE(DEBUG_LEVEL_ERROR,
  615. ("lock failed for bit %d\n", i));
  616. }
  617. if (lock_success == 1) {
  618. for (i = 0; i < BUS_WIDTH_IN_BITS; i++) {
  619. tap_val = GET_TAP_RESULT(ar_result[i], e_edge);
  620. if (tap_val > max_val)
  621. max_val = tap_val;
  622. if (tap_val < min_val)
  623. min_val = tap_val;
  624. if (e_edge_search == TRAINING_EDGE_MAX)
  625. *edge_result = (u32) max_val;
  626. else
  627. *edge_result = (u32) min_val;
  628. DEBUG_TRAINING_IP_ENGINE(DEBUG_LEVEL_ERROR,
  629. ("i %d ar_result[i] 0x%x tap_val %d max_val %d min_val %d Edge_result %d\n",
  630. i, ar_result[i], tap_val,
  631. max_val, min_val,
  632. *edge_result));
  633. }
  634. } else {
  635. return MV_FAIL;
  636. }
  637. return MV_OK;
  638. }
  639. /*
  640. * Read training search result
  641. */
  642. int ddr3_tip_read_training_result(u32 dev_num, u32 if_id,
  643. enum hws_access_type pup_access_type,
  644. u32 pup_num, u32 bit_num,
  645. enum hws_search_dir search,
  646. enum hws_dir direction,
  647. enum hws_training_result result_type,
  648. enum hws_training_load_op operation,
  649. u32 cs_num_type, u32 **load_res,
  650. int is_read_from_db, u8 cons_tap,
  651. int is_check_result_validity)
  652. {
  653. u32 reg_offset, pup_cnt, start_pup, end_pup, start_reg, end_reg;
  654. u32 *interface_train_res = NULL;
  655. u16 *reg_addr = NULL;
  656. u32 read_data[MAX_INTERFACE_NUM];
  657. u16 *mask_results_pup_reg_map = ddr3_tip_get_mask_results_pup_reg_map();
  658. u16 *mask_results_dq_reg_map = ddr3_tip_get_mask_results_dq_reg();
  659. u32 octets_per_if_num = ddr3_tip_dev_attr_get(dev_num, MV_ATTR_OCTET_PER_INTERFACE);
  660. struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
  661. /*
  662. * Agreed assumption: all CS mask contain same number of bits,
  663. * i.e. in multi CS, the number of CS per memory is the same for
  664. * all pups
  665. */
  666. CHECK_STATUS(ddr3_tip_if_write
  667. (dev_num, ACCESS_TYPE_UNICAST, if_id, DUAL_DUNIT_CFG_REG,
  668. (cs_num_type == 0) ? 1 << 3 : 0, (1 << 3)));
  669. CHECK_STATUS(ddr3_tip_if_write
  670. (dev_num, ACCESS_TYPE_UNICAST, if_id,
  671. ODPG_DATA_CTRL_REG, (cs_num_type << 26), (3 << 26)));
  672. DEBUG_TRAINING_IP_ENGINE(DEBUG_LEVEL_TRACE,
  673. ("Read_from_d_b %d cs_type %d oper %d result_type %d direction %d search %d pup_num %d if_id %d pup_access_type %d\n",
  674. is_read_from_db, cs_num_type, operation,
  675. result_type, direction, search, pup_num,
  676. if_id, pup_access_type));
  677. if ((load_res == NULL) && (is_read_from_db == 1)) {
  678. DEBUG_TRAINING_IP_ENGINE(DEBUG_LEVEL_ERROR,
  679. ("ddr3_tip_read_training_result load_res = NULL"));
  680. return MV_FAIL;
  681. }
  682. if (pup_num >= octets_per_if_num) {
  683. DEBUG_TRAINING_IP_ENGINE(DEBUG_LEVEL_ERROR,
  684. ("pup_num %d not valid\n", pup_num));
  685. }
  686. if (if_id >= MAX_INTERFACE_NUM) {
  687. DEBUG_TRAINING_IP_ENGINE(DEBUG_LEVEL_ERROR,
  688. ("if_id %d not valid\n", if_id));
  689. }
  690. if (result_type == RESULT_PER_BIT)
  691. reg_addr = mask_results_dq_reg_map;
  692. else
  693. reg_addr = mask_results_pup_reg_map;
  694. if (pup_access_type == ACCESS_TYPE_UNICAST) {
  695. start_pup = pup_num;
  696. end_pup = pup_num;
  697. } else { /*pup_access_type == ACCESS_TYPE_MULTICAST) */
  698. start_pup = 0;
  699. end_pup = octets_per_if_num - 1;
  700. }
  701. for (pup_cnt = start_pup; pup_cnt <= end_pup; pup_cnt++) {
  702. VALIDATE_BUS_ACTIVE(tm->bus_act_mask, pup_cnt);
  703. DEBUG_TRAINING_IP_ENGINE(
  704. DEBUG_LEVEL_TRACE,
  705. ("if_id %d start_pup %d end_pup %d pup_cnt %d\n",
  706. if_id, start_pup, end_pup, pup_cnt));
  707. if (result_type == RESULT_PER_BIT) {
  708. if (bit_num == ALL_BITS_PER_PUP) {
  709. start_reg = pup_cnt * BUS_WIDTH_IN_BITS;
  710. end_reg = (pup_cnt + 1) * BUS_WIDTH_IN_BITS - 1;
  711. } else {
  712. start_reg =
  713. pup_cnt * BUS_WIDTH_IN_BITS + bit_num;
  714. end_reg = pup_cnt * BUS_WIDTH_IN_BITS + bit_num;
  715. }
  716. } else {
  717. start_reg = pup_cnt;
  718. end_reg = pup_cnt;
  719. }
  720. interface_train_res =
  721. ddr3_tip_get_buf_ptr(dev_num, search, result_type,
  722. if_id);
  723. DEBUG_TRAINING_IP_ENGINE(
  724. DEBUG_LEVEL_TRACE,
  725. ("start_reg %d end_reg %d interface %p\n",
  726. start_reg, end_reg, interface_train_res));
  727. if (interface_train_res == NULL) {
  728. DEBUG_TRAINING_IP_ENGINE(
  729. DEBUG_LEVEL_ERROR,
  730. ("interface_train_res is NULL\n"));
  731. return MV_FAIL;
  732. }
  733. for (reg_offset = start_reg; reg_offset <= end_reg;
  734. reg_offset++) {
  735. if (operation == TRAINING_LOAD_OPERATION_UNLOAD) {
  736. if (is_read_from_db == 0) {
  737. CHECK_STATUS(ddr3_tip_if_read
  738. (dev_num,
  739. ACCESS_TYPE_UNICAST,
  740. if_id,
  741. reg_addr[reg_offset],
  742. read_data,
  743. MASK_ALL_BITS));
  744. if (is_check_result_validity == 1) {
  745. if ((read_data[if_id] &
  746. TIP_ENG_LOCK) == 0) {
  747. interface_train_res
  748. [reg_offset] =
  749. TIP_ENG_LOCK +
  750. TIP_TX_DLL_RANGE_MAX;
  751. } else {
  752. interface_train_res
  753. [reg_offset] =
  754. read_data
  755. [if_id] +
  756. cons_tap;
  757. }
  758. } else {
  759. interface_train_res[reg_offset]
  760. = read_data[if_id] +
  761. cons_tap;
  762. }
  763. DEBUG_TRAINING_IP_ENGINE
  764. (DEBUG_LEVEL_TRACE,
  765. ("reg_offset %d value 0x%x addr %p\n",
  766. reg_offset,
  767. interface_train_res
  768. [reg_offset],
  769. &interface_train_res
  770. [reg_offset]));
  771. } else {
  772. *load_res =
  773. &interface_train_res[start_reg];
  774. DEBUG_TRAINING_IP_ENGINE
  775. (DEBUG_LEVEL_TRACE,
  776. ("*load_res %p\n", *load_res));
  777. }
  778. } else {
  779. DEBUG_TRAINING_IP_ENGINE(DEBUG_LEVEL_TRACE,
  780. ("not supported\n"));
  781. }
  782. }
  783. }
  784. return MV_OK;
  785. }
  786. /*
  787. * Load all pattern to memory using ODPG
  788. */
  789. int ddr3_tip_load_all_pattern_to_mem(u32 dev_num)
  790. {
  791. u32 pattern = 0, if_id;
  792. struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
  793. for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
  794. VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
  795. training_result[training_stage][if_id] = TEST_SUCCESS;
  796. }
  797. for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
  798. VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
  799. /* enable single cs */
  800. CHECK_STATUS(ddr3_tip_if_write
  801. (dev_num, ACCESS_TYPE_UNICAST, if_id,
  802. DUAL_DUNIT_CFG_REG, (1 << 3), (1 << 3)));
  803. }
  804. for (pattern = 0; pattern < PATTERN_LAST; pattern++)
  805. ddr3_tip_load_pattern_to_mem(dev_num, pattern);
  806. return MV_OK;
  807. }
  808. /*
  809. * Load specific pattern to memory using ODPG
  810. */
  811. int ddr3_tip_load_pattern_to_mem(u32 dev_num, enum hws_pattern pattern)
  812. {
  813. u32 reg_data, if_id;
  814. struct pattern_info *pattern_table = ddr3_tip_get_pattern_table();
  815. struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
  816. /* load pattern to memory */
  817. /*
  818. * Write Tx mode, CS0, phases, Tx burst size, delay between burst,
  819. * rx pattern phases
  820. */
  821. reg_data =
  822. 0x1 | (pattern_table[pattern].num_of_phases_tx << 5) |
  823. (pattern_table[pattern].tx_burst_size << 11) |
  824. (pattern_table[pattern].delay_between_bursts << 15) |
  825. (pattern_table[pattern].num_of_phases_rx << 21) | (0x1 << 25) |
  826. (effective_cs << 26);
  827. CHECK_STATUS(ddr3_tip_if_write
  828. (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
  829. ODPG_DATA_CTRL_REG, reg_data, MASK_ALL_BITS));
  830. /* ODPG Write enable from BIST */
  831. CHECK_STATUS(ddr3_tip_if_write
  832. (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
  833. ODPG_DATA_CTRL_REG, (0x1 | (effective_cs << 26)),
  834. 0xc000003));
  835. /* disable error injection */
  836. CHECK_STATUS(ddr3_tip_if_write
  837. (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
  838. ODPG_DATA_WR_DATA_ERR_REG, 0, 0x1));
  839. /* load pattern to ODPG */
  840. ddr3_tip_load_pattern_to_odpg(dev_num, ACCESS_TYPE_MULTICAST,
  841. PARAM_NOT_CARE, pattern,
  842. pattern_table[pattern].start_addr);
  843. if (ddr3_tip_dev_attr_get(dev_num, MV_ATTR_TIP_REV) >= MV_TIP_REV_3) {
  844. for (if_id = 0; if_id < MAX_INTERFACE_NUM; if_id++) {
  845. VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
  846. CHECK_STATUS(ddr3_tip_if_write
  847. (dev_num, ACCESS_TYPE_UNICAST, if_id,
  848. SDRAM_ODT_CTRL_HIGH_REG,
  849. 0x3, 0xf));
  850. }
  851. mv_ddr_odpg_enable();
  852. } else {
  853. CHECK_STATUS(ddr3_tip_if_write
  854. (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
  855. ODPG_DATA_CTRL_REG, (u32)(0x1 << 31),
  856. (u32)(0x1 << 31)));
  857. }
  858. mdelay(1);
  859. if (mv_ddr_is_odpg_done(MAX_POLLING_ITERATIONS) != MV_OK)
  860. return MV_FAIL;
  861. /* Disable ODPG and stop write to memory */
  862. CHECK_STATUS(ddr3_tip_if_write
  863. (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
  864. ODPG_DATA_CTRL_REG, (0x1 << 30), (u32) (0x3 << 30)));
  865. /* return to default */
  866. CHECK_STATUS(ddr3_tip_if_write
  867. (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
  868. ODPG_DATA_CTRL_REG, 0, MASK_ALL_BITS));
  869. if (ddr3_tip_dev_attr_get(dev_num, MV_ATTR_TIP_REV) >= MV_TIP_REV_3) {
  870. /* Disable odt0 for CS0 training - need to adjust for multy CS */
  871. CHECK_STATUS(ddr3_tip_if_write
  872. (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
  873. SDRAM_ODT_CTRL_HIGH_REG, 0x0, 0xf));
  874. }
  875. /* temporary added */
  876. mdelay(1);
  877. return MV_OK;
  878. }
  879. /*
  880. * Training search routine
  881. */
  882. int ddr3_tip_ip_training_wrapper_int(u32 dev_num,
  883. enum hws_access_type access_type,
  884. u32 if_id,
  885. enum hws_access_type pup_access_type,
  886. u32 pup_num, u32 bit_num,
  887. enum hws_training_result result_type,
  888. enum hws_control_element control_element,
  889. enum hws_search_dir search_dir,
  890. enum hws_dir direction,
  891. u32 interface_mask, u32 init_value_l2h,
  892. u32 init_value_h2l, u32 num_iter,
  893. enum hws_pattern pattern,
  894. enum hws_edge_compare edge_comp,
  895. enum hws_ddr_cs train_cs_type, u32 cs_num,
  896. enum hws_training_ip_stat *train_status)
  897. {
  898. u32 interface_num = 0, start_if, end_if, init_value_used;
  899. enum hws_search_dir search_dir_id, start_search, end_search;
  900. enum hws_edge_compare edge_comp_used;
  901. u8 cons_tap = 0;
  902. u32 octets_per_if_num = ddr3_tip_dev_attr_get(dev_num, MV_ATTR_OCTET_PER_INTERFACE);
  903. struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
  904. if (train_status == NULL) {
  905. DEBUG_TRAINING_IP_ENGINE(DEBUG_LEVEL_ERROR,
  906. ("train_status is NULL\n"));
  907. return MV_FAIL;
  908. }
  909. if ((train_cs_type > CS_NON_SINGLE) ||
  910. (edge_comp >= EDGE_PFP) ||
  911. (pattern >= PATTERN_LAST) ||
  912. (direction > OPER_WRITE_AND_READ) ||
  913. (search_dir > HWS_HIGH2LOW) ||
  914. (control_element > HWS_CONTROL_ELEMENT_DQS_SKEW) ||
  915. (result_type > RESULT_PER_BYTE) ||
  916. (pup_num >= octets_per_if_num) ||
  917. (pup_access_type > ACCESS_TYPE_MULTICAST) ||
  918. (if_id > 11) || (access_type > ACCESS_TYPE_MULTICAST)) {
  919. DEBUG_TRAINING_IP_ENGINE(
  920. DEBUG_LEVEL_ERROR,
  921. ("wrong parameter train_cs_type %d edge_comp %d pattern %d direction %d search_dir %d control_element %d result_type %d pup_num %d pup_access_type %d if_id %d access_type %d\n",
  922. train_cs_type, edge_comp, pattern, direction,
  923. search_dir, control_element, result_type, pup_num,
  924. pup_access_type, if_id, access_type));
  925. return MV_FAIL;
  926. }
  927. if (edge_comp == EDGE_FPF) {
  928. start_search = HWS_LOW2HIGH;
  929. end_search = HWS_HIGH2LOW;
  930. edge_comp_used = EDGE_FP;
  931. } else {
  932. start_search = search_dir;
  933. end_search = search_dir;
  934. edge_comp_used = edge_comp;
  935. }
  936. for (search_dir_id = start_search; search_dir_id <= end_search;
  937. search_dir_id++) {
  938. init_value_used = (search_dir_id == HWS_LOW2HIGH) ?
  939. init_value_l2h : init_value_h2l;
  940. DEBUG_TRAINING_IP_ENGINE(
  941. DEBUG_LEVEL_TRACE,
  942. ("dev_num %d, access_type %d, if_id %d, pup_access_type %d,pup_num %d, result_type %d, control_element %d search_dir_id %d, direction %d, interface_mask %d,init_value_used %d, num_iter %d, pattern %d, edge_comp_used %d, train_cs_type %d, cs_num %d\n",
  943. dev_num, access_type, if_id, pup_access_type, pup_num,
  944. result_type, control_element, search_dir_id,
  945. direction, interface_mask, init_value_used, num_iter,
  946. pattern, edge_comp_used, train_cs_type, cs_num));
  947. ddr3_tip_ip_training(dev_num, access_type, if_id,
  948. pup_access_type, pup_num, result_type,
  949. control_element, search_dir_id, direction,
  950. interface_mask, init_value_used, num_iter,
  951. pattern, edge_comp_used, train_cs_type,
  952. cs_num, train_status);
  953. if (access_type == ACCESS_TYPE_MULTICAST) {
  954. start_if = 0;
  955. end_if = MAX_INTERFACE_NUM - 1;
  956. } else {
  957. start_if = if_id;
  958. end_if = if_id;
  959. }
  960. for (interface_num = start_if; interface_num <= end_if;
  961. interface_num++) {
  962. VALIDATE_IF_ACTIVE(tm->if_act_mask, interface_num);
  963. cs_num = 0;
  964. CHECK_STATUS(ddr3_tip_read_training_result
  965. (dev_num, interface_num, pup_access_type,
  966. pup_num, bit_num, search_dir_id,
  967. direction, result_type,
  968. TRAINING_LOAD_OPERATION_UNLOAD,
  969. train_cs_type, NULL, 0, cons_tap,
  970. 0));
  971. }
  972. }
  973. return MV_OK;
  974. }
  975. /*
  976. * Training search & read result routine
  977. * This function implements the search algorithm
  978. * first it calls the function ddr3_tip_ip_training_wrapper_int which triggers the search from l2h and h2l
  979. * this function handles rx and tx search cases
  980. * in case of rx it only triggers the search (l2h and h2l)
  981. * in case of tx there are 3 optional algorithm phases:
  982. * phase 1:
  983. * it first triggers the search and handles the results as following (phase 1):
  984. * each bit, which defined by the search two edges (e1 or VW_L and e2 or VW_H), match on of cases:
  985. * 1. BIT_LOW_UI 0 =< VW =< 31 in case of jitter use: VW_L <= 31, VW_H <= 31
  986. * 2. BIT_HIGH_UI 32 =< VW =< 63 in case of jitter use: VW_L >= 32, VW_H >= 32
  987. * 3. BIT_SPLIT_IN VW_L <= 31 & VW_H >= 32
  988. * 4. BIT_SPLIT_OUT* VW_H < 32 & VW_L > 32
  989. * note: the VW units is adll taps
  990. * phase 2:
  991. * only bit case BIT_SPLIT_OUT requires another search (phase 2) from the middle range in two directions h2l and l2h
  992. * because only this case is not locked by the search engine in the first search trigger (phase 1).
  993. * phase 3:
  994. * each subphy is categorized according to its bits definition.
  995. * the sub-phy cases are as follows:
  996. * 1.BYTE_NOT_DEFINED the byte has not yet been categorized
  997. * 2.BYTE_HOMOGENEOUS_LOW 0 =< VW =< 31
  998. * 3.BYTE_HOMOGENEOUS_HIGH 32 =< VW =< 63
  999. * 4.BYTE_HOMOGENEOUS_SPLIT_IN VW_L <= 31 & VW_H >= 32
  1000. * or the center of all bits in the byte =< 31
  1001. * 5.BYTE_HOMOGENEOUS_SPLIT_OUT VW_H < 32 & VW_L > 32
  1002. * 6.BYTE_SPLIT_OUT_MIX at least one bits is in split out state and one bit is in other
  1003. * or the center of all bits in the byte => 32
  1004. * after the two phases above a center valid window for each subphy is calculated accordingly:
  1005. * center valid window = maximum center of all bits in the subphy - minimum center of all bits in the subphy.
  1006. * now decisions are made in each subphy as following:
  1007. * all subphys which are homogeneous remains as is
  1008. * all subphys which are homogeneous low | homogeneous high and the subphy center valid window is less than 32
  1009. * mark this subphy as homogeneous split in.
  1010. * now the bits in the bytes which are BYTE_SPLIT_OUT_MIX needed to be reorganized and handles as following
  1011. * all bits which are BIT_LOW_UI will be added with 64 adll,
  1012. * this will hopefully ensures that all the bits in the sub phy can be sampled by the dqs
  1013. */
  1014. int ddr3_tip_ip_training_wrapper(u32 dev_num, enum hws_access_type access_type,
  1015. u32 if_id,
  1016. enum hws_access_type pup_access_type,
  1017. u32 pup_num,
  1018. enum hws_training_result result_type,
  1019. enum hws_control_element control_element,
  1020. enum hws_search_dir search_dir,
  1021. enum hws_dir direction, u32 interface_mask,
  1022. u32 init_value_l2h, u32 init_value_h2l,
  1023. u32 num_iter, enum hws_pattern pattern,
  1024. enum hws_edge_compare edge_comp,
  1025. enum hws_ddr_cs train_cs_type, u32 cs_num,
  1026. enum hws_training_ip_stat *train_status)
  1027. {
  1028. u8 e1, e2;
  1029. u32 bit_id, start_if, end_if, bit_end = 0;
  1030. u32 *result[HWS_SEARCH_DIR_LIMIT] = { 0 };
  1031. u8 cons_tap = (direction == OPER_WRITE) ? (64) : (0);
  1032. u8 bit_bit_mask[MAX_BUS_NUM] = { 0 }, bit_bit_mask_active = 0;
  1033. u8 bit_state[MAX_BUS_NUM * BUS_WIDTH_IN_BITS] = {0};
  1034. u8 h2l_adll_value[MAX_BUS_NUM][BUS_WIDTH_IN_BITS];
  1035. u8 l2h_adll_value[MAX_BUS_NUM][BUS_WIDTH_IN_BITS];
  1036. u8 center_subphy_adll_window[MAX_BUS_NUM];
  1037. u8 min_center_subphy_adll[MAX_BUS_NUM];
  1038. u8 max_center_subphy_adll[MAX_BUS_NUM];
  1039. u32 *l2h_if_train_res = NULL;
  1040. u32 *h2l_if_train_res = NULL;
  1041. enum hws_search_dir search_dir_id;
  1042. int status;
  1043. u32 bit_lock_result;
  1044. u8 sybphy_id;
  1045. u32 octets_per_if_num = ddr3_tip_dev_attr_get(dev_num, MV_ATTR_OCTET_PER_INTERFACE);
  1046. struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
  1047. if (pup_num >= octets_per_if_num) {
  1048. DEBUG_TRAINING_IP_ENGINE(DEBUG_LEVEL_ERROR,
  1049. ("pup_num %d not valid\n", pup_num));
  1050. }
  1051. if (if_id >= MAX_INTERFACE_NUM) {
  1052. DEBUG_TRAINING_IP_ENGINE(DEBUG_LEVEL_ERROR,
  1053. ("if_id %d not valid\n", if_id));
  1054. }
  1055. status = ddr3_tip_ip_training_wrapper_int
  1056. (dev_num, access_type, if_id, pup_access_type, pup_num,
  1057. ALL_BITS_PER_PUP, result_type, control_element,
  1058. search_dir, direction, interface_mask, init_value_l2h,
  1059. init_value_h2l, num_iter, pattern, edge_comp,
  1060. train_cs_type, cs_num, train_status);
  1061. if (MV_OK != status)
  1062. return status;
  1063. if (access_type == ACCESS_TYPE_MULTICAST) {
  1064. start_if = 0;
  1065. end_if = MAX_INTERFACE_NUM - 1;
  1066. } else {
  1067. start_if = if_id;
  1068. end_if = if_id;
  1069. }
  1070. for (if_id = start_if; if_id <= end_if; if_id++) {
  1071. VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
  1072. /* zero the database */
  1073. bit_bit_mask_active = 0; /* clean the flag for level2 search */
  1074. memset(bit_state, 0, sizeof(bit_state));
  1075. /* phase 1 */
  1076. for (sybphy_id = 0; sybphy_id < octets_per_if_num; sybphy_id++) {
  1077. VALIDATE_BUS_ACTIVE(tm->bus_act_mask, sybphy_id);
  1078. if (result_type == RESULT_PER_BIT)
  1079. bit_end = BUS_WIDTH_IN_BITS;
  1080. else
  1081. bit_end = 0;
  1082. /* zero the data base */
  1083. bit_bit_mask[sybphy_id] = 0;
  1084. byte_status[if_id][sybphy_id] = BYTE_NOT_DEFINED;
  1085. for (bit_id = 0; bit_id < bit_end; bit_id++) {
  1086. h2l_adll_value[sybphy_id][bit_id] = 64;
  1087. l2h_adll_value[sybphy_id][bit_id] = 0;
  1088. for (search_dir_id = HWS_LOW2HIGH; search_dir_id <= HWS_HIGH2LOW;
  1089. search_dir_id++) {
  1090. status = ddr3_tip_read_training_result
  1091. (dev_num, if_id,
  1092. ACCESS_TYPE_UNICAST, sybphy_id, bit_id,
  1093. search_dir_id, direction, result_type,
  1094. TRAINING_LOAD_OPERATION_UNLOAD, CS_SINGLE,
  1095. &result[search_dir_id], 1, 0, 0);
  1096. if (MV_OK != status)
  1097. return status;
  1098. }
  1099. e1 = GET_TAP_RESULT(result[HWS_LOW2HIGH][0], EDGE_1);
  1100. e2 = GET_TAP_RESULT(result[HWS_HIGH2LOW][0], EDGE_1);
  1101. DEBUG_TRAINING_IP_ENGINE
  1102. (DEBUG_LEVEL_INFO,
  1103. ("if_id %d sybphy_id %d bit %d l2h 0x%x (e1 0x%x) h2l 0x%x (e2 0x%x)\n",
  1104. if_id, sybphy_id, bit_id, result[HWS_LOW2HIGH][0], e1,
  1105. result[HWS_HIGH2LOW][0], e2));
  1106. bit_lock_result =
  1107. (GET_LOCK_RESULT(result[HWS_LOW2HIGH][0]) &&
  1108. GET_LOCK_RESULT(result[HWS_HIGH2LOW][0]));
  1109. if (bit_lock_result) {
  1110. /* in case of read operation set the byte status as homogeneous low */
  1111. if (direction == OPER_READ) {
  1112. byte_status[if_id][sybphy_id] |= BYTE_HOMOGENEOUS_LOW;
  1113. } else if ((e2 - e1) > 32) { /* oper_write */
  1114. /* split out */
  1115. bit_state[sybphy_id * BUS_WIDTH_IN_BITS + bit_id] =
  1116. BIT_SPLIT_OUT;
  1117. byte_status[if_id][sybphy_id] |= BYTE_HOMOGENEOUS_SPLIT_OUT;
  1118. /* mark problem bits */
  1119. bit_bit_mask[sybphy_id] |= (1 << bit_id);
  1120. bit_bit_mask_active = 1;
  1121. DEBUG_TRAINING_IP_ENGINE
  1122. (DEBUG_LEVEL_TRACE,
  1123. ("if_id %d sybphy_id %d bit %d BIT_SPLIT_OUT\n",
  1124. if_id, sybphy_id, bit_id));
  1125. } else {
  1126. /* low ui */
  1127. if (e1 <= 31 && e2 <= 31) {
  1128. bit_state[sybphy_id * BUS_WIDTH_IN_BITS + bit_id] =
  1129. BIT_LOW_UI;
  1130. byte_status[if_id][sybphy_id] |= BYTE_HOMOGENEOUS_LOW;
  1131. l2h_adll_value[sybphy_id][bit_id] = e1;
  1132. h2l_adll_value[sybphy_id][bit_id] = e2;
  1133. DEBUG_TRAINING_IP_ENGINE
  1134. (DEBUG_LEVEL_TRACE,
  1135. ("if_id %d sybphy_id %d bit %d BIT_LOW_UI\n",
  1136. if_id, sybphy_id, bit_id));
  1137. }
  1138. /* high ui */
  1139. if (e1 >= 32 && e2 >= 32) {
  1140. bit_state[sybphy_id * BUS_WIDTH_IN_BITS + bit_id] =
  1141. BIT_HIGH_UI;
  1142. byte_status[if_id][sybphy_id] |= BYTE_HOMOGENEOUS_HIGH;
  1143. l2h_adll_value[sybphy_id][bit_id] = e1;
  1144. h2l_adll_value[sybphy_id][bit_id] = e2;
  1145. DEBUG_TRAINING_IP_ENGINE
  1146. (DEBUG_LEVEL_TRACE,
  1147. ("if_id %d sybphy_id %d bit %d BIT_HIGH_UI\n",
  1148. if_id, sybphy_id, bit_id));
  1149. }
  1150. /* split in */
  1151. if (e1 <= 31 && e2 >= 32) {
  1152. bit_state[sybphy_id * BUS_WIDTH_IN_BITS + bit_id] =
  1153. BIT_SPLIT_IN;
  1154. byte_status[if_id][sybphy_id] |=
  1155. BYTE_HOMOGENEOUS_SPLIT_IN;
  1156. l2h_adll_value[sybphy_id][bit_id] = e1;
  1157. h2l_adll_value[sybphy_id][bit_id] = e2;
  1158. DEBUG_TRAINING_IP_ENGINE
  1159. (DEBUG_LEVEL_TRACE,
  1160. ("if_id %d sybphy_id %d bit %d BIT_SPLIT_IN\n",
  1161. if_id, sybphy_id, bit_id));
  1162. }
  1163. }
  1164. } else {
  1165. DEBUG_TRAINING_IP_ENGINE
  1166. (DEBUG_LEVEL_INFO,
  1167. ("if_id %d sybphy_id %d bit %d l2h 0x%x (e1 0x%x)"
  1168. "h2l 0x%x (e2 0x%x): bit cannot be categorized\n",
  1169. if_id, sybphy_id, bit_id, result[HWS_LOW2HIGH][0], e1,
  1170. result[HWS_HIGH2LOW][0], e2));
  1171. /* mark the byte as not defined */
  1172. byte_status[if_id][sybphy_id] = BYTE_NOT_DEFINED;
  1173. break; /* continue to next pup - no reason to analyze this byte */
  1174. }
  1175. } /* for all bits */
  1176. } /* for all PUPs */
  1177. /* phase 2 will occur only in write operation */
  1178. if (bit_bit_mask_active != 0) {
  1179. l2h_if_train_res = ddr3_tip_get_buf_ptr(dev_num, HWS_LOW2HIGH, result_type, if_id);
  1180. h2l_if_train_res = ddr3_tip_get_buf_ptr(dev_num, HWS_HIGH2LOW, result_type, if_id);
  1181. /* search from middle to end */
  1182. ddr3_tip_ip_training
  1183. (dev_num, ACCESS_TYPE_UNICAST,
  1184. if_id, ACCESS_TYPE_MULTICAST,
  1185. PARAM_NOT_CARE, result_type,
  1186. control_element, HWS_LOW2HIGH,
  1187. direction, interface_mask,
  1188. num_iter / 2, num_iter / 2,
  1189. pattern, EDGE_FP, train_cs_type,
  1190. cs_num, train_status);
  1191. for (sybphy_id = 0; sybphy_id < octets_per_if_num; sybphy_id++) {
  1192. VALIDATE_BUS_ACTIVE(tm->bus_act_mask, sybphy_id);
  1193. if (byte_status[if_id][sybphy_id] != BYTE_NOT_DEFINED) {
  1194. if (bit_bit_mask[sybphy_id] == 0)
  1195. continue; /* this byte bits have no split out state */
  1196. for (bit_id = 0; bit_id < bit_end; bit_id++) {
  1197. if ((bit_bit_mask[sybphy_id] & (1 << bit_id)) == 0)
  1198. continue; /* this bit is non split goto next bit */
  1199. /* enter the result to the data base */
  1200. status = ddr3_tip_read_training_result
  1201. (dev_num, if_id, ACCESS_TYPE_UNICAST, sybphy_id,
  1202. bit_id, HWS_LOW2HIGH, direction, result_type,
  1203. TRAINING_LOAD_OPERATION_UNLOAD, CS_SINGLE,
  1204. &l2h_if_train_res, 0, 0, 1);
  1205. if (MV_OK != status)
  1206. return status;
  1207. l2h_adll_value[sybphy_id][bit_id] =
  1208. l2h_if_train_res[sybphy_id *
  1209. BUS_WIDTH_IN_BITS + bit_id] & PUP_RESULT_EDGE_1_MASK;
  1210. }
  1211. }
  1212. }
  1213. /* Search from middle to start */
  1214. ddr3_tip_ip_training
  1215. (dev_num, ACCESS_TYPE_UNICAST,
  1216. if_id, ACCESS_TYPE_MULTICAST,
  1217. PARAM_NOT_CARE, result_type,
  1218. control_element, HWS_HIGH2LOW,
  1219. direction, interface_mask,
  1220. num_iter / 2, num_iter / 2,
  1221. pattern, EDGE_FP, train_cs_type,
  1222. cs_num, train_status);
  1223. for (sybphy_id = 0; sybphy_id < octets_per_if_num; sybphy_id++) {
  1224. VALIDATE_BUS_ACTIVE(tm->bus_act_mask, sybphy_id);
  1225. if (byte_status[if_id][sybphy_id] != BYTE_NOT_DEFINED) {
  1226. if (bit_bit_mask[sybphy_id] == 0)
  1227. continue;
  1228. for (bit_id = 0; bit_id < bit_end; bit_id++) {
  1229. if ((bit_bit_mask[sybphy_id] & (1 << bit_id)) == 0)
  1230. continue;
  1231. status = ddr3_tip_read_training_result
  1232. (dev_num, if_id, ACCESS_TYPE_UNICAST, sybphy_id,
  1233. bit_id, HWS_HIGH2LOW, direction, result_type,
  1234. TRAINING_LOAD_OPERATION_UNLOAD, CS_SINGLE,
  1235. &h2l_if_train_res, 0, cons_tap, 1);
  1236. if (MV_OK != status)
  1237. return status;
  1238. h2l_adll_value[sybphy_id][bit_id] =
  1239. h2l_if_train_res[sybphy_id *
  1240. BUS_WIDTH_IN_BITS + bit_id] & PUP_RESULT_EDGE_1_MASK;
  1241. }
  1242. }
  1243. }
  1244. } /* end if bit_bit_mask_active */
  1245. /*
  1246. * phase 3 will occur only in write operation
  1247. * find the maximum and the minimum center of each subphy
  1248. */
  1249. for (sybphy_id = 0; sybphy_id < octets_per_if_num; sybphy_id++) {
  1250. VALIDATE_BUS_ACTIVE(tm->bus_act_mask, sybphy_id);
  1251. if ((byte_status[if_id][sybphy_id] != BYTE_NOT_DEFINED) && (direction == OPER_WRITE)) {
  1252. /* clear the arrays and parameters */
  1253. center_subphy_adll_window[sybphy_id] = 0;
  1254. max_center_subphy_adll[sybphy_id] = 0;
  1255. min_center_subphy_adll[sybphy_id] = 64;
  1256. /* find the max and min center adll value in the current subphy */
  1257. for (bit_id = 0; bit_id < bit_end; bit_id++) {
  1258. /* debug print all the bit edges after alignment */
  1259. DEBUG_TRAINING_IP_ENGINE
  1260. (DEBUG_LEVEL_TRACE,
  1261. ("if_id %d sybphy_id %d bit %d l2h %d h2l %d\n",
  1262. if_id, sybphy_id, bit_id, l2h_adll_value[sybphy_id][bit_id],
  1263. h2l_adll_value[sybphy_id][bit_id]));
  1264. if (((l2h_adll_value[sybphy_id][bit_id] +
  1265. h2l_adll_value[sybphy_id][bit_id]) / 2) >
  1266. max_center_subphy_adll[sybphy_id])
  1267. max_center_subphy_adll[sybphy_id] =
  1268. (l2h_adll_value[sybphy_id][bit_id] +
  1269. h2l_adll_value[sybphy_id][bit_id]) / 2;
  1270. if (((l2h_adll_value[sybphy_id][bit_id] +
  1271. h2l_adll_value[sybphy_id][bit_id]) / 2) <
  1272. min_center_subphy_adll[sybphy_id])
  1273. min_center_subphy_adll[sybphy_id] =
  1274. (l2h_adll_value[sybphy_id][bit_id] +
  1275. h2l_adll_value[sybphy_id][bit_id]) / 2;
  1276. }
  1277. /* calculate the center of the current subphy */
  1278. center_subphy_adll_window[sybphy_id] =
  1279. max_center_subphy_adll[sybphy_id] -
  1280. min_center_subphy_adll[sybphy_id];
  1281. DEBUG_TRAINING_IP_ENGINE
  1282. (DEBUG_LEVEL_TRACE,
  1283. ("if_id %d sybphy_id %d min center %d max center %d center %d\n",
  1284. if_id, sybphy_id, min_center_subphy_adll[sybphy_id],
  1285. max_center_subphy_adll[sybphy_id],
  1286. center_subphy_adll_window[sybphy_id]));
  1287. }
  1288. }
  1289. /*
  1290. * check byte state and fix bits state if needed
  1291. * in case the level 1 and 2 above subphy results are
  1292. * homogeneous continue to the next subphy
  1293. */
  1294. for (sybphy_id = 0; sybphy_id < octets_per_if_num; sybphy_id++) {
  1295. VALIDATE_BUS_ACTIVE(tm->bus_act_mask, sybphy_id);
  1296. if ((byte_status[if_id][sybphy_id] == BYTE_HOMOGENEOUS_LOW) ||
  1297. (byte_status[if_id][sybphy_id] == BYTE_HOMOGENEOUS_HIGH) ||
  1298. (byte_status[if_id][sybphy_id] == BYTE_HOMOGENEOUS_SPLIT_IN) ||
  1299. (byte_status[if_id][sybphy_id] == BYTE_HOMOGENEOUS_SPLIT_OUT) ||
  1300. (byte_status[if_id][sybphy_id] == BYTE_NOT_DEFINED))
  1301. continue;
  1302. /*
  1303. * in case all of the bits in the current subphy are
  1304. * less than 32 which will find alignment in the subphy bits
  1305. * mark this subphy as homogeneous split in
  1306. */
  1307. if (center_subphy_adll_window[sybphy_id] <= 31)
  1308. byte_status[if_id][sybphy_id] = BYTE_HOMOGENEOUS_SPLIT_IN;
  1309. /*
  1310. * in case the current byte is split_out and the center is bigger than 31
  1311. * the byte can be aligned. in this case add 64 to the the low ui bits aligning it
  1312. * to the other ui bits
  1313. */
  1314. if (center_subphy_adll_window[sybphy_id] >= 32) {
  1315. byte_status[if_id][sybphy_id] = BYTE_SPLIT_OUT_MIX;
  1316. DEBUG_TRAINING_IP_ENGINE
  1317. (DEBUG_LEVEL_TRACE,
  1318. ("if_id %d sybphy_id %d byte state 0x%x\n",
  1319. if_id, sybphy_id, byte_status[if_id][sybphy_id]));
  1320. for (bit_id = 0; bit_id < bit_end; bit_id++) {
  1321. if (bit_state[sybphy_id * BUS_WIDTH_IN_BITS + bit_id] == BIT_LOW_UI) {
  1322. l2h_if_train_res[sybphy_id * BUS_WIDTH_IN_BITS + bit_id] += 64;
  1323. h2l_if_train_res[sybphy_id * BUS_WIDTH_IN_BITS + bit_id] += 64;
  1324. }
  1325. DEBUG_TRAINING_IP_ENGINE
  1326. (DEBUG_LEVEL_TRACE,
  1327. ("if_id %d sybphy_id %d bit_id %d added 64 adlls\n",
  1328. if_id, sybphy_id, bit_id));
  1329. }
  1330. }
  1331. }
  1332. } /* for all interfaces */
  1333. return MV_OK;
  1334. }
  1335. u8 mv_ddr_tip_sub_phy_byte_status_get(u32 if_id, u32 subphy_id)
  1336. {
  1337. return byte_status[if_id][subphy_id];
  1338. }
  1339. void mv_ddr_tip_sub_phy_byte_status_set(u32 if_id, u32 subphy_id, u8 byte_status_data)
  1340. {
  1341. byte_status[if_id][subphy_id] = byte_status_data;
  1342. }
  1343. /*
  1344. * Load phy values
  1345. */
  1346. int ddr3_tip_load_phy_values(int b_load)
  1347. {
  1348. u32 bus_cnt = 0, if_id, dev_num = 0;
  1349. u32 octets_per_if_num = ddr3_tip_dev_attr_get(dev_num, MV_ATTR_OCTET_PER_INTERFACE);
  1350. struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
  1351. for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
  1352. VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
  1353. for (bus_cnt = 0; bus_cnt < octets_per_if_num; bus_cnt++) {
  1354. VALIDATE_BUS_ACTIVE(tm->bus_act_mask, bus_cnt);
  1355. if (b_load == 1) {
  1356. CHECK_STATUS(ddr3_tip_bus_read
  1357. (dev_num, if_id,
  1358. ACCESS_TYPE_UNICAST, bus_cnt,
  1359. DDR_PHY_DATA,
  1360. CTX_PHY_REG(effective_cs),
  1361. &phy_reg_bk[if_id][bus_cnt]
  1362. [0]));
  1363. CHECK_STATUS(ddr3_tip_bus_read
  1364. (dev_num, if_id,
  1365. ACCESS_TYPE_UNICAST, bus_cnt,
  1366. DDR_PHY_DATA,
  1367. RL_PHY_REG(effective_cs),
  1368. &phy_reg_bk[if_id][bus_cnt]
  1369. [1]));
  1370. CHECK_STATUS(ddr3_tip_bus_read
  1371. (dev_num, if_id,
  1372. ACCESS_TYPE_UNICAST, bus_cnt,
  1373. DDR_PHY_DATA,
  1374. CRX_PHY_REG(effective_cs),
  1375. &phy_reg_bk[if_id][bus_cnt]
  1376. [2]));
  1377. } else {
  1378. CHECK_STATUS(ddr3_tip_bus_write
  1379. (dev_num, ACCESS_TYPE_UNICAST,
  1380. if_id, ACCESS_TYPE_UNICAST,
  1381. bus_cnt, DDR_PHY_DATA,
  1382. CTX_PHY_REG(effective_cs),
  1383. phy_reg_bk[if_id][bus_cnt]
  1384. [0]));
  1385. CHECK_STATUS(ddr3_tip_bus_write
  1386. (dev_num, ACCESS_TYPE_UNICAST,
  1387. if_id, ACCESS_TYPE_UNICAST,
  1388. bus_cnt, DDR_PHY_DATA,
  1389. RL_PHY_REG(effective_cs),
  1390. phy_reg_bk[if_id][bus_cnt]
  1391. [1]));
  1392. CHECK_STATUS(ddr3_tip_bus_write
  1393. (dev_num, ACCESS_TYPE_UNICAST,
  1394. if_id, ACCESS_TYPE_UNICAST,
  1395. bus_cnt, DDR_PHY_DATA,
  1396. CRX_PHY_REG(effective_cs),
  1397. phy_reg_bk[if_id][bus_cnt]
  1398. [2]));
  1399. }
  1400. }
  1401. }
  1402. return MV_OK;
  1403. }
  1404. int ddr3_tip_training_ip_test(u32 dev_num, enum hws_training_result result_type,
  1405. enum hws_search_dir search_dir,
  1406. enum hws_dir direction,
  1407. enum hws_edge_compare edge,
  1408. u32 init_val1, u32 init_val2,
  1409. u32 num_of_iterations,
  1410. u32 start_pattern, u32 end_pattern)
  1411. {
  1412. u32 pattern, if_id, pup_id;
  1413. enum hws_training_ip_stat train_status[MAX_INTERFACE_NUM];
  1414. u32 *res = NULL;
  1415. u32 search_state = 0;
  1416. u32 octets_per_if_num = ddr3_tip_dev_attr_get(dev_num, MV_ATTR_OCTET_PER_INTERFACE);
  1417. struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
  1418. ddr3_tip_load_phy_values(1);
  1419. for (pattern = start_pattern; pattern <= end_pattern; pattern++) {
  1420. for (search_state = 0; search_state < HWS_SEARCH_DIR_LIMIT;
  1421. search_state++) {
  1422. ddr3_tip_ip_training_wrapper(dev_num,
  1423. ACCESS_TYPE_MULTICAST, 0,
  1424. ACCESS_TYPE_MULTICAST, 0,
  1425. result_type,
  1426. HWS_CONTROL_ELEMENT_ADLL,
  1427. search_dir, direction,
  1428. 0xfff, init_val1,
  1429. init_val2,
  1430. num_of_iterations, pattern,
  1431. edge, CS_SINGLE,
  1432. PARAM_NOT_CARE,
  1433. train_status);
  1434. for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1;
  1435. if_id++) {
  1436. VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
  1437. for (pup_id = 0; pup_id <
  1438. octets_per_if_num;
  1439. pup_id++) {
  1440. VALIDATE_BUS_ACTIVE(tm->bus_act_mask,
  1441. pup_id);
  1442. CHECK_STATUS
  1443. (ddr3_tip_read_training_result
  1444. (dev_num, if_id,
  1445. ACCESS_TYPE_UNICAST, pup_id,
  1446. ALL_BITS_PER_PUP,
  1447. search_state,
  1448. direction, result_type,
  1449. TRAINING_LOAD_OPERATION_UNLOAD,
  1450. CS_SINGLE, &res, 1, 0,
  1451. 0));
  1452. if (result_type == RESULT_PER_BYTE) {
  1453. DEBUG_TRAINING_IP_ENGINE
  1454. (DEBUG_LEVEL_INFO,
  1455. ("search_state %d if_id %d pup_id %d 0x%x\n",
  1456. search_state, if_id,
  1457. pup_id, res[0]));
  1458. } else {
  1459. DEBUG_TRAINING_IP_ENGINE
  1460. (DEBUG_LEVEL_INFO,
  1461. ("search_state %d if_id %d pup_id %d 0x%x 0x%x 0x%x 0x%x 0x%x 0x%x 0x%x 0x%x\n",
  1462. search_state, if_id,
  1463. pup_id, res[0],
  1464. res[1], res[2],
  1465. res[3], res[4],
  1466. res[5], res[6],
  1467. res[7]));
  1468. }
  1469. }
  1470. } /* interface */
  1471. } /* search */
  1472. } /* pattern */
  1473. ddr3_tip_load_phy_values(0);
  1474. return MV_OK;
  1475. }
  1476. int mv_ddr_pattern_start_addr_set(struct pattern_info *pattern_tbl, enum hws_pattern pattern, u32 addr)
  1477. {
  1478. pattern_tbl[pattern].start_addr = addr;
  1479. return 0;
  1480. }
  1481. struct pattern_info *ddr3_tip_get_pattern_table()
  1482. {
  1483. struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
  1484. if (MV_DDR_IS_64BIT_DRAM_MODE(tm->bus_act_mask))
  1485. return pattern_table_64;
  1486. else if (DDR3_IS_16BIT_DRAM_MODE(tm->bus_act_mask) == 0)
  1487. return pattern_table_32;
  1488. else
  1489. return pattern_table_16;
  1490. }
  1491. u16 *ddr3_tip_get_mask_results_dq_reg()
  1492. {
  1493. #if MAX_BUS_NUM == 5
  1494. struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
  1495. if (DDR3_IS_ECC_PUP3_MODE(tm->bus_act_mask))
  1496. return mask_results_dq_reg_map_pup3_ecc;
  1497. else
  1498. #endif
  1499. return mask_results_dq_reg_map;
  1500. }
  1501. u16 *ddr3_tip_get_mask_results_pup_reg_map()
  1502. {
  1503. #if MAX_BUS_NUM == 5
  1504. struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
  1505. if (DDR3_IS_ECC_PUP3_MODE(tm->bus_act_mask))
  1506. return mask_results_pup_reg_map_pup3_ecc;
  1507. else
  1508. #endif
  1509. return mask_results_pup_reg_map;
  1510. }
  1511. /* load expected dm pattern to odpg */
  1512. #define LOW_NIBBLE_BYTE_MASK 0xf
  1513. #define HIGH_NIBBLE_BYTE_MASK 0xf0
  1514. int mv_ddr_load_dm_pattern_to_odpg(enum hws_access_type access_type, enum hws_pattern pattern,
  1515. enum dm_direction dm_dir)
  1516. {
  1517. struct pattern_info *pattern_table = ddr3_tip_get_pattern_table();
  1518. struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
  1519. u32 pattern_len = 0;
  1520. u32 data_low, data_high;
  1521. u8 dm_data;
  1522. for (pattern_len = 0;
  1523. pattern_len < pattern_table[pattern].pattern_len;
  1524. pattern_len++) {
  1525. if (MV_DDR_IS_64BIT_DRAM_MODE(tm->bus_act_mask)) {
  1526. data_low = pattern_table_get_word(0, pattern, (u8)pattern_len);
  1527. data_high = data_low;
  1528. } else {
  1529. data_low = pattern_table_get_word(0, pattern, (u8)(pattern_len * 2));
  1530. data_high = pattern_table_get_word(0, pattern, (u8)(pattern_len * 2 + 1));
  1531. }
  1532. /* odpg mbus dm definition is opposite to ddr4 protocol */
  1533. if (dm_dir == DM_DIR_INVERSE)
  1534. dm_data = ~((data_low & LOW_NIBBLE_BYTE_MASK) | (data_high & HIGH_NIBBLE_BYTE_MASK));
  1535. else
  1536. dm_data = (data_low & LOW_NIBBLE_BYTE_MASK) | (data_high & HIGH_NIBBLE_BYTE_MASK);
  1537. ddr3_tip_if_write(0, access_type, 0, ODPG_DATA_WR_DATA_LOW_REG, data_low, MASK_ALL_BITS);
  1538. ddr3_tip_if_write(0, access_type, 0, ODPG_DATA_WR_DATA_HIGH_REG, data_high, MASK_ALL_BITS);
  1539. ddr3_tip_if_write(0, access_type, 0, ODPG_DATA_WR_ADDR_REG,
  1540. pattern_len | ((dm_data & ODPG_DATA_WR_DATA_MASK) << ODPG_DATA_WR_DATA_OFFS),
  1541. MASK_ALL_BITS);
  1542. }
  1543. return MV_OK;
  1544. }