ddr3_sdram.c 17 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668
  1. // SPDX-License-Identifier: GPL-2.0
  2. /*
  3. * Copyright (C) Marvell International Ltd. and its affiliates
  4. */
  5. #include <common.h>
  6. #include <i2c.h>
  7. #include <spl.h>
  8. #include <asm/io.h>
  9. #include <asm/arch/cpu.h>
  10. #include <asm/arch/soc.h>
  11. #include "ddr3_hw_training.h"
  12. #include "xor.h"
  13. #include "xor_regs.h"
  14. static void ddr3_flush_l1_line(u32 line);
  15. extern u32 pbs_pattern[2][LEN_16BIT_PBS_PATTERN];
  16. extern u32 pbs_pattern_32b[2][LEN_PBS_PATTERN];
  17. #if defined(MV88F78X60)
  18. extern u32 pbs_pattern_64b[2][LEN_PBS_PATTERN];
  19. #endif
  20. extern u32 pbs_dq_mapping[PUP_NUM_64BIT][DQ_NUM];
  21. #if defined(MV88F78X60) || defined(MV88F672X)
  22. /* PBS locked dq (per pup) */
  23. u32 pbs_locked_dq[MAX_PUP_NUM][DQ_NUM] = { { 0 } };
  24. u32 pbs_locked_dm[MAX_PUP_NUM] = { 0 };
  25. u32 pbs_locked_value[MAX_PUP_NUM][DQ_NUM] = { { 0 } };
  26. int per_bit_data[MAX_PUP_NUM][DQ_NUM];
  27. #endif
  28. static u32 sdram_data[LEN_KILLER_PATTERN] __aligned(32) = { 0 };
  29. static struct crc_dma_desc dma_desc __aligned(32) = { 0 };
  30. #define XOR_TIMEOUT 0x8000000
  31. struct xor_channel_t {
  32. struct crc_dma_desc *desc;
  33. unsigned long desc_phys_addr;
  34. };
  35. #define XOR_CAUSE_DONE_MASK(chan) ((0x1 | 0x2) << (chan * 16))
  36. void xor_waiton_eng(int chan)
  37. {
  38. int timeout;
  39. timeout = 0;
  40. while (!(reg_read(XOR_CAUSE_REG(XOR_UNIT(chan))) &
  41. XOR_CAUSE_DONE_MASK(XOR_CHAN(chan)))) {
  42. if (timeout > XOR_TIMEOUT)
  43. goto timeout;
  44. timeout++;
  45. }
  46. timeout = 0;
  47. while (mv_xor_state_get(chan) != MV_IDLE) {
  48. if (timeout > XOR_TIMEOUT)
  49. goto timeout;
  50. timeout++;
  51. }
  52. /* Clear int */
  53. reg_write(XOR_CAUSE_REG(XOR_UNIT(chan)),
  54. ~(XOR_CAUSE_DONE_MASK(XOR_CHAN(chan))));
  55. timeout:
  56. return;
  57. }
  58. static int special_compare_pattern(u32 uj)
  59. {
  60. if ((uj == 30) || (uj == 31) || (uj == 61) || (uj == 62) ||
  61. (uj == 93) || (uj == 94) || (uj == 126) || (uj == 127))
  62. return 1;
  63. return 0;
  64. }
  65. /*
  66. * Compare code extracted as its used by multiple functions. This
  67. * reduces code-size and makes it easier to maintain it. Additionally
  68. * the code is not indented that much and therefore easier to read.
  69. */
  70. static void compare_pattern_v1(u32 uj, u32 *pup, u32 *pattern,
  71. u32 pup_groups, int debug_dqs)
  72. {
  73. u32 val;
  74. u32 uk;
  75. u32 var1;
  76. u32 var2;
  77. __maybe_unused u32 dq;
  78. if (((sdram_data[uj]) != (pattern[uj])) && (*pup != 0xFF)) {
  79. for (uk = 0; uk < PUP_NUM_32BIT; uk++) {
  80. val = CMP_BYTE_SHIFT * uk;
  81. var1 = ((sdram_data[uj] >> val) & CMP_BYTE_MASK);
  82. var2 = ((pattern[uj] >> val) & CMP_BYTE_MASK);
  83. if (var1 != var2) {
  84. *pup |= (1 << (uk + (PUP_NUM_32BIT *
  85. (uj % pup_groups))));
  86. #ifdef MV_DEBUG_DQS
  87. if (!debug_dqs)
  88. continue;
  89. for (dq = 0; dq < DQ_NUM; dq++) {
  90. val = uk + (PUP_NUM_32BIT *
  91. (uj % pup_groups));
  92. if (((var1 >> dq) & 0x1) !=
  93. ((var2 >> dq) & 0x1))
  94. per_bit_data[val][dq] = 1;
  95. else
  96. per_bit_data[val][dq] = 0;
  97. }
  98. #endif
  99. }
  100. }
  101. }
  102. }
  103. static void compare_pattern_v2(u32 uj, u32 *pup, u32 *pattern)
  104. {
  105. u32 val;
  106. u32 uk;
  107. u32 var1;
  108. u32 var2;
  109. if (((sdram_data[uj]) != (pattern[uj])) && (*pup != 0x3)) {
  110. /* Found error */
  111. for (uk = 0; uk < PUP_NUM_32BIT; uk++) {
  112. val = CMP_BYTE_SHIFT * uk;
  113. var1 = (sdram_data[uj] >> val) & CMP_BYTE_MASK;
  114. var2 = (pattern[uj] >> val) & CMP_BYTE_MASK;
  115. if (var1 != var2)
  116. *pup |= (1 << (uk % PUP_NUM_16BIT));
  117. }
  118. }
  119. }
  120. /*
  121. * Name: ddr3_sdram_compare
  122. * Desc: Execute compare per PUP
  123. * Args: unlock_pup Bit array of the unlock pups
  124. * new_locked_pup Output bit array of the pups with failed compare
  125. * pattern Pattern to compare
  126. * pattern_len Length of pattern (in bytes)
  127. * sdram_offset offset address to the SDRAM
  128. * write write to the SDRAM before read
  129. * mask compare pattern with mask;
  130. * mask_pattern Mask to compare pattern
  131. *
  132. * Notes:
  133. * Returns: MV_OK if success, other error code if fail.
  134. */
  135. int ddr3_sdram_compare(MV_DRAM_INFO *dram_info, u32 unlock_pup,
  136. u32 *new_locked_pup, u32 *pattern,
  137. u32 pattern_len, u32 sdram_offset, int write,
  138. int mask, u32 *mask_pattern,
  139. int special_compare)
  140. {
  141. u32 uj;
  142. __maybe_unused u32 pup_groups;
  143. __maybe_unused u32 dq;
  144. #if !defined(MV88F67XX)
  145. if (dram_info->num_of_std_pups == PUP_NUM_64BIT)
  146. pup_groups = 2;
  147. else
  148. pup_groups = 1;
  149. #endif
  150. ddr3_reset_phy_read_fifo();
  151. /* Check if need to write to sdram before read */
  152. if (write == 1)
  153. ddr3_dram_sram_burst((u32)pattern, sdram_offset, pattern_len);
  154. ddr3_dram_sram_burst(sdram_offset, (u32)sdram_data, pattern_len);
  155. /* Compare read result to write */
  156. for (uj = 0; uj < pattern_len; uj++) {
  157. if (special_compare && special_compare_pattern(uj))
  158. continue;
  159. #if defined(MV88F78X60) || defined(MV88F672X)
  160. compare_pattern_v1(uj, new_locked_pup, pattern, pup_groups, 1);
  161. #elif defined(MV88F67XX)
  162. compare_pattern_v2(uj, new_locked_pup, pattern);
  163. #endif
  164. }
  165. return MV_OK;
  166. }
  167. #if defined(MV88F78X60) || defined(MV88F672X)
  168. /*
  169. * Name: ddr3_sdram_dm_compare
  170. * Desc: Execute compare per PUP
  171. * Args: unlock_pup Bit array of the unlock pups
  172. * new_locked_pup Output bit array of the pups with failed compare
  173. * pattern Pattern to compare
  174. * pattern_len Length of pattern (in bytes)
  175. * sdram_offset offset address to the SDRAM
  176. * write write to the SDRAM before read
  177. * mask compare pattern with mask;
  178. * mask_pattern Mask to compare pattern
  179. *
  180. * Notes:
  181. * Returns: MV_OK if success, other error code if fail.
  182. */
  183. int ddr3_sdram_dm_compare(MV_DRAM_INFO *dram_info, u32 unlock_pup,
  184. u32 *new_locked_pup, u32 *pattern,
  185. u32 sdram_offset)
  186. {
  187. u32 uj, uk, var1, var2, pup_groups;
  188. u32 val;
  189. u32 pup = 0;
  190. if (dram_info->num_of_std_pups == PUP_NUM_64BIT)
  191. pup_groups = 2;
  192. else
  193. pup_groups = 1;
  194. ddr3_dram_sram_burst((u32)pattern, SDRAM_PBS_TX_OFFS,
  195. LEN_PBS_PATTERN);
  196. ddr3_dram_sram_burst(SDRAM_PBS_TX_OFFS, (u32)sdram_data,
  197. LEN_PBS_PATTERN);
  198. /* Validate the correctness of the results */
  199. for (uj = 0; uj < LEN_PBS_PATTERN; uj++)
  200. compare_pattern_v1(uj, &pup, pattern, pup_groups, 0);
  201. /* Test the DM Signals */
  202. *(u32 *)(SDRAM_PBS_TX_OFFS + 0x10) = 0x12345678;
  203. *(u32 *)(SDRAM_PBS_TX_OFFS + 0x14) = 0x12345678;
  204. sdram_data[0] = *(u32 *)(SDRAM_PBS_TX_OFFS + 0x10);
  205. sdram_data[1] = *(u32 *)(SDRAM_PBS_TX_OFFS + 0x14);
  206. for (uj = 0; uj < 2; uj++) {
  207. if (((sdram_data[uj]) != (pattern[uj])) &&
  208. (*new_locked_pup != 0xFF)) {
  209. for (uk = 0; uk < PUP_NUM_32BIT; uk++) {
  210. val = CMP_BYTE_SHIFT * uk;
  211. var1 = ((sdram_data[uj] >> val) & CMP_BYTE_MASK);
  212. var2 = ((pattern[uj] >> val) & CMP_BYTE_MASK);
  213. if (var1 != var2) {
  214. *new_locked_pup |= (1 << (uk +
  215. (PUP_NUM_32BIT * (uj % pup_groups))));
  216. *new_locked_pup |= pup;
  217. }
  218. }
  219. }
  220. }
  221. return MV_OK;
  222. }
  223. /*
  224. * Name: ddr3_sdram_pbs_compare
  225. * Desc: Execute SRAM compare per PUP and DQ.
  226. * Args: pup_locked bit array of locked pups
  227. * is_tx Indicate whether Rx or Tx
  228. * pbs_pattern_idx Index of PBS pattern
  229. * pbs_curr_val The PBS value
  230. * pbs_lock_val The value to set to locked PBS
  231. * skew_array Global array to update with the compare results
  232. * ai_unlock_pup_dq_array bit array of the locked / unlocked pups per dq.
  233. * Notes:
  234. * Returns: MV_OK if success, other error code if fail.
  235. */
  236. int ddr3_sdram_pbs_compare(MV_DRAM_INFO *dram_info, u32 pup_locked,
  237. int is_tx, u32 pbs_pattern_idx,
  238. u32 pbs_curr_val, u32 pbs_lock_val,
  239. u32 *skew_array, u8 *unlock_pup_dq_array,
  240. u32 ecc)
  241. {
  242. /* bit array failed dq per pup for current compare */
  243. u32 pbs_write_pup[DQ_NUM] = { 0 };
  244. u32 update_pup; /* pup as HW convention */
  245. u32 max_pup; /* maximal pup index */
  246. u32 pup_addr;
  247. u32 ui, dq, pup;
  248. int var1, var2;
  249. u32 sdram_offset, pup_groups, tmp_pup;
  250. u32 *pattern_ptr;
  251. u32 val;
  252. /* Choose pattern */
  253. switch (dram_info->ddr_width) {
  254. #if defined(MV88F672X)
  255. case 16:
  256. pattern_ptr = (u32 *)&pbs_pattern[pbs_pattern_idx];
  257. break;
  258. #endif
  259. case 32:
  260. pattern_ptr = (u32 *)&pbs_pattern_32b[pbs_pattern_idx];
  261. break;
  262. #if defined(MV88F78X60)
  263. case 64:
  264. pattern_ptr = (u32 *)&pbs_pattern_64b[pbs_pattern_idx];
  265. break;
  266. #endif
  267. default:
  268. return MV_FAIL;
  269. }
  270. max_pup = dram_info->num_of_std_pups;
  271. sdram_offset = SDRAM_PBS_I_OFFS + pbs_pattern_idx * SDRAM_PBS_NEXT_OFFS;
  272. if (dram_info->num_of_std_pups == PUP_NUM_64BIT)
  273. pup_groups = 2;
  274. else
  275. pup_groups = 1;
  276. ddr3_reset_phy_read_fifo();
  277. /* Check if need to write to sdram before read */
  278. if (is_tx == 1) {
  279. ddr3_dram_sram_burst((u32)pattern_ptr, sdram_offset,
  280. LEN_PBS_PATTERN);
  281. }
  282. ddr3_dram_sram_read(sdram_offset, (u32)sdram_data, LEN_PBS_PATTERN);
  283. /* Compare read result to write */
  284. for (ui = 0; ui < LEN_PBS_PATTERN; ui++) {
  285. if ((sdram_data[ui]) != (pattern_ptr[ui])) {
  286. /* found error */
  287. /* error in low pup group */
  288. for (pup = 0; pup < PUP_NUM_32BIT; pup++) {
  289. val = CMP_BYTE_SHIFT * pup;
  290. var1 = ((sdram_data[ui] >> val) &
  291. CMP_BYTE_MASK);
  292. var2 = ((pattern_ptr[ui] >> val) &
  293. CMP_BYTE_MASK);
  294. if (var1 != var2) {
  295. if (dram_info->ddr_width > 16) {
  296. tmp_pup = (pup + PUP_NUM_32BIT *
  297. (ui % pup_groups));
  298. } else {
  299. tmp_pup = (pup % PUP_NUM_16BIT);
  300. }
  301. update_pup = (1 << tmp_pup);
  302. if (ecc && (update_pup != 0x1))
  303. continue;
  304. /*
  305. * Pup is failed - Go over all DQs and
  306. * look for failures
  307. */
  308. for (dq = 0; dq < DQ_NUM; dq++) {
  309. val = tmp_pup * (1 - ecc) +
  310. ecc * ECC_PUP;
  311. if (((var1 >> dq) & 0x1) !=
  312. ((var2 >> dq) & 0x1)) {
  313. if (pbs_locked_dq[val][dq] == 1 &&
  314. pbs_locked_value[val][dq] != pbs_curr_val)
  315. continue;
  316. /*
  317. * Activate write to
  318. * update PBS to
  319. * pbs_lock_val
  320. */
  321. pbs_write_pup[dq] |=
  322. update_pup;
  323. /*
  324. * Update the
  325. * unlock_pup_dq_array
  326. */
  327. unlock_pup_dq_array[dq] &=
  328. ~update_pup;
  329. /*
  330. * Lock PBS value for
  331. * failed bits in
  332. * compare operation
  333. */
  334. skew_array[tmp_pup * DQ_NUM + dq] =
  335. pbs_curr_val;
  336. }
  337. }
  338. }
  339. }
  340. }
  341. }
  342. pup_addr = (is_tx == 1) ? PUP_PBS_TX : PUP_PBS_RX;
  343. /* Set last failed bits PBS to min / max pbs value */
  344. for (dq = 0; dq < DQ_NUM; dq++) {
  345. for (pup = 0; pup < max_pup; pup++) {
  346. if (pbs_write_pup[dq] & (1 << pup)) {
  347. val = pup * (1 - ecc) + ecc * ECC_PUP;
  348. if (pbs_locked_dq[val][dq] == 1 &&
  349. pbs_locked_value[val][dq] != pbs_curr_val)
  350. continue;
  351. /* Mark the dq as locked */
  352. pbs_locked_dq[val][dq] = 1;
  353. pbs_locked_value[val][dq] = pbs_curr_val;
  354. ddr3_write_pup_reg(pup_addr +
  355. pbs_dq_mapping[val][dq],
  356. CS0, val, 0, pbs_lock_val);
  357. }
  358. }
  359. }
  360. return MV_OK;
  361. }
  362. #endif
  363. /*
  364. * Name: ddr3_sdram_direct_compare
  365. * Desc: Execute compare per PUP without DMA (no burst mode)
  366. * Args: unlock_pup Bit array of the unlock pups
  367. * new_locked_pup Output bit array of the pups with failed compare
  368. * pattern Pattern to compare
  369. * pattern_len Length of pattern (in bytes)
  370. * sdram_offset offset address to the SDRAM
  371. * write write to the SDRAM before read
  372. * mask compare pattern with mask;
  373. * auiMaskPatter Mask to compare pattern
  374. *
  375. * Notes:
  376. * Returns: MV_OK if success, other error code if fail.
  377. */
  378. int ddr3_sdram_direct_compare(MV_DRAM_INFO *dram_info, u32 unlock_pup,
  379. u32 *new_locked_pup, u32 *pattern,
  380. u32 pattern_len, u32 sdram_offset,
  381. int write, int mask, u32 *mask_pattern)
  382. {
  383. u32 uj, uk, pup_groups;
  384. u32 *sdram_addr; /* used to read from SDRAM */
  385. sdram_addr = (u32 *)sdram_offset;
  386. if (dram_info->num_of_std_pups == PUP_NUM_64BIT)
  387. pup_groups = 2;
  388. else
  389. pup_groups = 1;
  390. /* Check if need to write before read */
  391. if (write == 1) {
  392. for (uk = 0; uk < pattern_len; uk++) {
  393. *sdram_addr = pattern[uk];
  394. sdram_addr++;
  395. }
  396. }
  397. sdram_addr = (u32 *)sdram_offset;
  398. for (uk = 0; uk < pattern_len; uk++) {
  399. sdram_data[uk] = *sdram_addr;
  400. sdram_addr++;
  401. }
  402. /* Compare read result to write */
  403. for (uj = 0; uj < pattern_len; uj++) {
  404. if (dram_info->ddr_width > 16) {
  405. compare_pattern_v1(uj, new_locked_pup, pattern,
  406. pup_groups, 0);
  407. } else {
  408. compare_pattern_v2(uj, new_locked_pup, pattern);
  409. }
  410. }
  411. return MV_OK;
  412. }
  413. /*
  414. * Name: ddr3_dram_sram_burst
  415. * Desc: Read from the SDRAM in burst of 64 bytes
  416. * Args: src
  417. * dst
  418. * Notes: Using the XOR mechanism
  419. * Returns: MV_OK if success, other error code if fail.
  420. */
  421. int ddr3_dram_sram_burst(u32 src, u32 dst, u32 len)
  422. {
  423. u32 chan, byte_count, cs_num, byte;
  424. struct xor_channel_t channel;
  425. chan = 0;
  426. byte_count = len * 4;
  427. /* Wait for previous transfer completion */
  428. while (mv_xor_state_get(chan) != MV_IDLE)
  429. ;
  430. /* Build the channel descriptor */
  431. channel.desc = &dma_desc;
  432. /* Enable Address Override and set correct src and dst */
  433. if (src < SRAM_BASE) {
  434. /* src is DRAM CS, dst is SRAM */
  435. cs_num = (src / (1 + SDRAM_CS_SIZE));
  436. reg_write(XOR_ADDR_OVRD_REG(0, 0),
  437. ((cs_num << 1) | (1 << 0)));
  438. channel.desc->src_addr0 = (src % (1 + SDRAM_CS_SIZE));
  439. channel.desc->dst_addr = dst;
  440. } else {
  441. /* src is SRAM, dst is DRAM CS */
  442. cs_num = (dst / (1 + SDRAM_CS_SIZE));
  443. reg_write(XOR_ADDR_OVRD_REG(0, 0),
  444. ((cs_num << 25) | (1 << 24)));
  445. channel.desc->src_addr0 = (src);
  446. channel.desc->dst_addr = (dst % (1 + SDRAM_CS_SIZE));
  447. channel.desc->src_addr0 = src;
  448. channel.desc->dst_addr = (dst % (1 + SDRAM_CS_SIZE));
  449. }
  450. channel.desc->src_addr1 = 0;
  451. channel.desc->byte_cnt = byte_count;
  452. channel.desc->next_desc_ptr = 0;
  453. channel.desc->status = 1 << 31;
  454. channel.desc->desc_cmd = 0x0;
  455. channel.desc_phys_addr = (unsigned long)&dma_desc;
  456. ddr3_flush_l1_line((u32)&dma_desc);
  457. /* Issue the transfer */
  458. if (mv_xor_transfer(chan, MV_DMA, channel.desc_phys_addr) != MV_OK)
  459. return MV_FAIL;
  460. /* Wait for completion */
  461. xor_waiton_eng(chan);
  462. if (dst > SRAM_BASE) {
  463. for (byte = 0; byte < byte_count; byte += 0x20)
  464. cache_inv(dst + byte);
  465. }
  466. return MV_OK;
  467. }
  468. /*
  469. * Name: ddr3_flush_l1_line
  470. * Desc:
  471. * Args:
  472. * Notes:
  473. * Returns: MV_OK if success, other error code if fail.
  474. */
  475. static void ddr3_flush_l1_line(u32 line)
  476. {
  477. u32 reg;
  478. #if defined(MV88F672X)
  479. reg = 1;
  480. #else
  481. reg = reg_read(REG_SAMPLE_RESET_LOW_ADDR) &
  482. (1 << REG_SAMPLE_RESET_CPU_ARCH_OFFS);
  483. #ifdef MV88F67XX
  484. reg = ~reg & (1 << REG_SAMPLE_RESET_CPU_ARCH_OFFS);
  485. #endif
  486. #endif
  487. if (reg) {
  488. /* V7 Arch mode */
  489. flush_l1_v7(line);
  490. flush_l1_v7(line + CACHE_LINE_SIZE);
  491. } else {
  492. /* V6 Arch mode */
  493. flush_l1_v6(line);
  494. flush_l1_v6(line + CACHE_LINE_SIZE);
  495. }
  496. }
  497. int ddr3_dram_sram_read(u32 src, u32 dst, u32 len)
  498. {
  499. u32 ui;
  500. u32 *dst_ptr, *src_ptr;
  501. dst_ptr = (u32 *)dst;
  502. src_ptr = (u32 *)src;
  503. for (ui = 0; ui < len; ui++) {
  504. *dst_ptr = *src_ptr;
  505. dst_ptr++;
  506. src_ptr++;
  507. }
  508. return MV_OK;
  509. }
  510. int ddr3_sdram_dqs_compare(MV_DRAM_INFO *dram_info, u32 unlock_pup,
  511. u32 *new_locked_pup, u32 *pattern,
  512. u32 pattern_len, u32 sdram_offset, int write,
  513. int mask, u32 *mask_pattern,
  514. int special_compare)
  515. {
  516. u32 uj, pup_groups;
  517. if (dram_info->num_of_std_pups == PUP_NUM_64BIT)
  518. pup_groups = 2;
  519. else
  520. pup_groups = 1;
  521. ddr3_reset_phy_read_fifo();
  522. /* Check if need to write to sdram before read */
  523. if (write == 1)
  524. ddr3_dram_sram_burst((u32)pattern, sdram_offset, pattern_len);
  525. ddr3_dram_sram_burst(sdram_offset, (u32)sdram_data, pattern_len);
  526. /* Compare read result to write */
  527. for (uj = 0; uj < pattern_len; uj++) {
  528. if (special_compare && special_compare_pattern(uj))
  529. continue;
  530. if (dram_info->ddr_width > 16) {
  531. compare_pattern_v1(uj, new_locked_pup, pattern,
  532. pup_groups, 1);
  533. } else {
  534. compare_pattern_v2(uj, new_locked_pup, pattern);
  535. }
  536. }
  537. return MV_OK;
  538. }
  539. void ddr3_reset_phy_read_fifo(void)
  540. {
  541. u32 reg;
  542. /* reset read FIFO */
  543. reg = reg_read(REG_DRAM_TRAINING_ADDR);
  544. /* Start Auto Read Leveling procedure */
  545. reg |= (1 << REG_DRAM_TRAINING_RL_OFFS);
  546. /* 0x15B0 - Training Register */
  547. reg_write(REG_DRAM_TRAINING_ADDR, reg);
  548. reg = reg_read(REG_DRAM_TRAINING_2_ADDR);
  549. reg |= ((1 << REG_DRAM_TRAINING_2_FIFO_RST_OFFS) +
  550. (1 << REG_DRAM_TRAINING_2_SW_OVRD_OFFS));
  551. /* [0] = 1 - Enable SW override, [4] = 1 - FIFO reset */
  552. /* 0x15B8 - Training SW 2 Register */
  553. reg_write(REG_DRAM_TRAINING_2_ADDR, reg);
  554. do {
  555. reg = reg_read(REG_DRAM_TRAINING_2_ADDR) &
  556. (1 << REG_DRAM_TRAINING_2_FIFO_RST_OFFS);
  557. } while (reg); /* Wait for '0' */
  558. reg = reg_read(REG_DRAM_TRAINING_ADDR);
  559. /* Clear Auto Read Leveling procedure */
  560. reg &= ~(1 << REG_DRAM_TRAINING_RL_OFFS);
  561. /* 0x15B0 - Training Register */
  562. reg_write(REG_DRAM_TRAINING_ADDR, reg);
  563. }