dmc_init_ddr3.c 27 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866
  1. /*
  2. * DDR3 mem setup file for board based on EXYNOS5
  3. *
  4. * Copyright (C) 2012 Samsung Electronics
  5. *
  6. * SPDX-License-Identifier: GPL-2.0+
  7. */
  8. #include <common.h>
  9. #include <config.h>
  10. #include <asm/io.h>
  11. #include <asm/arch/clock.h>
  12. #include <asm/arch/cpu.h>
  13. #include <asm/arch/dmc.h>
  14. #include <asm/arch/power.h>
  15. #include "common_setup.h"
  16. #include "exynos5_setup.h"
  17. #include "clock_init.h"
  18. #define TIMEOUT_US 10000
  19. #define NUM_BYTE_LANES 4
  20. #define DEFAULT_DQS 8
  21. #define DEFAULT_DQS_X4 ((DEFAULT_DQS << 24) || (DEFAULT_DQS << 16) \
  22. || (DEFAULT_DQS << 8) || (DEFAULT_DQS << 0))
  23. #ifdef CONFIG_EXYNOS5250
  24. static void reset_phy_ctrl(void)
  25. {
  26. struct exynos5_clock *clk =
  27. (struct exynos5_clock *)samsung_get_base_clock();
  28. writel(DDR3PHY_CTRL_PHY_RESET_OFF, &clk->lpddr3phy_ctrl);
  29. writel(DDR3PHY_CTRL_PHY_RESET, &clk->lpddr3phy_ctrl);
  30. }
  31. int ddr3_mem_ctrl_init(struct mem_timings *mem, int reset)
  32. {
  33. unsigned int val;
  34. struct exynos5_phy_control *phy0_ctrl, *phy1_ctrl;
  35. struct exynos5_dmc *dmc;
  36. int i;
  37. phy0_ctrl = (struct exynos5_phy_control *)samsung_get_base_dmc_phy();
  38. phy1_ctrl = (struct exynos5_phy_control *)(samsung_get_base_dmc_phy()
  39. + DMC_OFFSET);
  40. dmc = (struct exynos5_dmc *)samsung_get_base_dmc_ctrl();
  41. if (reset)
  42. reset_phy_ctrl();
  43. /* Set Impedance Output Driver */
  44. val = (mem->impedance << CA_CK_DRVR_DS_OFFSET) |
  45. (mem->impedance << CA_CKE_DRVR_DS_OFFSET) |
  46. (mem->impedance << CA_CS_DRVR_DS_OFFSET) |
  47. (mem->impedance << CA_ADR_DRVR_DS_OFFSET);
  48. writel(val, &phy0_ctrl->phy_con39);
  49. writel(val, &phy1_ctrl->phy_con39);
  50. /* Set Read Latency and Burst Length for PHY0 and PHY1 */
  51. val = (mem->ctrl_bstlen << PHY_CON42_CTRL_BSTLEN_SHIFT) |
  52. (mem->ctrl_rdlat << PHY_CON42_CTRL_RDLAT_SHIFT);
  53. writel(val, &phy0_ctrl->phy_con42);
  54. writel(val, &phy1_ctrl->phy_con42);
  55. /* ZQ Calibration */
  56. if (dmc_config_zq(mem, &phy0_ctrl->phy_con16, &phy1_ctrl->phy_con16,
  57. &phy0_ctrl->phy_con17, &phy1_ctrl->phy_con17))
  58. return SETUP_ERR_ZQ_CALIBRATION_FAILURE;
  59. /* DQ Signal */
  60. writel(mem->phy0_pulld_dqs, &phy0_ctrl->phy_con14);
  61. writel(mem->phy1_pulld_dqs, &phy1_ctrl->phy_con14);
  62. writel(mem->concontrol | (mem->rd_fetch << CONCONTROL_RD_FETCH_SHIFT)
  63. | (mem->dfi_init_start << CONCONTROL_DFI_INIT_START_SHIFT),
  64. &dmc->concontrol);
  65. update_reset_dll(&dmc->phycontrol0, DDR_MODE_DDR3);
  66. /* DQS Signal */
  67. writel(mem->phy0_dqs, &phy0_ctrl->phy_con4);
  68. writel(mem->phy1_dqs, &phy1_ctrl->phy_con4);
  69. writel(mem->phy0_dq, &phy0_ctrl->phy_con6);
  70. writel(mem->phy1_dq, &phy1_ctrl->phy_con6);
  71. writel(mem->phy0_tFS, &phy0_ctrl->phy_con10);
  72. writel(mem->phy1_tFS, &phy1_ctrl->phy_con10);
  73. val = (mem->ctrl_start_point << PHY_CON12_CTRL_START_POINT_SHIFT) |
  74. (mem->ctrl_inc << PHY_CON12_CTRL_INC_SHIFT) |
  75. (mem->ctrl_dll_on << PHY_CON12_CTRL_DLL_ON_SHIFT) |
  76. (mem->ctrl_ref << PHY_CON12_CTRL_REF_SHIFT);
  77. writel(val, &phy0_ctrl->phy_con12);
  78. writel(val, &phy1_ctrl->phy_con12);
  79. /* Start DLL locking */
  80. writel(val | (mem->ctrl_start << PHY_CON12_CTRL_START_SHIFT),
  81. &phy0_ctrl->phy_con12);
  82. writel(val | (mem->ctrl_start << PHY_CON12_CTRL_START_SHIFT),
  83. &phy1_ctrl->phy_con12);
  84. update_reset_dll(&dmc->phycontrol0, DDR_MODE_DDR3);
  85. writel(mem->concontrol | (mem->rd_fetch << CONCONTROL_RD_FETCH_SHIFT),
  86. &dmc->concontrol);
  87. /* Memory Channel Inteleaving Size */
  88. writel(mem->iv_size, &dmc->ivcontrol);
  89. writel(mem->memconfig, &dmc->memconfig0);
  90. writel(mem->memconfig, &dmc->memconfig1);
  91. writel(mem->membaseconfig0, &dmc->membaseconfig0);
  92. writel(mem->membaseconfig1, &dmc->membaseconfig1);
  93. /* Precharge Configuration */
  94. writel(mem->prechconfig_tp_cnt << PRECHCONFIG_TP_CNT_SHIFT,
  95. &dmc->prechconfig);
  96. /* Power Down mode Configuration */
  97. writel(mem->dpwrdn_cyc << PWRDNCONFIG_DPWRDN_CYC_SHIFT |
  98. mem->dsref_cyc << PWRDNCONFIG_DSREF_CYC_SHIFT,
  99. &dmc->pwrdnconfig);
  100. /* TimingRow, TimingData, TimingPower and Timingaref
  101. * values as per Memory AC parameters
  102. */
  103. writel(mem->timing_ref, &dmc->timingref);
  104. writel(mem->timing_row, &dmc->timingrow);
  105. writel(mem->timing_data, &dmc->timingdata);
  106. writel(mem->timing_power, &dmc->timingpower);
  107. /* Send PALL command */
  108. dmc_config_prech(mem, &dmc->directcmd);
  109. /* Send NOP, MRS and ZQINIT commands */
  110. dmc_config_mrs(mem, &dmc->directcmd);
  111. if (mem->gate_leveling_enable) {
  112. val = PHY_CON0_RESET_VAL;
  113. val |= P0_CMD_EN;
  114. writel(val, &phy0_ctrl->phy_con0);
  115. writel(val, &phy1_ctrl->phy_con0);
  116. val = PHY_CON2_RESET_VAL;
  117. val |= INIT_DESKEW_EN;
  118. writel(val, &phy0_ctrl->phy_con2);
  119. writel(val, &phy1_ctrl->phy_con2);
  120. val = PHY_CON0_RESET_VAL;
  121. val |= P0_CMD_EN;
  122. val |= BYTE_RDLVL_EN;
  123. writel(val, &phy0_ctrl->phy_con0);
  124. writel(val, &phy1_ctrl->phy_con0);
  125. val = (mem->ctrl_start_point <<
  126. PHY_CON12_CTRL_START_POINT_SHIFT) |
  127. (mem->ctrl_inc << PHY_CON12_CTRL_INC_SHIFT) |
  128. (mem->ctrl_force << PHY_CON12_CTRL_FORCE_SHIFT) |
  129. (mem->ctrl_start << PHY_CON12_CTRL_START_SHIFT) |
  130. (mem->ctrl_ref << PHY_CON12_CTRL_REF_SHIFT);
  131. writel(val, &phy0_ctrl->phy_con12);
  132. writel(val, &phy1_ctrl->phy_con12);
  133. val = PHY_CON2_RESET_VAL;
  134. val |= INIT_DESKEW_EN;
  135. val |= RDLVL_GATE_EN;
  136. writel(val, &phy0_ctrl->phy_con2);
  137. writel(val, &phy1_ctrl->phy_con2);
  138. val = PHY_CON0_RESET_VAL;
  139. val |= P0_CMD_EN;
  140. val |= BYTE_RDLVL_EN;
  141. val |= CTRL_SHGATE;
  142. writel(val, &phy0_ctrl->phy_con0);
  143. writel(val, &phy1_ctrl->phy_con0);
  144. val = PHY_CON1_RESET_VAL;
  145. val &= ~(CTRL_GATEDURADJ_MASK);
  146. writel(val, &phy0_ctrl->phy_con1);
  147. writel(val, &phy1_ctrl->phy_con1);
  148. writel(CTRL_RDLVL_GATE_ENABLE, &dmc->rdlvl_config);
  149. i = TIMEOUT_US;
  150. while ((readl(&dmc->phystatus) &
  151. (RDLVL_COMPLETE_CHO | RDLVL_COMPLETE_CH1)) !=
  152. (RDLVL_COMPLETE_CHO | RDLVL_COMPLETE_CH1) && i > 0) {
  153. /*
  154. * TODO(waihong): Comment on how long this take to
  155. * timeout
  156. */
  157. sdelay(100);
  158. i--;
  159. }
  160. if (!i)
  161. return SETUP_ERR_RDLV_COMPLETE_TIMEOUT;
  162. writel(CTRL_RDLVL_GATE_DISABLE, &dmc->rdlvl_config);
  163. writel(0, &phy0_ctrl->phy_con14);
  164. writel(0, &phy1_ctrl->phy_con14);
  165. val = (mem->ctrl_start_point <<
  166. PHY_CON12_CTRL_START_POINT_SHIFT) |
  167. (mem->ctrl_inc << PHY_CON12_CTRL_INC_SHIFT) |
  168. (mem->ctrl_force << PHY_CON12_CTRL_FORCE_SHIFT) |
  169. (mem->ctrl_start << PHY_CON12_CTRL_START_SHIFT) |
  170. (mem->ctrl_dll_on << PHY_CON12_CTRL_DLL_ON_SHIFT) |
  171. (mem->ctrl_ref << PHY_CON12_CTRL_REF_SHIFT);
  172. writel(val, &phy0_ctrl->phy_con12);
  173. writel(val, &phy1_ctrl->phy_con12);
  174. update_reset_dll(&dmc->phycontrol0, DDR_MODE_DDR3);
  175. }
  176. /* Send PALL command */
  177. dmc_config_prech(mem, &dmc->directcmd);
  178. writel(mem->memcontrol, &dmc->memcontrol);
  179. /* Set DMC Concontrol and enable auto-refresh counter */
  180. writel(mem->concontrol | (mem->rd_fetch << CONCONTROL_RD_FETCH_SHIFT)
  181. | (mem->aref_en << CONCONTROL_AREF_EN_SHIFT), &dmc->concontrol);
  182. return 0;
  183. }
  184. #endif
  185. #ifdef CONFIG_EXYNOS5420
  186. /**
  187. * RAM address to use in the test.
  188. *
  189. * We'll use 4 words at this address and 4 at this address + 0x80 (Ares
  190. * interleaves channels every 128 bytes). This will allow us to evaluate all of
  191. * the chips in a 1 chip per channel (2GB) system and half the chips in a 2
  192. * chip per channel (4GB) system. We can't test the 2nd chip since we need to
  193. * do tests before the 2nd chip is enabled. Looking at the 2nd chip isn't
  194. * critical because the 1st and 2nd chip have very similar timings (they'd
  195. * better have similar timings, since there's only a single adjustment that is
  196. * shared by both chips).
  197. */
  198. const unsigned int test_addr = CONFIG_SYS_SDRAM_BASE;
  199. /* Test pattern with which RAM will be tested */
  200. static const unsigned int test_pattern[] = {
  201. 0x5a5a5a5a,
  202. 0xa5a5a5a5,
  203. 0xf0f0f0f0,
  204. 0x0f0f0f0f,
  205. };
  206. /**
  207. * This function is a test vector for sw read leveling,
  208. * it compares the read data with the written data.
  209. *
  210. * @param ch DMC channel number
  211. * @param byte_lane which DQS byte offset,
  212. * possible values are 0,1,2,3
  213. * @return TRUE if memory was good, FALSE if not.
  214. */
  215. static bool dmc_valid_window_test_vector(int ch, int byte_lane)
  216. {
  217. unsigned int read_data;
  218. unsigned int mask;
  219. int i;
  220. mask = 0xFF << (8 * byte_lane);
  221. for (i = 0; i < ARRAY_SIZE(test_pattern); i++) {
  222. read_data = readl(test_addr + i * 4 + ch * 0x80);
  223. if ((read_data & mask) != (test_pattern[i] & mask))
  224. return false;
  225. }
  226. return true;
  227. }
  228. /**
  229. * This function returns current read offset value.
  230. *
  231. * @param phy_ctrl pointer to the current phy controller
  232. */
  233. static unsigned int dmc_get_read_offset_value(struct exynos5420_phy_control
  234. *phy_ctrl)
  235. {
  236. return readl(&phy_ctrl->phy_con4);
  237. }
  238. /**
  239. * This function performs resync, so that slave DLL is updated.
  240. *
  241. * @param phy_ctrl pointer to the current phy controller
  242. */
  243. static void ddr_phy_set_do_resync(struct exynos5420_phy_control *phy_ctrl)
  244. {
  245. setbits_le32(&phy_ctrl->phy_con10, PHY_CON10_CTRL_OFFSETR3);
  246. clrbits_le32(&phy_ctrl->phy_con10, PHY_CON10_CTRL_OFFSETR3);
  247. }
  248. /**
  249. * This function sets read offset value register with 'offset'.
  250. *
  251. * ...we also call call ddr_phy_set_do_resync().
  252. *
  253. * @param phy_ctrl pointer to the current phy controller
  254. * @param offset offset to read DQS
  255. */
  256. static void dmc_set_read_offset_value(struct exynos5420_phy_control *phy_ctrl,
  257. unsigned int offset)
  258. {
  259. writel(offset, &phy_ctrl->phy_con4);
  260. ddr_phy_set_do_resync(phy_ctrl);
  261. }
  262. /**
  263. * Convert a 2s complement byte to a byte with a sign bit.
  264. *
  265. * NOTE: you shouldn't use normal math on the number returned by this function.
  266. * As an example, -10 = 0xf6. After this function -10 = 0x8a. If you wanted
  267. * to do math and get the average of 10 and -10 (should be 0):
  268. * 0x8a + 0xa = 0x94 (-108)
  269. * 0x94 / 2 = 0xca (-54)
  270. * ...and 0xca = sign bit plus 0x4a, or -74
  271. *
  272. * Also note that you lose the ability to represent -128 since there are two
  273. * representations of 0.
  274. *
  275. * @param b The byte to convert in two's complement.
  276. * @return The 7-bit value + sign bit.
  277. */
  278. unsigned char make_signed_byte(signed char b)
  279. {
  280. if (b < 0)
  281. return 0x80 | -b;
  282. else
  283. return b;
  284. }
  285. /**
  286. * Test various shifts starting at 'start' and going to 'end'.
  287. *
  288. * For each byte lane, we'll walk through shift starting at 'start' and going
  289. * to 'end' (inclusive). When we are finally able to read the test pattern
  290. * we'll store the value in the results array.
  291. *
  292. * @param phy_ctrl pointer to the current phy controller
  293. * @param ch channel number
  294. * @param start the start shift. -127 to 127
  295. * @param end the end shift. -127 to 127
  296. * @param results we'll store results for each byte lane.
  297. */
  298. void test_shifts(struct exynos5420_phy_control *phy_ctrl, int ch,
  299. int start, int end, int results[NUM_BYTE_LANES])
  300. {
  301. int incr = (start < end) ? 1 : -1;
  302. int byte_lane;
  303. for (byte_lane = 0; byte_lane < NUM_BYTE_LANES; byte_lane++) {
  304. int shift;
  305. dmc_set_read_offset_value(phy_ctrl, DEFAULT_DQS_X4);
  306. results[byte_lane] = DEFAULT_DQS;
  307. for (shift = start; shift != (end + incr); shift += incr) {
  308. unsigned int byte_offsetr;
  309. unsigned int offsetr;
  310. byte_offsetr = make_signed_byte(shift);
  311. offsetr = dmc_get_read_offset_value(phy_ctrl);
  312. offsetr &= ~(0xFF << (8 * byte_lane));
  313. offsetr |= (byte_offsetr << (8 * byte_lane));
  314. dmc_set_read_offset_value(phy_ctrl, offsetr);
  315. if (dmc_valid_window_test_vector(ch, byte_lane)) {
  316. results[byte_lane] = shift;
  317. break;
  318. }
  319. }
  320. }
  321. }
  322. /**
  323. * This function performs SW read leveling to compensate DQ-DQS skew at
  324. * receiver it first finds the optimal read offset value on each DQS
  325. * then applies the value to PHY.
  326. *
  327. * Read offset value has its min margin and max margin. If read offset
  328. * value exceeds its min or max margin, read data will have corruption.
  329. * To avoid this we are doing sw read leveling.
  330. *
  331. * SW read leveling is:
  332. * 1> Finding offset value's left_limit and right_limit
  333. * 2> and calculate its center value
  334. * 3> finally programs that center value to PHY
  335. * 4> then PHY gets its optimal offset value.
  336. *
  337. * @param phy_ctrl pointer to the current phy controller
  338. * @param ch channel number
  339. * @param coarse_lock_val The coarse lock value read from PHY_CON13.
  340. * (0 - 0x7f)
  341. */
  342. static void software_find_read_offset(struct exynos5420_phy_control *phy_ctrl,
  343. int ch, unsigned int coarse_lock_val)
  344. {
  345. unsigned int offsetr_cent;
  346. int byte_lane;
  347. int left_limit;
  348. int right_limit;
  349. int left[NUM_BYTE_LANES];
  350. int right[NUM_BYTE_LANES];
  351. int i;
  352. /* Fill the memory with test patterns */
  353. for (i = 0; i < ARRAY_SIZE(test_pattern); i++)
  354. writel(test_pattern[i], test_addr + i * 4 + ch * 0x80);
  355. /* Figure out the limits we'll test with; keep -127 < limit < 127 */
  356. left_limit = DEFAULT_DQS - coarse_lock_val;
  357. right_limit = DEFAULT_DQS + coarse_lock_val;
  358. if (right_limit > 127)
  359. right_limit = 127;
  360. /* Fill in the location where reads were OK from left and right */
  361. test_shifts(phy_ctrl, ch, left_limit, right_limit, left);
  362. test_shifts(phy_ctrl, ch, right_limit, left_limit, right);
  363. /* Make a final value by taking the center between the left and right */
  364. offsetr_cent = 0;
  365. for (byte_lane = 0; byte_lane < NUM_BYTE_LANES; byte_lane++) {
  366. int temp_center;
  367. unsigned int vmwc;
  368. temp_center = (left[byte_lane] + right[byte_lane]) / 2;
  369. vmwc = make_signed_byte(temp_center);
  370. offsetr_cent |= vmwc << (8 * byte_lane);
  371. }
  372. dmc_set_read_offset_value(phy_ctrl, offsetr_cent);
  373. }
  374. int ddr3_mem_ctrl_init(struct mem_timings *mem, int reset)
  375. {
  376. struct exynos5420_clock *clk =
  377. (struct exynos5420_clock *)samsung_get_base_clock();
  378. struct exynos5420_power *power =
  379. (struct exynos5420_power *)samsung_get_base_power();
  380. struct exynos5420_phy_control *phy0_ctrl, *phy1_ctrl;
  381. struct exynos5420_dmc *drex0, *drex1;
  382. struct exynos5420_tzasc *tzasc0, *tzasc1;
  383. struct exynos5_power *pmu;
  384. uint32_t val, n_lock_r, n_lock_w_phy0, n_lock_w_phy1;
  385. uint32_t lock0_info, lock1_info;
  386. int chip;
  387. int i;
  388. phy0_ctrl = (struct exynos5420_phy_control *)samsung_get_base_dmc_phy();
  389. phy1_ctrl = (struct exynos5420_phy_control *)(samsung_get_base_dmc_phy()
  390. + DMC_OFFSET);
  391. drex0 = (struct exynos5420_dmc *)samsung_get_base_dmc_ctrl();
  392. drex1 = (struct exynos5420_dmc *)(samsung_get_base_dmc_ctrl()
  393. + DMC_OFFSET);
  394. tzasc0 = (struct exynos5420_tzasc *)samsung_get_base_dmc_tzasc();
  395. tzasc1 = (struct exynos5420_tzasc *)(samsung_get_base_dmc_tzasc()
  396. + DMC_OFFSET);
  397. pmu = (struct exynos5_power *)EXYNOS5420_POWER_BASE;
  398. if (CONFIG_NR_DRAM_BANKS > 4) {
  399. /* Need both controllers. */
  400. mem->memcontrol |= DMC_MEMCONTROL_NUM_CHIP_2;
  401. mem->chips_per_channel = 2;
  402. mem->chips_to_configure = 2;
  403. } else {
  404. /* 2GB requires a single controller */
  405. mem->memcontrol |= DMC_MEMCONTROL_NUM_CHIP_1;
  406. }
  407. /* Enable PAUSE for DREX */
  408. setbits_le32(&clk->pause, ENABLE_BIT);
  409. /* Enable BYPASS mode */
  410. setbits_le32(&clk->bpll_con1, BYPASS_EN);
  411. writel(MUX_BPLL_SEL_FOUTBPLL, &clk->src_cdrex);
  412. do {
  413. val = readl(&clk->mux_stat_cdrex);
  414. val &= BPLL_SEL_MASK;
  415. } while (val != FOUTBPLL);
  416. clrbits_le32(&clk->bpll_con1, BYPASS_EN);
  417. /* Specify the DDR memory type as DDR3 */
  418. val = readl(&phy0_ctrl->phy_con0);
  419. val &= ~(PHY_CON0_CTRL_DDR_MODE_MASK << PHY_CON0_CTRL_DDR_MODE_SHIFT);
  420. val |= (DDR_MODE_DDR3 << PHY_CON0_CTRL_DDR_MODE_SHIFT);
  421. writel(val, &phy0_ctrl->phy_con0);
  422. val = readl(&phy1_ctrl->phy_con0);
  423. val &= ~(PHY_CON0_CTRL_DDR_MODE_MASK << PHY_CON0_CTRL_DDR_MODE_SHIFT);
  424. val |= (DDR_MODE_DDR3 << PHY_CON0_CTRL_DDR_MODE_SHIFT);
  425. writel(val, &phy1_ctrl->phy_con0);
  426. /* Set Read Latency and Burst Length for PHY0 and PHY1 */
  427. val = (mem->ctrl_bstlen << PHY_CON42_CTRL_BSTLEN_SHIFT) |
  428. (mem->ctrl_rdlat << PHY_CON42_CTRL_RDLAT_SHIFT);
  429. writel(val, &phy0_ctrl->phy_con42);
  430. writel(val, &phy1_ctrl->phy_con42);
  431. val = readl(&phy0_ctrl->phy_con26);
  432. val &= ~(T_WRDATA_EN_MASK << T_WRDATA_EN_OFFSET);
  433. val |= (T_WRDATA_EN_DDR3 << T_WRDATA_EN_OFFSET);
  434. writel(val, &phy0_ctrl->phy_con26);
  435. val = readl(&phy1_ctrl->phy_con26);
  436. val &= ~(T_WRDATA_EN_MASK << T_WRDATA_EN_OFFSET);
  437. val |= (T_WRDATA_EN_DDR3 << T_WRDATA_EN_OFFSET);
  438. writel(val, &phy1_ctrl->phy_con26);
  439. /*
  440. * Set Driver strength for CK, CKE, CS & CA to 0x7
  441. * Set Driver strength for Data Slice 0~3 to 0x7
  442. */
  443. val = (0x7 << CA_CK_DRVR_DS_OFFSET) | (0x7 << CA_CKE_DRVR_DS_OFFSET) |
  444. (0x7 << CA_CS_DRVR_DS_OFFSET) | (0x7 << CA_ADR_DRVR_DS_OFFSET);
  445. val |= (0x7 << DA_3_DS_OFFSET) | (0x7 << DA_2_DS_OFFSET) |
  446. (0x7 << DA_1_DS_OFFSET) | (0x7 << DA_0_DS_OFFSET);
  447. writel(val, &phy0_ctrl->phy_con39);
  448. writel(val, &phy1_ctrl->phy_con39);
  449. /* ZQ Calibration */
  450. if (dmc_config_zq(mem, &phy0_ctrl->phy_con16, &phy1_ctrl->phy_con16,
  451. &phy0_ctrl->phy_con17, &phy1_ctrl->phy_con17))
  452. return SETUP_ERR_ZQ_CALIBRATION_FAILURE;
  453. clrbits_le32(&phy0_ctrl->phy_con16, ZQ_CLK_DIV_EN);
  454. clrbits_le32(&phy1_ctrl->phy_con16, ZQ_CLK_DIV_EN);
  455. /* DQ Signal */
  456. val = readl(&phy0_ctrl->phy_con14);
  457. val |= mem->phy0_pulld_dqs;
  458. writel(val, &phy0_ctrl->phy_con14);
  459. val = readl(&phy1_ctrl->phy_con14);
  460. val |= mem->phy1_pulld_dqs;
  461. writel(val, &phy1_ctrl->phy_con14);
  462. val = MEM_TERM_EN | PHY_TERM_EN;
  463. writel(val, &drex0->phycontrol0);
  464. writel(val, &drex1->phycontrol0);
  465. writel(mem->concontrol |
  466. (mem->dfi_init_start << CONCONTROL_DFI_INIT_START_SHIFT) |
  467. (mem->rd_fetch << CONCONTROL_RD_FETCH_SHIFT),
  468. &drex0->concontrol);
  469. writel(mem->concontrol |
  470. (mem->dfi_init_start << CONCONTROL_DFI_INIT_START_SHIFT) |
  471. (mem->rd_fetch << CONCONTROL_RD_FETCH_SHIFT),
  472. &drex1->concontrol);
  473. do {
  474. val = readl(&drex0->phystatus);
  475. } while ((val & DFI_INIT_COMPLETE) != DFI_INIT_COMPLETE);
  476. do {
  477. val = readl(&drex1->phystatus);
  478. } while ((val & DFI_INIT_COMPLETE) != DFI_INIT_COMPLETE);
  479. clrbits_le32(&drex0->concontrol, DFI_INIT_START);
  480. clrbits_le32(&drex1->concontrol, DFI_INIT_START);
  481. update_reset_dll(&drex0->phycontrol0, DDR_MODE_DDR3);
  482. update_reset_dll(&drex1->phycontrol0, DDR_MODE_DDR3);
  483. /*
  484. * Set Base Address:
  485. * 0x2000_0000 ~ 0x5FFF_FFFF
  486. * 0x6000_0000 ~ 0x9FFF_FFFF
  487. */
  488. /* MEMBASECONFIG0 */
  489. val = DMC_MEMBASECONFIGX_CHIP_BASE(DMC_CHIP_BASE_0) |
  490. DMC_MEMBASECONFIGX_CHIP_MASK(DMC_CHIP_MASK);
  491. writel(val, &tzasc0->membaseconfig0);
  492. writel(val, &tzasc1->membaseconfig0);
  493. /* MEMBASECONFIG1 */
  494. val = DMC_MEMBASECONFIGX_CHIP_BASE(DMC_CHIP_BASE_1) |
  495. DMC_MEMBASECONFIGX_CHIP_MASK(DMC_CHIP_MASK);
  496. writel(val, &tzasc0->membaseconfig1);
  497. writel(val, &tzasc1->membaseconfig1);
  498. /*
  499. * Memory Channel Inteleaving Size
  500. * Ares Channel interleaving = 128 bytes
  501. */
  502. /* MEMCONFIG0/1 */
  503. writel(mem->memconfig, &tzasc0->memconfig0);
  504. writel(mem->memconfig, &tzasc1->memconfig0);
  505. writel(mem->memconfig, &tzasc0->memconfig1);
  506. writel(mem->memconfig, &tzasc1->memconfig1);
  507. /* Precharge Configuration */
  508. writel(mem->prechconfig_tp_cnt << PRECHCONFIG_TP_CNT_SHIFT,
  509. &drex0->prechconfig0);
  510. writel(mem->prechconfig_tp_cnt << PRECHCONFIG_TP_CNT_SHIFT,
  511. &drex1->prechconfig0);
  512. /*
  513. * TimingRow, TimingData, TimingPower and Timingaref
  514. * values as per Memory AC parameters
  515. */
  516. writel(mem->timing_ref, &drex0->timingref);
  517. writel(mem->timing_ref, &drex1->timingref);
  518. writel(mem->timing_row, &drex0->timingrow0);
  519. writel(mem->timing_row, &drex1->timingrow0);
  520. writel(mem->timing_data, &drex0->timingdata0);
  521. writel(mem->timing_data, &drex1->timingdata0);
  522. writel(mem->timing_power, &drex0->timingpower0);
  523. writel(mem->timing_power, &drex1->timingpower0);
  524. if (reset) {
  525. /*
  526. * Send NOP, MRS and ZQINIT commands
  527. * Sending MRS command will reset the DRAM. We should not be
  528. * resetting the DRAM after resume, this will lead to memory
  529. * corruption as DRAM content is lost after DRAM reset
  530. */
  531. dmc_config_mrs(mem, &drex0->directcmd);
  532. dmc_config_mrs(mem, &drex1->directcmd);
  533. }
  534. /*
  535. * Get PHY_CON13 from both phys. Gate CLKM around reading since
  536. * PHY_CON13 is glitchy when CLKM is running. We're paranoid and
  537. * wait until we get a "fine lock", though a coarse lock is probably
  538. * OK (we only use the coarse numbers below). We try to gate the
  539. * clock for as short a time as possible in case SDRAM is somehow
  540. * sensitive. sdelay(10) in the loop is arbitrary to make sure
  541. * there is some time for PHY_CON13 to get updated. In practice
  542. * no delay appears to be needed.
  543. */
  544. val = readl(&clk->gate_bus_cdrex);
  545. while (true) {
  546. writel(val & ~0x1, &clk->gate_bus_cdrex);
  547. lock0_info = readl(&phy0_ctrl->phy_con13);
  548. writel(val, &clk->gate_bus_cdrex);
  549. if ((lock0_info & CTRL_FINE_LOCKED) == CTRL_FINE_LOCKED)
  550. break;
  551. sdelay(10);
  552. }
  553. while (true) {
  554. writel(val & ~0x2, &clk->gate_bus_cdrex);
  555. lock1_info = readl(&phy1_ctrl->phy_con13);
  556. writel(val, &clk->gate_bus_cdrex);
  557. if ((lock1_info & CTRL_FINE_LOCKED) == CTRL_FINE_LOCKED)
  558. break;
  559. sdelay(10);
  560. }
  561. if (!reset) {
  562. /*
  563. * During Suspend-Resume & S/W-Reset, as soon as PMU releases
  564. * pad retention, CKE goes high. This causes memory contents
  565. * not to be retained during DRAM initialization. Therfore,
  566. * there is a new control register(0x100431e8[28]) which lets us
  567. * release pad retention and retain the memory content until the
  568. * initialization is complete.
  569. */
  570. writel(PAD_RETENTION_DRAM_COREBLK_VAL,
  571. &power->pad_retention_dram_coreblk_option);
  572. do {
  573. val = readl(&power->pad_retention_dram_status);
  574. } while (val != 0x1);
  575. /*
  576. * CKE PAD retention disables DRAM self-refresh mode.
  577. * Send auto refresh command for DRAM refresh.
  578. */
  579. for (i = 0; i < 128; i++) {
  580. for (chip = 0; chip < mem->chips_to_configure; chip++) {
  581. writel(DIRECT_CMD_REFA |
  582. (chip << DIRECT_CMD_CHIP_SHIFT),
  583. &drex0->directcmd);
  584. writel(DIRECT_CMD_REFA |
  585. (chip << DIRECT_CMD_CHIP_SHIFT),
  586. &drex1->directcmd);
  587. }
  588. }
  589. }
  590. if (mem->gate_leveling_enable) {
  591. writel(PHY_CON0_RESET_VAL, &phy0_ctrl->phy_con0);
  592. writel(PHY_CON0_RESET_VAL, &phy1_ctrl->phy_con0);
  593. setbits_le32(&phy0_ctrl->phy_con0, P0_CMD_EN);
  594. setbits_le32(&phy1_ctrl->phy_con0, P0_CMD_EN);
  595. val = PHY_CON2_RESET_VAL;
  596. val |= INIT_DESKEW_EN;
  597. writel(val, &phy0_ctrl->phy_con2);
  598. writel(val, &phy1_ctrl->phy_con2);
  599. val = readl(&phy0_ctrl->phy_con1);
  600. val |= (RDLVL_PASS_ADJ_VAL << RDLVL_PASS_ADJ_OFFSET);
  601. writel(val, &phy0_ctrl->phy_con1);
  602. val = readl(&phy1_ctrl->phy_con1);
  603. val |= (RDLVL_PASS_ADJ_VAL << RDLVL_PASS_ADJ_OFFSET);
  604. writel(val, &phy1_ctrl->phy_con1);
  605. n_lock_w_phy0 = (lock0_info & CTRL_LOCK_COARSE_MASK) >> 2;
  606. n_lock_r = readl(&phy0_ctrl->phy_con12);
  607. n_lock_r &= ~CTRL_DLL_ON;
  608. n_lock_r |= n_lock_w_phy0;
  609. writel(n_lock_r, &phy0_ctrl->phy_con12);
  610. n_lock_w_phy1 = (lock1_info & CTRL_LOCK_COARSE_MASK) >> 2;
  611. n_lock_r = readl(&phy1_ctrl->phy_con12);
  612. n_lock_r &= ~CTRL_DLL_ON;
  613. n_lock_r |= n_lock_w_phy1;
  614. writel(n_lock_r, &phy1_ctrl->phy_con12);
  615. val = (0x3 << DIRECT_CMD_BANK_SHIFT) | 0x4;
  616. for (chip = 0; chip < mem->chips_to_configure; chip++) {
  617. writel(val | (chip << DIRECT_CMD_CHIP_SHIFT),
  618. &drex0->directcmd);
  619. writel(val | (chip << DIRECT_CMD_CHIP_SHIFT),
  620. &drex1->directcmd);
  621. }
  622. setbits_le32(&phy0_ctrl->phy_con2, RDLVL_GATE_EN);
  623. setbits_le32(&phy1_ctrl->phy_con2, RDLVL_GATE_EN);
  624. setbits_le32(&phy0_ctrl->phy_con0, CTRL_SHGATE);
  625. setbits_le32(&phy1_ctrl->phy_con0, CTRL_SHGATE);
  626. val = readl(&phy0_ctrl->phy_con1);
  627. val &= ~(CTRL_GATEDURADJ_MASK);
  628. writel(val, &phy0_ctrl->phy_con1);
  629. val = readl(&phy1_ctrl->phy_con1);
  630. val &= ~(CTRL_GATEDURADJ_MASK);
  631. writel(val, &phy1_ctrl->phy_con1);
  632. writel(CTRL_RDLVL_GATE_ENABLE, &drex0->rdlvl_config);
  633. i = TIMEOUT_US;
  634. while (((readl(&drex0->phystatus) & RDLVL_COMPLETE_CHO) !=
  635. RDLVL_COMPLETE_CHO) && (i > 0)) {
  636. /*
  637. * TODO(waihong): Comment on how long this take to
  638. * timeout
  639. */
  640. sdelay(100);
  641. i--;
  642. }
  643. if (!i)
  644. return SETUP_ERR_RDLV_COMPLETE_TIMEOUT;
  645. writel(CTRL_RDLVL_GATE_DISABLE, &drex0->rdlvl_config);
  646. writel(CTRL_RDLVL_GATE_ENABLE, &drex1->rdlvl_config);
  647. i = TIMEOUT_US;
  648. while (((readl(&drex1->phystatus) & RDLVL_COMPLETE_CHO) !=
  649. RDLVL_COMPLETE_CHO) && (i > 0)) {
  650. /*
  651. * TODO(waihong): Comment on how long this take to
  652. * timeout
  653. */
  654. sdelay(100);
  655. i--;
  656. }
  657. if (!i)
  658. return SETUP_ERR_RDLV_COMPLETE_TIMEOUT;
  659. writel(CTRL_RDLVL_GATE_DISABLE, &drex1->rdlvl_config);
  660. writel(0, &phy0_ctrl->phy_con14);
  661. writel(0, &phy1_ctrl->phy_con14);
  662. val = (0x3 << DIRECT_CMD_BANK_SHIFT);
  663. for (chip = 0; chip < mem->chips_to_configure; chip++) {
  664. writel(val | (chip << DIRECT_CMD_CHIP_SHIFT),
  665. &drex0->directcmd);
  666. writel(val | (chip << DIRECT_CMD_CHIP_SHIFT),
  667. &drex1->directcmd);
  668. }
  669. /* Common Settings for Leveling */
  670. val = PHY_CON12_RESET_VAL;
  671. writel((val + n_lock_w_phy0), &phy0_ctrl->phy_con12);
  672. writel((val + n_lock_w_phy1), &phy1_ctrl->phy_con12);
  673. setbits_le32(&phy0_ctrl->phy_con2, DLL_DESKEW_EN);
  674. setbits_le32(&phy1_ctrl->phy_con2, DLL_DESKEW_EN);
  675. }
  676. /*
  677. * Do software read leveling
  678. *
  679. * Do this before we turn on auto refresh since the auto refresh can
  680. * be in conflict with the resync operation that's part of setting
  681. * read leveling.
  682. */
  683. if (!reset) {
  684. /* restore calibrated value after resume */
  685. dmc_set_read_offset_value(phy0_ctrl, readl(&pmu->pmu_spare1));
  686. dmc_set_read_offset_value(phy1_ctrl, readl(&pmu->pmu_spare2));
  687. } else {
  688. software_find_read_offset(phy0_ctrl, 0,
  689. CTRL_LOCK_COARSE(lock0_info));
  690. software_find_read_offset(phy1_ctrl, 1,
  691. CTRL_LOCK_COARSE(lock1_info));
  692. /* save calibrated value to restore after resume */
  693. writel(dmc_get_read_offset_value(phy0_ctrl), &pmu->pmu_spare1);
  694. writel(dmc_get_read_offset_value(phy1_ctrl), &pmu->pmu_spare2);
  695. }
  696. /* Send PALL command */
  697. dmc_config_prech(mem, &drex0->directcmd);
  698. dmc_config_prech(mem, &drex1->directcmd);
  699. writel(mem->memcontrol, &drex0->memcontrol);
  700. writel(mem->memcontrol, &drex1->memcontrol);
  701. /*
  702. * Set DMC Concontrol: Enable auto-refresh counter, provide
  703. * read data fetch cycles and enable DREX auto set powerdown
  704. * for input buffer of I/O in none read memory state.
  705. */
  706. writel(mem->concontrol | (mem->aref_en << CONCONTROL_AREF_EN_SHIFT) |
  707. (mem->rd_fetch << CONCONTROL_RD_FETCH_SHIFT)|
  708. DMC_CONCONTROL_IO_PD_CON(0x2),
  709. &drex0->concontrol);
  710. writel(mem->concontrol | (mem->aref_en << CONCONTROL_AREF_EN_SHIFT) |
  711. (mem->rd_fetch << CONCONTROL_RD_FETCH_SHIFT)|
  712. DMC_CONCONTROL_IO_PD_CON(0x2),
  713. &drex1->concontrol);
  714. /*
  715. * Enable Clock Gating Control for DMC
  716. * this saves around 25 mw dmc power as compared to the power
  717. * consumption without these bits enabled
  718. */
  719. setbits_le32(&drex0->cgcontrol, DMC_INTERNAL_CG);
  720. setbits_le32(&drex1->cgcontrol, DMC_INTERNAL_CG);
  721. /*
  722. * As per Exynos5800 UM ver 0.00 section 17.13.2.1
  723. * CONCONTROL register bit 3 [update_mode], Exynos5800 does not
  724. * support the PHY initiated update. And it is recommended to set
  725. * this field to 1'b1 during initialization
  726. *
  727. * When we apply PHY-initiated mode, DLL lock value is determined
  728. * once at DMC init time and not updated later when we change the MIF
  729. * voltage based on ASV group in kernel. Applying MC-initiated mode
  730. * makes sure that DLL tracing is ON so that silicon is able to
  731. * compensate the voltage variation.
  732. */
  733. val = readl(&drex0->concontrol);
  734. val |= CONCONTROL_UPDATE_MODE;
  735. writel(val, &drex0->concontrol);
  736. val = readl(&drex1->concontrol);
  737. val |= CONCONTROL_UPDATE_MODE;
  738. writel(val, &drex1->concontrol);
  739. return 0;
  740. }
  741. #endif