ddr3_init.c 21 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852
  1. /*
  2. * Copyright (C) Marvell International Ltd. and its affiliates
  3. *
  4. * SPDX-License-Identifier: GPL-2.0
  5. */
  6. #include <common.h>
  7. #include <i2c.h>
  8. #include <spl.h>
  9. #include <asm/io.h>
  10. #include <asm/arch/cpu.h>
  11. #include <asm/arch/soc.h>
  12. #include "ddr3_init.h"
  13. #include "../../../../arch/arm/mach-mvebu/serdes/a38x/sys_env_lib.h"
  14. static struct dlb_config ddr3_dlb_config_table[] = {
  15. {REG_STATIC_DRAM_DLB_CONTROL, 0x2000005c},
  16. {DLB_BUS_OPTIMIZATION_WEIGHTS_REG, 0x00880000},
  17. {DLB_AGING_REGISTER, 0x0f7f007f},
  18. {DLB_EVICTION_CONTROL_REG, 0x0000129f},
  19. {DLB_EVICTION_TIMERS_REGISTER_REG, 0x00ff0000},
  20. {DLB_BUS_WEIGHTS_DIFF_CS, 0x04030802},
  21. {DLB_BUS_WEIGHTS_DIFF_BG, 0x00000a02},
  22. {DLB_BUS_WEIGHTS_SAME_BG, 0x09000a01},
  23. {DLB_BUS_WEIGHTS_RD_WR, 0x00020005},
  24. {DLB_BUS_WEIGHTS_ATTR_SYS_PRIO, 0x00060f10},
  25. {DLB_MAIN_QUEUE_MAP, 0x00000543},
  26. {DLB_LINE_SPLIT, 0x00000000},
  27. {DLB_USER_COMMAND_REG, 0x00000000},
  28. {0x0, 0x0}
  29. };
  30. static struct dlb_config ddr3_dlb_config_table_a0[] = {
  31. {REG_STATIC_DRAM_DLB_CONTROL, 0x2000005c},
  32. {DLB_BUS_OPTIMIZATION_WEIGHTS_REG, 0x00880000},
  33. {DLB_AGING_REGISTER, 0x0f7f007f},
  34. {DLB_EVICTION_CONTROL_REG, 0x0000129f},
  35. {DLB_EVICTION_TIMERS_REGISTER_REG, 0x00ff0000},
  36. {DLB_BUS_WEIGHTS_DIFF_CS, 0x04030802},
  37. {DLB_BUS_WEIGHTS_DIFF_BG, 0x00000a02},
  38. {DLB_BUS_WEIGHTS_SAME_BG, 0x09000a01},
  39. {DLB_BUS_WEIGHTS_RD_WR, 0x00020005},
  40. {DLB_BUS_WEIGHTS_ATTR_SYS_PRIO, 0x00060f10},
  41. {DLB_MAIN_QUEUE_MAP, 0x00000543},
  42. {DLB_LINE_SPLIT, 0x00000000},
  43. {DLB_USER_COMMAND_REG, 0x00000000},
  44. {0x0, 0x0}
  45. };
  46. #if defined(CONFIG_ARMADA_38X)
  47. struct dram_modes {
  48. char *mode_name;
  49. u8 cpu_freq;
  50. u8 fab_freq;
  51. u8 chip_id;
  52. u8 chip_board_rev;
  53. struct reg_data *regs;
  54. };
  55. struct dram_modes ddr_modes[] = {
  56. #ifdef SUPPORT_STATIC_DUNIT_CONFIG
  57. /* Conf name, CPUFreq, Fab_freq, Chip ID, Chip/Board, MC regs*/
  58. #ifdef CONFIG_CUSTOMER_BOARD_SUPPORT
  59. {"a38x_customer_0_800", DDR_FREQ_800, 0, 0x0, A38X_CUSTOMER_BOARD_ID0,
  60. ddr3_customer_800},
  61. {"a38x_customer_1_800", DDR_FREQ_800, 0, 0x0, A38X_CUSTOMER_BOARD_ID1,
  62. ddr3_customer_800},
  63. #else
  64. {"a38x_533", DDR_FREQ_533, 0, 0x0, MARVELL_BOARD, ddr3_a38x_533},
  65. {"a38x_667", DDR_FREQ_667, 0, 0x0, MARVELL_BOARD, ddr3_a38x_667},
  66. {"a38x_800", DDR_FREQ_800, 0, 0x0, MARVELL_BOARD, ddr3_a38x_800},
  67. {"a38x_933", DDR_FREQ_933, 0, 0x0, MARVELL_BOARD, ddr3_a38x_933},
  68. #endif
  69. #endif
  70. };
  71. #endif /* defined(CONFIG_ARMADA_38X) */
  72. /* Translates topology map definitions to real memory size in bits */
  73. u32 mem_size[] = {
  74. ADDR_SIZE_512MB, ADDR_SIZE_1GB, ADDR_SIZE_2GB, ADDR_SIZE_4GB,
  75. ADDR_SIZE_8GB
  76. };
  77. static char *ddr_type = "DDR3";
  78. /*
  79. * Set 1 to use dynamic DUNIT configuration,
  80. * set 0 (supported for A380 and AC3) to configure DUNIT in values set by
  81. * ddr3_tip_init_specific_reg_config
  82. */
  83. u8 generic_init_controller = 1;
  84. #ifdef SUPPORT_STATIC_DUNIT_CONFIG
  85. static u32 ddr3_get_static_ddr_mode(void);
  86. #endif
  87. static int ddr3_hws_tune_training_params(u8 dev_num);
  88. static int ddr3_update_topology_map(struct hws_topology_map *topology_map);
  89. /* device revision */
  90. #define DEV_VERSION_ID_REG 0x1823c
  91. #define REVISON_ID_OFFS 8
  92. #define REVISON_ID_MASK 0xf00
  93. /* A38x revisions */
  94. #define MV_88F68XX_Z1_ID 0x0
  95. #define MV_88F68XX_A0_ID 0x4
  96. /* A39x revisions */
  97. #define MV_88F69XX_Z1_ID 0x2
  98. /*
  99. * sys_env_device_rev_get - Get Marvell controller device revision number
  100. *
  101. * DESCRIPTION:
  102. * This function returns 8bit describing the device revision as defined
  103. * Revision ID Register.
  104. *
  105. * INPUT:
  106. * None.
  107. *
  108. * OUTPUT:
  109. * None.
  110. *
  111. * RETURN:
  112. * 8bit desscribing Marvell controller revision number
  113. */
  114. u8 sys_env_device_rev_get(void)
  115. {
  116. u32 value;
  117. value = reg_read(DEV_VERSION_ID_REG);
  118. return (value & (REVISON_ID_MASK)) >> REVISON_ID_OFFS;
  119. }
  120. /*
  121. * sys_env_dlb_config_ptr_get
  122. *
  123. * DESCRIPTION: defines pointer to to DLB COnfiguration table
  124. *
  125. * INPUT: none
  126. *
  127. * OUTPUT: pointer to DLB COnfiguration table
  128. *
  129. * RETURN:
  130. * returns pointer to DLB COnfiguration table
  131. */
  132. struct dlb_config *sys_env_dlb_config_ptr_get(void)
  133. {
  134. #ifdef CONFIG_ARMADA_39X
  135. return &ddr3_dlb_config_table_a0[0];
  136. #else
  137. if (sys_env_device_rev_get() == MV_88F68XX_A0_ID)
  138. return &ddr3_dlb_config_table_a0[0];
  139. else
  140. return &ddr3_dlb_config_table[0];
  141. #endif
  142. }
  143. /*
  144. * sys_env_get_cs_ena_from_reg
  145. *
  146. * DESCRIPTION: Get bit mask of enabled CS
  147. *
  148. * INPUT: None
  149. *
  150. * OUTPUT: None
  151. *
  152. * RETURN:
  153. * Bit mask of enabled CS, 1 if only CS0 enabled,
  154. * 3 if both CS0 and CS1 enabled
  155. */
  156. u32 sys_env_get_cs_ena_from_reg(void)
  157. {
  158. return reg_read(REG_DDR3_RANK_CTRL_ADDR) &
  159. REG_DDR3_RANK_CTRL_CS_ENA_MASK;
  160. }
  161. static void ddr3_restore_and_set_final_windows(u32 *win)
  162. {
  163. u32 win_ctrl_reg, num_of_win_regs;
  164. u32 cs_ena = sys_env_get_cs_ena_from_reg();
  165. u32 ui;
  166. win_ctrl_reg = REG_XBAR_WIN_4_CTRL_ADDR;
  167. num_of_win_regs = 16;
  168. /* Return XBAR windows 4-7 or 16-19 init configuration */
  169. for (ui = 0; ui < num_of_win_regs; ui++)
  170. reg_write((win_ctrl_reg + 0x4 * ui), win[ui]);
  171. printf("%s Training Sequence - Switching XBAR Window to FastPath Window\n",
  172. ddr_type);
  173. #if defined DYNAMIC_CS_SIZE_CONFIG
  174. if (ddr3_fast_path_dynamic_cs_size_config(cs_ena) != MV_OK)
  175. printf("ddr3_fast_path_dynamic_cs_size_config FAILED\n");
  176. #else
  177. u32 reg, cs;
  178. reg = 0x1fffffe1;
  179. for (cs = 0; cs < MAX_CS; cs++) {
  180. if (cs_ena & (1 << cs)) {
  181. reg |= (cs << 2);
  182. break;
  183. }
  184. }
  185. /* Open fast path Window to - 0.5G */
  186. reg_write(REG_FASTPATH_WIN_0_CTRL_ADDR, reg);
  187. #endif
  188. }
  189. static int ddr3_save_and_set_training_windows(u32 *win)
  190. {
  191. u32 cs_ena;
  192. u32 reg, tmp_count, cs, ui;
  193. u32 win_ctrl_reg, win_base_reg, win_remap_reg;
  194. u32 num_of_win_regs, win_jump_index;
  195. win_ctrl_reg = REG_XBAR_WIN_4_CTRL_ADDR;
  196. win_base_reg = REG_XBAR_WIN_4_BASE_ADDR;
  197. win_remap_reg = REG_XBAR_WIN_4_REMAP_ADDR;
  198. win_jump_index = 0x10;
  199. num_of_win_regs = 16;
  200. struct hws_topology_map *tm = ddr3_get_topology_map();
  201. #ifdef DISABLE_L2_FILTERING_DURING_DDR_TRAINING
  202. /*
  203. * Disable L2 filtering during DDR training
  204. * (when Cross Bar window is open)
  205. */
  206. reg_write(ADDRESS_FILTERING_END_REGISTER, 0);
  207. #endif
  208. cs_ena = tm->interface_params[0].as_bus_params[0].cs_bitmask;
  209. /* Close XBAR Window 19 - Not needed */
  210. /* {0x000200e8} - Open Mbus Window - 2G */
  211. reg_write(REG_XBAR_WIN_19_CTRL_ADDR, 0);
  212. /* Save XBAR Windows 4-19 init configurations */
  213. for (ui = 0; ui < num_of_win_regs; ui++)
  214. win[ui] = reg_read(win_ctrl_reg + 0x4 * ui);
  215. /* Open XBAR Windows 4-7 or 16-19 for other CS */
  216. reg = 0;
  217. tmp_count = 0;
  218. for (cs = 0; cs < MAX_CS; cs++) {
  219. if (cs_ena & (1 << cs)) {
  220. switch (cs) {
  221. case 0:
  222. reg = 0x0e00;
  223. break;
  224. case 1:
  225. reg = 0x0d00;
  226. break;
  227. case 2:
  228. reg = 0x0b00;
  229. break;
  230. case 3:
  231. reg = 0x0700;
  232. break;
  233. }
  234. reg |= (1 << 0);
  235. reg |= (SDRAM_CS_SIZE & 0xffff0000);
  236. reg_write(win_ctrl_reg + win_jump_index * tmp_count,
  237. reg);
  238. reg = (((SDRAM_CS_SIZE + 1) * (tmp_count)) &
  239. 0xffff0000);
  240. reg_write(win_base_reg + win_jump_index * tmp_count,
  241. reg);
  242. if (win_remap_reg <= REG_XBAR_WIN_7_REMAP_ADDR)
  243. reg_write(win_remap_reg +
  244. win_jump_index * tmp_count, 0);
  245. tmp_count++;
  246. }
  247. }
  248. return MV_OK;
  249. }
  250. /*
  251. * Name: ddr3_init - Main DDR3 Init function
  252. * Desc: This routine initialize the DDR3 MC and runs HW training.
  253. * Args: None.
  254. * Notes:
  255. * Returns: None.
  256. */
  257. int ddr3_init(void)
  258. {
  259. u32 reg = 0;
  260. u32 soc_num;
  261. int status;
  262. u32 win[16];
  263. /* SoC/Board special Initializtions */
  264. /* Get version from internal library */
  265. ddr3_print_version();
  266. /*Add sub_version string */
  267. DEBUG_INIT_C("", SUB_VERSION, 1);
  268. /* Switching CPU to MRVL ID */
  269. soc_num = (reg_read(REG_SAMPLE_RESET_HIGH_ADDR) & SAR1_CPU_CORE_MASK) >>
  270. SAR1_CPU_CORE_OFFSET;
  271. switch (soc_num) {
  272. case 0x3:
  273. reg_bit_set(CPU_CONFIGURATION_REG(3), CPU_MRVL_ID_OFFSET);
  274. reg_bit_set(CPU_CONFIGURATION_REG(2), CPU_MRVL_ID_OFFSET);
  275. case 0x1:
  276. reg_bit_set(CPU_CONFIGURATION_REG(1), CPU_MRVL_ID_OFFSET);
  277. case 0x0:
  278. reg_bit_set(CPU_CONFIGURATION_REG(0), CPU_MRVL_ID_OFFSET);
  279. default:
  280. break;
  281. }
  282. /*
  283. * Set DRAM Reset Mask in case detected GPIO indication of wakeup from
  284. * suspend i.e the DRAM values will not be overwritten / reset when
  285. * waking from suspend
  286. */
  287. if (sys_env_suspend_wakeup_check() ==
  288. SUSPEND_WAKEUP_ENABLED_GPIO_DETECTED) {
  289. reg_bit_set(REG_SDRAM_INIT_CTRL_ADDR,
  290. 1 << REG_SDRAM_INIT_RESET_MASK_OFFS);
  291. }
  292. /*
  293. * Stage 0 - Set board configuration
  294. */
  295. /* Check if DRAM is already initialized */
  296. if (reg_read(REG_BOOTROM_ROUTINE_ADDR) &
  297. (1 << REG_BOOTROM_ROUTINE_DRAM_INIT_OFFS)) {
  298. printf("%s Training Sequence - 2nd boot - Skip\n", ddr_type);
  299. return MV_OK;
  300. }
  301. /*
  302. * Stage 1 - Dunit Setup
  303. */
  304. /* Fix read ready phases for all SOC in reg 0x15c8 */
  305. reg = reg_read(REG_TRAINING_DEBUG_3_ADDR);
  306. reg &= ~(REG_TRAINING_DEBUG_3_MASK);
  307. reg |= 0x4; /* Phase 0 */
  308. reg &= ~(REG_TRAINING_DEBUG_3_MASK << REG_TRAINING_DEBUG_3_OFFS);
  309. reg |= (0x4 << (1 * REG_TRAINING_DEBUG_3_OFFS)); /* Phase 1 */
  310. reg &= ~(REG_TRAINING_DEBUG_3_MASK << (3 * REG_TRAINING_DEBUG_3_OFFS));
  311. reg |= (0x6 << (3 * REG_TRAINING_DEBUG_3_OFFS)); /* Phase 3 */
  312. reg &= ~(REG_TRAINING_DEBUG_3_MASK << (4 * REG_TRAINING_DEBUG_3_OFFS));
  313. reg |= (0x6 << (4 * REG_TRAINING_DEBUG_3_OFFS));
  314. reg &= ~(REG_TRAINING_DEBUG_3_MASK << (5 * REG_TRAINING_DEBUG_3_OFFS));
  315. reg |= (0x6 << (5 * REG_TRAINING_DEBUG_3_OFFS));
  316. reg_write(REG_TRAINING_DEBUG_3_ADDR, reg);
  317. /*
  318. * Axi_bresp_mode[8] = Compliant,
  319. * Axi_addr_decode_cntrl[11] = Internal,
  320. * Axi_data_bus_width[0] = 128bit
  321. * */
  322. /* 0x14a8 - AXI Control Register */
  323. reg_write(REG_DRAM_AXI_CTRL_ADDR, 0);
  324. /*
  325. * Stage 2 - Training Values Setup
  326. */
  327. /* Set X-BAR windows for the training sequence */
  328. ddr3_save_and_set_training_windows(win);
  329. #ifdef SUPPORT_STATIC_DUNIT_CONFIG
  330. /*
  331. * Load static controller configuration (in case dynamic/generic init
  332. * is not enabled
  333. */
  334. if (generic_init_controller == 0) {
  335. ddr3_tip_init_specific_reg_config(0,
  336. ddr_modes
  337. [ddr3_get_static_ddr_mode
  338. ()].regs);
  339. }
  340. #endif
  341. /* Load topology for New Training IP */
  342. status = ddr3_load_topology_map();
  343. if (MV_OK != status) {
  344. printf("%s Training Sequence topology load - FAILED\n",
  345. ddr_type);
  346. return status;
  347. }
  348. /* Tune training algo paramteres */
  349. status = ddr3_hws_tune_training_params(0);
  350. if (MV_OK != status)
  351. return status;
  352. /* Set log level for training lib */
  353. ddr3_hws_set_log_level(DEBUG_BLOCK_ALL, DEBUG_LEVEL_ERROR);
  354. /* Start New Training IP */
  355. status = ddr3_hws_hw_training();
  356. if (MV_OK != status) {
  357. printf("%s Training Sequence - FAILED\n", ddr_type);
  358. return status;
  359. }
  360. /*
  361. * Stage 3 - Finish
  362. */
  363. /* Restore and set windows */
  364. ddr3_restore_and_set_final_windows(win);
  365. /* Update DRAM init indication in bootROM register */
  366. reg = reg_read(REG_BOOTROM_ROUTINE_ADDR);
  367. reg_write(REG_BOOTROM_ROUTINE_ADDR,
  368. reg | (1 << REG_BOOTROM_ROUTINE_DRAM_INIT_OFFS));
  369. /* DLB config */
  370. ddr3_new_tip_dlb_config();
  371. #if defined(ECC_SUPPORT)
  372. if (ddr3_if_ecc_enabled())
  373. ddr3_new_tip_ecc_scrub();
  374. #endif
  375. printf("%s Training Sequence - Ended Successfully\n", ddr_type);
  376. return MV_OK;
  377. }
  378. /*
  379. * Name: ddr3_get_cpu_freq
  380. * Desc: read S@R and return CPU frequency
  381. * Args:
  382. * Notes:
  383. * Returns: required value
  384. */
  385. u32 ddr3_get_cpu_freq(void)
  386. {
  387. return ddr3_tip_get_init_freq();
  388. }
  389. /*
  390. * Name: ddr3_get_fab_opt
  391. * Desc: read S@R and return CPU frequency
  392. * Args:
  393. * Notes:
  394. * Returns: required value
  395. */
  396. u32 ddr3_get_fab_opt(void)
  397. {
  398. return 0; /* No fabric */
  399. }
  400. /*
  401. * Name: ddr3_get_static_m_cValue - Init Memory controller with
  402. * static parameters
  403. * Desc: Use this routine to init the controller without the HW training
  404. * procedure.
  405. * User must provide compatible header file with registers data.
  406. * Args: None.
  407. * Notes:
  408. * Returns: None.
  409. */
  410. u32 ddr3_get_static_mc_value(u32 reg_addr, u32 offset1, u32 mask1,
  411. u32 offset2, u32 mask2)
  412. {
  413. u32 reg, temp;
  414. reg = reg_read(reg_addr);
  415. temp = (reg >> offset1) & mask1;
  416. if (mask2)
  417. temp |= (reg >> offset2) & mask2;
  418. return temp;
  419. }
  420. /*
  421. * Name: ddr3_get_static_ddr_mode - Init Memory controller with
  422. * static parameters
  423. * Desc: Use this routine to init the controller without the HW training
  424. * procedure.
  425. * User must provide compatible header file with registers data.
  426. * Args: None.
  427. * Notes:
  428. * Returns: None.
  429. */
  430. u32 ddr3_get_static_ddr_mode(void)
  431. {
  432. u32 chip_board_rev, i;
  433. u32 size;
  434. /* Valid only for A380 only, MSYS using dynamic controller config */
  435. #ifdef CONFIG_CUSTOMER_BOARD_SUPPORT
  436. /*
  437. * Customer boards select DDR mode according to
  438. * board ID & Sample@Reset
  439. */
  440. chip_board_rev = mv_board_id_get();
  441. #else
  442. /* Marvell boards select DDR mode according to Sample@Reset only */
  443. chip_board_rev = MARVELL_BOARD;
  444. #endif
  445. size = ARRAY_SIZE(ddr_modes);
  446. for (i = 0; i < size; i++) {
  447. if ((ddr3_get_cpu_freq() == ddr_modes[i].cpu_freq) &&
  448. (ddr3_get_fab_opt() == ddr_modes[i].fab_freq) &&
  449. (chip_board_rev == ddr_modes[i].chip_board_rev))
  450. return i;
  451. }
  452. DEBUG_INIT_S("\n*** Error: ddr3_get_static_ddr_mode: No match for requested DDR mode. ***\n\n");
  453. return 0;
  454. }
  455. /******************************************************************************
  456. * Name: ddr3_get_cs_num_from_reg
  457. * Desc:
  458. * Args:
  459. * Notes:
  460. * Returns:
  461. */
  462. u32 ddr3_get_cs_num_from_reg(void)
  463. {
  464. u32 cs_ena = sys_env_get_cs_ena_from_reg();
  465. u32 cs_count = 0;
  466. u32 cs;
  467. for (cs = 0; cs < MAX_CS; cs++) {
  468. if (cs_ena & (1 << cs))
  469. cs_count++;
  470. }
  471. return cs_count;
  472. }
  473. /*
  474. * Name: ddr3_load_topology_map
  475. * Desc:
  476. * Args:
  477. * Notes:
  478. * Returns:
  479. */
  480. int ddr3_load_topology_map(void)
  481. {
  482. struct hws_topology_map *tm = ddr3_get_topology_map();
  483. #if defined(MV_DDR_TOPOLOGY_UPDATE_FROM_TWSI)
  484. /* Update topology data */
  485. if (MV_OK != ddr3_update_topology_map(tm)) {
  486. DEBUG_INIT_FULL_S("Failed update of DDR3 Topology map\n");
  487. }
  488. #endif
  489. return MV_OK;
  490. }
  491. void get_target_freq(u32 freq_mode, u32 *ddr_freq, u32 *hclk_ps)
  492. {
  493. u32 tmp, hclk = 200;
  494. switch (freq_mode) {
  495. case 4:
  496. tmp = 1; /* DDR_400; */
  497. hclk = 200;
  498. break;
  499. case 0x8:
  500. tmp = 1; /* DDR_666; */
  501. hclk = 333;
  502. break;
  503. case 0xc:
  504. tmp = 1; /* DDR_800; */
  505. hclk = 400;
  506. break;
  507. default:
  508. *ddr_freq = 0;
  509. *hclk_ps = 0;
  510. break;
  511. }
  512. *ddr_freq = tmp; /* DDR freq define */
  513. *hclk_ps = 1000000 / hclk; /* values are 1/HCLK in ps */
  514. return;
  515. }
  516. void ddr3_new_tip_dlb_config(void)
  517. {
  518. u32 reg, i = 0;
  519. struct dlb_config *config_table_ptr = sys_env_dlb_config_ptr_get();
  520. /* Write the configuration */
  521. while (config_table_ptr[i].reg_addr != 0) {
  522. reg_write(config_table_ptr[i].reg_addr,
  523. config_table_ptr[i].reg_data);
  524. i++;
  525. }
  526. /* Enable DLB */
  527. reg = reg_read(REG_STATIC_DRAM_DLB_CONTROL);
  528. reg |= DLB_ENABLE | DLB_WRITE_COALESING | DLB_AXI_PREFETCH_EN |
  529. DLB_MBUS_PREFETCH_EN | PREFETCH_N_LN_SZ_TR;
  530. reg_write(REG_STATIC_DRAM_DLB_CONTROL, reg);
  531. }
  532. int ddr3_fast_path_dynamic_cs_size_config(u32 cs_ena)
  533. {
  534. u32 reg, cs;
  535. u32 mem_total_size = 0;
  536. u32 cs_mem_size = 0;
  537. u32 mem_total_size_c, cs_mem_size_c;
  538. #ifdef DEVICE_MAX_DRAM_ADDRESS_SIZE
  539. u32 physical_mem_size;
  540. u32 max_mem_size = DEVICE_MAX_DRAM_ADDRESS_SIZE;
  541. struct hws_topology_map *tm = ddr3_get_topology_map();
  542. #endif
  543. /* Open fast path windows */
  544. for (cs = 0; cs < MAX_CS; cs++) {
  545. if (cs_ena & (1 << cs)) {
  546. /* get CS size */
  547. if (ddr3_calc_mem_cs_size(cs, &cs_mem_size) != MV_OK)
  548. return MV_FAIL;
  549. #ifdef DEVICE_MAX_DRAM_ADDRESS_SIZE
  550. /*
  551. * if number of address pins doesn't allow to use max
  552. * mem size that is defined in topology
  553. * mem size is defined by DEVICE_MAX_DRAM_ADDRESS_SIZE
  554. */
  555. physical_mem_size = mem_size
  556. [tm->interface_params[0].memory_size];
  557. if (ddr3_get_device_width(cs) == 16) {
  558. /*
  559. * 16bit mem device can be twice more - no need
  560. * in less significant pin
  561. */
  562. max_mem_size = DEVICE_MAX_DRAM_ADDRESS_SIZE * 2;
  563. }
  564. if (physical_mem_size > max_mem_size) {
  565. cs_mem_size = max_mem_size *
  566. (ddr3_get_bus_width() /
  567. ddr3_get_device_width(cs));
  568. printf("Updated Physical Mem size is from 0x%x to %x\n",
  569. physical_mem_size,
  570. DEVICE_MAX_DRAM_ADDRESS_SIZE);
  571. }
  572. #endif
  573. /* set fast path window control for the cs */
  574. reg = 0xffffe1;
  575. reg |= (cs << 2);
  576. reg |= (cs_mem_size - 1) & 0xffff0000;
  577. /*Open fast path Window */
  578. reg_write(REG_FASTPATH_WIN_CTRL_ADDR(cs), reg);
  579. /* Set fast path window base address for the cs */
  580. reg = ((cs_mem_size) * cs) & 0xffff0000;
  581. /* Set base address */
  582. reg_write(REG_FASTPATH_WIN_BASE_ADDR(cs), reg);
  583. /*
  584. * Since memory size may be bigger than 4G the summ may
  585. * be more than 32 bit word,
  586. * so to estimate the result divide mem_total_size and
  587. * cs_mem_size by 0x10000 (it is equal to >> 16)
  588. */
  589. mem_total_size_c = mem_total_size >> 16;
  590. cs_mem_size_c = cs_mem_size >> 16;
  591. /* if the sum less than 2 G - calculate the value */
  592. if (mem_total_size_c + cs_mem_size_c < 0x10000)
  593. mem_total_size += cs_mem_size;
  594. else /* put max possible size */
  595. mem_total_size = L2_FILTER_FOR_MAX_MEMORY_SIZE;
  596. }
  597. }
  598. /* Set L2 filtering to Max Memory size */
  599. reg_write(ADDRESS_FILTERING_END_REGISTER, mem_total_size);
  600. return MV_OK;
  601. }
  602. u32 ddr3_get_bus_width(void)
  603. {
  604. u32 bus_width;
  605. bus_width = (reg_read(REG_SDRAM_CONFIG_ADDR) & 0x8000) >>
  606. REG_SDRAM_CONFIG_WIDTH_OFFS;
  607. return (bus_width == 0) ? 16 : 32;
  608. }
  609. u32 ddr3_get_device_width(u32 cs)
  610. {
  611. u32 device_width;
  612. device_width = (reg_read(REG_SDRAM_ADDRESS_CTRL_ADDR) &
  613. (0x3 << (REG_SDRAM_ADDRESS_CTRL_STRUCT_OFFS * cs))) >>
  614. (REG_SDRAM_ADDRESS_CTRL_STRUCT_OFFS * cs);
  615. return (device_width == 0) ? 8 : 16;
  616. }
  617. float ddr3_get_device_size(u32 cs)
  618. {
  619. u32 device_size_low, device_size_high, device_size;
  620. u32 data, cs_low_offset, cs_high_offset;
  621. cs_low_offset = REG_SDRAM_ADDRESS_SIZE_OFFS + cs * 4;
  622. cs_high_offset = REG_SDRAM_ADDRESS_SIZE_OFFS +
  623. REG_SDRAM_ADDRESS_SIZE_HIGH_OFFS + cs;
  624. data = reg_read(REG_SDRAM_ADDRESS_CTRL_ADDR);
  625. device_size_low = (data >> cs_low_offset) & 0x3;
  626. device_size_high = (data >> cs_high_offset) & 0x1;
  627. device_size = device_size_low | (device_size_high << 2);
  628. switch (device_size) {
  629. case 0:
  630. return 2;
  631. case 2:
  632. return 0.5;
  633. case 3:
  634. return 1;
  635. case 4:
  636. return 4;
  637. case 5:
  638. return 8;
  639. case 1:
  640. default:
  641. DEBUG_INIT_C("Error: Wrong device size of Cs: ", cs, 1);
  642. /*
  643. * Small value will give wrong emem size in
  644. * ddr3_calc_mem_cs_size
  645. */
  646. return 0.01;
  647. }
  648. }
  649. int ddr3_calc_mem_cs_size(u32 cs, u32 *cs_size)
  650. {
  651. float cs_mem_size;
  652. /* Calculate in GiB */
  653. cs_mem_size = ((ddr3_get_bus_width() / ddr3_get_device_width(cs)) *
  654. ddr3_get_device_size(cs)) / 8;
  655. /*
  656. * Multiple controller bus width, 2x for 64 bit
  657. * (SoC controller may be 32 or 64 bit,
  658. * so bit 15 in 0x1400, that means if whole bus used or only half,
  659. * have a differnt meaning
  660. */
  661. cs_mem_size *= DDR_CONTROLLER_BUS_WIDTH_MULTIPLIER;
  662. if (cs_mem_size == 0.125) {
  663. *cs_size = 128 << 20;
  664. } else if (cs_mem_size == 0.25) {
  665. *cs_size = 256 << 20;
  666. } else if (cs_mem_size == 0.5) {
  667. *cs_size = 512 << 20;
  668. } else if (cs_mem_size == 1) {
  669. *cs_size = 1 << 30;
  670. } else if (cs_mem_size == 2) {
  671. *cs_size = 2 << 30;
  672. } else {
  673. DEBUG_INIT_C("Error: Wrong Memory size of Cs: ", cs, 1);
  674. return MV_BAD_VALUE;
  675. }
  676. return MV_OK;
  677. }
  678. #if defined(MV_DDR_TOPOLOGY_UPDATE_FROM_TWSI)
  679. /*
  680. * Name: ddr3_update_topology_map
  681. * Desc:
  682. * Args:
  683. * Notes: Update topology map by Sat_r values
  684. * Returns:
  685. */
  686. static int ddr3_update_topology_map(struct hws_topology_map *tm)
  687. {
  688. struct topology_update_info topology_update_info;
  689. topology_update_info.update_width = 0;
  690. topology_update_info.update_ecc = 0;
  691. topology_update_info.update_ecc_pup3_mode = 0;
  692. sys_env_get_topology_update_info(&topology_update_info);
  693. if (topology_update_info.update_width) {
  694. tm->bus_act_mask &=
  695. ~(TOPOLOGY_UPDATE_WIDTH_32BIT_MASK);
  696. if (topology_update_info.width == TOPOLOGY_UPDATE_WIDTH_16BIT)
  697. tm->bus_act_mask =
  698. TOPOLOGY_UPDATE_WIDTH_16BIT_MASK;
  699. else
  700. tm->bus_act_mask =
  701. TOPOLOGY_UPDATE_WIDTH_32BIT_MASK;
  702. }
  703. if (topology_update_info.update_ecc) {
  704. if (topology_update_info.ecc == TOPOLOGY_UPDATE_ECC_OFF) {
  705. tm->bus_act_mask &=
  706. ~(1 << topology_update_info.ecc_pup_mode_offset);
  707. } else {
  708. tm->bus_act_mask |=
  709. topology_update_info.
  710. ecc << topology_update_info.ecc_pup_mode_offset;
  711. }
  712. }
  713. return MV_OK;
  714. }
  715. #endif
  716. /*
  717. * Name: ddr3_hws_tune_training_params
  718. * Desc:
  719. * Args:
  720. * Notes: Tune internal training params
  721. * Returns:
  722. */
  723. static int ddr3_hws_tune_training_params(u8 dev_num)
  724. {
  725. struct tune_train_params params;
  726. int status;
  727. /* NOTE: do not remove any field initilization */
  728. params.ck_delay = TUNE_TRAINING_PARAMS_CK_DELAY;
  729. params.ck_delay_16 = TUNE_TRAINING_PARAMS_CK_DELAY_16;
  730. params.p_finger = TUNE_TRAINING_PARAMS_PFINGER;
  731. params.n_finger = TUNE_TRAINING_PARAMS_NFINGER;
  732. params.phy_reg3_val = TUNE_TRAINING_PARAMS_PHYREG3VAL;
  733. status = ddr3_tip_tune_training_params(dev_num, &params);
  734. if (MV_OK != status) {
  735. printf("%s Training Sequence - FAILED\n", ddr_type);
  736. return status;
  737. }
  738. return MV_OK;
  739. }