stm32mp1_ddr.c 14 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495
  1. // SPDX-License-Identifier: GPL-2.0+ OR BSD-3-Clause
  2. /*
  3. * Copyright (C) 2018, STMicroelectronics - All Rights Reserved
  4. */
  5. #include <common.h>
  6. #include <clk.h>
  7. #include <ram.h>
  8. #include <reset.h>
  9. #include <timer.h>
  10. #include <asm/io.h>
  11. #include <asm/arch/ddr.h>
  12. #include <linux/iopoll.h>
  13. #include "stm32mp1_ddr.h"
  14. #include "stm32mp1_ddr_regs.h"
  15. #define RCC_DDRITFCR 0xD8
  16. #define RCC_DDRITFCR_DDRCAPBRST (BIT(14))
  17. #define RCC_DDRITFCR_DDRCAXIRST (BIT(15))
  18. #define RCC_DDRITFCR_DDRCORERST (BIT(16))
  19. #define RCC_DDRITFCR_DPHYAPBRST (BIT(17))
  20. #define RCC_DDRITFCR_DPHYRST (BIT(18))
  21. #define RCC_DDRITFCR_DPHYCTLRST (BIT(19))
  22. struct reg_desc {
  23. const char *name;
  24. u16 offset; /* offset for base address */
  25. u8 par_offset; /* offset for parameter array */
  26. };
  27. #define INVALID_OFFSET 0xFF
  28. #define DDRCTL_REG(x, y) \
  29. {#x,\
  30. offsetof(struct stm32mp1_ddrctl, x),\
  31. offsetof(struct y, x)}
  32. #define DDRPHY_REG(x, y) \
  33. {#x,\
  34. offsetof(struct stm32mp1_ddrphy, x),\
  35. offsetof(struct y, x)}
  36. #define DDRCTL_REG_REG(x) DDRCTL_REG(x, stm32mp1_ddrctrl_reg)
  37. static const struct reg_desc ddr_reg[] = {
  38. DDRCTL_REG_REG(mstr),
  39. DDRCTL_REG_REG(mrctrl0),
  40. DDRCTL_REG_REG(mrctrl1),
  41. DDRCTL_REG_REG(derateen),
  42. DDRCTL_REG_REG(derateint),
  43. DDRCTL_REG_REG(pwrctl),
  44. DDRCTL_REG_REG(pwrtmg),
  45. DDRCTL_REG_REG(hwlpctl),
  46. DDRCTL_REG_REG(rfshctl0),
  47. DDRCTL_REG_REG(rfshctl3),
  48. DDRCTL_REG_REG(crcparctl0),
  49. DDRCTL_REG_REG(zqctl0),
  50. DDRCTL_REG_REG(dfitmg0),
  51. DDRCTL_REG_REG(dfitmg1),
  52. DDRCTL_REG_REG(dfilpcfg0),
  53. DDRCTL_REG_REG(dfiupd0),
  54. DDRCTL_REG_REG(dfiupd1),
  55. DDRCTL_REG_REG(dfiupd2),
  56. DDRCTL_REG_REG(dfiphymstr),
  57. DDRCTL_REG_REG(odtmap),
  58. DDRCTL_REG_REG(dbg0),
  59. DDRCTL_REG_REG(dbg1),
  60. DDRCTL_REG_REG(dbgcmd),
  61. DDRCTL_REG_REG(poisoncfg),
  62. DDRCTL_REG_REG(pccfg),
  63. };
  64. #define DDRCTL_REG_TIMING(x) DDRCTL_REG(x, stm32mp1_ddrctrl_timing)
  65. static const struct reg_desc ddr_timing[] = {
  66. DDRCTL_REG_TIMING(rfshtmg),
  67. DDRCTL_REG_TIMING(dramtmg0),
  68. DDRCTL_REG_TIMING(dramtmg1),
  69. DDRCTL_REG_TIMING(dramtmg2),
  70. DDRCTL_REG_TIMING(dramtmg3),
  71. DDRCTL_REG_TIMING(dramtmg4),
  72. DDRCTL_REG_TIMING(dramtmg5),
  73. DDRCTL_REG_TIMING(dramtmg6),
  74. DDRCTL_REG_TIMING(dramtmg7),
  75. DDRCTL_REG_TIMING(dramtmg8),
  76. DDRCTL_REG_TIMING(dramtmg14),
  77. DDRCTL_REG_TIMING(odtcfg),
  78. };
  79. #define DDRCTL_REG_MAP(x) DDRCTL_REG(x, stm32mp1_ddrctrl_map)
  80. static const struct reg_desc ddr_map[] = {
  81. DDRCTL_REG_MAP(addrmap1),
  82. DDRCTL_REG_MAP(addrmap2),
  83. DDRCTL_REG_MAP(addrmap3),
  84. DDRCTL_REG_MAP(addrmap4),
  85. DDRCTL_REG_MAP(addrmap5),
  86. DDRCTL_REG_MAP(addrmap6),
  87. DDRCTL_REG_MAP(addrmap9),
  88. DDRCTL_REG_MAP(addrmap10),
  89. DDRCTL_REG_MAP(addrmap11),
  90. };
  91. #define DDRCTL_REG_PERF(x) DDRCTL_REG(x, stm32mp1_ddrctrl_perf)
  92. static const struct reg_desc ddr_perf[] = {
  93. DDRCTL_REG_PERF(sched),
  94. DDRCTL_REG_PERF(sched1),
  95. DDRCTL_REG_PERF(perfhpr1),
  96. DDRCTL_REG_PERF(perflpr1),
  97. DDRCTL_REG_PERF(perfwr1),
  98. DDRCTL_REG_PERF(pcfgr_0),
  99. DDRCTL_REG_PERF(pcfgw_0),
  100. DDRCTL_REG_PERF(pcfgqos0_0),
  101. DDRCTL_REG_PERF(pcfgqos1_0),
  102. DDRCTL_REG_PERF(pcfgwqos0_0),
  103. DDRCTL_REG_PERF(pcfgwqos1_0),
  104. DDRCTL_REG_PERF(pcfgr_1),
  105. DDRCTL_REG_PERF(pcfgw_1),
  106. DDRCTL_REG_PERF(pcfgqos0_1),
  107. DDRCTL_REG_PERF(pcfgqos1_1),
  108. DDRCTL_REG_PERF(pcfgwqos0_1),
  109. DDRCTL_REG_PERF(pcfgwqos1_1),
  110. };
  111. #define DDRPHY_REG_REG(x) DDRPHY_REG(x, stm32mp1_ddrphy_reg)
  112. static const struct reg_desc ddrphy_reg[] = {
  113. DDRPHY_REG_REG(pgcr),
  114. DDRPHY_REG_REG(aciocr),
  115. DDRPHY_REG_REG(dxccr),
  116. DDRPHY_REG_REG(dsgcr),
  117. DDRPHY_REG_REG(dcr),
  118. DDRPHY_REG_REG(odtcr),
  119. DDRPHY_REG_REG(zq0cr1),
  120. DDRPHY_REG_REG(dx0gcr),
  121. DDRPHY_REG_REG(dx1gcr),
  122. DDRPHY_REG_REG(dx2gcr),
  123. DDRPHY_REG_REG(dx3gcr),
  124. };
  125. #define DDRPHY_REG_TIMING(x) DDRPHY_REG(x, stm32mp1_ddrphy_timing)
  126. static const struct reg_desc ddrphy_timing[] = {
  127. DDRPHY_REG_TIMING(ptr0),
  128. DDRPHY_REG_TIMING(ptr1),
  129. DDRPHY_REG_TIMING(ptr2),
  130. DDRPHY_REG_TIMING(dtpr0),
  131. DDRPHY_REG_TIMING(dtpr1),
  132. DDRPHY_REG_TIMING(dtpr2),
  133. DDRPHY_REG_TIMING(mr0),
  134. DDRPHY_REG_TIMING(mr1),
  135. DDRPHY_REG_TIMING(mr2),
  136. DDRPHY_REG_TIMING(mr3),
  137. };
  138. #define DDRPHY_REG_CAL(x) DDRPHY_REG(x, stm32mp1_ddrphy_cal)
  139. static const struct reg_desc ddrphy_cal[] = {
  140. DDRPHY_REG_CAL(dx0dllcr),
  141. DDRPHY_REG_CAL(dx0dqtr),
  142. DDRPHY_REG_CAL(dx0dqstr),
  143. DDRPHY_REG_CAL(dx1dllcr),
  144. DDRPHY_REG_CAL(dx1dqtr),
  145. DDRPHY_REG_CAL(dx1dqstr),
  146. DDRPHY_REG_CAL(dx2dllcr),
  147. DDRPHY_REG_CAL(dx2dqtr),
  148. DDRPHY_REG_CAL(dx2dqstr),
  149. DDRPHY_REG_CAL(dx3dllcr),
  150. DDRPHY_REG_CAL(dx3dqtr),
  151. DDRPHY_REG_CAL(dx3dqstr),
  152. };
  153. enum reg_type {
  154. REG_REG,
  155. REG_TIMING,
  156. REG_PERF,
  157. REG_MAP,
  158. REGPHY_REG,
  159. REGPHY_TIMING,
  160. REGPHY_CAL,
  161. REG_TYPE_NB
  162. };
  163. enum base_type {
  164. DDR_BASE,
  165. DDRPHY_BASE,
  166. NONE_BASE
  167. };
  168. struct ddr_reg_info {
  169. const char *name;
  170. const struct reg_desc *desc;
  171. u8 size;
  172. enum base_type base;
  173. };
  174. #define DDRPHY_REG_CAL(x) DDRPHY_REG(x, stm32mp1_ddrphy_cal)
  175. const struct ddr_reg_info ddr_registers[REG_TYPE_NB] = {
  176. [REG_REG] = {
  177. "static", ddr_reg, ARRAY_SIZE(ddr_reg), DDR_BASE},
  178. [REG_TIMING] = {
  179. "timing", ddr_timing, ARRAY_SIZE(ddr_timing), DDR_BASE},
  180. [REG_PERF] = {
  181. "perf", ddr_perf, ARRAY_SIZE(ddr_perf), DDR_BASE},
  182. [REG_MAP] = {
  183. "map", ddr_map, ARRAY_SIZE(ddr_map), DDR_BASE},
  184. [REGPHY_REG] = {
  185. "static", ddrphy_reg, ARRAY_SIZE(ddrphy_reg), DDRPHY_BASE},
  186. [REGPHY_TIMING] = {
  187. "timing", ddrphy_timing, ARRAY_SIZE(ddrphy_timing), DDRPHY_BASE},
  188. [REGPHY_CAL] = {
  189. "cal", ddrphy_cal, ARRAY_SIZE(ddrphy_cal), DDRPHY_BASE},
  190. };
  191. const char *base_name[] = {
  192. [DDR_BASE] = "ctl",
  193. [DDRPHY_BASE] = "phy",
  194. };
  195. static u32 get_base_addr(const struct ddr_info *priv, enum base_type base)
  196. {
  197. if (base == DDRPHY_BASE)
  198. return (u32)priv->phy;
  199. else
  200. return (u32)priv->ctl;
  201. }
  202. static void set_reg(const struct ddr_info *priv,
  203. enum reg_type type,
  204. const void *param)
  205. {
  206. unsigned int i;
  207. unsigned int *ptr, value;
  208. enum base_type base = ddr_registers[type].base;
  209. u32 base_addr = get_base_addr(priv, base);
  210. const struct reg_desc *desc = ddr_registers[type].desc;
  211. debug("init %s\n", ddr_registers[type].name);
  212. for (i = 0; i < ddr_registers[type].size; i++) {
  213. ptr = (unsigned int *)(base_addr + desc[i].offset);
  214. if (desc[i].par_offset == INVALID_OFFSET) {
  215. pr_err("invalid parameter offset for %s", desc[i].name);
  216. } else {
  217. value = *((u32 *)((u32)param +
  218. desc[i].par_offset));
  219. writel(value, ptr);
  220. debug("[0x%x] %s= 0x%08x\n",
  221. (u32)ptr, desc[i].name, value);
  222. }
  223. }
  224. }
  225. static void ddrphy_idone_wait(struct stm32mp1_ddrphy *phy)
  226. {
  227. u32 pgsr;
  228. int ret;
  229. ret = readl_poll_timeout(&phy->pgsr, pgsr,
  230. pgsr & (DDRPHYC_PGSR_IDONE |
  231. DDRPHYC_PGSR_DTERR |
  232. DDRPHYC_PGSR_DTIERR |
  233. DDRPHYC_PGSR_DFTERR |
  234. DDRPHYC_PGSR_RVERR |
  235. DDRPHYC_PGSR_RVEIRR),
  236. 1000000);
  237. debug("\n[0x%08x] pgsr = 0x%08x ret=%d\n",
  238. (u32)&phy->pgsr, pgsr, ret);
  239. }
  240. void stm32mp1_ddrphy_init(struct stm32mp1_ddrphy *phy, u32 pir)
  241. {
  242. pir |= DDRPHYC_PIR_INIT;
  243. writel(pir, &phy->pir);
  244. debug("[0x%08x] pir = 0x%08x -> 0x%08x\n",
  245. (u32)&phy->pir, pir, readl(&phy->pir));
  246. /* need to wait 10 configuration clock before start polling */
  247. udelay(10);
  248. /* Wait DRAM initialization and Gate Training Evaluation complete */
  249. ddrphy_idone_wait(phy);
  250. }
  251. /* start quasi dynamic register update */
  252. static void start_sw_done(struct stm32mp1_ddrctl *ctl)
  253. {
  254. clrbits_le32(&ctl->swctl, DDRCTRL_SWCTL_SW_DONE);
  255. }
  256. /* wait quasi dynamic register update */
  257. static void wait_sw_done_ack(struct stm32mp1_ddrctl *ctl)
  258. {
  259. int ret;
  260. u32 swstat;
  261. setbits_le32(&ctl->swctl, DDRCTRL_SWCTL_SW_DONE);
  262. ret = readl_poll_timeout(&ctl->swstat, swstat,
  263. swstat & DDRCTRL_SWSTAT_SW_DONE_ACK,
  264. 1000000);
  265. if (ret)
  266. panic("Timeout initialising DRAM : DDR->swstat = %x\n",
  267. swstat);
  268. debug("[0x%08x] swstat = 0x%08x\n", (u32)&ctl->swstat, swstat);
  269. }
  270. /* wait quasi dynamic register update */
  271. static void wait_operating_mode(struct ddr_info *priv, int mode)
  272. {
  273. u32 stat, val, mask, val2 = 0, mask2 = 0;
  274. int ret;
  275. mask = DDRCTRL_STAT_OPERATING_MODE_MASK;
  276. val = mode;
  277. /* self-refresh due to software => check also STAT.selfref_type */
  278. if (mode == DDRCTRL_STAT_OPERATING_MODE_SR) {
  279. mask |= DDRCTRL_STAT_SELFREF_TYPE_MASK;
  280. stat |= DDRCTRL_STAT_SELFREF_TYPE_SR;
  281. } else if (mode == DDRCTRL_STAT_OPERATING_MODE_NORMAL) {
  282. /* normal mode: handle also automatic self refresh */
  283. mask2 = DDRCTRL_STAT_OPERATING_MODE_MASK |
  284. DDRCTRL_STAT_SELFREF_TYPE_MASK;
  285. val2 = DDRCTRL_STAT_OPERATING_MODE_SR |
  286. DDRCTRL_STAT_SELFREF_TYPE_ASR;
  287. }
  288. ret = readl_poll_timeout(&priv->ctl->stat, stat,
  289. ((stat & mask) == val) ||
  290. (mask2 && ((stat & mask2) == val2)),
  291. 1000000);
  292. if (ret)
  293. panic("Timeout DRAM : DDR->stat = %x\n", stat);
  294. debug("[0x%08x] stat = 0x%08x\n", (u32)&priv->ctl->stat, stat);
  295. }
  296. void stm32mp1_refresh_disable(struct stm32mp1_ddrctl *ctl)
  297. {
  298. start_sw_done(ctl);
  299. /* quasi-dynamic register update*/
  300. setbits_le32(&ctl->rfshctl3, DDRCTRL_RFSHCTL3_DIS_AUTO_REFRESH);
  301. clrbits_le32(&ctl->pwrctl, DDRCTRL_PWRCTL_POWERDOWN_EN);
  302. clrbits_le32(&ctl->dfimisc, DDRCTRL_DFIMISC_DFI_INIT_COMPLETE_EN);
  303. wait_sw_done_ack(ctl);
  304. }
  305. void stm32mp1_refresh_restore(struct stm32mp1_ddrctl *ctl,
  306. u32 rfshctl3, u32 pwrctl)
  307. {
  308. start_sw_done(ctl);
  309. if (!(rfshctl3 & DDRCTRL_RFSHCTL3_DIS_AUTO_REFRESH))
  310. clrbits_le32(&ctl->rfshctl3, DDRCTRL_RFSHCTL3_DIS_AUTO_REFRESH);
  311. if (pwrctl & DDRCTRL_PWRCTL_POWERDOWN_EN)
  312. setbits_le32(&ctl->pwrctl, DDRCTRL_PWRCTL_POWERDOWN_EN);
  313. setbits_le32(&ctl->dfimisc, DDRCTRL_DFIMISC_DFI_INIT_COMPLETE_EN);
  314. wait_sw_done_ack(ctl);
  315. }
  316. /* board-specific DDR power initializations. */
  317. __weak int board_ddr_power_init(void)
  318. {
  319. return 0;
  320. }
  321. __maybe_unused
  322. void stm32mp1_ddr_init(struct ddr_info *priv,
  323. const struct stm32mp1_ddr_config *config)
  324. {
  325. u32 pir;
  326. int ret;
  327. ret = board_ddr_power_init();
  328. if (ret)
  329. panic("ddr power init failed\n");
  330. debug("name = %s\n", config->info.name);
  331. debug("speed = %d MHz\n", config->info.speed);
  332. debug("size = 0x%x\n", config->info.size);
  333. /*
  334. * 1. Program the DWC_ddr_umctl2 registers
  335. * 1.1 RESETS: presetn, core_ddrc_rstn, aresetn
  336. */
  337. /* Assert All DDR part */
  338. setbits_le32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DDRCAPBRST);
  339. setbits_le32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DDRCAXIRST);
  340. setbits_le32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DDRCORERST);
  341. setbits_le32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DPHYAPBRST);
  342. setbits_le32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DPHYRST);
  343. setbits_le32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DPHYCTLRST);
  344. /* 1.2. start CLOCK */
  345. if (stm32mp1_ddr_clk_enable(priv, config->info.speed))
  346. panic("invalid DRAM clock : %d MHz\n",
  347. config->info.speed);
  348. /* 1.3. deassert reset */
  349. /* de-assert PHY rstn and ctl_rstn via DPHYRST and DPHYCTLRST */
  350. clrbits_le32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DPHYRST);
  351. clrbits_le32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DPHYCTLRST);
  352. /* De-assert presetn once the clocks are active
  353. * and stable via DDRCAPBRST bit
  354. */
  355. clrbits_le32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DDRCAPBRST);
  356. /* 1.4. wait 4 cycles for synchronization */
  357. asm(" nop");
  358. asm(" nop");
  359. asm(" nop");
  360. asm(" nop");
  361. /* 1.5. initialize registers ddr_umctl2 */
  362. /* Stop uMCTL2 before PHY is ready */
  363. clrbits_le32(&priv->ctl->dfimisc, DDRCTRL_DFIMISC_DFI_INIT_COMPLETE_EN);
  364. debug("[0x%08x] dfimisc = 0x%08x\n",
  365. (u32)&priv->ctl->dfimisc, readl(&priv->ctl->dfimisc));
  366. set_reg(priv, REG_REG, &config->c_reg);
  367. set_reg(priv, REG_TIMING, &config->c_timing);
  368. set_reg(priv, REG_MAP, &config->c_map);
  369. /* skip CTRL init, SDRAM init is done by PHY PUBL */
  370. clrsetbits_le32(&priv->ctl->init0,
  371. DDRCTRL_INIT0_SKIP_DRAM_INIT_MASK,
  372. DDRCTRL_INIT0_SKIP_DRAM_INIT_NORMAL);
  373. set_reg(priv, REG_PERF, &config->c_perf);
  374. /* 2. deassert reset signal core_ddrc_rstn, aresetn and presetn */
  375. clrbits_le32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DDRCORERST);
  376. clrbits_le32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DDRCAXIRST);
  377. clrbits_le32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DPHYAPBRST);
  378. /* 3. start PHY init by accessing relevant PUBL registers
  379. * (DXGCR, DCR, PTR*, MR*, DTPR*)
  380. */
  381. set_reg(priv, REGPHY_REG, &config->p_reg);
  382. set_reg(priv, REGPHY_TIMING, &config->p_timing);
  383. set_reg(priv, REGPHY_CAL, &config->p_cal);
  384. /* 4. Monitor PHY init status by polling PUBL register PGSR.IDONE
  385. * Perform DDR PHY DRAM initialization and Gate Training Evaluation
  386. */
  387. ddrphy_idone_wait(priv->phy);
  388. /* 5. Indicate to PUBL that controller performs SDRAM initialization
  389. * by setting PIR.INIT and PIR CTLDINIT and pool PGSR.IDONE
  390. * DRAM init is done by PHY, init0.skip_dram.init = 1
  391. */
  392. pir = DDRPHYC_PIR_DLLSRST | DDRPHYC_PIR_DLLLOCK | DDRPHYC_PIR_ZCAL |
  393. DDRPHYC_PIR_ITMSRST | DDRPHYC_PIR_DRAMINIT | DDRPHYC_PIR_ICPC;
  394. if (config->c_reg.mstr & DDRCTRL_MSTR_DDR3)
  395. pir |= DDRPHYC_PIR_DRAMRST; /* only for DDR3 */
  396. stm32mp1_ddrphy_init(priv->phy, pir);
  397. /* 6. SET DFIMISC.dfi_init_complete_en to 1 */
  398. /* Enable quasi-dynamic register programming*/
  399. start_sw_done(priv->ctl);
  400. setbits_le32(&priv->ctl->dfimisc, DDRCTRL_DFIMISC_DFI_INIT_COMPLETE_EN);
  401. wait_sw_done_ack(priv->ctl);
  402. /* 7. Wait for DWC_ddr_umctl2 to move to normal operation mode
  403. * by monitoring STAT.operating_mode signal
  404. */
  405. /* wait uMCTL2 ready */
  406. wait_operating_mode(priv, DDRCTRL_STAT_OPERATING_MODE_NORMAL);
  407. debug("DDR DQS training : ");
  408. /* 8. Disable Auto refresh and power down by setting
  409. * - RFSHCTL3.dis_au_refresh = 1
  410. * - PWRCTL.powerdown_en = 0
  411. * - DFIMISC.dfiinit_complete_en = 0
  412. */
  413. stm32mp1_refresh_disable(priv->ctl);
  414. /* 9. Program PUBL PGCR to enable refresh during training and rank to train
  415. * not done => keep the programed value in PGCR
  416. */
  417. /* 10. configure PUBL PIR register to specify which training step to run */
  418. /* warning : RVTRN is not supported by this PUBL */
  419. stm32mp1_ddrphy_init(priv->phy, DDRPHYC_PIR_QSTRN);
  420. /* 11. monitor PUB PGSR.IDONE to poll cpmpletion of training sequence */
  421. ddrphy_idone_wait(priv->phy);
  422. /* 12. set back registers in step 8 to the orginal values if desidered */
  423. stm32mp1_refresh_restore(priv->ctl, config->c_reg.rfshctl3,
  424. config->c_reg.pwrctl);
  425. /* enable uMCTL2 AXI port 0 and 1 */
  426. setbits_le32(&priv->ctl->pctrl_0, DDRCTRL_PCTRL_N_PORT_EN);
  427. setbits_le32(&priv->ctl->pctrl_1, DDRCTRL_PCTRL_N_PORT_EN);
  428. }