2 * Copyright (C) 2018, STMicroelectronics - All Rights Reserved
4 * SPDX-License-Identifier: GPL-2.0+ OR BSD-3-Clause
10 #include <arch_helpers.h>
11 #include <common/debug.h>
12 #include <drivers/delay_timer.h>
13 #include <drivers/st/stm32mp1_clk.h>
14 #include <drivers/st/stm32mp1_ddr.h>
15 #include <drivers/st/stm32mp1_ddr_regs.h>
16 #include <drivers/st/stm32mp1_pmic.h>
17 #include <drivers/st/stm32mp1_pwr.h>
18 #include <drivers/st/stm32mp1_ram.h>
19 #include <drivers/st/stm32mp1_rcc.h>
20 #include <dt-bindings/clock/stm32mp1-clks.h>
22 #include <plat/common/platform.h>
24 #include <stm32mp1_def.h>
25 #include <stm32mp1_dt.h>
29 uint16_t offset
; /* Offset for base address */
30 uint8_t par_offset
; /* Offset for parameter array */
33 #define INVALID_OFFSET 0xFFU
35 #define TIMESLOT_1US (plat_get_syscnt_freq2() / 1000000U)
37 #define DDRCTL_REG(x, y) \
40 .offset = offsetof(struct stm32mp1_ddrctl, x), \
41 .par_offset = offsetof(struct y, x) \
44 #define DDRPHY_REG(x, y) \
47 .offset = offsetof(struct stm32mp1_ddrphy, x), \
48 .par_offset = offsetof(struct y, x) \
51 #define DDRCTL_REG_REG(x) DDRCTL_REG(x, stm32mp1_ddrctrl_reg)
52 static const struct reg_desc ddr_reg
[] = {
54 DDRCTL_REG_REG(mrctrl0
),
55 DDRCTL_REG_REG(mrctrl1
),
56 DDRCTL_REG_REG(derateen
),
57 DDRCTL_REG_REG(derateint
),
58 DDRCTL_REG_REG(pwrctl
),
59 DDRCTL_REG_REG(pwrtmg
),
60 DDRCTL_REG_REG(hwlpctl
),
61 DDRCTL_REG_REG(rfshctl0
),
62 DDRCTL_REG_REG(rfshctl3
),
63 DDRCTL_REG_REG(crcparctl0
),
64 DDRCTL_REG_REG(zqctl0
),
65 DDRCTL_REG_REG(dfitmg0
),
66 DDRCTL_REG_REG(dfitmg1
),
67 DDRCTL_REG_REG(dfilpcfg0
),
68 DDRCTL_REG_REG(dfiupd0
),
69 DDRCTL_REG_REG(dfiupd1
),
70 DDRCTL_REG_REG(dfiupd2
),
71 DDRCTL_REG_REG(dfiphymstr
),
72 DDRCTL_REG_REG(odtmap
),
75 DDRCTL_REG_REG(dbgcmd
),
76 DDRCTL_REG_REG(poisoncfg
),
77 DDRCTL_REG_REG(pccfg
),
80 #define DDRCTL_REG_TIMING(x) DDRCTL_REG(x, stm32mp1_ddrctrl_timing)
81 static const struct reg_desc ddr_timing
[] = {
82 DDRCTL_REG_TIMING(rfshtmg
),
83 DDRCTL_REG_TIMING(dramtmg0
),
84 DDRCTL_REG_TIMING(dramtmg1
),
85 DDRCTL_REG_TIMING(dramtmg2
),
86 DDRCTL_REG_TIMING(dramtmg3
),
87 DDRCTL_REG_TIMING(dramtmg4
),
88 DDRCTL_REG_TIMING(dramtmg5
),
89 DDRCTL_REG_TIMING(dramtmg6
),
90 DDRCTL_REG_TIMING(dramtmg7
),
91 DDRCTL_REG_TIMING(dramtmg8
),
92 DDRCTL_REG_TIMING(dramtmg14
),
93 DDRCTL_REG_TIMING(odtcfg
),
96 #define DDRCTL_REG_MAP(x) DDRCTL_REG(x, stm32mp1_ddrctrl_map)
97 static const struct reg_desc ddr_map
[] = {
98 DDRCTL_REG_MAP(addrmap1
),
99 DDRCTL_REG_MAP(addrmap2
),
100 DDRCTL_REG_MAP(addrmap3
),
101 DDRCTL_REG_MAP(addrmap4
),
102 DDRCTL_REG_MAP(addrmap5
),
103 DDRCTL_REG_MAP(addrmap6
),
104 DDRCTL_REG_MAP(addrmap9
),
105 DDRCTL_REG_MAP(addrmap10
),
106 DDRCTL_REG_MAP(addrmap11
),
109 #define DDRCTL_REG_PERF(x) DDRCTL_REG(x, stm32mp1_ddrctrl_perf)
110 static const struct reg_desc ddr_perf
[] = {
111 DDRCTL_REG_PERF(sched
),
112 DDRCTL_REG_PERF(sched1
),
113 DDRCTL_REG_PERF(perfhpr1
),
114 DDRCTL_REG_PERF(perflpr1
),
115 DDRCTL_REG_PERF(perfwr1
),
116 DDRCTL_REG_PERF(pcfgr_0
),
117 DDRCTL_REG_PERF(pcfgw_0
),
118 DDRCTL_REG_PERF(pcfgqos0_0
),
119 DDRCTL_REG_PERF(pcfgqos1_0
),
120 DDRCTL_REG_PERF(pcfgwqos0_0
),
121 DDRCTL_REG_PERF(pcfgwqos1_0
),
122 DDRCTL_REG_PERF(pcfgr_1
),
123 DDRCTL_REG_PERF(pcfgw_1
),
124 DDRCTL_REG_PERF(pcfgqos0_1
),
125 DDRCTL_REG_PERF(pcfgqos1_1
),
126 DDRCTL_REG_PERF(pcfgwqos0_1
),
127 DDRCTL_REG_PERF(pcfgwqos1_1
),
130 #define DDRPHY_REG_REG(x) DDRPHY_REG(x, stm32mp1_ddrphy_reg)
131 static const struct reg_desc ddrphy_reg
[] = {
132 DDRPHY_REG_REG(pgcr
),
133 DDRPHY_REG_REG(aciocr
),
134 DDRPHY_REG_REG(dxccr
),
135 DDRPHY_REG_REG(dsgcr
),
137 DDRPHY_REG_REG(odtcr
),
138 DDRPHY_REG_REG(zq0cr1
),
139 DDRPHY_REG_REG(dx0gcr
),
140 DDRPHY_REG_REG(dx1gcr
),
141 DDRPHY_REG_REG(dx2gcr
),
142 DDRPHY_REG_REG(dx3gcr
),
145 #define DDRPHY_REG_TIMING(x) DDRPHY_REG(x, stm32mp1_ddrphy_timing)
146 static const struct reg_desc ddrphy_timing
[] = {
147 DDRPHY_REG_TIMING(ptr0
),
148 DDRPHY_REG_TIMING(ptr1
),
149 DDRPHY_REG_TIMING(ptr2
),
150 DDRPHY_REG_TIMING(dtpr0
),
151 DDRPHY_REG_TIMING(dtpr1
),
152 DDRPHY_REG_TIMING(dtpr2
),
153 DDRPHY_REG_TIMING(mr0
),
154 DDRPHY_REG_TIMING(mr1
),
155 DDRPHY_REG_TIMING(mr2
),
156 DDRPHY_REG_TIMING(mr3
),
159 #define DDRPHY_REG_CAL(x) DDRPHY_REG(x, stm32mp1_ddrphy_cal)
160 static const struct reg_desc ddrphy_cal
[] = {
161 DDRPHY_REG_CAL(dx0dllcr
),
162 DDRPHY_REG_CAL(dx0dqtr
),
163 DDRPHY_REG_CAL(dx0dqstr
),
164 DDRPHY_REG_CAL(dx1dllcr
),
165 DDRPHY_REG_CAL(dx1dqtr
),
166 DDRPHY_REG_CAL(dx1dqstr
),
167 DDRPHY_REG_CAL(dx2dllcr
),
168 DDRPHY_REG_CAL(dx2dqtr
),
169 DDRPHY_REG_CAL(dx2dqstr
),
170 DDRPHY_REG_CAL(dx3dllcr
),
171 DDRPHY_REG_CAL(dx3dqtr
),
172 DDRPHY_REG_CAL(dx3dqstr
),
175 #define DDR_REG_DYN(x) \
178 .offset = offsetof(struct stm32mp1_ddrctl, x), \
179 .par_offset = INVALID_OFFSET \
182 static const struct reg_desc ddr_dyn
[] = {
185 DDR_REG_DYN(dfimisc
),
186 DDR_REG_DYN(dfistat
),
189 DDR_REG_DYN(pctrl_0
),
190 DDR_REG_DYN(pctrl_1
),
193 #define DDRPHY_REG_DYN(x) \
196 .offset = offsetof(struct stm32mp1_ddrphy, x), \
197 .par_offset = INVALID_OFFSET \
200 static const struct reg_desc ddrphy_dyn
[] = {
202 DDRPHY_REG_DYN(pgsr
),
214 * Dynamic registers => managed in driver or not changed,
215 * can be dumped in interactive mode.
228 struct ddr_reg_info
{
230 const struct reg_desc
*desc
;
235 static const struct ddr_reg_info ddr_registers
[REG_TYPE_NB
] = {
237 "static", ddr_reg
, ARRAY_SIZE(ddr_reg
), DDR_BASE
240 "timing", ddr_timing
, ARRAY_SIZE(ddr_timing
), DDR_BASE
243 "perf", ddr_perf
, ARRAY_SIZE(ddr_perf
), DDR_BASE
246 "map", ddr_map
, ARRAY_SIZE(ddr_map
), DDR_BASE
249 "static", ddrphy_reg
, ARRAY_SIZE(ddrphy_reg
), DDRPHY_BASE
252 "timing", ddrphy_timing
, ARRAY_SIZE(ddrphy_timing
), DDRPHY_BASE
255 "cal", ddrphy_cal
, ARRAY_SIZE(ddrphy_cal
), DDRPHY_BASE
258 "dyn", ddr_dyn
, ARRAY_SIZE(ddr_dyn
), DDR_BASE
261 "dyn", ddrphy_dyn
, ARRAY_SIZE(ddrphy_dyn
), DDRPHY_BASE
265 static uint32_t get_base_addr(const struct ddr_info
*priv
, enum base_type base
)
267 if (base
== DDRPHY_BASE
) {
268 return (uint32_t)priv
->phy
;
270 return (uint32_t)priv
->ctl
;
274 static void set_reg(const struct ddr_info
*priv
,
279 unsigned int *ptr
, value
;
280 enum base_type base
= ddr_registers
[type
].base
;
281 uint32_t base_addr
= get_base_addr(priv
, base
);
282 const struct reg_desc
*desc
= ddr_registers
[type
].desc
;
284 VERBOSE("init %s\n", ddr_registers
[type
].name
);
285 for (i
= 0; i
< ddr_registers
[type
].size
; i
++) {
286 ptr
= (unsigned int *)(base_addr
+ desc
[i
].offset
);
287 if (desc
[i
].par_offset
== INVALID_OFFSET
) {
288 ERROR("invalid parameter offset for %s", desc
[i
].name
);
291 value
= *((uint32_t *)((uint32_t)param
+
292 desc
[i
].par_offset
));
293 mmio_write_32((uint32_t)ptr
, value
);
298 static void stm32mp1_ddrphy_idone_wait(struct stm32mp1_ddrphy
*phy
)
303 unsigned long time0
, time
;
305 start
= get_timer(0);
309 pgsr
= mmio_read_32((uint32_t)&phy
->pgsr
);
310 time
= get_timer(start
);
312 VERBOSE(" > [0x%x] pgsr = 0x%x &\n",
313 (uint32_t)&phy
->pgsr
, pgsr
);
314 VERBOSE(" [0x%x] pir = 0x%x (time=%x)\n",
316 mmio_read_32((uint32_t)&phy
->pir
),
321 if (time
> plat_get_syscnt_freq2()) {
324 if ((pgsr
& DDRPHYC_PGSR_DTERR
) != 0U) {
325 VERBOSE("DQS Gate Trainig Error\n");
328 if ((pgsr
& DDRPHYC_PGSR_DTIERR
) != 0U) {
329 VERBOSE("DQS Gate Trainig Intermittent Error\n");
332 if ((pgsr
& DDRPHYC_PGSR_DFTERR
) != 0U) {
333 VERBOSE("DQS Drift Error\n");
336 if ((pgsr
& DDRPHYC_PGSR_RVERR
) != 0U) {
337 VERBOSE("Read Valid Training Error\n");
340 if ((pgsr
& DDRPHYC_PGSR_RVEIRR
) != 0U) {
341 VERBOSE("Read Valid Training Intermittent Error\n");
344 } while ((pgsr
& DDRPHYC_PGSR_IDONE
) == 0U && error
== 0);
345 VERBOSE("\n[0x%x] pgsr = 0x%x\n",
346 (uint32_t)&phy
->pgsr
, pgsr
);
349 static void stm32mp1_ddrphy_init(struct stm32mp1_ddrphy
*phy
, uint32_t pir
)
351 uint32_t pir_init
= pir
| DDRPHYC_PIR_INIT
;
353 mmio_write_32((uint32_t)&phy
->pir
, pir_init
);
354 VERBOSE("[0x%x] pir = 0x%x -> 0x%x\n",
355 (uint32_t)&phy
->pir
, pir_init
,
356 mmio_read_32((uint32_t)&phy
->pir
));
358 /* Need to wait 10 configuration clock before start polling */
361 /* Wait DRAM initialization and Gate Training Evaluation complete */
362 stm32mp1_ddrphy_idone_wait(phy
);
365 /* Start quasi dynamic register update */
366 static void stm32mp1_start_sw_done(struct stm32mp1_ddrctl
*ctl
)
368 mmio_clrbits_32((uint32_t)&ctl
->swctl
, DDRCTRL_SWCTL_SW_DONE
);
369 VERBOSE("[0x%x] swctl = 0x%x\n",
370 (uint32_t)&ctl
->swctl
, mmio_read_32((uint32_t)&ctl
->swctl
));
373 /* Wait quasi dynamic register update */
374 static void stm32mp1_wait_sw_done_ack(struct stm32mp1_ddrctl
*ctl
)
379 mmio_setbits_32((uint32_t)&ctl
->swctl
, DDRCTRL_SWCTL_SW_DONE
);
380 VERBOSE("[0x%x] swctl = 0x%x\n",
381 (uint32_t)&ctl
->swctl
, mmio_read_32((uint32_t)&ctl
->swctl
));
383 start
= get_timer(0);
385 swstat
= mmio_read_32((uint32_t)&ctl
->swstat
);
386 VERBOSE("[0x%x] swstat = 0x%x ",
387 (uint32_t)&ctl
->swstat
, swstat
);
388 VERBOSE("timer in ms 0x%x = start 0x%lx\r",
389 get_timer(0), start
);
390 if (get_timer(start
) > plat_get_syscnt_freq2()) {
393 } while ((swstat
& DDRCTRL_SWSTAT_SW_DONE_ACK
) == 0U);
395 VERBOSE("[0x%x] swstat = 0x%x\n",
396 (uint32_t)&ctl
->swstat
, swstat
);
399 /* Wait quasi dynamic register update */
400 static void stm32mp1_wait_operating_mode(struct ddr_info
*priv
, uint32_t mode
)
404 uint32_t operating_mode
;
405 uint32_t selref_type
;
408 start
= get_timer(0);
410 stat
= mmio_read_32((uint32_t)&priv
->ctl
->stat
);
411 operating_mode
= stat
& DDRCTRL_STAT_OPERATING_MODE_MASK
;
412 selref_type
= stat
& DDRCTRL_STAT_SELFREF_TYPE_MASK
;
413 VERBOSE("[0x%x] stat = 0x%x\n",
414 (uint32_t)&priv
->ctl
->stat
, stat
);
415 VERBOSE("timer in ms 0x%x = start 0x%lx\r",
416 get_timer(0), start
);
417 if (get_timer(start
) > plat_get_syscnt_freq2()) {
421 if (mode
== DDRCTRL_STAT_OPERATING_MODE_SR
) {
423 * Self-refresh due to software
424 * => checking also STAT.selfref_type.
426 if ((operating_mode
==
427 DDRCTRL_STAT_OPERATING_MODE_SR
) &&
428 (selref_type
== DDRCTRL_STAT_SELFREF_TYPE_SR
)) {
431 } else if (operating_mode
== mode
) {
433 } else if ((mode
== DDRCTRL_STAT_OPERATING_MODE_NORMAL
) &&
434 (operating_mode
== DDRCTRL_STAT_OPERATING_MODE_SR
) &&
435 (selref_type
== DDRCTRL_STAT_SELFREF_TYPE_ASR
)) {
436 /* Normal mode: handle also automatic self refresh */
440 if (break_loop
== 1) {
445 VERBOSE("[0x%x] stat = 0x%x\n",
446 (uint32_t)&priv
->ctl
->stat
, stat
);
449 /* Mode Register Writes (MRW or MRS) */
450 static void stm32mp1_mode_register_write(struct ddr_info
*priv
, uint8_t addr
,
455 VERBOSE("MRS: %d = %x\n", addr
, data
);
458 * 1. Poll MRSTAT.mr_wr_busy until it is '0'.
459 * This checks that there is no outstanding MR transaction.
460 * No write should be performed to MRCTRL0 and MRCTRL1
461 * if MRSTAT.mr_wr_busy = 1.
463 while ((mmio_read_32((uint32_t)&priv
->ctl
->mrstat
) &
464 DDRCTRL_MRSTAT_MR_WR_BUSY
) != 0U) {
469 * 2. Write the MRCTRL0.mr_type, MRCTRL0.mr_addr, MRCTRL0.mr_rank
470 * and (for MRWs) MRCTRL1.mr_data to define the MR transaction.
472 mrctrl0
= DDRCTRL_MRCTRL0_MR_TYPE_WRITE
|
473 DDRCTRL_MRCTRL0_MR_RANK_ALL
|
474 (((uint32_t)addr
<< DDRCTRL_MRCTRL0_MR_ADDR_SHIFT
) &
475 DDRCTRL_MRCTRL0_MR_ADDR_MASK
);
476 mmio_write_32((uint32_t)&priv
->ctl
->mrctrl0
, mrctrl0
);
477 VERBOSE("[0x%x] mrctrl0 = 0x%x (0x%x)\n",
478 (uint32_t)&priv
->ctl
->mrctrl0
,
479 mmio_read_32((uint32_t)&priv
->ctl
->mrctrl0
), mrctrl0
);
480 mmio_write_32((uint32_t)&priv
->ctl
->mrctrl1
, data
);
481 VERBOSE("[0x%x] mrctrl1 = 0x%x\n",
482 (uint32_t)&priv
->ctl
->mrctrl1
,
483 mmio_read_32((uint32_t)&priv
->ctl
->mrctrl1
));
486 * 3. In a separate APB transaction, write the MRCTRL0.mr_wr to 1. This
487 * bit is self-clearing, and triggers the MR transaction.
488 * The uMCTL2 then asserts the MRSTAT.mr_wr_busy while it performs
489 * the MR transaction to SDRAM, and no further access can be
490 * initiated until it is deasserted.
492 mrctrl0
|= DDRCTRL_MRCTRL0_MR_WR
;
493 mmio_write_32((uint32_t)&priv
->ctl
->mrctrl0
, mrctrl0
);
495 while ((mmio_read_32((uint32_t)&priv
->ctl
->mrstat
) &
496 DDRCTRL_MRSTAT_MR_WR_BUSY
) != 0U) {
500 VERBOSE("[0x%x] mrctrl0 = 0x%x\n",
501 (uint32_t)&priv
->ctl
->mrctrl0
, mrctrl0
);
504 /* Switch DDR3 from DLL-on to DLL-off */
505 static void stm32mp1_ddr3_dll_off(struct ddr_info
*priv
)
507 uint32_t mr1
= mmio_read_32((uint32_t)&priv
->phy
->mr1
);
508 uint32_t mr2
= mmio_read_32((uint32_t)&priv
->phy
->mr2
);
511 VERBOSE("mr1: 0x%x\n", mr1
);
512 VERBOSE("mr2: 0x%x\n", mr2
);
515 * 1. Set the DBG1.dis_hif = 1.
516 * This prevents further reads/writes being received on the HIF.
518 mmio_setbits_32((uint32_t)&priv
->ctl
->dbg1
, DDRCTRL_DBG1_DIS_HIF
);
519 VERBOSE("[0x%x] dbg1 = 0x%x\n",
520 (uint32_t)&priv
->ctl
->dbg1
,
521 mmio_read_32((uint32_t)&priv
->ctl
->dbg1
));
524 * 2. Ensure all commands have been flushed from the uMCTL2 by polling
525 * DBGCAM.wr_data_pipeline_empty = 1,
526 * DBGCAM.rd_data_pipeline_empty = 1,
527 * DBGCAM.dbg_wr_q_depth = 0 ,
528 * DBGCAM.dbg_lpr_q_depth = 0, and
529 * DBGCAM.dbg_hpr_q_depth = 0.
532 dbgcam
= mmio_read_32((uint32_t)&priv
->ctl
->dbgcam
);
533 VERBOSE("[0x%x] dbgcam = 0x%x\n",
534 (uint32_t)&priv
->ctl
->dbgcam
, dbgcam
);
535 } while ((((dbgcam
& DDRCTRL_DBGCAM_DATA_PIPELINE_EMPTY
) ==
536 DDRCTRL_DBGCAM_DATA_PIPELINE_EMPTY
)) &&
537 ((dbgcam
& DDRCTRL_DBGCAM_DBG_Q_DEPTH
) == 0U));
540 * 3. Perform an MRS command (using MRCTRL0 and MRCTRL1 registers)
541 * to disable RTT_NOM:
542 * a. DDR3: Write to MR1[9], MR1[6] and MR1[2]
543 * b. DDR4: Write to MR1[10:8]
545 mr1
&= ~(BIT(9) | BIT(6) | BIT(2));
546 stm32mp1_mode_register_write(priv
, 1, mr1
);
549 * 4. For DDR4 only: Perform an MRS command
550 * (using MRCTRL0 and MRCTRL1 registers) to write to MR5[8:6]
551 * to disable RTT_PARK
555 * 5. Perform an MRS command (using MRCTRL0 and MRCTRL1 registers)
556 * to write to MR2[10:9], to disable RTT_WR
557 * (and therefore disable dynamic ODT).
558 * This applies for both DDR3 and DDR4.
560 mr2
&= ~GENMASK(10, 9);
561 stm32mp1_mode_register_write(priv
, 2, mr2
);
564 * 6. Perform an MRS command (using MRCTRL0 and MRCTRL1 registers)
565 * to disable the DLL. The timing of this MRS is automatically
566 * handled by the uMCTL2.
567 * a. DDR3: Write to MR1[0]
568 * b. DDR4: Write to MR1[0]
571 stm32mp1_mode_register_write(priv
, 1, mr1
);
574 * 7. Put the SDRAM into self-refresh mode by setting
575 * PWRCTL.selfref_sw = 1, and polling STAT.operating_mode to ensure
576 * the DDRC has entered self-refresh.
578 mmio_setbits_32((uint32_t)&priv
->ctl
->pwrctl
,
579 DDRCTRL_PWRCTL_SELFREF_SW
);
580 VERBOSE("[0x%x] pwrctl = 0x%x\n",
581 (uint32_t)&priv
->ctl
->pwrctl
,
582 mmio_read_32((uint32_t)&priv
->ctl
->pwrctl
));
585 * 8. Wait until STAT.operating_mode[1:0]==11 indicating that the
586 * DWC_ddr_umctl2 core is in self-refresh mode.
587 * Ensure transition to self-refresh was due to software
588 * by checking that STAT.selfref_type[1:0]=2.
590 stm32mp1_wait_operating_mode(priv
, DDRCTRL_STAT_OPERATING_MODE_SR
);
593 * 9. Set the MSTR.dll_off_mode = 1.
594 * warning: MSTR.dll_off_mode is a quasi-dynamic type 2 field
596 stm32mp1_start_sw_done(priv
->ctl
);
598 mmio_setbits_32((uint32_t)&priv
->ctl
->mstr
, DDRCTRL_MSTR_DLL_OFF_MODE
);
599 VERBOSE("[0x%x] mstr = 0x%x\n",
600 (uint32_t)&priv
->ctl
->mstr
,
601 mmio_read_32((uint32_t)&priv
->ctl
->mstr
));
603 stm32mp1_wait_sw_done_ack(priv
->ctl
);
605 /* 10. Change the clock frequency to the desired value. */
608 * 11. Update any registers which may be required to change for the new
609 * frequency. This includes static and dynamic registers.
610 * This includes both uMCTL2 registers and PHY registers.
613 /* Change Bypass Mode Frequency Range */
614 if (stm32mp1_clk_get_rate(DDRPHYC
) < 100000000U) {
615 mmio_clrbits_32((uint32_t)&priv
->phy
->dllgcr
,
616 DDRPHYC_DLLGCR_BPS200
);
618 mmio_setbits_32((uint32_t)&priv
->phy
->dllgcr
,
619 DDRPHYC_DLLGCR_BPS200
);
622 mmio_setbits_32((uint32_t)&priv
->phy
->acdllcr
, DDRPHYC_ACDLLCR_DLLDIS
);
624 mmio_setbits_32((uint32_t)&priv
->phy
->dx0dllcr
,
625 DDRPHYC_DXNDLLCR_DLLDIS
);
626 mmio_setbits_32((uint32_t)&priv
->phy
->dx1dllcr
,
627 DDRPHYC_DXNDLLCR_DLLDIS
);
628 mmio_setbits_32((uint32_t)&priv
->phy
->dx2dllcr
,
629 DDRPHYC_DXNDLLCR_DLLDIS
);
630 mmio_setbits_32((uint32_t)&priv
->phy
->dx3dllcr
,
631 DDRPHYC_DXNDLLCR_DLLDIS
);
633 /* 12. Exit the self-refresh state by setting PWRCTL.selfref_sw = 0. */
634 mmio_clrbits_32((uint32_t)&priv
->ctl
->pwrctl
,
635 DDRCTRL_PWRCTL_SELFREF_SW
);
636 stm32mp1_wait_operating_mode(priv
, DDRCTRL_STAT_OPERATING_MODE_NORMAL
);
639 * 13. If ZQCTL0.dis_srx_zqcl = 0, the uMCTL2 performs a ZQCL command
644 * 14. Perform MRS commands as required to re-program timing registers
645 * in the SDRAM for the new frequency
646 * (in particular, CL, CWL and WR may need to be changed).
649 /* 15. Write DBG1.dis_hif = 0 to re-enable reads and writes. */
650 mmio_clrbits_32((uint32_t)&priv
->ctl
->dbg1
, DDRCTRL_DBG1_DIS_HIF
);
651 VERBOSE("[0x%x] dbg1 = 0x%x\n",
652 (uint32_t)&priv
->ctl
->dbg1
,
653 mmio_read_32((uint32_t)&priv
->ctl
->dbg1
));
656 static void stm32mp1_refresh_disable(struct stm32mp1_ddrctl
*ctl
)
658 stm32mp1_start_sw_done(ctl
);
659 /* Quasi-dynamic register update*/
660 mmio_setbits_32((uint32_t)&ctl
->rfshctl3
,
661 DDRCTRL_RFSHCTL3_DIS_AUTO_REFRESH
);
662 mmio_clrbits_32((uint32_t)&ctl
->pwrctl
, DDRCTRL_PWRCTL_POWERDOWN_EN
);
663 mmio_clrbits_32((uint32_t)&ctl
->dfimisc
,
664 DDRCTRL_DFIMISC_DFI_INIT_COMPLETE_EN
);
665 stm32mp1_wait_sw_done_ack(ctl
);
668 static void stm32mp1_refresh_restore(struct stm32mp1_ddrctl
*ctl
,
669 uint32_t rfshctl3
, uint32_t pwrctl
)
671 stm32mp1_start_sw_done(ctl
);
672 if ((rfshctl3
& DDRCTRL_RFSHCTL3_DIS_AUTO_REFRESH
) == 0U) {
673 mmio_clrbits_32((uint32_t)&ctl
->rfshctl3
,
674 DDRCTRL_RFSHCTL3_DIS_AUTO_REFRESH
);
676 if ((pwrctl
& DDRCTRL_PWRCTL_POWERDOWN_EN
) != 0U) {
677 mmio_setbits_32((uint32_t)&ctl
->pwrctl
,
678 DDRCTRL_PWRCTL_POWERDOWN_EN
);
680 mmio_setbits_32((uint32_t)&ctl
->dfimisc
,
681 DDRCTRL_DFIMISC_DFI_INIT_COMPLETE_EN
);
682 stm32mp1_wait_sw_done_ack(ctl
);
685 static int board_ddr_power_init(enum ddr_type ddr_type
)
687 if (dt_check_pmic()) {
688 return pmic_ddr_power_init(ddr_type
);
694 void stm32mp1_ddr_init(struct ddr_info
*priv
,
695 struct stm32mp1_ddr_config
*config
)
700 if ((config
->c_reg
.mstr
& DDRCTRL_MSTR_DDR3
) != 0U) {
701 ret
= board_ddr_power_init(STM32MP_DDR3
);
703 ret
= board_ddr_power_init(STM32MP_LPDDR2
);
710 VERBOSE("name = %s\n", config
->info
.name
);
711 VERBOSE("speed = %d MHz\n", config
->info
.speed
);
712 VERBOSE("size = 0x%x\n", config
->info
.size
);
714 /* DDR INIT SEQUENCE */
717 * 1. Program the DWC_ddr_umctl2 registers
718 * nota: check DFIMISC.dfi_init_complete = 0
721 /* 1.1 RESETS: presetn, core_ddrc_rstn, aresetn */
722 mmio_setbits_32(priv
->rcc
+ RCC_DDRITFCR
, RCC_DDRITFCR_DDRCAPBRST
);
723 mmio_setbits_32(priv
->rcc
+ RCC_DDRITFCR
, RCC_DDRITFCR_DDRCAXIRST
);
724 mmio_setbits_32(priv
->rcc
+ RCC_DDRITFCR
, RCC_DDRITFCR_DDRCORERST
);
725 mmio_setbits_32(priv
->rcc
+ RCC_DDRITFCR
, RCC_DDRITFCR_DPHYAPBRST
);
726 mmio_setbits_32(priv
->rcc
+ RCC_DDRITFCR
, RCC_DDRITFCR_DPHYRST
);
727 mmio_setbits_32(priv
->rcc
+ RCC_DDRITFCR
, RCC_DDRITFCR_DPHYCTLRST
);
729 /* 1.2. start CLOCK */
730 if (stm32mp1_ddr_clk_enable(priv
, config
->info
.speed
) != 0) {
734 /* 1.3. deassert reset */
735 /* De-assert PHY rstn and ctl_rstn via DPHYRST and DPHYCTLRST. */
736 mmio_clrbits_32(priv
->rcc
+ RCC_DDRITFCR
, RCC_DDRITFCR_DPHYRST
);
737 mmio_clrbits_32(priv
->rcc
+ RCC_DDRITFCR
, RCC_DDRITFCR_DPHYCTLRST
);
739 * De-assert presetn once the clocks are active
740 * and stable via DDRCAPBRST bit.
742 mmio_clrbits_32(priv
->rcc
+ RCC_DDRITFCR
, RCC_DDRITFCR_DDRCAPBRST
);
744 /* 1.4. wait 128 cycles to permit initialization of end logic */
746 /* For PCLK = 133MHz => 1 us is enough, 2 to allow lower frequency */
748 /* 1.5. initialize registers ddr_umctl2 */
749 /* Stop uMCTL2 before PHY is ready */
750 mmio_clrbits_32((uint32_t)&priv
->ctl
->dfimisc
,
751 DDRCTRL_DFIMISC_DFI_INIT_COMPLETE_EN
);
752 VERBOSE("[0x%x] dfimisc = 0x%x\n",
753 (uint32_t)&priv
->ctl
->dfimisc
,
754 mmio_read_32((uint32_t)&priv
->ctl
->dfimisc
));
756 set_reg(priv
, REG_REG
, &config
->c_reg
);
758 /* DDR3 = don't set DLLOFF for init mode */
759 if ((config
->c_reg
.mstr
&
760 (DDRCTRL_MSTR_DDR3
| DDRCTRL_MSTR_DLL_OFF_MODE
))
761 == (DDRCTRL_MSTR_DDR3
| DDRCTRL_MSTR_DLL_OFF_MODE
)) {
762 VERBOSE("deactivate DLL OFF in mstr\n");
763 mmio_clrbits_32((uint32_t)&priv
->ctl
->mstr
,
764 DDRCTRL_MSTR_DLL_OFF_MODE
);
765 VERBOSE("[0x%x] mstr = 0x%x\n",
766 (uint32_t)&priv
->ctl
->mstr
,
767 mmio_read_32((uint32_t)&priv
->ctl
->mstr
));
770 set_reg(priv
, REG_TIMING
, &config
->c_timing
);
771 set_reg(priv
, REG_MAP
, &config
->c_map
);
773 /* Skip CTRL init, SDRAM init is done by PHY PUBL */
774 mmio_clrsetbits_32((uint32_t)&priv
->ctl
->init0
,
775 DDRCTRL_INIT0_SKIP_DRAM_INIT_MASK
,
776 DDRCTRL_INIT0_SKIP_DRAM_INIT_NORMAL
);
777 VERBOSE("[0x%x] init0 = 0x%x\n",
778 (uint32_t)&priv
->ctl
->init0
,
779 mmio_read_32((uint32_t)&priv
->ctl
->init0
));
781 set_reg(priv
, REG_PERF
, &config
->c_perf
);
783 /* 2. deassert reset signal core_ddrc_rstn, aresetn and presetn */
784 mmio_clrbits_32(priv
->rcc
+ RCC_DDRITFCR
, RCC_DDRITFCR_DDRCORERST
);
785 mmio_clrbits_32(priv
->rcc
+ RCC_DDRITFCR
, RCC_DDRITFCR_DDRCAXIRST
);
786 mmio_clrbits_32(priv
->rcc
+ RCC_DDRITFCR
, RCC_DDRITFCR_DPHYAPBRST
);
789 * 3. start PHY init by accessing relevant PUBL registers
790 * (DXGCR, DCR, PTR*, MR*, DTPR*)
792 set_reg(priv
, REGPHY_REG
, &config
->p_reg
);
793 set_reg(priv
, REGPHY_TIMING
, &config
->p_timing
);
794 set_reg(priv
, REGPHY_CAL
, &config
->p_cal
);
796 /* DDR3 = don't set DLLOFF for init mode */
797 if ((config
->c_reg
.mstr
&
798 (DDRCTRL_MSTR_DDR3
| DDRCTRL_MSTR_DLL_OFF_MODE
))
799 == (DDRCTRL_MSTR_DDR3
| DDRCTRL_MSTR_DLL_OFF_MODE
)) {
800 VERBOSE("deactivate DLL OFF in mr1\n");
801 mmio_clrbits_32((uint32_t)&priv
->phy
->mr1
, BIT(0));
802 VERBOSE("[0x%x] mr1 = 0x%x\n",
803 (uint32_t)&priv
->phy
->mr1
,
804 mmio_read_32((uint32_t)&priv
->phy
->mr1
));
808 * 4. Monitor PHY init status by polling PUBL register PGSR.IDONE
809 * Perform DDR PHY DRAM initialization and Gate Training Evaluation
811 stm32mp1_ddrphy_idone_wait(priv
->phy
);
814 * 5. Indicate to PUBL that controller performs SDRAM initialization
815 * by setting PIR.INIT and PIR CTLDINIT and pool PGSR.IDONE
816 * DRAM init is done by PHY, init0.skip_dram.init = 1
819 pir
= DDRPHYC_PIR_DLLSRST
| DDRPHYC_PIR_DLLLOCK
| DDRPHYC_PIR_ZCAL
|
820 DDRPHYC_PIR_ITMSRST
| DDRPHYC_PIR_DRAMINIT
| DDRPHYC_PIR_ICPC
;
822 if ((config
->c_reg
.mstr
& DDRCTRL_MSTR_DDR3
) != 0U) {
823 pir
|= DDRPHYC_PIR_DRAMRST
; /* Only for DDR3 */
826 stm32mp1_ddrphy_init(priv
->phy
, pir
);
829 * 6. SET DFIMISC.dfi_init_complete_en to 1
830 * Enable quasi-dynamic register programming.
832 stm32mp1_start_sw_done(priv
->ctl
);
834 mmio_setbits_32((uint32_t)&priv
->ctl
->dfimisc
,
835 DDRCTRL_DFIMISC_DFI_INIT_COMPLETE_EN
);
836 VERBOSE("[0x%x] dfimisc = 0x%x\n",
837 (uint32_t)&priv
->ctl
->dfimisc
,
838 mmio_read_32((uint32_t)&priv
->ctl
->dfimisc
));
840 stm32mp1_wait_sw_done_ack(priv
->ctl
);
843 * 7. Wait for DWC_ddr_umctl2 to move to normal operation mode
844 * by monitoring STAT.operating_mode signal
847 /* Wait uMCTL2 ready */
848 stm32mp1_wait_operating_mode(priv
, DDRCTRL_STAT_OPERATING_MODE_NORMAL
);
850 /* Switch to DLL OFF mode */
851 if ((config
->c_reg
.mstr
& DDRCTRL_MSTR_DLL_OFF_MODE
) != 0U) {
852 stm32mp1_ddr3_dll_off(priv
);
855 VERBOSE("DDR DQS training : ");
858 * 8. Disable Auto refresh and power down by setting
859 * - RFSHCTL3.dis_au_refresh = 1
860 * - PWRCTL.powerdown_en = 0
861 * - DFIMISC.dfiinit_complete_en = 0
863 stm32mp1_refresh_disable(priv
->ctl
);
866 * 9. Program PUBL PGCR to enable refresh during training
868 * not done => keep the programed value in PGCR
872 * 10. configure PUBL PIR register to specify which training step
874 * Warning : RVTRN is not supported by this PUBL
876 stm32mp1_ddrphy_init(priv
->phy
, DDRPHYC_PIR_QSTRN
);
878 /* 11. monitor PUB PGSR.IDONE to poll cpmpletion of training sequence */
879 stm32mp1_ddrphy_idone_wait(priv
->phy
);
882 * 12. set back registers in step 8 to the orginal values if desidered
884 stm32mp1_refresh_restore(priv
->ctl
, config
->c_reg
.rfshctl3
,
885 config
->c_reg
.pwrctl
);
887 /* Enable uMCTL2 AXI port 0 */
888 mmio_setbits_32((uint32_t)&priv
->ctl
->pctrl_0
, DDRCTRL_PCTRL_N_PORT_EN
);
889 VERBOSE("[0x%x] pctrl_0 = 0x%x\n",
890 (uint32_t)&priv
->ctl
->pctrl_0
,
891 mmio_read_32((uint32_t)&priv
->ctl
->pctrl_0
));
893 /* Enable uMCTL2 AXI port 1 */
894 mmio_setbits_32((uint32_t)&priv
->ctl
->pctrl_1
, DDRCTRL_PCTRL_N_PORT_EN
);
895 VERBOSE("[0x%x] pctrl_1 = 0x%x\n",
896 (uint32_t)&priv
->ctl
->pctrl_1
,
897 mmio_read_32((uint32_t)&priv
->ctl
->pctrl_1
));