Sanitise includes across codebase
[project/bcm63xx/atf.git] / drivers / st / ddr / stm32mp1_ddr.c
1 /*
2 * Copyright (C) 2018, STMicroelectronics - All Rights Reserved
3 *
4 * SPDX-License-Identifier: GPL-2.0+ OR BSD-3-Clause
5 */
6
7 #include <stddef.h>
8
9 #include <arch.h>
10 #include <arch_helpers.h>
11 #include <common/debug.h>
12 #include <drivers/delay_timer.h>
13 #include <drivers/st/stm32mp1_clk.h>
14 #include <drivers/st/stm32mp1_ddr.h>
15 #include <drivers/st/stm32mp1_ddr_regs.h>
16 #include <drivers/st/stm32mp1_pmic.h>
17 #include <drivers/st/stm32mp1_pwr.h>
18 #include <drivers/st/stm32mp1_ram.h>
19 #include <drivers/st/stm32mp1_rcc.h>
20 #include <dt-bindings/clock/stm32mp1-clks.h>
21 #include <lib/mmio.h>
22 #include <plat/common/platform.h>
23
24 #include <stm32mp1_def.h>
25 #include <stm32mp1_dt.h>
26
27 struct reg_desc {
28 const char *name;
29 uint16_t offset; /* Offset for base address */
30 uint8_t par_offset; /* Offset for parameter array */
31 };
32
33 #define INVALID_OFFSET 0xFFU
34
35 #define TIMESLOT_1US (plat_get_syscnt_freq2() / 1000000U)
36
37 #define DDRCTL_REG(x, y) \
38 { \
39 .name = #x, \
40 .offset = offsetof(struct stm32mp1_ddrctl, x), \
41 .par_offset = offsetof(struct y, x) \
42 }
43
44 #define DDRPHY_REG(x, y) \
45 { \
46 .name = #x, \
47 .offset = offsetof(struct stm32mp1_ddrphy, x), \
48 .par_offset = offsetof(struct y, x) \
49 }
50
51 #define DDRCTL_REG_REG(x) DDRCTL_REG(x, stm32mp1_ddrctrl_reg)
52 static const struct reg_desc ddr_reg[] = {
53 DDRCTL_REG_REG(mstr),
54 DDRCTL_REG_REG(mrctrl0),
55 DDRCTL_REG_REG(mrctrl1),
56 DDRCTL_REG_REG(derateen),
57 DDRCTL_REG_REG(derateint),
58 DDRCTL_REG_REG(pwrctl),
59 DDRCTL_REG_REG(pwrtmg),
60 DDRCTL_REG_REG(hwlpctl),
61 DDRCTL_REG_REG(rfshctl0),
62 DDRCTL_REG_REG(rfshctl3),
63 DDRCTL_REG_REG(crcparctl0),
64 DDRCTL_REG_REG(zqctl0),
65 DDRCTL_REG_REG(dfitmg0),
66 DDRCTL_REG_REG(dfitmg1),
67 DDRCTL_REG_REG(dfilpcfg0),
68 DDRCTL_REG_REG(dfiupd0),
69 DDRCTL_REG_REG(dfiupd1),
70 DDRCTL_REG_REG(dfiupd2),
71 DDRCTL_REG_REG(dfiphymstr),
72 DDRCTL_REG_REG(odtmap),
73 DDRCTL_REG_REG(dbg0),
74 DDRCTL_REG_REG(dbg1),
75 DDRCTL_REG_REG(dbgcmd),
76 DDRCTL_REG_REG(poisoncfg),
77 DDRCTL_REG_REG(pccfg),
78 };
79
80 #define DDRCTL_REG_TIMING(x) DDRCTL_REG(x, stm32mp1_ddrctrl_timing)
81 static const struct reg_desc ddr_timing[] = {
82 DDRCTL_REG_TIMING(rfshtmg),
83 DDRCTL_REG_TIMING(dramtmg0),
84 DDRCTL_REG_TIMING(dramtmg1),
85 DDRCTL_REG_TIMING(dramtmg2),
86 DDRCTL_REG_TIMING(dramtmg3),
87 DDRCTL_REG_TIMING(dramtmg4),
88 DDRCTL_REG_TIMING(dramtmg5),
89 DDRCTL_REG_TIMING(dramtmg6),
90 DDRCTL_REG_TIMING(dramtmg7),
91 DDRCTL_REG_TIMING(dramtmg8),
92 DDRCTL_REG_TIMING(dramtmg14),
93 DDRCTL_REG_TIMING(odtcfg),
94 };
95
96 #define DDRCTL_REG_MAP(x) DDRCTL_REG(x, stm32mp1_ddrctrl_map)
97 static const struct reg_desc ddr_map[] = {
98 DDRCTL_REG_MAP(addrmap1),
99 DDRCTL_REG_MAP(addrmap2),
100 DDRCTL_REG_MAP(addrmap3),
101 DDRCTL_REG_MAP(addrmap4),
102 DDRCTL_REG_MAP(addrmap5),
103 DDRCTL_REG_MAP(addrmap6),
104 DDRCTL_REG_MAP(addrmap9),
105 DDRCTL_REG_MAP(addrmap10),
106 DDRCTL_REG_MAP(addrmap11),
107 };
108
109 #define DDRCTL_REG_PERF(x) DDRCTL_REG(x, stm32mp1_ddrctrl_perf)
110 static const struct reg_desc ddr_perf[] = {
111 DDRCTL_REG_PERF(sched),
112 DDRCTL_REG_PERF(sched1),
113 DDRCTL_REG_PERF(perfhpr1),
114 DDRCTL_REG_PERF(perflpr1),
115 DDRCTL_REG_PERF(perfwr1),
116 DDRCTL_REG_PERF(pcfgr_0),
117 DDRCTL_REG_PERF(pcfgw_0),
118 DDRCTL_REG_PERF(pcfgqos0_0),
119 DDRCTL_REG_PERF(pcfgqos1_0),
120 DDRCTL_REG_PERF(pcfgwqos0_0),
121 DDRCTL_REG_PERF(pcfgwqos1_0),
122 DDRCTL_REG_PERF(pcfgr_1),
123 DDRCTL_REG_PERF(pcfgw_1),
124 DDRCTL_REG_PERF(pcfgqos0_1),
125 DDRCTL_REG_PERF(pcfgqos1_1),
126 DDRCTL_REG_PERF(pcfgwqos0_1),
127 DDRCTL_REG_PERF(pcfgwqos1_1),
128 };
129
130 #define DDRPHY_REG_REG(x) DDRPHY_REG(x, stm32mp1_ddrphy_reg)
131 static const struct reg_desc ddrphy_reg[] = {
132 DDRPHY_REG_REG(pgcr),
133 DDRPHY_REG_REG(aciocr),
134 DDRPHY_REG_REG(dxccr),
135 DDRPHY_REG_REG(dsgcr),
136 DDRPHY_REG_REG(dcr),
137 DDRPHY_REG_REG(odtcr),
138 DDRPHY_REG_REG(zq0cr1),
139 DDRPHY_REG_REG(dx0gcr),
140 DDRPHY_REG_REG(dx1gcr),
141 DDRPHY_REG_REG(dx2gcr),
142 DDRPHY_REG_REG(dx3gcr),
143 };
144
145 #define DDRPHY_REG_TIMING(x) DDRPHY_REG(x, stm32mp1_ddrphy_timing)
146 static const struct reg_desc ddrphy_timing[] = {
147 DDRPHY_REG_TIMING(ptr0),
148 DDRPHY_REG_TIMING(ptr1),
149 DDRPHY_REG_TIMING(ptr2),
150 DDRPHY_REG_TIMING(dtpr0),
151 DDRPHY_REG_TIMING(dtpr1),
152 DDRPHY_REG_TIMING(dtpr2),
153 DDRPHY_REG_TIMING(mr0),
154 DDRPHY_REG_TIMING(mr1),
155 DDRPHY_REG_TIMING(mr2),
156 DDRPHY_REG_TIMING(mr3),
157 };
158
159 #define DDRPHY_REG_CAL(x) DDRPHY_REG(x, stm32mp1_ddrphy_cal)
160 static const struct reg_desc ddrphy_cal[] = {
161 DDRPHY_REG_CAL(dx0dllcr),
162 DDRPHY_REG_CAL(dx0dqtr),
163 DDRPHY_REG_CAL(dx0dqstr),
164 DDRPHY_REG_CAL(dx1dllcr),
165 DDRPHY_REG_CAL(dx1dqtr),
166 DDRPHY_REG_CAL(dx1dqstr),
167 DDRPHY_REG_CAL(dx2dllcr),
168 DDRPHY_REG_CAL(dx2dqtr),
169 DDRPHY_REG_CAL(dx2dqstr),
170 DDRPHY_REG_CAL(dx3dllcr),
171 DDRPHY_REG_CAL(dx3dqtr),
172 DDRPHY_REG_CAL(dx3dqstr),
173 };
174
175 #define DDR_REG_DYN(x) \
176 { \
177 .name = #x, \
178 .offset = offsetof(struct stm32mp1_ddrctl, x), \
179 .par_offset = INVALID_OFFSET \
180 }
181
182 static const struct reg_desc ddr_dyn[] = {
183 DDR_REG_DYN(stat),
184 DDR_REG_DYN(init0),
185 DDR_REG_DYN(dfimisc),
186 DDR_REG_DYN(dfistat),
187 DDR_REG_DYN(swctl),
188 DDR_REG_DYN(swstat),
189 DDR_REG_DYN(pctrl_0),
190 DDR_REG_DYN(pctrl_1),
191 };
192
193 #define DDRPHY_REG_DYN(x) \
194 { \
195 .name = #x, \
196 .offset = offsetof(struct stm32mp1_ddrphy, x), \
197 .par_offset = INVALID_OFFSET \
198 }
199
200 static const struct reg_desc ddrphy_dyn[] = {
201 DDRPHY_REG_DYN(pir),
202 DDRPHY_REG_DYN(pgsr),
203 };
204
205 enum reg_type {
206 REG_REG,
207 REG_TIMING,
208 REG_PERF,
209 REG_MAP,
210 REGPHY_REG,
211 REGPHY_TIMING,
212 REGPHY_CAL,
213 /*
214 * Dynamic registers => managed in driver or not changed,
215 * can be dumped in interactive mode.
216 */
217 REG_DYN,
218 REGPHY_DYN,
219 REG_TYPE_NB
220 };
221
222 enum base_type {
223 DDR_BASE,
224 DDRPHY_BASE,
225 NONE_BASE
226 };
227
228 struct ddr_reg_info {
229 const char *name;
230 const struct reg_desc *desc;
231 uint8_t size;
232 enum base_type base;
233 };
234
235 static const struct ddr_reg_info ddr_registers[REG_TYPE_NB] = {
236 [REG_REG] = {
237 "static", ddr_reg, ARRAY_SIZE(ddr_reg), DDR_BASE
238 },
239 [REG_TIMING] = {
240 "timing", ddr_timing, ARRAY_SIZE(ddr_timing), DDR_BASE
241 },
242 [REG_PERF] = {
243 "perf", ddr_perf, ARRAY_SIZE(ddr_perf), DDR_BASE
244 },
245 [REG_MAP] = {
246 "map", ddr_map, ARRAY_SIZE(ddr_map), DDR_BASE
247 },
248 [REGPHY_REG] = {
249 "static", ddrphy_reg, ARRAY_SIZE(ddrphy_reg), DDRPHY_BASE
250 },
251 [REGPHY_TIMING] = {
252 "timing", ddrphy_timing, ARRAY_SIZE(ddrphy_timing), DDRPHY_BASE
253 },
254 [REGPHY_CAL] = {
255 "cal", ddrphy_cal, ARRAY_SIZE(ddrphy_cal), DDRPHY_BASE
256 },
257 [REG_DYN] = {
258 "dyn", ddr_dyn, ARRAY_SIZE(ddr_dyn), DDR_BASE
259 },
260 [REGPHY_DYN] = {
261 "dyn", ddrphy_dyn, ARRAY_SIZE(ddrphy_dyn), DDRPHY_BASE
262 },
263 };
264
265 static uint32_t get_base_addr(const struct ddr_info *priv, enum base_type base)
266 {
267 if (base == DDRPHY_BASE) {
268 return (uint32_t)priv->phy;
269 } else {
270 return (uint32_t)priv->ctl;
271 }
272 }
273
274 static void set_reg(const struct ddr_info *priv,
275 enum reg_type type,
276 const void *param)
277 {
278 unsigned int i;
279 unsigned int *ptr, value;
280 enum base_type base = ddr_registers[type].base;
281 uint32_t base_addr = get_base_addr(priv, base);
282 const struct reg_desc *desc = ddr_registers[type].desc;
283
284 VERBOSE("init %s\n", ddr_registers[type].name);
285 for (i = 0; i < ddr_registers[type].size; i++) {
286 ptr = (unsigned int *)(base_addr + desc[i].offset);
287 if (desc[i].par_offset == INVALID_OFFSET) {
288 ERROR("invalid parameter offset for %s", desc[i].name);
289 panic();
290 } else {
291 value = *((uint32_t *)((uint32_t)param +
292 desc[i].par_offset));
293 mmio_write_32((uint32_t)ptr, value);
294 }
295 }
296 }
297
298 static void stm32mp1_ddrphy_idone_wait(struct stm32mp1_ddrphy *phy)
299 {
300 uint32_t pgsr;
301 int error = 0;
302 unsigned long start;
303 unsigned long time0, time;
304
305 start = get_timer(0);
306 time0 = start;
307
308 do {
309 pgsr = mmio_read_32((uint32_t)&phy->pgsr);
310 time = get_timer(start);
311 if (time != time0) {
312 VERBOSE(" > [0x%x] pgsr = 0x%x &\n",
313 (uint32_t)&phy->pgsr, pgsr);
314 VERBOSE(" [0x%x] pir = 0x%x (time=%x)\n",
315 (uint32_t)&phy->pir,
316 mmio_read_32((uint32_t)&phy->pir),
317 (uint32_t)time);
318 }
319
320 time0 = time;
321 if (time > plat_get_syscnt_freq2()) {
322 panic();
323 }
324 if ((pgsr & DDRPHYC_PGSR_DTERR) != 0U) {
325 VERBOSE("DQS Gate Trainig Error\n");
326 error++;
327 }
328 if ((pgsr & DDRPHYC_PGSR_DTIERR) != 0U) {
329 VERBOSE("DQS Gate Trainig Intermittent Error\n");
330 error++;
331 }
332 if ((pgsr & DDRPHYC_PGSR_DFTERR) != 0U) {
333 VERBOSE("DQS Drift Error\n");
334 error++;
335 }
336 if ((pgsr & DDRPHYC_PGSR_RVERR) != 0U) {
337 VERBOSE("Read Valid Training Error\n");
338 error++;
339 }
340 if ((pgsr & DDRPHYC_PGSR_RVEIRR) != 0U) {
341 VERBOSE("Read Valid Training Intermittent Error\n");
342 error++;
343 }
344 } while ((pgsr & DDRPHYC_PGSR_IDONE) == 0U && error == 0);
345 VERBOSE("\n[0x%x] pgsr = 0x%x\n",
346 (uint32_t)&phy->pgsr, pgsr);
347 }
348
349 static void stm32mp1_ddrphy_init(struct stm32mp1_ddrphy *phy, uint32_t pir)
350 {
351 uint32_t pir_init = pir | DDRPHYC_PIR_INIT;
352
353 mmio_write_32((uint32_t)&phy->pir, pir_init);
354 VERBOSE("[0x%x] pir = 0x%x -> 0x%x\n",
355 (uint32_t)&phy->pir, pir_init,
356 mmio_read_32((uint32_t)&phy->pir));
357
358 /* Need to wait 10 configuration clock before start polling */
359 udelay(10);
360
361 /* Wait DRAM initialization and Gate Training Evaluation complete */
362 stm32mp1_ddrphy_idone_wait(phy);
363 }
364
365 /* Start quasi dynamic register update */
366 static void stm32mp1_start_sw_done(struct stm32mp1_ddrctl *ctl)
367 {
368 mmio_clrbits_32((uint32_t)&ctl->swctl, DDRCTRL_SWCTL_SW_DONE);
369 VERBOSE("[0x%x] swctl = 0x%x\n",
370 (uint32_t)&ctl->swctl, mmio_read_32((uint32_t)&ctl->swctl));
371 }
372
373 /* Wait quasi dynamic register update */
374 static void stm32mp1_wait_sw_done_ack(struct stm32mp1_ddrctl *ctl)
375 {
376 unsigned long start;
377 uint32_t swstat;
378
379 mmio_setbits_32((uint32_t)&ctl->swctl, DDRCTRL_SWCTL_SW_DONE);
380 VERBOSE("[0x%x] swctl = 0x%x\n",
381 (uint32_t)&ctl->swctl, mmio_read_32((uint32_t)&ctl->swctl));
382
383 start = get_timer(0);
384 do {
385 swstat = mmio_read_32((uint32_t)&ctl->swstat);
386 VERBOSE("[0x%x] swstat = 0x%x ",
387 (uint32_t)&ctl->swstat, swstat);
388 VERBOSE("timer in ms 0x%x = start 0x%lx\r",
389 get_timer(0), start);
390 if (get_timer(start) > plat_get_syscnt_freq2()) {
391 panic();
392 }
393 } while ((swstat & DDRCTRL_SWSTAT_SW_DONE_ACK) == 0U);
394
395 VERBOSE("[0x%x] swstat = 0x%x\n",
396 (uint32_t)&ctl->swstat, swstat);
397 }
398
399 /* Wait quasi dynamic register update */
400 static void stm32mp1_wait_operating_mode(struct ddr_info *priv, uint32_t mode)
401 {
402 unsigned long start;
403 uint32_t stat;
404 uint32_t operating_mode;
405 uint32_t selref_type;
406 int break_loop = 0;
407
408 start = get_timer(0);
409 for ( ; ; ) {
410 stat = mmio_read_32((uint32_t)&priv->ctl->stat);
411 operating_mode = stat & DDRCTRL_STAT_OPERATING_MODE_MASK;
412 selref_type = stat & DDRCTRL_STAT_SELFREF_TYPE_MASK;
413 VERBOSE("[0x%x] stat = 0x%x\n",
414 (uint32_t)&priv->ctl->stat, stat);
415 VERBOSE("timer in ms 0x%x = start 0x%lx\r",
416 get_timer(0), start);
417 if (get_timer(start) > plat_get_syscnt_freq2()) {
418 panic();
419 }
420
421 if (mode == DDRCTRL_STAT_OPERATING_MODE_SR) {
422 /*
423 * Self-refresh due to software
424 * => checking also STAT.selfref_type.
425 */
426 if ((operating_mode ==
427 DDRCTRL_STAT_OPERATING_MODE_SR) &&
428 (selref_type == DDRCTRL_STAT_SELFREF_TYPE_SR)) {
429 break_loop = 1;
430 }
431 } else if (operating_mode == mode) {
432 break_loop = 1;
433 } else if ((mode == DDRCTRL_STAT_OPERATING_MODE_NORMAL) &&
434 (operating_mode == DDRCTRL_STAT_OPERATING_MODE_SR) &&
435 (selref_type == DDRCTRL_STAT_SELFREF_TYPE_ASR)) {
436 /* Normal mode: handle also automatic self refresh */
437 break_loop = 1;
438 }
439
440 if (break_loop == 1) {
441 break;
442 }
443 }
444
445 VERBOSE("[0x%x] stat = 0x%x\n",
446 (uint32_t)&priv->ctl->stat, stat);
447 }
448
449 /* Mode Register Writes (MRW or MRS) */
450 static void stm32mp1_mode_register_write(struct ddr_info *priv, uint8_t addr,
451 uint32_t data)
452 {
453 uint32_t mrctrl0;
454
455 VERBOSE("MRS: %d = %x\n", addr, data);
456
457 /*
458 * 1. Poll MRSTAT.mr_wr_busy until it is '0'.
459 * This checks that there is no outstanding MR transaction.
460 * No write should be performed to MRCTRL0 and MRCTRL1
461 * if MRSTAT.mr_wr_busy = 1.
462 */
463 while ((mmio_read_32((uint32_t)&priv->ctl->mrstat) &
464 DDRCTRL_MRSTAT_MR_WR_BUSY) != 0U) {
465 ;
466 }
467
468 /*
469 * 2. Write the MRCTRL0.mr_type, MRCTRL0.mr_addr, MRCTRL0.mr_rank
470 * and (for MRWs) MRCTRL1.mr_data to define the MR transaction.
471 */
472 mrctrl0 = DDRCTRL_MRCTRL0_MR_TYPE_WRITE |
473 DDRCTRL_MRCTRL0_MR_RANK_ALL |
474 (((uint32_t)addr << DDRCTRL_MRCTRL0_MR_ADDR_SHIFT) &
475 DDRCTRL_MRCTRL0_MR_ADDR_MASK);
476 mmio_write_32((uint32_t)&priv->ctl->mrctrl0, mrctrl0);
477 VERBOSE("[0x%x] mrctrl0 = 0x%x (0x%x)\n",
478 (uint32_t)&priv->ctl->mrctrl0,
479 mmio_read_32((uint32_t)&priv->ctl->mrctrl0), mrctrl0);
480 mmio_write_32((uint32_t)&priv->ctl->mrctrl1, data);
481 VERBOSE("[0x%x] mrctrl1 = 0x%x\n",
482 (uint32_t)&priv->ctl->mrctrl1,
483 mmio_read_32((uint32_t)&priv->ctl->mrctrl1));
484
485 /*
486 * 3. In a separate APB transaction, write the MRCTRL0.mr_wr to 1. This
487 * bit is self-clearing, and triggers the MR transaction.
488 * The uMCTL2 then asserts the MRSTAT.mr_wr_busy while it performs
489 * the MR transaction to SDRAM, and no further access can be
490 * initiated until it is deasserted.
491 */
492 mrctrl0 |= DDRCTRL_MRCTRL0_MR_WR;
493 mmio_write_32((uint32_t)&priv->ctl->mrctrl0, mrctrl0);
494
495 while ((mmio_read_32((uint32_t)&priv->ctl->mrstat) &
496 DDRCTRL_MRSTAT_MR_WR_BUSY) != 0U) {
497 ;
498 }
499
500 VERBOSE("[0x%x] mrctrl0 = 0x%x\n",
501 (uint32_t)&priv->ctl->mrctrl0, mrctrl0);
502 }
503
504 /* Switch DDR3 from DLL-on to DLL-off */
505 static void stm32mp1_ddr3_dll_off(struct ddr_info *priv)
506 {
507 uint32_t mr1 = mmio_read_32((uint32_t)&priv->phy->mr1);
508 uint32_t mr2 = mmio_read_32((uint32_t)&priv->phy->mr2);
509 uint32_t dbgcam;
510
511 VERBOSE("mr1: 0x%x\n", mr1);
512 VERBOSE("mr2: 0x%x\n", mr2);
513
514 /*
515 * 1. Set the DBG1.dis_hif = 1.
516 * This prevents further reads/writes being received on the HIF.
517 */
518 mmio_setbits_32((uint32_t)&priv->ctl->dbg1, DDRCTRL_DBG1_DIS_HIF);
519 VERBOSE("[0x%x] dbg1 = 0x%x\n",
520 (uint32_t)&priv->ctl->dbg1,
521 mmio_read_32((uint32_t)&priv->ctl->dbg1));
522
523 /*
524 * 2. Ensure all commands have been flushed from the uMCTL2 by polling
525 * DBGCAM.wr_data_pipeline_empty = 1,
526 * DBGCAM.rd_data_pipeline_empty = 1,
527 * DBGCAM.dbg_wr_q_depth = 0 ,
528 * DBGCAM.dbg_lpr_q_depth = 0, and
529 * DBGCAM.dbg_hpr_q_depth = 0.
530 */
531 do {
532 dbgcam = mmio_read_32((uint32_t)&priv->ctl->dbgcam);
533 VERBOSE("[0x%x] dbgcam = 0x%x\n",
534 (uint32_t)&priv->ctl->dbgcam, dbgcam);
535 } while ((((dbgcam & DDRCTRL_DBGCAM_DATA_PIPELINE_EMPTY) ==
536 DDRCTRL_DBGCAM_DATA_PIPELINE_EMPTY)) &&
537 ((dbgcam & DDRCTRL_DBGCAM_DBG_Q_DEPTH) == 0U));
538
539 /*
540 * 3. Perform an MRS command (using MRCTRL0 and MRCTRL1 registers)
541 * to disable RTT_NOM:
542 * a. DDR3: Write to MR1[9], MR1[6] and MR1[2]
543 * b. DDR4: Write to MR1[10:8]
544 */
545 mr1 &= ~(BIT(9) | BIT(6) | BIT(2));
546 stm32mp1_mode_register_write(priv, 1, mr1);
547
548 /*
549 * 4. For DDR4 only: Perform an MRS command
550 * (using MRCTRL0 and MRCTRL1 registers) to write to MR5[8:6]
551 * to disable RTT_PARK
552 */
553
554 /*
555 * 5. Perform an MRS command (using MRCTRL0 and MRCTRL1 registers)
556 * to write to MR2[10:9], to disable RTT_WR
557 * (and therefore disable dynamic ODT).
558 * This applies for both DDR3 and DDR4.
559 */
560 mr2 &= ~GENMASK(10, 9);
561 stm32mp1_mode_register_write(priv, 2, mr2);
562
563 /*
564 * 6. Perform an MRS command (using MRCTRL0 and MRCTRL1 registers)
565 * to disable the DLL. The timing of this MRS is automatically
566 * handled by the uMCTL2.
567 * a. DDR3: Write to MR1[0]
568 * b. DDR4: Write to MR1[0]
569 */
570 mr1 |= BIT(0);
571 stm32mp1_mode_register_write(priv, 1, mr1);
572
573 /*
574 * 7. Put the SDRAM into self-refresh mode by setting
575 * PWRCTL.selfref_sw = 1, and polling STAT.operating_mode to ensure
576 * the DDRC has entered self-refresh.
577 */
578 mmio_setbits_32((uint32_t)&priv->ctl->pwrctl,
579 DDRCTRL_PWRCTL_SELFREF_SW);
580 VERBOSE("[0x%x] pwrctl = 0x%x\n",
581 (uint32_t)&priv->ctl->pwrctl,
582 mmio_read_32((uint32_t)&priv->ctl->pwrctl));
583
584 /*
585 * 8. Wait until STAT.operating_mode[1:0]==11 indicating that the
586 * DWC_ddr_umctl2 core is in self-refresh mode.
587 * Ensure transition to self-refresh was due to software
588 * by checking that STAT.selfref_type[1:0]=2.
589 */
590 stm32mp1_wait_operating_mode(priv, DDRCTRL_STAT_OPERATING_MODE_SR);
591
592 /*
593 * 9. Set the MSTR.dll_off_mode = 1.
594 * warning: MSTR.dll_off_mode is a quasi-dynamic type 2 field
595 */
596 stm32mp1_start_sw_done(priv->ctl);
597
598 mmio_setbits_32((uint32_t)&priv->ctl->mstr, DDRCTRL_MSTR_DLL_OFF_MODE);
599 VERBOSE("[0x%x] mstr = 0x%x\n",
600 (uint32_t)&priv->ctl->mstr,
601 mmio_read_32((uint32_t)&priv->ctl->mstr));
602
603 stm32mp1_wait_sw_done_ack(priv->ctl);
604
605 /* 10. Change the clock frequency to the desired value. */
606
607 /*
608 * 11. Update any registers which may be required to change for the new
609 * frequency. This includes static and dynamic registers.
610 * This includes both uMCTL2 registers and PHY registers.
611 */
612
613 /* Change Bypass Mode Frequency Range */
614 if (stm32mp1_clk_get_rate(DDRPHYC) < 100000000U) {
615 mmio_clrbits_32((uint32_t)&priv->phy->dllgcr,
616 DDRPHYC_DLLGCR_BPS200);
617 } else {
618 mmio_setbits_32((uint32_t)&priv->phy->dllgcr,
619 DDRPHYC_DLLGCR_BPS200);
620 }
621
622 mmio_setbits_32((uint32_t)&priv->phy->acdllcr, DDRPHYC_ACDLLCR_DLLDIS);
623
624 mmio_setbits_32((uint32_t)&priv->phy->dx0dllcr,
625 DDRPHYC_DXNDLLCR_DLLDIS);
626 mmio_setbits_32((uint32_t)&priv->phy->dx1dllcr,
627 DDRPHYC_DXNDLLCR_DLLDIS);
628 mmio_setbits_32((uint32_t)&priv->phy->dx2dllcr,
629 DDRPHYC_DXNDLLCR_DLLDIS);
630 mmio_setbits_32((uint32_t)&priv->phy->dx3dllcr,
631 DDRPHYC_DXNDLLCR_DLLDIS);
632
633 /* 12. Exit the self-refresh state by setting PWRCTL.selfref_sw = 0. */
634 mmio_clrbits_32((uint32_t)&priv->ctl->pwrctl,
635 DDRCTRL_PWRCTL_SELFREF_SW);
636 stm32mp1_wait_operating_mode(priv, DDRCTRL_STAT_OPERATING_MODE_NORMAL);
637
638 /*
639 * 13. If ZQCTL0.dis_srx_zqcl = 0, the uMCTL2 performs a ZQCL command
640 * at this point.
641 */
642
643 /*
644 * 14. Perform MRS commands as required to re-program timing registers
645 * in the SDRAM for the new frequency
646 * (in particular, CL, CWL and WR may need to be changed).
647 */
648
649 /* 15. Write DBG1.dis_hif = 0 to re-enable reads and writes. */
650 mmio_clrbits_32((uint32_t)&priv->ctl->dbg1, DDRCTRL_DBG1_DIS_HIF);
651 VERBOSE("[0x%x] dbg1 = 0x%x\n",
652 (uint32_t)&priv->ctl->dbg1,
653 mmio_read_32((uint32_t)&priv->ctl->dbg1));
654 }
655
656 static void stm32mp1_refresh_disable(struct stm32mp1_ddrctl *ctl)
657 {
658 stm32mp1_start_sw_done(ctl);
659 /* Quasi-dynamic register update*/
660 mmio_setbits_32((uint32_t)&ctl->rfshctl3,
661 DDRCTRL_RFSHCTL3_DIS_AUTO_REFRESH);
662 mmio_clrbits_32((uint32_t)&ctl->pwrctl, DDRCTRL_PWRCTL_POWERDOWN_EN);
663 mmio_clrbits_32((uint32_t)&ctl->dfimisc,
664 DDRCTRL_DFIMISC_DFI_INIT_COMPLETE_EN);
665 stm32mp1_wait_sw_done_ack(ctl);
666 }
667
668 static void stm32mp1_refresh_restore(struct stm32mp1_ddrctl *ctl,
669 uint32_t rfshctl3, uint32_t pwrctl)
670 {
671 stm32mp1_start_sw_done(ctl);
672 if ((rfshctl3 & DDRCTRL_RFSHCTL3_DIS_AUTO_REFRESH) == 0U) {
673 mmio_clrbits_32((uint32_t)&ctl->rfshctl3,
674 DDRCTRL_RFSHCTL3_DIS_AUTO_REFRESH);
675 }
676 if ((pwrctl & DDRCTRL_PWRCTL_POWERDOWN_EN) != 0U) {
677 mmio_setbits_32((uint32_t)&ctl->pwrctl,
678 DDRCTRL_PWRCTL_POWERDOWN_EN);
679 }
680 mmio_setbits_32((uint32_t)&ctl->dfimisc,
681 DDRCTRL_DFIMISC_DFI_INIT_COMPLETE_EN);
682 stm32mp1_wait_sw_done_ack(ctl);
683 }
684
685 static int board_ddr_power_init(enum ddr_type ddr_type)
686 {
687 if (dt_check_pmic()) {
688 return pmic_ddr_power_init(ddr_type);
689 }
690
691 return 0;
692 }
693
694 void stm32mp1_ddr_init(struct ddr_info *priv,
695 struct stm32mp1_ddr_config *config)
696 {
697 uint32_t pir;
698 int ret;
699
700 if ((config->c_reg.mstr & DDRCTRL_MSTR_DDR3) != 0U) {
701 ret = board_ddr_power_init(STM32MP_DDR3);
702 } else {
703 ret = board_ddr_power_init(STM32MP_LPDDR2);
704 }
705
706 if (ret != 0) {
707 panic();
708 }
709
710 VERBOSE("name = %s\n", config->info.name);
711 VERBOSE("speed = %d MHz\n", config->info.speed);
712 VERBOSE("size = 0x%x\n", config->info.size);
713
714 /* DDR INIT SEQUENCE */
715
716 /*
717 * 1. Program the DWC_ddr_umctl2 registers
718 * nota: check DFIMISC.dfi_init_complete = 0
719 */
720
721 /* 1.1 RESETS: presetn, core_ddrc_rstn, aresetn */
722 mmio_setbits_32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DDRCAPBRST);
723 mmio_setbits_32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DDRCAXIRST);
724 mmio_setbits_32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DDRCORERST);
725 mmio_setbits_32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DPHYAPBRST);
726 mmio_setbits_32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DPHYRST);
727 mmio_setbits_32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DPHYCTLRST);
728
729 /* 1.2. start CLOCK */
730 if (stm32mp1_ddr_clk_enable(priv, config->info.speed) != 0) {
731 panic();
732 }
733
734 /* 1.3. deassert reset */
735 /* De-assert PHY rstn and ctl_rstn via DPHYRST and DPHYCTLRST. */
736 mmio_clrbits_32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DPHYRST);
737 mmio_clrbits_32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DPHYCTLRST);
738 /*
739 * De-assert presetn once the clocks are active
740 * and stable via DDRCAPBRST bit.
741 */
742 mmio_clrbits_32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DDRCAPBRST);
743
744 /* 1.4. wait 128 cycles to permit initialization of end logic */
745 udelay(2);
746 /* For PCLK = 133MHz => 1 us is enough, 2 to allow lower frequency */
747
748 /* 1.5. initialize registers ddr_umctl2 */
749 /* Stop uMCTL2 before PHY is ready */
750 mmio_clrbits_32((uint32_t)&priv->ctl->dfimisc,
751 DDRCTRL_DFIMISC_DFI_INIT_COMPLETE_EN);
752 VERBOSE("[0x%x] dfimisc = 0x%x\n",
753 (uint32_t)&priv->ctl->dfimisc,
754 mmio_read_32((uint32_t)&priv->ctl->dfimisc));
755
756 set_reg(priv, REG_REG, &config->c_reg);
757
758 /* DDR3 = don't set DLLOFF for init mode */
759 if ((config->c_reg.mstr &
760 (DDRCTRL_MSTR_DDR3 | DDRCTRL_MSTR_DLL_OFF_MODE))
761 == (DDRCTRL_MSTR_DDR3 | DDRCTRL_MSTR_DLL_OFF_MODE)) {
762 VERBOSE("deactivate DLL OFF in mstr\n");
763 mmio_clrbits_32((uint32_t)&priv->ctl->mstr,
764 DDRCTRL_MSTR_DLL_OFF_MODE);
765 VERBOSE("[0x%x] mstr = 0x%x\n",
766 (uint32_t)&priv->ctl->mstr,
767 mmio_read_32((uint32_t)&priv->ctl->mstr));
768 }
769
770 set_reg(priv, REG_TIMING, &config->c_timing);
771 set_reg(priv, REG_MAP, &config->c_map);
772
773 /* Skip CTRL init, SDRAM init is done by PHY PUBL */
774 mmio_clrsetbits_32((uint32_t)&priv->ctl->init0,
775 DDRCTRL_INIT0_SKIP_DRAM_INIT_MASK,
776 DDRCTRL_INIT0_SKIP_DRAM_INIT_NORMAL);
777 VERBOSE("[0x%x] init0 = 0x%x\n",
778 (uint32_t)&priv->ctl->init0,
779 mmio_read_32((uint32_t)&priv->ctl->init0));
780
781 set_reg(priv, REG_PERF, &config->c_perf);
782
783 /* 2. deassert reset signal core_ddrc_rstn, aresetn and presetn */
784 mmio_clrbits_32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DDRCORERST);
785 mmio_clrbits_32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DDRCAXIRST);
786 mmio_clrbits_32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DPHYAPBRST);
787
788 /*
789 * 3. start PHY init by accessing relevant PUBL registers
790 * (DXGCR, DCR, PTR*, MR*, DTPR*)
791 */
792 set_reg(priv, REGPHY_REG, &config->p_reg);
793 set_reg(priv, REGPHY_TIMING, &config->p_timing);
794 set_reg(priv, REGPHY_CAL, &config->p_cal);
795
796 /* DDR3 = don't set DLLOFF for init mode */
797 if ((config->c_reg.mstr &
798 (DDRCTRL_MSTR_DDR3 | DDRCTRL_MSTR_DLL_OFF_MODE))
799 == (DDRCTRL_MSTR_DDR3 | DDRCTRL_MSTR_DLL_OFF_MODE)) {
800 VERBOSE("deactivate DLL OFF in mr1\n");
801 mmio_clrbits_32((uint32_t)&priv->phy->mr1, BIT(0));
802 VERBOSE("[0x%x] mr1 = 0x%x\n",
803 (uint32_t)&priv->phy->mr1,
804 mmio_read_32((uint32_t)&priv->phy->mr1));
805 }
806
807 /*
808 * 4. Monitor PHY init status by polling PUBL register PGSR.IDONE
809 * Perform DDR PHY DRAM initialization and Gate Training Evaluation
810 */
811 stm32mp1_ddrphy_idone_wait(priv->phy);
812
813 /*
814 * 5. Indicate to PUBL that controller performs SDRAM initialization
815 * by setting PIR.INIT and PIR CTLDINIT and pool PGSR.IDONE
816 * DRAM init is done by PHY, init0.skip_dram.init = 1
817 */
818
819 pir = DDRPHYC_PIR_DLLSRST | DDRPHYC_PIR_DLLLOCK | DDRPHYC_PIR_ZCAL |
820 DDRPHYC_PIR_ITMSRST | DDRPHYC_PIR_DRAMINIT | DDRPHYC_PIR_ICPC;
821
822 if ((config->c_reg.mstr & DDRCTRL_MSTR_DDR3) != 0U) {
823 pir |= DDRPHYC_PIR_DRAMRST; /* Only for DDR3 */
824 }
825
826 stm32mp1_ddrphy_init(priv->phy, pir);
827
828 /*
829 * 6. SET DFIMISC.dfi_init_complete_en to 1
830 * Enable quasi-dynamic register programming.
831 */
832 stm32mp1_start_sw_done(priv->ctl);
833
834 mmio_setbits_32((uint32_t)&priv->ctl->dfimisc,
835 DDRCTRL_DFIMISC_DFI_INIT_COMPLETE_EN);
836 VERBOSE("[0x%x] dfimisc = 0x%x\n",
837 (uint32_t)&priv->ctl->dfimisc,
838 mmio_read_32((uint32_t)&priv->ctl->dfimisc));
839
840 stm32mp1_wait_sw_done_ack(priv->ctl);
841
842 /*
843 * 7. Wait for DWC_ddr_umctl2 to move to normal operation mode
844 * by monitoring STAT.operating_mode signal
845 */
846
847 /* Wait uMCTL2 ready */
848 stm32mp1_wait_operating_mode(priv, DDRCTRL_STAT_OPERATING_MODE_NORMAL);
849
850 /* Switch to DLL OFF mode */
851 if ((config->c_reg.mstr & DDRCTRL_MSTR_DLL_OFF_MODE) != 0U) {
852 stm32mp1_ddr3_dll_off(priv);
853 }
854
855 VERBOSE("DDR DQS training : ");
856
857 /*
858 * 8. Disable Auto refresh and power down by setting
859 * - RFSHCTL3.dis_au_refresh = 1
860 * - PWRCTL.powerdown_en = 0
861 * - DFIMISC.dfiinit_complete_en = 0
862 */
863 stm32mp1_refresh_disable(priv->ctl);
864
865 /*
866 * 9. Program PUBL PGCR to enable refresh during training
867 * and rank to train
868 * not done => keep the programed value in PGCR
869 */
870
871 /*
872 * 10. configure PUBL PIR register to specify which training step
873 * to run
874 * Warning : RVTRN is not supported by this PUBL
875 */
876 stm32mp1_ddrphy_init(priv->phy, DDRPHYC_PIR_QSTRN);
877
878 /* 11. monitor PUB PGSR.IDONE to poll cpmpletion of training sequence */
879 stm32mp1_ddrphy_idone_wait(priv->phy);
880
881 /*
882 * 12. set back registers in step 8 to the orginal values if desidered
883 */
884 stm32mp1_refresh_restore(priv->ctl, config->c_reg.rfshctl3,
885 config->c_reg.pwrctl);
886
887 /* Enable uMCTL2 AXI port 0 */
888 mmio_setbits_32((uint32_t)&priv->ctl->pctrl_0, DDRCTRL_PCTRL_N_PORT_EN);
889 VERBOSE("[0x%x] pctrl_0 = 0x%x\n",
890 (uint32_t)&priv->ctl->pctrl_0,
891 mmio_read_32((uint32_t)&priv->ctl->pctrl_0));
892
893 /* Enable uMCTL2 AXI port 1 */
894 mmio_setbits_32((uint32_t)&priv->ctl->pctrl_1, DDRCTRL_PCTRL_N_PORT_EN);
895 VERBOSE("[0x%x] pctrl_1 = 0x%x\n",
896 (uint32_t)&priv->ctl->pctrl_1,
897 mmio_read_32((uint32_t)&priv->ctl->pctrl_1));
898 }