1 /*
2  * Copyright (C) 2018-2022, STMicroelectronics - All Rights Reserved
3  *
4  * SPDX-License-Identifier: GPL-2.0+ OR BSD-3-Clause
5  */
6 
7 #include <errno.h>
8 #include <stddef.h>
9 
10 #include <arch.h>
11 #include <arch_helpers.h>
12 #include <common/debug.h>
13 #include <drivers/clk.h>
14 #include <drivers/delay_timer.h>
15 #include <drivers/st/stm32mp1_ddr.h>
16 #include <drivers/st/stm32mp1_ddr_regs.h>
17 #include <drivers/st/stm32mp1_pwr.h>
18 #include <drivers/st/stm32mp1_ram.h>
19 #include <drivers/st/stm32mp_ddr.h>
20 #include <lib/mmio.h>
21 #include <plat/common/platform.h>
22 
23 #include <platform_def.h>
24 
25 #define DDRCTL_REG(x, y)					\
26 	{							\
27 		.name = #x,					\
28 		.offset = offsetof(struct stm32mp_ddrctl, x),	\
29 		.par_offset = offsetof(struct y, x)		\
30 	}
31 
32 #define DDRPHY_REG(x, y)					\
33 	{							\
34 		.name = #x,					\
35 		.offset = offsetof(struct stm32mp_ddrphy, x),	\
36 		.par_offset = offsetof(struct y, x)		\
37 	}
38 
39 /*
40  * PARAMETERS: value get from device tree :
41  *             size / order need to be aligned with binding
42  *             modification NOT ALLOWED !!!
43  */
44 #define DDRCTL_REG_REG_SIZE	25	/* st,ctl-reg */
45 #define DDRCTL_REG_TIMING_SIZE	12	/* st,ctl-timing */
46 #define DDRCTL_REG_MAP_SIZE	9	/* st,ctl-map */
47 #if STM32MP_DDR_DUAL_AXI_PORT
48 #define DDRCTL_REG_PERF_SIZE	17	/* st,ctl-perf */
49 #else
50 #define DDRCTL_REG_PERF_SIZE	11	/* st,ctl-perf */
51 #endif
52 
53 #if STM32MP_DDR_32BIT_INTERFACE
54 #define DDRPHY_REG_REG_SIZE	11	/* st,phy-reg */
55 #else
56 #define DDRPHY_REG_REG_SIZE	9	/* st,phy-reg */
57 #endif
58 #define	DDRPHY_REG_TIMING_SIZE	10	/* st,phy-timing */
59 
60 #define DDRCTL_REG_REG(x)	DDRCTL_REG(x, stm32mp1_ddrctrl_reg)
61 static const struct stm32mp_ddr_reg_desc ddr_reg[DDRCTL_REG_REG_SIZE] = {
62 	DDRCTL_REG_REG(mstr),
63 	DDRCTL_REG_REG(mrctrl0),
64 	DDRCTL_REG_REG(mrctrl1),
65 	DDRCTL_REG_REG(derateen),
66 	DDRCTL_REG_REG(derateint),
67 	DDRCTL_REG_REG(pwrctl),
68 	DDRCTL_REG_REG(pwrtmg),
69 	DDRCTL_REG_REG(hwlpctl),
70 	DDRCTL_REG_REG(rfshctl0),
71 	DDRCTL_REG_REG(rfshctl3),
72 	DDRCTL_REG_REG(crcparctl0),
73 	DDRCTL_REG_REG(zqctl0),
74 	DDRCTL_REG_REG(dfitmg0),
75 	DDRCTL_REG_REG(dfitmg1),
76 	DDRCTL_REG_REG(dfilpcfg0),
77 	DDRCTL_REG_REG(dfiupd0),
78 	DDRCTL_REG_REG(dfiupd1),
79 	DDRCTL_REG_REG(dfiupd2),
80 	DDRCTL_REG_REG(dfiphymstr),
81 	DDRCTL_REG_REG(odtmap),
82 	DDRCTL_REG_REG(dbg0),
83 	DDRCTL_REG_REG(dbg1),
84 	DDRCTL_REG_REG(dbgcmd),
85 	DDRCTL_REG_REG(poisoncfg),
86 	DDRCTL_REG_REG(pccfg),
87 };
88 
89 #define DDRCTL_REG_TIMING(x)	DDRCTL_REG(x, stm32mp1_ddrctrl_timing)
90 static const struct stm32mp_ddr_reg_desc ddr_timing[DDRCTL_REG_TIMING_SIZE] = {
91 	DDRCTL_REG_TIMING(rfshtmg),
92 	DDRCTL_REG_TIMING(dramtmg0),
93 	DDRCTL_REG_TIMING(dramtmg1),
94 	DDRCTL_REG_TIMING(dramtmg2),
95 	DDRCTL_REG_TIMING(dramtmg3),
96 	DDRCTL_REG_TIMING(dramtmg4),
97 	DDRCTL_REG_TIMING(dramtmg5),
98 	DDRCTL_REG_TIMING(dramtmg6),
99 	DDRCTL_REG_TIMING(dramtmg7),
100 	DDRCTL_REG_TIMING(dramtmg8),
101 	DDRCTL_REG_TIMING(dramtmg14),
102 	DDRCTL_REG_TIMING(odtcfg),
103 };
104 
105 #define DDRCTL_REG_MAP(x)	DDRCTL_REG(x, stm32mp1_ddrctrl_map)
106 static const struct stm32mp_ddr_reg_desc ddr_map[DDRCTL_REG_MAP_SIZE] = {
107 	DDRCTL_REG_MAP(addrmap1),
108 	DDRCTL_REG_MAP(addrmap2),
109 	DDRCTL_REG_MAP(addrmap3),
110 	DDRCTL_REG_MAP(addrmap4),
111 	DDRCTL_REG_MAP(addrmap5),
112 	DDRCTL_REG_MAP(addrmap6),
113 	DDRCTL_REG_MAP(addrmap9),
114 	DDRCTL_REG_MAP(addrmap10),
115 	DDRCTL_REG_MAP(addrmap11),
116 };
117 
118 #define DDRCTL_REG_PERF(x)	DDRCTL_REG(x, stm32mp1_ddrctrl_perf)
119 static const struct stm32mp_ddr_reg_desc ddr_perf[DDRCTL_REG_PERF_SIZE] = {
120 	DDRCTL_REG_PERF(sched),
121 	DDRCTL_REG_PERF(sched1),
122 	DDRCTL_REG_PERF(perfhpr1),
123 	DDRCTL_REG_PERF(perflpr1),
124 	DDRCTL_REG_PERF(perfwr1),
125 	DDRCTL_REG_PERF(pcfgr_0),
126 	DDRCTL_REG_PERF(pcfgw_0),
127 	DDRCTL_REG_PERF(pcfgqos0_0),
128 	DDRCTL_REG_PERF(pcfgqos1_0),
129 	DDRCTL_REG_PERF(pcfgwqos0_0),
130 	DDRCTL_REG_PERF(pcfgwqos1_0),
131 #if STM32MP_DDR_DUAL_AXI_PORT
132 	DDRCTL_REG_PERF(pcfgr_1),
133 	DDRCTL_REG_PERF(pcfgw_1),
134 	DDRCTL_REG_PERF(pcfgqos0_1),
135 	DDRCTL_REG_PERF(pcfgqos1_1),
136 	DDRCTL_REG_PERF(pcfgwqos0_1),
137 	DDRCTL_REG_PERF(pcfgwqos1_1),
138 #endif
139 };
140 
141 #define DDRPHY_REG_REG(x)	DDRPHY_REG(x, stm32mp1_ddrphy_reg)
142 static const struct stm32mp_ddr_reg_desc ddrphy_reg[DDRPHY_REG_REG_SIZE] = {
143 	DDRPHY_REG_REG(pgcr),
144 	DDRPHY_REG_REG(aciocr),
145 	DDRPHY_REG_REG(dxccr),
146 	DDRPHY_REG_REG(dsgcr),
147 	DDRPHY_REG_REG(dcr),
148 	DDRPHY_REG_REG(odtcr),
149 	DDRPHY_REG_REG(zq0cr1),
150 	DDRPHY_REG_REG(dx0gcr),
151 	DDRPHY_REG_REG(dx1gcr),
152 #if STM32MP_DDR_32BIT_INTERFACE
153 	DDRPHY_REG_REG(dx2gcr),
154 	DDRPHY_REG_REG(dx3gcr),
155 #endif
156 };
157 
158 #define DDRPHY_REG_TIMING(x)	DDRPHY_REG(x, stm32mp1_ddrphy_timing)
159 static const struct stm32mp_ddr_reg_desc ddrphy_timing[DDRPHY_REG_TIMING_SIZE] = {
160 	DDRPHY_REG_TIMING(ptr0),
161 	DDRPHY_REG_TIMING(ptr1),
162 	DDRPHY_REG_TIMING(ptr2),
163 	DDRPHY_REG_TIMING(dtpr0),
164 	DDRPHY_REG_TIMING(dtpr1),
165 	DDRPHY_REG_TIMING(dtpr2),
166 	DDRPHY_REG_TIMING(mr0),
167 	DDRPHY_REG_TIMING(mr1),
168 	DDRPHY_REG_TIMING(mr2),
169 	DDRPHY_REG_TIMING(mr3),
170 };
171 
172 /*
173  * REGISTERS ARRAY: used to parse device tree and interactive mode
174  */
175 static const struct stm32mp_ddr_reg_info ddr_registers[REG_TYPE_NB] = {
176 	[REG_REG] = {
177 		.name = "static",
178 		.desc = ddr_reg,
179 		.size = DDRCTL_REG_REG_SIZE,
180 		.base = DDR_BASE
181 	},
182 	[REG_TIMING] = {
183 		.name = "timing",
184 		.desc = ddr_timing,
185 		.size = DDRCTL_REG_TIMING_SIZE,
186 		.base = DDR_BASE
187 	},
188 	[REG_PERF] = {
189 		.name = "perf",
190 		.desc = ddr_perf,
191 		.size = DDRCTL_REG_PERF_SIZE,
192 		.base = DDR_BASE
193 	},
194 	[REG_MAP] = {
195 		.name = "map",
196 		.desc = ddr_map,
197 		.size = DDRCTL_REG_MAP_SIZE,
198 		.base = DDR_BASE
199 	},
200 	[REGPHY_REG] = {
201 		.name = "static",
202 		.desc = ddrphy_reg,
203 		.size = DDRPHY_REG_REG_SIZE,
204 		.base = DDRPHY_BASE
205 	},
206 	[REGPHY_TIMING] = {
207 		.name = "timing",
208 		.desc = ddrphy_timing,
209 		.size = DDRPHY_REG_TIMING_SIZE,
210 		.base = DDRPHY_BASE
211 	},
212 };
213 
stm32mp1_ddrphy_idone_wait(struct stm32mp_ddrphy * phy)214 static void stm32mp1_ddrphy_idone_wait(struct stm32mp_ddrphy *phy)
215 {
216 	uint32_t pgsr;
217 	int error = 0;
218 	uint64_t timeout = timeout_init_us(TIMEOUT_US_1S);
219 
220 	do {
221 		pgsr = mmio_read_32((uintptr_t)&phy->pgsr);
222 
223 		VERBOSE("  > [0x%lx] pgsr = 0x%x &\n",
224 			(uintptr_t)&phy->pgsr, pgsr);
225 
226 		if (timeout_elapsed(timeout)) {
227 			panic();
228 		}
229 
230 		if ((pgsr & DDRPHYC_PGSR_DTERR) != 0U) {
231 			VERBOSE("DQS Gate Trainig Error\n");
232 			error++;
233 		}
234 
235 		if ((pgsr & DDRPHYC_PGSR_DTIERR) != 0U) {
236 			VERBOSE("DQS Gate Trainig Intermittent Error\n");
237 			error++;
238 		}
239 
240 		if ((pgsr & DDRPHYC_PGSR_DFTERR) != 0U) {
241 			VERBOSE("DQS Drift Error\n");
242 			error++;
243 		}
244 
245 		if ((pgsr & DDRPHYC_PGSR_RVERR) != 0U) {
246 			VERBOSE("Read Valid Training Error\n");
247 			error++;
248 		}
249 
250 		if ((pgsr & DDRPHYC_PGSR_RVEIRR) != 0U) {
251 			VERBOSE("Read Valid Training Intermittent Error\n");
252 			error++;
253 		}
254 	} while (((pgsr & DDRPHYC_PGSR_IDONE) == 0U) && (error == 0));
255 	VERBOSE("\n[0x%lx] pgsr = 0x%x\n",
256 		(uintptr_t)&phy->pgsr, pgsr);
257 }
258 
stm32mp1_ddrphy_init(struct stm32mp_ddrphy * phy,uint32_t pir)259 static void stm32mp1_ddrphy_init(struct stm32mp_ddrphy *phy, uint32_t pir)
260 {
261 	uint32_t pir_init = pir | DDRPHYC_PIR_INIT;
262 
263 	mmio_write_32((uintptr_t)&phy->pir, pir_init);
264 	VERBOSE("[0x%lx] pir = 0x%x -> 0x%x\n",
265 		(uintptr_t)&phy->pir, pir_init,
266 		mmio_read_32((uintptr_t)&phy->pir));
267 
268 	/* Need to wait 10 configuration clock before start polling */
269 	udelay(10);
270 
271 	/* Wait DRAM initialization and Gate Training Evaluation complete */
272 	stm32mp1_ddrphy_idone_wait(phy);
273 }
274 
275 /* Wait quasi dynamic register update */
stm32mp1_wait_operating_mode(struct stm32mp_ddr_priv * priv,uint32_t mode)276 static void stm32mp1_wait_operating_mode(struct stm32mp_ddr_priv *priv, uint32_t mode)
277 {
278 	uint64_t timeout;
279 	uint32_t stat;
280 	int break_loop = 0;
281 
282 	timeout = timeout_init_us(TIMEOUT_US_1S);
283 	for ( ; ; ) {
284 		uint32_t operating_mode;
285 		uint32_t selref_type;
286 
287 		stat = mmio_read_32((uintptr_t)&priv->ctl->stat);
288 		operating_mode = stat & DDRCTRL_STAT_OPERATING_MODE_MASK;
289 		selref_type = stat & DDRCTRL_STAT_SELFREF_TYPE_MASK;
290 		VERBOSE("[0x%lx] stat = 0x%x\n",
291 			(uintptr_t)&priv->ctl->stat, stat);
292 		if (timeout_elapsed(timeout)) {
293 			panic();
294 		}
295 
296 		if (mode == DDRCTRL_STAT_OPERATING_MODE_SR) {
297 			/*
298 			 * Self-refresh due to software
299 			 * => checking also STAT.selfref_type.
300 			 */
301 			if ((operating_mode ==
302 			     DDRCTRL_STAT_OPERATING_MODE_SR) &&
303 			    (selref_type == DDRCTRL_STAT_SELFREF_TYPE_SR)) {
304 				break_loop = 1;
305 			}
306 		} else if (operating_mode == mode) {
307 			break_loop = 1;
308 		} else if ((mode == DDRCTRL_STAT_OPERATING_MODE_NORMAL) &&
309 			   (operating_mode == DDRCTRL_STAT_OPERATING_MODE_SR) &&
310 			   (selref_type == DDRCTRL_STAT_SELFREF_TYPE_ASR)) {
311 			/* Normal mode: handle also automatic self refresh */
312 			break_loop = 1;
313 		}
314 
315 		if (break_loop == 1) {
316 			break;
317 		}
318 	}
319 
320 	VERBOSE("[0x%lx] stat = 0x%x\n",
321 		(uintptr_t)&priv->ctl->stat, stat);
322 }
323 
324 /* Mode Register Writes (MRW or MRS) */
stm32mp1_mode_register_write(struct stm32mp_ddr_priv * priv,uint8_t addr,uint32_t data)325 static void stm32mp1_mode_register_write(struct stm32mp_ddr_priv *priv, uint8_t addr,
326 					 uint32_t data)
327 {
328 	uint32_t mrctrl0;
329 
330 	VERBOSE("MRS: %d = %x\n", addr, data);
331 
332 	/*
333 	 * 1. Poll MRSTAT.mr_wr_busy until it is '0'.
334 	 *    This checks that there is no outstanding MR transaction.
335 	 *    No write should be performed to MRCTRL0 and MRCTRL1
336 	 *    if MRSTAT.mr_wr_busy = 1.
337 	 */
338 	while ((mmio_read_32((uintptr_t)&priv->ctl->mrstat) &
339 		DDRCTRL_MRSTAT_MR_WR_BUSY) != 0U) {
340 		;
341 	}
342 
343 	/*
344 	 * 2. Write the MRCTRL0.mr_type, MRCTRL0.mr_addr, MRCTRL0.mr_rank
345 	 *    and (for MRWs) MRCTRL1.mr_data to define the MR transaction.
346 	 */
347 	mrctrl0 = DDRCTRL_MRCTRL0_MR_TYPE_WRITE |
348 		  DDRCTRL_MRCTRL0_MR_RANK_ALL |
349 		  (((uint32_t)addr << DDRCTRL_MRCTRL0_MR_ADDR_SHIFT) &
350 		   DDRCTRL_MRCTRL0_MR_ADDR_MASK);
351 	mmio_write_32((uintptr_t)&priv->ctl->mrctrl0, mrctrl0);
352 	VERBOSE("[0x%lx] mrctrl0 = 0x%x (0x%x)\n",
353 		(uintptr_t)&priv->ctl->mrctrl0,
354 		mmio_read_32((uintptr_t)&priv->ctl->mrctrl0), mrctrl0);
355 	mmio_write_32((uintptr_t)&priv->ctl->mrctrl1, data);
356 	VERBOSE("[0x%lx] mrctrl1 = 0x%x\n",
357 		(uintptr_t)&priv->ctl->mrctrl1,
358 		mmio_read_32((uintptr_t)&priv->ctl->mrctrl1));
359 
360 	/*
361 	 * 3. In a separate APB transaction, write the MRCTRL0.mr_wr to 1. This
362 	 *    bit is self-clearing, and triggers the MR transaction.
363 	 *    The uMCTL2 then asserts the MRSTAT.mr_wr_busy while it performs
364 	 *    the MR transaction to SDRAM, and no further access can be
365 	 *    initiated until it is deasserted.
366 	 */
367 	mrctrl0 |= DDRCTRL_MRCTRL0_MR_WR;
368 	mmio_write_32((uintptr_t)&priv->ctl->mrctrl0, mrctrl0);
369 
370 	while ((mmio_read_32((uintptr_t)&priv->ctl->mrstat) &
371 	       DDRCTRL_MRSTAT_MR_WR_BUSY) != 0U) {
372 		;
373 	}
374 
375 	VERBOSE("[0x%lx] mrctrl0 = 0x%x\n",
376 		(uintptr_t)&priv->ctl->mrctrl0, mrctrl0);
377 }
378 
379 /* Switch DDR3 from DLL-on to DLL-off */
stm32mp1_ddr3_dll_off(struct stm32mp_ddr_priv * priv)380 static void stm32mp1_ddr3_dll_off(struct stm32mp_ddr_priv *priv)
381 {
382 	uint32_t mr1 = mmio_read_32((uintptr_t)&priv->phy->mr1);
383 	uint32_t mr2 = mmio_read_32((uintptr_t)&priv->phy->mr2);
384 	uint32_t dbgcam;
385 
386 	VERBOSE("mr1: 0x%x\n", mr1);
387 	VERBOSE("mr2: 0x%x\n", mr2);
388 
389 	/*
390 	 * 1. Set the DBG1.dis_hif = 1.
391 	 *    This prevents further reads/writes being received on the HIF.
392 	 */
393 	mmio_setbits_32((uintptr_t)&priv->ctl->dbg1, DDRCTRL_DBG1_DIS_HIF);
394 	VERBOSE("[0x%lx] dbg1 = 0x%x\n",
395 		(uintptr_t)&priv->ctl->dbg1,
396 		mmio_read_32((uintptr_t)&priv->ctl->dbg1));
397 
398 	/*
399 	 * 2. Ensure all commands have been flushed from the uMCTL2 by polling
400 	 *    DBGCAM.wr_data_pipeline_empty = 1,
401 	 *    DBGCAM.rd_data_pipeline_empty = 1,
402 	 *    DBGCAM.dbg_wr_q_depth = 0 ,
403 	 *    DBGCAM.dbg_lpr_q_depth = 0, and
404 	 *    DBGCAM.dbg_hpr_q_depth = 0.
405 	 */
406 	do {
407 		dbgcam = mmio_read_32((uintptr_t)&priv->ctl->dbgcam);
408 		VERBOSE("[0x%lx] dbgcam = 0x%x\n",
409 			(uintptr_t)&priv->ctl->dbgcam, dbgcam);
410 	} while ((((dbgcam & DDRCTRL_DBGCAM_DATA_PIPELINE_EMPTY) ==
411 		   DDRCTRL_DBGCAM_DATA_PIPELINE_EMPTY)) &&
412 		 ((dbgcam & DDRCTRL_DBGCAM_DBG_Q_DEPTH) == 0U));
413 
414 	/*
415 	 * 3. Perform an MRS command (using MRCTRL0 and MRCTRL1 registers)
416 	 *    to disable RTT_NOM:
417 	 *    a. DDR3: Write to MR1[9], MR1[6] and MR1[2]
418 	 *    b. DDR4: Write to MR1[10:8]
419 	 */
420 	mr1 &= ~(BIT(9) | BIT(6) | BIT(2));
421 	stm32mp1_mode_register_write(priv, 1, mr1);
422 
423 	/*
424 	 * 4. For DDR4 only: Perform an MRS command
425 	 *    (using MRCTRL0 and MRCTRL1 registers) to write to MR5[8:6]
426 	 *    to disable RTT_PARK
427 	 */
428 
429 	/*
430 	 * 5. Perform an MRS command (using MRCTRL0 and MRCTRL1 registers)
431 	 *    to write to MR2[10:9], to disable RTT_WR
432 	 *    (and therefore disable dynamic ODT).
433 	 *    This applies for both DDR3 and DDR4.
434 	 */
435 	mr2 &= ~GENMASK(10, 9);
436 	stm32mp1_mode_register_write(priv, 2, mr2);
437 
438 	/*
439 	 * 6. Perform an MRS command (using MRCTRL0 and MRCTRL1 registers)
440 	 *    to disable the DLL. The timing of this MRS is automatically
441 	 *    handled by the uMCTL2.
442 	 *    a. DDR3: Write to MR1[0]
443 	 *    b. DDR4: Write to MR1[0]
444 	 */
445 	mr1 |= BIT(0);
446 	stm32mp1_mode_register_write(priv, 1, mr1);
447 
448 	/*
449 	 * 7. Put the SDRAM into self-refresh mode by setting
450 	 *    PWRCTL.selfref_sw = 1, and polling STAT.operating_mode to ensure
451 	 *    the DDRC has entered self-refresh.
452 	 */
453 	mmio_setbits_32((uintptr_t)&priv->ctl->pwrctl,
454 			DDRCTRL_PWRCTL_SELFREF_SW);
455 	VERBOSE("[0x%lx] pwrctl = 0x%x\n",
456 		(uintptr_t)&priv->ctl->pwrctl,
457 		mmio_read_32((uintptr_t)&priv->ctl->pwrctl));
458 
459 	/*
460 	 * 8. Wait until STAT.operating_mode[1:0]==11 indicating that the
461 	 *    DWC_ddr_umctl2 core is in self-refresh mode.
462 	 *    Ensure transition to self-refresh was due to software
463 	 *    by checking that STAT.selfref_type[1:0]=2.
464 	 */
465 	stm32mp1_wait_operating_mode(priv, DDRCTRL_STAT_OPERATING_MODE_SR);
466 
467 	/*
468 	 * 9. Set the MSTR.dll_off_mode = 1.
469 	 *    warning: MSTR.dll_off_mode is a quasi-dynamic type 2 field
470 	 */
471 	stm32mp_ddr_start_sw_done(priv->ctl);
472 
473 	mmio_setbits_32((uintptr_t)&priv->ctl->mstr, DDRCTRL_MSTR_DLL_OFF_MODE);
474 	VERBOSE("[0x%lx] mstr = 0x%x\n",
475 		(uintptr_t)&priv->ctl->mstr,
476 		mmio_read_32((uintptr_t)&priv->ctl->mstr));
477 
478 	stm32mp_ddr_wait_sw_done_ack(priv->ctl);
479 
480 	/* 10. Change the clock frequency to the desired value. */
481 
482 	/*
483 	 * 11. Update any registers which may be required to change for the new
484 	 *     frequency. This includes static and dynamic registers.
485 	 *     This includes both uMCTL2 registers and PHY registers.
486 	 */
487 
488 	/* Change Bypass Mode Frequency Range */
489 	if (clk_get_rate(DDRPHYC) < 100000000U) {
490 		mmio_clrbits_32((uintptr_t)&priv->phy->dllgcr,
491 				DDRPHYC_DLLGCR_BPS200);
492 	} else {
493 		mmio_setbits_32((uintptr_t)&priv->phy->dllgcr,
494 				DDRPHYC_DLLGCR_BPS200);
495 	}
496 
497 	mmio_setbits_32((uintptr_t)&priv->phy->acdllcr, DDRPHYC_ACDLLCR_DLLDIS);
498 
499 	mmio_setbits_32((uintptr_t)&priv->phy->dx0dllcr,
500 			DDRPHYC_DXNDLLCR_DLLDIS);
501 	mmio_setbits_32((uintptr_t)&priv->phy->dx1dllcr,
502 			DDRPHYC_DXNDLLCR_DLLDIS);
503 #if STM32MP_DDR_32BIT_INTERFACE
504 	mmio_setbits_32((uintptr_t)&priv->phy->dx2dllcr,
505 			DDRPHYC_DXNDLLCR_DLLDIS);
506 	mmio_setbits_32((uintptr_t)&priv->phy->dx3dllcr,
507 			DDRPHYC_DXNDLLCR_DLLDIS);
508 #endif
509 
510 	/* 12. Exit the self-refresh state by setting PWRCTL.selfref_sw = 0. */
511 	mmio_clrbits_32((uintptr_t)&priv->ctl->pwrctl,
512 			DDRCTRL_PWRCTL_SELFREF_SW);
513 	stm32mp1_wait_operating_mode(priv, DDRCTRL_STAT_OPERATING_MODE_NORMAL);
514 
515 	/*
516 	 * 13. If ZQCTL0.dis_srx_zqcl = 0, the uMCTL2 performs a ZQCL command
517 	 *     at this point.
518 	 */
519 
520 	/*
521 	 * 14. Perform MRS commands as required to re-program timing registers
522 	 *     in the SDRAM for the new frequency
523 	 *     (in particular, CL, CWL and WR may need to be changed).
524 	 */
525 
526 	/* 15. Write DBG1.dis_hif = 0 to re-enable reads and writes. */
527 	mmio_clrbits_32((uintptr_t)&priv->ctl->dbg1, DDRCTRL_DBG1_DIS_HIF);
528 	VERBOSE("[0x%lx] dbg1 = 0x%x\n",
529 		(uintptr_t)&priv->ctl->dbg1,
530 		mmio_read_32((uintptr_t)&priv->ctl->dbg1));
531 }
532 
stm32mp1_refresh_disable(struct stm32mp_ddrctl * ctl)533 static void stm32mp1_refresh_disable(struct stm32mp_ddrctl *ctl)
534 {
535 	stm32mp_ddr_start_sw_done(ctl);
536 	/* Quasi-dynamic register update*/
537 	mmio_setbits_32((uintptr_t)&ctl->rfshctl3,
538 			DDRCTRL_RFSHCTL3_DIS_AUTO_REFRESH);
539 	mmio_clrbits_32((uintptr_t)&ctl->pwrctl, DDRCTRL_PWRCTL_POWERDOWN_EN);
540 	mmio_clrbits_32((uintptr_t)&ctl->dfimisc,
541 			DDRCTRL_DFIMISC_DFI_INIT_COMPLETE_EN);
542 	stm32mp_ddr_wait_sw_done_ack(ctl);
543 }
544 
stm32mp1_refresh_restore(struct stm32mp_ddrctl * ctl,uint32_t rfshctl3,uint32_t pwrctl)545 static void stm32mp1_refresh_restore(struct stm32mp_ddrctl *ctl,
546 				     uint32_t rfshctl3, uint32_t pwrctl)
547 {
548 	stm32mp_ddr_start_sw_done(ctl);
549 	if ((rfshctl3 & DDRCTRL_RFSHCTL3_DIS_AUTO_REFRESH) == 0U) {
550 		mmio_clrbits_32((uintptr_t)&ctl->rfshctl3,
551 				DDRCTRL_RFSHCTL3_DIS_AUTO_REFRESH);
552 	}
553 	if ((pwrctl & DDRCTRL_PWRCTL_POWERDOWN_EN) != 0U) {
554 		mmio_setbits_32((uintptr_t)&ctl->pwrctl,
555 				DDRCTRL_PWRCTL_POWERDOWN_EN);
556 	}
557 	mmio_setbits_32((uintptr_t)&ctl->dfimisc,
558 			DDRCTRL_DFIMISC_DFI_INIT_COMPLETE_EN);
559 	stm32mp_ddr_wait_sw_done_ack(ctl);
560 }
561 
stm32mp1_ddr_init(struct stm32mp_ddr_priv * priv,struct stm32mp_ddr_config * config)562 void stm32mp1_ddr_init(struct stm32mp_ddr_priv *priv,
563 		       struct stm32mp_ddr_config *config)
564 {
565 	uint32_t pir;
566 	int ret = -EINVAL;
567 
568 	if ((config->c_reg.mstr & DDRCTRL_MSTR_DDR3) != 0U) {
569 		ret = stm32mp_board_ddr_power_init(STM32MP_DDR3);
570 	} else if ((config->c_reg.mstr & DDRCTRL_MSTR_LPDDR2) != 0U) {
571 		ret = stm32mp_board_ddr_power_init(STM32MP_LPDDR2);
572 	} else if ((config->c_reg.mstr & DDRCTRL_MSTR_LPDDR3) != 0U) {
573 		ret = stm32mp_board_ddr_power_init(STM32MP_LPDDR3);
574 	} else {
575 		ERROR("DDR type not supported\n");
576 	}
577 
578 	if (ret != 0) {
579 		panic();
580 	}
581 
582 	VERBOSE("name = %s\n", config->info.name);
583 	VERBOSE("speed = %u kHz\n", config->info.speed);
584 	VERBOSE("size  = 0x%x\n", config->info.size);
585 
586 	/* DDR INIT SEQUENCE */
587 
588 	/*
589 	 * 1. Program the DWC_ddr_umctl2 registers
590 	 *     nota: check DFIMISC.dfi_init_complete = 0
591 	 */
592 
593 	/* 1.1 RESETS: presetn, core_ddrc_rstn, aresetn */
594 	mmio_setbits_32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DDRCAPBRST);
595 	mmio_setbits_32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DDRCAXIRST);
596 	mmio_setbits_32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DDRCORERST);
597 	mmio_setbits_32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DPHYAPBRST);
598 	mmio_setbits_32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DPHYRST);
599 	mmio_setbits_32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DPHYCTLRST);
600 
601 	/* 1.2. start CLOCK */
602 	if (stm32mp1_ddr_clk_enable(priv, config->info.speed) != 0) {
603 		panic();
604 	}
605 
606 	/* 1.3. deassert reset */
607 	/* De-assert PHY rstn and ctl_rstn via DPHYRST and DPHYCTLRST. */
608 	mmio_clrbits_32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DPHYRST);
609 	mmio_clrbits_32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DPHYCTLRST);
610 	/*
611 	 * De-assert presetn once the clocks are active
612 	 * and stable via DDRCAPBRST bit.
613 	 */
614 	mmio_clrbits_32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DDRCAPBRST);
615 
616 	/* 1.4. wait 128 cycles to permit initialization of end logic */
617 	udelay(2);
618 	/* For PCLK = 133MHz => 1 us is enough, 2 to allow lower frequency */
619 
620 	/* 1.5. initialize registers ddr_umctl2 */
621 	/* Stop uMCTL2 before PHY is ready */
622 	mmio_clrbits_32((uintptr_t)&priv->ctl->dfimisc,
623 			DDRCTRL_DFIMISC_DFI_INIT_COMPLETE_EN);
624 	VERBOSE("[0x%lx] dfimisc = 0x%x\n",
625 		(uintptr_t)&priv->ctl->dfimisc,
626 		mmio_read_32((uintptr_t)&priv->ctl->dfimisc));
627 
628 	stm32mp_ddr_set_reg(priv, REG_REG, &config->c_reg, ddr_registers);
629 
630 	/* DDR3 = don't set DLLOFF for init mode */
631 	if ((config->c_reg.mstr &
632 	     (DDRCTRL_MSTR_DDR3 | DDRCTRL_MSTR_DLL_OFF_MODE))
633 	    == (DDRCTRL_MSTR_DDR3 | DDRCTRL_MSTR_DLL_OFF_MODE)) {
634 		VERBOSE("deactivate DLL OFF in mstr\n");
635 		mmio_clrbits_32((uintptr_t)&priv->ctl->mstr,
636 				DDRCTRL_MSTR_DLL_OFF_MODE);
637 		VERBOSE("[0x%lx] mstr = 0x%x\n",
638 			(uintptr_t)&priv->ctl->mstr,
639 			mmio_read_32((uintptr_t)&priv->ctl->mstr));
640 	}
641 
642 	stm32mp_ddr_set_reg(priv, REG_TIMING, &config->c_timing, ddr_registers);
643 	stm32mp_ddr_set_reg(priv, REG_MAP, &config->c_map, ddr_registers);
644 
645 	/* Skip CTRL init, SDRAM init is done by PHY PUBL */
646 	mmio_clrsetbits_32((uintptr_t)&priv->ctl->init0,
647 			   DDRCTRL_INIT0_SKIP_DRAM_INIT_MASK,
648 			   DDRCTRL_INIT0_SKIP_DRAM_INIT_NORMAL);
649 	VERBOSE("[0x%lx] init0 = 0x%x\n",
650 		(uintptr_t)&priv->ctl->init0,
651 		mmio_read_32((uintptr_t)&priv->ctl->init0));
652 
653 	stm32mp_ddr_set_reg(priv, REG_PERF, &config->c_perf, ddr_registers);
654 
655 	/*  2. deassert reset signal core_ddrc_rstn, aresetn and presetn */
656 	mmio_clrbits_32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DDRCORERST);
657 	mmio_clrbits_32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DDRCAXIRST);
658 	mmio_clrbits_32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DPHYAPBRST);
659 
660 	/*
661 	 * 3. start PHY init by accessing relevant PUBL registers
662 	 *    (DXGCR, DCR, PTR*, MR*, DTPR*)
663 	 */
664 	stm32mp_ddr_set_reg(priv, REGPHY_REG, &config->p_reg, ddr_registers);
665 	stm32mp_ddr_set_reg(priv, REGPHY_TIMING, &config->p_timing, ddr_registers);
666 
667 	/* DDR3 = don't set DLLOFF for init mode */
668 	if ((config->c_reg.mstr &
669 	     (DDRCTRL_MSTR_DDR3 | DDRCTRL_MSTR_DLL_OFF_MODE))
670 	    == (DDRCTRL_MSTR_DDR3 | DDRCTRL_MSTR_DLL_OFF_MODE)) {
671 		VERBOSE("deactivate DLL OFF in mr1\n");
672 		mmio_clrbits_32((uintptr_t)&priv->phy->mr1, BIT(0));
673 		VERBOSE("[0x%lx] mr1 = 0x%x\n",
674 			(uintptr_t)&priv->phy->mr1,
675 			mmio_read_32((uintptr_t)&priv->phy->mr1));
676 	}
677 
678 	/*
679 	 *  4. Monitor PHY init status by polling PUBL register PGSR.IDONE
680 	 *     Perform DDR PHY DRAM initialization and Gate Training Evaluation
681 	 */
682 	stm32mp1_ddrphy_idone_wait(priv->phy);
683 
684 	/*
685 	 *  5. Indicate to PUBL that controller performs SDRAM initialization
686 	 *     by setting PIR.INIT and PIR CTLDINIT and pool PGSR.IDONE
687 	 *     DRAM init is done by PHY, init0.skip_dram.init = 1
688 	 */
689 
690 	pir = DDRPHYC_PIR_DLLSRST | DDRPHYC_PIR_DLLLOCK | DDRPHYC_PIR_ZCAL |
691 	      DDRPHYC_PIR_ITMSRST | DDRPHYC_PIR_DRAMINIT | DDRPHYC_PIR_ICPC;
692 
693 	if ((config->c_reg.mstr & DDRCTRL_MSTR_DDR3) != 0U) {
694 		pir |= DDRPHYC_PIR_DRAMRST; /* Only for DDR3 */
695 	}
696 
697 	stm32mp1_ddrphy_init(priv->phy, pir);
698 
699 	/*
700 	 *  6. SET DFIMISC.dfi_init_complete_en to 1
701 	 *  Enable quasi-dynamic register programming.
702 	 */
703 	stm32mp_ddr_start_sw_done(priv->ctl);
704 
705 	mmio_setbits_32((uintptr_t)&priv->ctl->dfimisc,
706 			DDRCTRL_DFIMISC_DFI_INIT_COMPLETE_EN);
707 	VERBOSE("[0x%lx] dfimisc = 0x%x\n",
708 		(uintptr_t)&priv->ctl->dfimisc,
709 		mmio_read_32((uintptr_t)&priv->ctl->dfimisc));
710 
711 	stm32mp_ddr_wait_sw_done_ack(priv->ctl);
712 
713 	/*
714 	 *  7. Wait for DWC_ddr_umctl2 to move to normal operation mode
715 	 *     by monitoring STAT.operating_mode signal
716 	 */
717 
718 	/* Wait uMCTL2 ready */
719 	stm32mp1_wait_operating_mode(priv, DDRCTRL_STAT_OPERATING_MODE_NORMAL);
720 
721 	/* Switch to DLL OFF mode */
722 	if ((config->c_reg.mstr & DDRCTRL_MSTR_DLL_OFF_MODE) != 0U) {
723 		stm32mp1_ddr3_dll_off(priv);
724 	}
725 
726 	VERBOSE("DDR DQS training : ");
727 
728 	/*
729 	 *  8. Disable Auto refresh and power down by setting
730 	 *    - RFSHCTL3.dis_au_refresh = 1
731 	 *    - PWRCTL.powerdown_en = 0
732 	 *    - DFIMISC.dfiinit_complete_en = 0
733 	 */
734 	stm32mp1_refresh_disable(priv->ctl);
735 
736 	/*
737 	 *  9. Program PUBL PGCR to enable refresh during training
738 	 *     and rank to train
739 	 *     not done => keep the programed value in PGCR
740 	 */
741 
742 	/*
743 	 * 10. configure PUBL PIR register to specify which training step
744 	 * to run
745 	 * RVTRN is executed only on LPDDR2/LPDDR3
746 	 */
747 	pir = DDRPHYC_PIR_QSTRN;
748 	if ((config->c_reg.mstr & DDRCTRL_MSTR_DDR3) == 0U) {
749 		pir |= DDRPHYC_PIR_RVTRN;
750 	}
751 
752 	stm32mp1_ddrphy_init(priv->phy, pir);
753 
754 	/* 11. monitor PUB PGSR.IDONE to poll cpmpletion of training sequence */
755 	stm32mp1_ddrphy_idone_wait(priv->phy);
756 
757 	/*
758 	 * 12. set back registers in step 8 to the orginal values if desidered
759 	 */
760 	stm32mp1_refresh_restore(priv->ctl, config->c_reg.rfshctl3,
761 				 config->c_reg.pwrctl);
762 
763 	stm32mp_ddr_enable_axi_port(priv->ctl);
764 }
765