xref: /aosp_15_r20/external/coreboot/src/soc/ti/am335x/sdram.c (revision b9411a12aaaa7e1e6a6fb7c5e057f44ee179a49c)
1 /* SPDX-License-Identifier: GPL-2.0-only */
2 
3 /*
4  * Taken and adapted from U-Boot.
5  */
6 
7 #include "sdram.h"
8 #include <types.h>
9 #include <device/mmio.h>
10 #include <delay.h>
11 #include "clock.h"
12 
13 static struct vtp_reg *vtpreg[2] = {(struct vtp_reg *)VTP0_CTRL_ADDR,
14 				    (struct vtp_reg *)VTP1_CTRL_ADDR};
15 
16 /**
17  * Base address for EMIF instances
18  */
19 static struct emif_reg_struct *emif_reg[2] = {(struct emif_reg_struct *)EMIF4_0_CFG_BASE,
20 					      (struct emif_reg_struct *)EMIF4_1_CFG_BASE};
21 
22 /**
23  * Base addresses for DDR PHY cmd/data regs
24  */
25 static struct ddr_cmd_regs *ddr_cmd_reg[2] = {(struct ddr_cmd_regs *)DDR_PHY_CMD_ADDR,
26 					      (struct ddr_cmd_regs *)DDR_PHY_CMD_ADDR2};
27 
28 static struct ddr_data_regs *ddr_data_reg[2] = {(struct ddr_data_regs *)DDR_PHY_DATA_ADDR,
29 						(struct ddr_data_regs *)DDR_PHY_DATA_ADDR2};
30 
31 /**
32  * Base address for ddr io control instances
33  */
34 static struct ddr_cmdtctrl *ioctrl_reg = {(struct ddr_cmdtctrl *)DDR_CONTROL_BASE_ADDR};
35 
36 struct ctrl_stat *cstat = (struct ctrl_stat *)CTRL_BASE;
37 
38 static struct ddr_ctrl *ddrctrl = (struct ddr_ctrl *)DDR_CTRL_ADDR;
39 
40 
config_vtp(int nr)41 static void config_vtp(int nr)
42 {
43 	write32(&vtpreg[nr]->vtp0ctrlreg, read32(&vtpreg[nr]->vtp0ctrlreg) | VTP_CTRL_ENABLE);
44 	write32(&vtpreg[nr]->vtp0ctrlreg,
45 		read32(&vtpreg[nr]->vtp0ctrlreg) & (~VTP_CTRL_START_EN));
46 	write32(&vtpreg[nr]->vtp0ctrlreg, read32(&vtpreg[nr]->vtp0ctrlreg) | VTP_CTRL_START_EN);
47 
48 	/* Poll for READY */
49 	while ((read32(&vtpreg[nr]->vtp0ctrlreg) & VTP_CTRL_READY) != VTP_CTRL_READY)
50 		;
51 }
52 
53 /**
54  * Configure SDRAM
55  */
config_sdram(const struct emif_regs * regs,int nr)56 static void config_sdram(const struct emif_regs *regs, int nr)
57 {
58 	if (regs->zq_config) {
59 		write32(&emif_reg[nr]->emif_zq_config, regs->zq_config);
60 		write32(&cstat->secure_emif_sdram_config, regs->sdram_config);
61 		write32(&emif_reg[nr]->emif_sdram_config, regs->sdram_config);
62 
63 		/* Trigger initialization */
64 		write32(&emif_reg[nr]->emif_sdram_ref_ctrl, 0x00003100);
65 		/* Wait 1ms because of L3 timeout error */
66 		udelay(1000);
67 
68 		/* Write proper sdram_ref_cref_ctrl value */
69 		write32(&emif_reg[nr]->emif_sdram_ref_ctrl, regs->ref_ctrl);
70 		write32(&emif_reg[nr]->emif_sdram_ref_ctrl_shdw, regs->ref_ctrl);
71 	}
72 	write32(&emif_reg[nr]->emif_sdram_ref_ctrl, regs->ref_ctrl);
73 	write32(&emif_reg[nr]->emif_sdram_ref_ctrl_shdw, regs->ref_ctrl);
74 	write32(&emif_reg[nr]->emif_sdram_config, regs->sdram_config);
75 
76 	/* Write REG_COS_COUNT_1, REG_COS_COUNT_2, and REG_PR_OLD_COUNT. */
77 	if (regs->ocp_config)
78 		write32(&emif_reg[nr]->emif_l3_config, regs->ocp_config);
79 }
80 
81 /**
82  * Configure DDR DATA registers
83  */
config_ddr_data(const struct ddr_data * data,int nr)84 static void config_ddr_data(const struct ddr_data *data, int nr)
85 {
86 	int i;
87 
88 	if (!data)
89 		return;
90 
91 	for (i = 0; i < DDR_DATA_REGS_NR; i++) {
92 		write32(&(ddr_data_reg[nr] + i)->dt0rdsratio0, data->datardsratio0);
93 		write32(&(ddr_data_reg[nr] + i)->dt0wdsratio0, data->datawdsratio0);
94 		write32(&(ddr_data_reg[nr] + i)->dt0wiratio0, data->datawiratio0);
95 		write32(&(ddr_data_reg[nr] + i)->dt0giratio0, data->datagiratio0);
96 		write32(&(ddr_data_reg[nr] + i)->dt0fwsratio0, data->datafwsratio0);
97 		write32(&(ddr_data_reg[nr] + i)->dt0wrsratio0, data->datawrsratio0);
98 	}
99 }
100 
config_io_ctrl(const struct ctrl_ioregs * ioregs)101 static void config_io_ctrl(const struct ctrl_ioregs *ioregs)
102 {
103 	if (!ioregs)
104 		return;
105 
106 	write32(&ioctrl_reg->cm0ioctl, ioregs->cm0ioctl);
107 	write32(&ioctrl_reg->cm1ioctl, ioregs->cm1ioctl);
108 	write32(&ioctrl_reg->cm2ioctl, ioregs->cm2ioctl);
109 	write32(&ioctrl_reg->dt0ioctl, ioregs->dt0ioctl);
110 	write32(&ioctrl_reg->dt1ioctl, ioregs->dt1ioctl);
111 }
112 
113 
114 /**
115  * Configure DDR CMD control registers
116  */
config_cmd_ctrl(const struct cmd_control * cmd,int nr)117 static void config_cmd_ctrl(const struct cmd_control *cmd, int nr)
118 {
119 	if (!cmd)
120 		return;
121 
122 	write32(&ddr_cmd_reg[nr]->cm0csratio, cmd->cmd0csratio);
123 	write32(&ddr_cmd_reg[nr]->cm0iclkout, cmd->cmd0iclkout);
124 
125 	write32(&ddr_cmd_reg[nr]->cm1csratio, cmd->cmd1csratio);
126 	write32(&ddr_cmd_reg[nr]->cm1iclkout, cmd->cmd1iclkout);
127 
128 	write32(&ddr_cmd_reg[nr]->cm2csratio, cmd->cmd2csratio);
129 	write32(&ddr_cmd_reg[nr]->cm2iclkout, cmd->cmd2iclkout);
130 }
131 
get_emif_rev(uint32_t base)132 static inline uint32_t get_emif_rev(uint32_t base)
133 {
134 	struct emif_reg_struct *emif = (struct emif_reg_struct *)base;
135 
136 	return (read32(&emif->emif_mod_id_rev) & EMIF_REG_MAJOR_REVISION_MASK)
137 	       >> EMIF_REG_MAJOR_REVISION_SHIFT;
138 }
139 
140 /*
141  * Get SDRAM type connected to EMIF.
142  * Assuming similar SDRAM parts are connected to both EMIF's
143  * which is typically the case. So it is sufficient to get
144  * SDRAM type from EMIF1.
145  */
emif_sdram_type(uint32_t sdram_config)146 static inline uint32_t emif_sdram_type(uint32_t sdram_config)
147 {
148 	return (sdram_config & EMIF_REG_SDRAM_TYPE_MASK) >> EMIF_REG_SDRAM_TYPE_SHIFT;
149 }
150 
151 /*
152  * Configure EXT PHY registers for software leveling
153  */
ext_phy_settings_swlvl(const struct emif_regs * regs,int nr)154 static void ext_phy_settings_swlvl(const struct emif_regs *regs, int nr)
155 {
156 	uint32_t *ext_phy_ctrl_base = 0;
157 	uint32_t *emif_ext_phy_ctrl_base = 0;
158 	uint32_t i = 0;
159 
160 	ext_phy_ctrl_base = (uint32_t *)&(regs->emif_ddr_ext_phy_ctrl_1);
161 	emif_ext_phy_ctrl_base = (uint32_t *)&(emif_reg[nr]->emif_ddr_ext_phy_ctrl_1);
162 
163 	/* Configure external phy control timing registers */
164 	for (i = 0; i < EMIF_EXT_PHY_CTRL_TIMING_REG; i++) {
165 		write32(emif_ext_phy_ctrl_base++, *ext_phy_ctrl_base);
166 		/* Update shadow registers */
167 		write32(emif_ext_phy_ctrl_base++, *ext_phy_ctrl_base++);
168 	}
169 }
170 
171 /*
172  * Configure EXT PHY registers for hardware leveling
173  */
ext_phy_settings_hwlvl(const struct emif_regs * regs,int nr)174 static void ext_phy_settings_hwlvl(const struct emif_regs *regs, int nr)
175 {
176 	/*
177 	 * Enable hardware leveling on the EMIF.  For details about these
178 	 * magic values please see the EMIF registers section of the TRM.
179 	 */
180 	if (regs->emif_ddr_phy_ctlr_1 & 0x00040000) {
181 		/* PHY_INVERT_CLKOUT = 1 */
182 		write32(&emif_reg[nr]->emif_ddr_ext_phy_ctrl_1, 0x00040100);
183 		write32(&emif_reg[nr]->emif_ddr_ext_phy_ctrl_1_shdw, 0x00040100);
184 	} else {
185 		/* PHY_INVERT_CLKOUT = 0 */
186 		write32(&emif_reg[nr]->emif_ddr_ext_phy_ctrl_1, 0x08020080);
187 		write32(&emif_reg[nr]->emif_ddr_ext_phy_ctrl_1_shdw, 0x08020080);
188 	}
189 
190 	write32(&emif_reg[nr]->emif_ddr_ext_phy_ctrl_22, 0x00000000);
191 	write32(&emif_reg[nr]->emif_ddr_ext_phy_ctrl_22_shdw, 0x00000000);
192 	write32(&emif_reg[nr]->emif_ddr_ext_phy_ctrl_23, 0x00600020);
193 	write32(&emif_reg[nr]->emif_ddr_ext_phy_ctrl_23_shdw, 0x00600020);
194 	write32(&emif_reg[nr]->emif_ddr_ext_phy_ctrl_24, 0x40010080);
195 	write32(&emif_reg[nr]->emif_ddr_ext_phy_ctrl_24_shdw, 0x40010080);
196 	write32(&emif_reg[nr]->emif_ddr_ext_phy_ctrl_25, 0x08102040);
197 	write32(&emif_reg[nr]->emif_ddr_ext_phy_ctrl_25_shdw, 0x08102040);
198 	write32(&emif_reg[nr]->emif_ddr_ext_phy_ctrl_26, 0x00200020);
199 	write32(&emif_reg[nr]->emif_ddr_ext_phy_ctrl_26_shdw, 0x00200020);
200 	write32(&emif_reg[nr]->emif_ddr_ext_phy_ctrl_27, 0x00200020);
201 	write32(&emif_reg[nr]->emif_ddr_ext_phy_ctrl_27_shdw, 0x00200020);
202 	write32(&emif_reg[nr]->emif_ddr_ext_phy_ctrl_28, 0x00200020);
203 	write32(&emif_reg[nr]->emif_ddr_ext_phy_ctrl_28_shdw, 0x00200020);
204 	write32(&emif_reg[nr]->emif_ddr_ext_phy_ctrl_29, 0x00200020);
205 	write32(&emif_reg[nr]->emif_ddr_ext_phy_ctrl_29_shdw, 0x00200020);
206 	write32(&emif_reg[nr]->emif_ddr_ext_phy_ctrl_30, 0x00200020);
207 	write32(&emif_reg[nr]->emif_ddr_ext_phy_ctrl_30_shdw, 0x00200020);
208 	write32(&emif_reg[nr]->emif_ddr_ext_phy_ctrl_31, 0x00000000);
209 	write32(&emif_reg[nr]->emif_ddr_ext_phy_ctrl_31_shdw, 0x00000000);
210 	write32(&emif_reg[nr]->emif_ddr_ext_phy_ctrl_32, 0x00000000);
211 	write32(&emif_reg[nr]->emif_ddr_ext_phy_ctrl_32_shdw, 0x00000000);
212 	write32(&emif_reg[nr]->emif_ddr_ext_phy_ctrl_33, 0x00000000);
213 	write32(&emif_reg[nr]->emif_ddr_ext_phy_ctrl_33_shdw, 0x00000000);
214 	write32(&emif_reg[nr]->emif_ddr_ext_phy_ctrl_34, 0x00000000);
215 	write32(&emif_reg[nr]->emif_ddr_ext_phy_ctrl_34_shdw, 0x00000000);
216 	write32(&emif_reg[nr]->emif_ddr_ext_phy_ctrl_35, 0x00000000);
217 	write32(&emif_reg[nr]->emif_ddr_ext_phy_ctrl_35_shdw, 0x00000000);
218 	write32(&emif_reg[nr]->emif_ddr_ext_phy_ctrl_36, 0x00000077);
219 	write32(&emif_reg[nr]->emif_ddr_ext_phy_ctrl_36_shdw, 0x00000077);
220 
221 	/*
222 	 * Sequence to ensure that the PHY is again in a known state after
223 	 * hardware leveling.
224 	 */
225 	write32(&emif_reg[nr]->emif_iodft_tlgc, 0x2011);
226 	write32(&emif_reg[nr]->emif_iodft_tlgc, 0x2411);
227 	write32(&emif_reg[nr]->emif_iodft_tlgc, 0x2011);
228 }
229 
230 
231 /**
232  * Configure DDR PHY
233  */
config_ddr_phy(const struct emif_regs * regs,int nr)234 static void config_ddr_phy(const struct emif_regs *regs, int nr)
235 {
236 	/*
237 	 * Disable initialization and refreshes for now until we finish
238 	 * programming EMIF regs and set time between rising edge of
239 	 * DDR_RESET to rising edge of DDR_CKE to > 500us per memory spec.
240 	 * We currently hardcode a value based on a max expected frequency
241 	 * of 400MHz.
242 	 */
243 	write32(&emif_reg[nr]->emif_sdram_ref_ctrl, EMIF_REG_INITREF_DIS_MASK | 0x3100);
244 
245 	write32(&emif_reg[nr]->emif_ddr_phy_ctrl_1, regs->emif_ddr_phy_ctlr_1);
246 	write32(&emif_reg[nr]->emif_ddr_phy_ctrl_1_shdw, regs->emif_ddr_phy_ctlr_1);
247 
248 	if (get_emif_rev((uint32_t)emif_reg[nr]) == EMIF_4D5) {
249 		if (emif_sdram_type(regs->sdram_config) == EMIF_SDRAM_TYPE_DDR3)
250 			ext_phy_settings_hwlvl(regs, nr);
251 		else
252 			ext_phy_settings_swlvl(regs, nr);
253 	}
254 }
255 
256 /**
257  * Set SDRAM timings
258  */
set_sdram_timings(const struct emif_regs * regs,int nr)259 static void set_sdram_timings(const struct emif_regs *regs, int nr)
260 {
261 	write32(&emif_reg[nr]->emif_sdram_tim_1, regs->sdram_tim1);
262 	write32(&emif_reg[nr]->emif_sdram_tim_1_shdw, regs->sdram_tim1);
263 	write32(&emif_reg[nr]->emif_sdram_tim_2, regs->sdram_tim2);
264 	write32(&emif_reg[nr]->emif_sdram_tim_2_shdw, regs->sdram_tim2);
265 	write32(&emif_reg[nr]->emif_sdram_tim_3, regs->sdram_tim3);
266 	write32(&emif_reg[nr]->emif_sdram_tim_3_shdw, regs->sdram_tim3);
267 }
268 
ddr_pll_config(uint32_t ddrpll_m)269 static void ddr_pll_config(uint32_t ddrpll_m)
270 {
271 	uint32_t clkmode, clksel, div_m2;
272 
273 	clkmode = read32(&am335x_cm_wkup->clkmode_dpll_ddr);
274 	clksel = read32(&am335x_cm_wkup->clksel_dpll_ddr);
275 	div_m2 = read32(&am335x_cm_wkup->div_m2_dpll_ddr);
276 
277 	/* Set the PLL to bypass Mode */
278 	clkmode = (clkmode & CLK_MODE_MASK) | PLL_BYPASS_MODE;
279 	write32(&am335x_cm_wkup->clkmode_dpll_ddr, clkmode);
280 
281 	/* Wait till bypass mode is enabled */
282 	while ((read32(&am335x_cm_wkup->idlest_dpll_ddr) & ST_MN_BYPASS) != ST_MN_BYPASS)
283 		;
284 
285 	clksel = clksel & (~CLK_SEL_MASK);
286 	clksel = clksel | ((ddrpll_m << CLK_SEL_SHIFT) | DDRPLL_N);
287 	write32(&am335x_cm_wkup->clksel_dpll_ddr, clksel);
288 
289 	div_m2 = div_m2 & CLK_DIV_SEL;
290 	div_m2 = div_m2 | DDRPLL_M2;
291 	write32(&am335x_cm_wkup->div_m2_dpll_ddr, div_m2);
292 
293 	clkmode = (clkmode & CLK_MODE_MASK) | CLK_MODE_SEL;
294 	write32(&am335x_cm_wkup->clkmode_dpll_ddr, clkmode);
295 
296 	/* Wait till dpll is locked */
297 	while ((read32(&am335x_cm_wkup->idlest_dpll_ddr) & ST_DPLL_CLK) != ST_DPLL_CLK)
298 		;
299 }
300 
301 
enable_emif_clocks(void)302 static void enable_emif_clocks(void)
303 {
304 	/* Enable EMIF0 Clock */
305 	write32(&am335x_cm_per->emif, CM_MODULEMODE_ENABLED);
306 	/* Poll if module is functional */
307 	while ((read32(&am335x_cm_per->emif)) != CM_MODULEMODE_ENABLED)
308 		;
309 }
310 
config_ddr(uint32_t pll,const struct ctrl_ioregs * ioregs,const struct ddr_data * data,const struct cmd_control * ctrl,const struct emif_regs * regs,int nr)311 void config_ddr(uint32_t pll, const struct ctrl_ioregs *ioregs, const struct ddr_data *data,
312 		const struct cmd_control *ctrl, const struct emif_regs *regs, int nr)
313 {
314 	enable_emif_clocks();
315 	ddr_pll_config(pll);
316 	config_vtp(nr);
317 	config_cmd_ctrl(ctrl, nr);
318 	config_ddr_data(data, nr);
319 	config_io_ctrl(ioregs);
320 
321 	/* Set CKE to be controlled by EMIF/DDR PHY */
322 	write32(&ddrctrl->ddrckectrl, DDR_CKE_CTRL_NORMAL);
323 
324 	/* Program EMIF instance */
325 	config_ddr_phy(regs, nr);
326 
327 	set_sdram_timings(regs, nr);
328 	config_sdram(regs, nr);
329 }
330