xref: /aosp_15_r20/external/coreboot/src/northbridge/intel/sandybridge/raminit_native.c (revision b9411a12aaaa7e1e6a6fb7c5e057f44ee179a49c)
1 /* SPDX-License-Identifier: GPL-2.0-only */
2 
3 #include <commonlib/bsd/clamp.h>
4 #include <console/console.h>
5 #include <console/usb.h>
6 #include <cpu/intel/model_206ax/model_206ax.h>
7 #include <delay.h>
8 #include <device/device.h>
9 #include <device/pci_def.h>
10 #include <device/pci_ops.h>
11 #include <northbridge/intel/sandybridge/chip.h>
12 #include <stdbool.h>
13 #include <stdint.h>
14 
15 #include "sandybridge.h"
16 #include "raminit_common.h"
17 #include "raminit_tables.h"
18 
19 #define SNB_MIN_DCLK_133_MULT	3
20 #define SNB_MAX_DCLK_133_MULT	8
21 #define IVB_MIN_DCLK_133_MULT	3
22 #define IVB_MAX_DCLK_133_MULT	10
23 #define IVB_MIN_DCLK_100_MULT	7
24 #define IVB_MAX_DCLK_100_MULT	12
25 
26 /* Frequency multiplier */
get_FRQ(const ramctr_timing * ctrl)27 static u32 get_FRQ(const ramctr_timing *ctrl)
28 {
29 	const u32 FRQ = 256000 / (ctrl->tCK * ctrl->base_freq);
30 
31 	if (IS_IVY_CPU(ctrl->cpu)) {
32 		if (ctrl->base_freq == 100)
33 			return clamp_u32(IVB_MIN_DCLK_100_MULT, FRQ, IVB_MAX_DCLK_100_MULT);
34 
35 		if (ctrl->base_freq == 133)
36 			return clamp_u32(IVB_MIN_DCLK_133_MULT, FRQ, IVB_MAX_DCLK_133_MULT);
37 
38 	} else if (IS_SANDY_CPU(ctrl->cpu)) {
39 		if (ctrl->base_freq == 133)
40 			return clamp_u32(SNB_MIN_DCLK_133_MULT, FRQ, SNB_MAX_DCLK_133_MULT);
41 	}
42 
43 	die("Unsupported CPU or base frequency.");
44 }
45 
46 /* CAS write latency. To be programmed in MR2. See DDR3 SPEC for MR2 documentation. */
get_CWL(u32 tCK)47 static u8 get_CWL(u32 tCK)
48 {
49 	/* Get CWL based on tCK using the following rule */
50 	switch (tCK) {
51 	case TCK_1333MHZ:
52 		return 12;
53 
54 	case TCK_1200MHZ:
55 	case TCK_1100MHZ:
56 		return 11;
57 
58 	case TCK_1066MHZ:
59 	case TCK_1000MHZ:
60 		return 10;
61 
62 	case TCK_933MHZ:
63 	case TCK_900MHZ:
64 		return 9;
65 
66 	case TCK_800MHZ:
67 	case TCK_700MHZ:
68 		return 8;
69 
70 	case TCK_666MHZ:
71 		return 7;
72 
73 	case TCK_533MHZ:
74 		return 6;
75 
76 	default:
77 		return 5;
78 	}
79 }
80 
81 /* Get REFI based on frequency index, tREFI = 7.8usec */
get_REFI(u32 FRQ,u8 base_freq)82 static u32 get_REFI(u32 FRQ, u8 base_freq)
83 {
84 	if (base_freq == 100)
85 		return frq_refi_map[1][FRQ - 7];
86 
87 	else
88 		return frq_refi_map[0][FRQ - 3];
89 }
90 
91 /* Get XSOffset based on frequency index, tXS-Offset: tXS = tRFC + 10ns */
get_XSOffset(u32 FRQ,u8 base_freq)92 static u8 get_XSOffset(u32 FRQ, u8 base_freq)
93 {
94 	if (base_freq == 100)
95 		return frq_xs_map[1][FRQ - 7];
96 
97 	else
98 		return frq_xs_map[0][FRQ - 3];
99 }
100 
101 /* Get MOD based on frequency index */
get_MOD(u32 FRQ,u8 base_freq)102 static u8 get_MOD(u32 FRQ, u8 base_freq)
103 {
104 	if (base_freq == 100)
105 		return frq_mod_map[1][FRQ - 7];
106 
107 	else
108 		return frq_mod_map[0][FRQ - 3];
109 }
110 
111 /* Get Write Leveling Output delay based on frequency index */
get_WLO(u32 FRQ,u8 base_freq)112 static u8 get_WLO(u32 FRQ, u8 base_freq)
113 {
114 	if (base_freq == 100)
115 		return frq_wlo_map[1][FRQ - 7];
116 
117 	else
118 		return frq_wlo_map[0][FRQ - 3];
119 }
120 
121 /* Get CKE based on frequency index */
get_CKE(u32 FRQ,u8 base_freq)122 static u8 get_CKE(u32 FRQ, u8 base_freq)
123 {
124 	if (base_freq == 100)
125 		return frq_cke_map[1][FRQ - 7];
126 
127 	else
128 		return frq_cke_map[0][FRQ - 3];
129 }
130 
131 /* Get XPDLL based on frequency index */
get_XPDLL(u32 FRQ,u8 base_freq)132 static u8 get_XPDLL(u32 FRQ, u8 base_freq)
133 {
134 	if (base_freq == 100)
135 		return frq_xpdll_map[1][FRQ - 7];
136 
137 	else
138 		return frq_xpdll_map[0][FRQ - 3];
139 }
140 
141 /* Get XP based on frequency index */
get_XP(u32 FRQ,u8 base_freq)142 static u8 get_XP(u32 FRQ, u8 base_freq)
143 {
144 	if (base_freq == 100)
145 		return frq_xp_map[1][FRQ - 7];
146 
147 	else
148 		return frq_xp_map[0][FRQ - 3];
149 }
150 
151 /* Get AONPD based on frequency index */
get_AONPD(u32 FRQ,u8 base_freq)152 static u8 get_AONPD(u32 FRQ, u8 base_freq)
153 {
154 	if (base_freq == 100)
155 		return frq_aonpd_map[1][FRQ - 7];
156 
157 	else
158 		return frq_aonpd_map[0][FRQ - 3];
159 }
160 
161 /* Get COMP2 based on CPU generation and clock speed */
get_COMP2(const ramctr_timing * ctrl)162 static u32 get_COMP2(const ramctr_timing *ctrl)
163 {
164 	const bool is_ivybridge = IS_IVY_CPU(ctrl->cpu);
165 
166 	if (ctrl->tCK <= TCK_1066MHZ)
167 		return is_ivybridge ? 0x0C235924 : 0x0C21410C;
168 	else if (ctrl->tCK <= TCK_933MHZ)
169 		return is_ivybridge ? 0x0C446964 : 0x0C42514C;
170 	else if (ctrl->tCK <= TCK_800MHZ)
171 		return is_ivybridge ? 0x0C6671E4 : 0x0C6369CC;
172 	else if (ctrl->tCK <= TCK_666MHZ)
173 		return is_ivybridge ? 0x0CA8C264 : 0x0CA57A4C;
174 	else if (ctrl->tCK <= TCK_533MHZ)
175 		return is_ivybridge ? 0x0CEBDB64 : 0x0CE7C34C;
176 	else
177 		return is_ivybridge ? 0x0D6FF5E4 : 0x0D6BEDCC;
178 }
179 
180 /* Get updated COMP1 based on CPU generation and stepping */
get_COMP1(ramctr_timing * ctrl,const int channel)181 static u32 get_COMP1(ramctr_timing *ctrl, const int channel)
182 {
183 	const union comp_ofst_1_reg orig_comp = {
184 		.raw = mchbar_read32(CRCOMPOFST1_ch(channel)),
185 	};
186 
187 	if (IS_SANDY_CPU(ctrl->cpu) && !IS_SANDY_CPU_D2(ctrl->cpu)) {
188 		union comp_ofst_1_reg comp_ofst_1 = orig_comp;
189 
190 		comp_ofst_1.clk_odt_up = 1;
191 		comp_ofst_1.clk_drv_up = 1;
192 		comp_ofst_1.ctl_drv_up = 1;
193 
194 		return comp_ofst_1.raw;
195 	}
196 
197 	/* Fix PCODE COMP offset bug: revert to default values */
198 	union comp_ofst_1_reg comp_ofst_1 = {
199 		.dq_odt_down  = 4,
200 		.dq_odt_up    = 4,
201 		.clk_odt_down = 4,
202 		.clk_odt_up   = orig_comp.clk_odt_up,
203 		.dq_drv_down  = 4,
204 		.dq_drv_up    = orig_comp.dq_drv_up,
205 		.clk_drv_down = 4,
206 		.clk_drv_up   = orig_comp.clk_drv_up,
207 		.ctl_drv_down = 4,
208 		.ctl_drv_up   = orig_comp.ctl_drv_up,
209 	};
210 
211 	if (IS_IVY_CPU(ctrl->cpu))
212 		comp_ofst_1.dq_drv_up = 2;	/* 28p6 ohms */
213 
214 	return comp_ofst_1.raw;
215 }
216 
normalize_tclk(ramctr_timing * ctrl,bool ref_100mhz_support)217 static void normalize_tclk(ramctr_timing *ctrl, bool ref_100mhz_support)
218 {
219 	if (ctrl->tCK <= TCK_1200MHZ) {
220 		ctrl->tCK = TCK_1200MHZ;
221 		ctrl->base_freq = 133;
222 	} else if (ctrl->tCK <= TCK_1100MHZ) {
223 		ctrl->tCK = TCK_1100MHZ;
224 		ctrl->base_freq = 100;
225 	} else if (ctrl->tCK <= TCK_1066MHZ) {
226 		ctrl->tCK = TCK_1066MHZ;
227 		ctrl->base_freq = 133;
228 	} else if (ctrl->tCK <= TCK_1000MHZ) {
229 		ctrl->tCK = TCK_1000MHZ;
230 		ctrl->base_freq = 100;
231 	} else if (ctrl->tCK <= TCK_933MHZ) {
232 		ctrl->tCK = TCK_933MHZ;
233 		ctrl->base_freq = 133;
234 	} else if (ctrl->tCK <= TCK_900MHZ) {
235 		ctrl->tCK = TCK_900MHZ;
236 		ctrl->base_freq = 100;
237 	} else if (ctrl->tCK <= TCK_800MHZ) {
238 		ctrl->tCK = TCK_800MHZ;
239 		ctrl->base_freq = 133;
240 	} else if (ctrl->tCK <= TCK_700MHZ) {
241 		ctrl->tCK = TCK_700MHZ;
242 		ctrl->base_freq = 100;
243 	} else if (ctrl->tCK <= TCK_666MHZ) {
244 		ctrl->tCK = TCK_666MHZ;
245 		ctrl->base_freq = 133;
246 	} else if (ctrl->tCK <= TCK_533MHZ) {
247 		ctrl->tCK = TCK_533MHZ;
248 		ctrl->base_freq = 133;
249 	} else if (ctrl->tCK <= TCK_400MHZ) {
250 		ctrl->tCK = TCK_400MHZ;
251 		ctrl->base_freq = 133;
252 	} else {
253 		ctrl->tCK = 0;
254 		return;
255 	}
256 
257 	if (!ref_100mhz_support && ctrl->base_freq == 100) {
258 		/* Skip unsupported frequency */
259 		ctrl->tCK++;
260 		normalize_tclk(ctrl, ref_100mhz_support);
261 	}
262 }
263 
264 #define DEFAULT_TCK	TCK_800MHZ
265 
get_mem_min_tck(void)266 static unsigned int get_mem_min_tck(void)
267 {
268 	u32 reg32;
269 	u8 rev;
270 	const struct northbridge_intel_sandybridge_config *cfg = NULL;
271 
272 	/* Actually, config of MCH or Host Bridge */
273 	cfg = config_of_soc();
274 
275 	/* If non-zero, it was set in the devicetree */
276 	if (cfg->max_mem_clock_mhz) {
277 		if (cfg->max_mem_clock_mhz >= 1066)
278 			return TCK_1066MHZ;
279 
280 		else if (cfg->max_mem_clock_mhz >= 933)
281 			return TCK_933MHZ;
282 
283 		else if (cfg->max_mem_clock_mhz >= 800)
284 			return TCK_800MHZ;
285 
286 		else if (cfg->max_mem_clock_mhz >= 666)
287 			return TCK_666MHZ;
288 
289 		else if (cfg->max_mem_clock_mhz >= 533)
290 			return TCK_533MHZ;
291 
292 		else
293 			return TCK_400MHZ;
294 	}
295 
296 	if (CONFIG(NATIVE_RAMINIT_IGNORE_MAX_MEM_FUSES))
297 		return TCK_1333MHZ;
298 
299 	rev = pci_read_config8(HOST_BRIDGE, PCI_DEVICE_ID);
300 
301 	if ((rev & BASE_REV_MASK) == BASE_REV_SNB) {
302 		/* Read Capabilities A Register DMFC bits */
303 		reg32 = pci_read_config32(HOST_BRIDGE, CAPID0_A);
304 		reg32 &= 0x7;
305 
306 		switch (reg32) {
307 		case 7: return TCK_533MHZ;
308 		case 6: return TCK_666MHZ;
309 		case 5: return TCK_800MHZ;
310 		/* Reserved */
311 		default:
312 			break;
313 		}
314 	} else {
315 		/* Read Capabilities B Register DMFC bits */
316 		reg32 = pci_read_config32(HOST_BRIDGE, CAPID0_B);
317 		reg32 = (reg32 >> 4) & 0x7;
318 
319 		switch (reg32) {
320 		case 7: return TCK_533MHZ;
321 		case 6: return TCK_666MHZ;
322 		case 5: return TCK_800MHZ;
323 		case 4: return TCK_933MHZ;
324 		case 3: return TCK_1066MHZ;
325 		case 2: return TCK_1200MHZ;
326 		case 1: return TCK_1333MHZ;
327 		/* Reserved */
328 		default:
329 			break;
330 		}
331 	}
332 	return DEFAULT_TCK;
333 }
334 
find_cas_tck(ramctr_timing * ctrl)335 static void find_cas_tck(ramctr_timing *ctrl)
336 {
337 	u8 val;
338 	u32 reg32;
339 	u8 ref_100mhz_support;
340 
341 	/* 100 MHz reference clock supported */
342 	reg32 = pci_read_config32(HOST_BRIDGE, CAPID0_B);
343 	ref_100mhz_support = (reg32 >> 21) & 0x7;
344 	printk(BIOS_DEBUG, "100MHz reference clock support: %s\n", ref_100mhz_support ? "yes"
345 										      : "no");
346 
347 	printk(BIOS_DEBUG, "PLL_REF100_CFG value: 0x%x\n", ref_100mhz_support);
348 
349 	ctrl->tCK = get_mem_min_tck();
350 
351 	/* Find CAS latency */
352 	while (1) {
353 		/*
354 		 * Normalising tCK before computing clock could potentially
355 		 * result in a lower selected CAS, which is desired.
356 		 */
357 		normalize_tclk(ctrl, ref_100mhz_support);
358 		if (!(ctrl->tCK))
359 			die("Couldn't find compatible clock / CAS settings\n");
360 
361 		val = DIV_ROUND_UP(ctrl->tAA, ctrl->tCK);
362 		printk(BIOS_DEBUG, "Trying CAS %u, tCK %u.\n", val, ctrl->tCK);
363 		for (; val <= MAX_CAS; val++)
364 			if ((ctrl->cas_supported >> (val - MIN_CAS)) & 1)
365 				break;
366 
367 		if (val == (MAX_CAS + 1)) {
368 			ctrl->tCK++;
369 			continue;
370 		} else {
371 			printk(BIOS_DEBUG, "Found compatible clock, CAS pair.\n");
372 			break;
373 		}
374 	}
375 
376 	/* Frequency multiplier */
377 	ctrl->FRQ = get_FRQ(ctrl);
378 
379 	printk(BIOS_DEBUG, "Selected DRAM frequency: %u MHz\n", NS2MHZ_DIV256 / ctrl->tCK);
380 	printk(BIOS_DEBUG, "Selected CAS latency   : %uT\n", val);
381 	ctrl->CAS = val;
382 }
383 
dram_timing(ramctr_timing * ctrl)384 static void dram_timing(ramctr_timing *ctrl)
385 {
386 	/*
387 	 * On Sandy Bridge, the maximum supported DDR3 frequency is 1066MHz (DDR3 2133).
388 	 * Cap it for faster DIMMs, and align it to the closest JEDEC standard frequency.
389 	 */
390 	/*
391 	 * On Ivy Bridge, the maximum supported DDR3 frequency is 1400MHz (DDR3 2800).
392 	 * Cap it at 1200MHz (DDR3 2400), and align it to the closest JEDEC standard frequency.
393 	 */
394 	if (ctrl->tCK == TCK_1200MHZ) {
395 		ctrl->edge_offset[0] = 18; //XXX: guessed
396 		ctrl->edge_offset[1] = 8;
397 		ctrl->edge_offset[2] = 8;
398 		ctrl->tx_dq_offset[0] = 20; //XXX: guessed
399 		ctrl->tx_dq_offset[1] = 8;
400 		ctrl->tx_dq_offset[2] = 8;
401 		ctrl->pi_coding_threshold = 10;
402 
403 	} else if (ctrl->tCK == TCK_1100MHZ) {
404 		ctrl->edge_offset[0] = 17; //XXX: guessed
405 		ctrl->edge_offset[1] = 7;
406 		ctrl->edge_offset[2] = 7;
407 		ctrl->tx_dq_offset[0] = 19; //XXX: guessed
408 		ctrl->tx_dq_offset[1] = 7;
409 		ctrl->tx_dq_offset[2] = 7;
410 		ctrl->pi_coding_threshold = 13;
411 
412 	} else if (ctrl->tCK == TCK_1066MHZ) {
413 		ctrl->edge_offset[0] = 16;
414 		ctrl->edge_offset[1] = 7;
415 		ctrl->edge_offset[2] = 7;
416 		ctrl->tx_dq_offset[0] = 18;
417 		ctrl->tx_dq_offset[1] = 7;
418 		ctrl->tx_dq_offset[2] = 7;
419 		ctrl->pi_coding_threshold = 13;
420 
421 	} else if (ctrl->tCK == TCK_1000MHZ) {
422 		ctrl->edge_offset[0] = 15; //XXX: guessed
423 		ctrl->edge_offset[1] = 6;
424 		ctrl->edge_offset[2] = 6;
425 		ctrl->tx_dq_offset[0] = 17; //XXX: guessed
426 		ctrl->tx_dq_offset[1] = 6;
427 		ctrl->tx_dq_offset[2] = 6;
428 		ctrl->pi_coding_threshold = 13;
429 
430 	} else if (ctrl->tCK == TCK_933MHZ) {
431 		ctrl->edge_offset[0] = 14;
432 		ctrl->edge_offset[1] = 6;
433 		ctrl->edge_offset[2] = 6;
434 		ctrl->tx_dq_offset[0] = 15;
435 		ctrl->tx_dq_offset[1] = 6;
436 		ctrl->tx_dq_offset[2] = 6;
437 		ctrl->pi_coding_threshold = 15;
438 
439 	} else if (ctrl->tCK == TCK_900MHZ) {
440 		ctrl->edge_offset[0] = 14; //XXX: guessed
441 		ctrl->edge_offset[1] = 6;
442 		ctrl->edge_offset[2] = 6;
443 		ctrl->tx_dq_offset[0] = 15; //XXX: guessed
444 		ctrl->tx_dq_offset[1] = 6;
445 		ctrl->tx_dq_offset[2] = 6;
446 		ctrl->pi_coding_threshold = 12;
447 
448 	} else if (ctrl->tCK == TCK_800MHZ) {
449 		ctrl->edge_offset[0] = 13;
450 		ctrl->edge_offset[1] = 5;
451 		ctrl->edge_offset[2] = 5;
452 		ctrl->tx_dq_offset[0] = 14;
453 		ctrl->tx_dq_offset[1] = 5;
454 		ctrl->tx_dq_offset[2] = 5;
455 		ctrl->pi_coding_threshold = 15;
456 
457 	} else if (ctrl->tCK == TCK_700MHZ) {
458 		ctrl->edge_offset[0] = 13; //XXX: guessed
459 		ctrl->edge_offset[1] = 5;
460 		ctrl->edge_offset[2] = 5;
461 		ctrl->tx_dq_offset[0] = 14; //XXX: guessed
462 		ctrl->tx_dq_offset[1] = 5;
463 		ctrl->tx_dq_offset[2] = 5;
464 		ctrl->pi_coding_threshold = 16;
465 
466 	} else if (ctrl->tCK == TCK_666MHZ) {
467 		ctrl->edge_offset[0] = 10;
468 		ctrl->edge_offset[1] = 4;
469 		ctrl->edge_offset[2] = 4;
470 		ctrl->tx_dq_offset[0] = 11;
471 		ctrl->tx_dq_offset[1] = 4;
472 		ctrl->tx_dq_offset[2] = 4;
473 		ctrl->pi_coding_threshold = 16;
474 
475 	} else if (ctrl->tCK == TCK_533MHZ) {
476 		ctrl->edge_offset[0] = 8;
477 		ctrl->edge_offset[1] = 3;
478 		ctrl->edge_offset[2] = 3;
479 		ctrl->tx_dq_offset[0] = 9;
480 		ctrl->tx_dq_offset[1] = 3;
481 		ctrl->tx_dq_offset[2] = 3;
482 		ctrl->pi_coding_threshold = 17;
483 
484 	} else  { /* TCK_400MHZ */
485 		ctrl->edge_offset[0] = 6;
486 		ctrl->edge_offset[1] = 2;
487 		ctrl->edge_offset[2] = 2;
488 		ctrl->tx_dq_offset[0] = 6;
489 		ctrl->tx_dq_offset[1] = 2;
490 		ctrl->tx_dq_offset[2] = 2;
491 		ctrl->pi_coding_threshold = 17;
492 	}
493 
494 	/* Initial phase between CLK/CMD pins */
495 	ctrl->pi_code_offset = (256000 / ctrl->tCK) / 66;
496 
497 	/* DLL_CONFIG_MDLL_W_TIMER */
498 	ctrl->mdll_wake_delay = (128000 / ctrl->tCK) + 3;
499 
500 	if (ctrl->tCWL)
501 		ctrl->CWL = DIV_ROUND_UP(ctrl->tCWL, ctrl->tCK);
502 	else
503 		ctrl->CWL = get_CWL(ctrl->tCK);
504 
505 	ctrl->tRCD = DIV_ROUND_UP(ctrl->tRCD, ctrl->tCK);
506 	ctrl->tRP  = DIV_ROUND_UP(ctrl->tRP,  ctrl->tCK);
507 	ctrl->tRAS = DIV_ROUND_UP(ctrl->tRAS, ctrl->tCK);
508 	ctrl->tWR  = DIV_ROUND_UP(ctrl->tWR,  ctrl->tCK);
509 	ctrl->tFAW = DIV_ROUND_UP(ctrl->tFAW, ctrl->tCK);
510 	ctrl->tRRD = DIV_ROUND_UP(ctrl->tRRD, ctrl->tCK);
511 	ctrl->tRTP = DIV_ROUND_UP(ctrl->tRTP, ctrl->tCK);
512 	ctrl->tWTR = DIV_ROUND_UP(ctrl->tWTR, ctrl->tCK);
513 	ctrl->tRFC = DIV_ROUND_UP(ctrl->tRFC, ctrl->tCK);
514 
515 	ctrl->tREFI     =     get_REFI(ctrl->FRQ, ctrl->base_freq);
516 	ctrl->tMOD      =      get_MOD(ctrl->FRQ, ctrl->base_freq);
517 	ctrl->tXSOffset = get_XSOffset(ctrl->FRQ, ctrl->base_freq);
518 	ctrl->tWLO      =      get_WLO(ctrl->FRQ, ctrl->base_freq);
519 	ctrl->tCKE      =      get_CKE(ctrl->FRQ, ctrl->base_freq);
520 	ctrl->tXPDLL    =    get_XPDLL(ctrl->FRQ, ctrl->base_freq);
521 	ctrl->tXP       =       get_XP(ctrl->FRQ, ctrl->base_freq);
522 	ctrl->tAONPD    =    get_AONPD(ctrl->FRQ, ctrl->base_freq);
523 
524 	printk(BIOS_DEBUG, "Selected CWL latency   : %uT\n", ctrl->CWL);
525 	printk(BIOS_DEBUG, "Selected tRCD          : %uT\n", ctrl->tRCD);
526 	printk(BIOS_DEBUG, "Selected tRP           : %uT\n", ctrl->tRP);
527 	printk(BIOS_DEBUG, "Selected tRAS          : %uT\n", ctrl->tRAS);
528 	printk(BIOS_DEBUG, "Selected tWR           : %uT\n", ctrl->tWR);
529 	printk(BIOS_DEBUG, "Selected tFAW          : %uT\n", ctrl->tFAW);
530 	printk(BIOS_DEBUG, "Selected tRRD          : %uT\n", ctrl->tRRD);
531 	printk(BIOS_DEBUG, "Selected tRTP          : %uT\n", ctrl->tRTP);
532 	printk(BIOS_DEBUG, "Selected tWTR          : %uT\n", ctrl->tWTR);
533 	printk(BIOS_DEBUG, "Selected tRFC          : %uT\n", ctrl->tRFC);
534 }
535 
dram_freq(ramctr_timing * ctrl)536 static void dram_freq(ramctr_timing *ctrl)
537 {
538 	if (ctrl->tCK > TCK_400MHZ) {
539 		printk(BIOS_ERR,
540 			"DRAM frequency is under lowest supported frequency (400 MHz). "
541 			"Increasing to 400 MHz as last resort.\n");
542 		ctrl->tCK = TCK_400MHZ;
543 	}
544 
545 	while (1) {
546 		u8 val2;
547 		u32 reg1 = 0;
548 
549 		/* Step 1 - Determine target MPLL frequency */
550 		find_cas_tck(ctrl);
551 
552 		/*
553 		 * The MPLL will never lock if the requested frequency is already set.
554 		 * Exit early to prevent a system hang.
555 		 */
556 		reg1 = mchbar_read32(MC_BIOS_DATA);
557 		val2 = (u8)reg1;
558 		if (val2)
559 			return;
560 
561 		/* Step 2 - Request MPLL frequency through the PCU */
562 		reg1 = ctrl->FRQ;
563 		if (ctrl->base_freq == 100)
564 			reg1 |= (1 << 8);	/* Use 100MHz reference clock */
565 
566 		reg1 |= (1 << 31);	/* Set running bit */
567 		mchbar_write32(MC_BIOS_REQ, reg1);
568 		int i = 0;
569 		printk(BIOS_DEBUG, "MPLL busy... ");
570 		while (reg1 & (1 << 31)) {
571 			udelay(10);
572 			i++;
573 			reg1 = mchbar_read32(MC_BIOS_REQ);
574 		}
575 		printk(BIOS_DEBUG, "done in %d us\n", i * 10);
576 
577 		/* Step 3 - Verify lock frequency */
578 		reg1 = mchbar_read32(MC_BIOS_DATA);
579 		val2 = (u8)reg1;
580 		if (val2 >= ctrl->FRQ) {
581 			printk(BIOS_DEBUG, "MPLL frequency is set at : %d MHz\n",
582 			       (1000 << 8) / ctrl->tCK);
583 			return;
584 		}
585 		printk(BIOS_DEBUG, "MPLL didn't lock. Retrying at lower frequency\n");
586 		ctrl->tCK++;
587 	}
588 }
589 
dram_ioregs(ramctr_timing * ctrl)590 static void dram_ioregs(ramctr_timing *ctrl)
591 {
592 	int channel;
593 
594 	/* IO clock */
595 	FOR_ALL_CHANNELS {
596 		mchbar_write32(GDCRCLKRANKSUSED_ch(channel), ctrl->rankmap[channel]);
597 	}
598 
599 	/* IO command */
600 	FOR_ALL_CHANNELS {
601 		mchbar_write32(GDCRCTLRANKSUSED_ch(channel), ctrl->rankmap[channel]);
602 	}
603 
604 	/* IO control */
605 	FOR_ALL_POPULATED_CHANNELS {
606 		program_timings(ctrl, channel);
607 	}
608 
609 	/* Perform RCOMP */
610 	printram("RCOMP...");
611 	while (!(mchbar_read32(RCOMP_TIMER) & (1 << 16)))
612 		;
613 
614 	printram("done\n");
615 
616 	/* Set COMP2 */
617 	mchbar_write32(CRCOMPOFST2, get_COMP2(ctrl));
618 	printram("COMP2 done\n");
619 
620 	/* Set COMP1 */
621 	FOR_ALL_POPULATED_CHANNELS {
622 		mchbar_write32(CRCOMPOFST1_ch(channel), get_COMP1(ctrl, channel));
623 	}
624 	printram("COMP1 done\n");
625 
626 	printram("FORCE RCOMP and wait 20us...");
627 	mchbar_setbits32(M_COMP, 1 << 8);
628 	udelay(20);
629 	printram("done\n");
630 }
631 
try_init_dram_ddr3(ramctr_timing * ctrl,int fast_boot,int s3resume,int me_uma_size)632 int try_init_dram_ddr3(ramctr_timing *ctrl, int fast_boot, int s3resume, int me_uma_size)
633 {
634 	int err;
635 
636 	printk(BIOS_DEBUG, "Starting %s Bridge RAM training (%s).\n",
637 			IS_SANDY_CPU(ctrl->cpu) ? "Sandy" : "Ivy",
638 			fast_boot ? "fast boot" : "full initialization");
639 
640 	if (!fast_boot) {
641 		/* Find fastest common supported parameters */
642 		dram_find_common_params(ctrl);
643 
644 		dram_dimm_mapping(ctrl);
645 	}
646 
647 	/* Set MPLL frequency */
648 	dram_freq(ctrl);
649 
650 	if (!fast_boot) {
651 		/* Calculate timings */
652 		dram_timing(ctrl);
653 	}
654 
655 	/* Set version register */
656 	mchbar_write32(MRC_REVISION, 0xc04eb002);
657 
658 	/* Enable crossover */
659 	dram_xover(ctrl);
660 
661 	/* Set timing and refresh registers */
662 	dram_timing_regs(ctrl);
663 
664 	/* Power mode preset */
665 	mchbar_write32(PM_THML_STAT, 0x5500);
666 
667 	/* Set scheduler chicken bits */
668 	mchbar_write32(SCHED_CBIT, 0x10100005);
669 
670 	/* Set up watermarks and starvation counter */
671 	set_wmm_behavior(ctrl->cpu);
672 
673 	/* Clear IO reset bit */
674 	mchbar_clrbits32(MC_INIT_STATE_G, 1 << 5);
675 
676 	/* Set MAD-DIMM registers */
677 	dram_dimm_set_mapping(ctrl, 1);
678 	printk(BIOS_DEBUG, "Done dimm mapping\n");
679 
680 	/* Zone config */
681 	dram_zones(ctrl, 1);
682 
683 	/* Set memory map */
684 	dram_memorymap(ctrl, me_uma_size);
685 	printk(BIOS_DEBUG, "Done memory map\n");
686 
687 	/* Set IO registers */
688 	dram_ioregs(ctrl);
689 	printk(BIOS_DEBUG, "Done io registers\n");
690 
691 	udelay(1);
692 
693 	if (fast_boot) {
694 		restore_timings(ctrl);
695 	} else {
696 		/* Do JEDEC DDR3 reset sequence */
697 		dram_jedecreset(ctrl);
698 		printk(BIOS_DEBUG, "Done jedec reset\n");
699 
700 		/* MRS commands */
701 		dram_mrscommands(ctrl);
702 		printk(BIOS_DEBUG, "Done MRS commands\n");
703 
704 		/* Prepare for memory training */
705 		prepare_training(ctrl);
706 
707 		err = receive_enable_calibration(ctrl);
708 		if (err)
709 			return err;
710 
711 		err = read_mpr_training(ctrl);
712 		if (err)
713 			return err;
714 
715 		err = write_training(ctrl);
716 		if (err)
717 			return err;
718 
719 		printram("CP5a\n");
720 
721 		printram("CP5b\n");
722 
723 		err = command_training(ctrl);
724 		if (err)
725 			return err;
726 
727 		printram("CP5c\n");
728 
729 		err = aggressive_read_training(ctrl);
730 		if (err)
731 			return err;
732 
733 		err = aggressive_write_training(ctrl);
734 		if (err)
735 			return err;
736 
737 		normalize_training(ctrl);
738 	}
739 
740 	set_read_write_timings(ctrl);
741 
742 	if (!s3resume) {
743 		err = channel_test(ctrl);
744 		if (err)
745 			return err;
746 	}
747 
748 	/* Set MAD-DIMM registers */
749 	dram_dimm_set_mapping(ctrl, 0);
750 
751 	return 0;
752 }
753