xref: /aosp_15_r20/external/coreboot/src/cpu/intel/car/p4-netburst/cache_as_ram.S (revision b9411a12aaaa7e1e6a6fb7c5e057f44ee179a49c)
1/* SPDX-License-Identifier: GPL-2.0-only */
2
3#include <cpu/intel/post_codes.h>
4#include <cpu/x86/mtrr.h>
5#include <cpu/x86/cache.h>
6#include <cpu/x86/post_code.h>
7#include <cpu/x86/lapic_def.h>
8
9/* Macro to access Local APIC registers at default base. */
10#define LAPIC(x)		$(LAPIC_DEFAULT_BASE | LAPIC_ ## x)
11
12.section .init
13.global bootblock_pre_c_entry
14
15#include <cpu/intel/car/cache_as_ram_symbols.inc>
16#include <cpu/x86/64bit/entry64.inc>
17
18.code32
19_cache_as_ram_setup:
20
21bootblock_pre_c_entry:
22
23cache_as_ram:
24	post_code(POSTCODE_BOOTBLOCK_CAR)
25
26	movl	$LAPIC_BASE_MSR, %ecx
27	rdmsr
28	andl	$LAPIC_BASE_MSR_BOOTSTRAP_PROCESSOR, %eax
29	jz	ap_init
30
31	/* Clear/disable fixed MTRRs */
32	mov	$fixed_mtrr_list, %ebx
33	xor	%eax, %eax
34	xor	%edx, %edx
35
36clear_fixed_mtrr:
37	movzwl	(%ebx), %ecx
38	wrmsr
39	add	$2, %ebx
40	cmp	$fixed_mtrr_list_end, %ebx
41	jl	clear_fixed_mtrr
42
43	/* Figure out how many MTRRs we have, and clear them out */
44	mov	$MTRR_CAP_MSR, %ecx
45	rdmsr
46	movzb	%al, %ebx		/* Number of variable MTRRs */
47	mov	$MTRR_PHYS_BASE(0), %ecx
48	xor	%eax, %eax
49	xor	%edx, %edx
50
51clear_var_mtrr:
52	wrmsr
53	inc	%ecx
54	wrmsr
55	inc	%ecx
56	dec	%ebx
57	jnz	clear_var_mtrr
58	post_code(POSTCODE_SOC_SET_DEF_MTRR_TYPE)
59
60	/* Configure the default memory type to uncacheable. */
61	movl	$MTRR_DEF_TYPE_MSR, %ecx
62	rdmsr
63	andl	$(~0x00000cff), %eax
64	wrmsr
65
66	post_code(POSTCODE_SOC_DETERMINE_CPU_ADDR_BITS)
67
68	/* Determine CPU_ADDR_BITS and load PHYSMASK high
69	 * word to %edx.
70	 */
71	movl	$0x80000000, %eax
72	cpuid
73	cmpl	$0x80000008, %eax
74	jc	addrsize_no_MSR
75	movl	$0x80000008, %eax
76	cpuid
77	movb	%al, %cl
78	sub	$32, %cl
79	movl	$1, %edx
80	shl	%cl, %edx
81	subl	$1, %edx
82	jmp	addrsize_set_high
83addrsize_no_MSR:
84	movl	$1, %eax
85	cpuid
86	andl	$(1 << 6 | 1 << 17), %edx	/* PAE or PSE36 */
87	jz	addrsize_set_high
88	movl	$0x0f, %edx
89
90	/* Preload high word of address mask (in %edx) for Variable
91	 * MTRRs 0 and 1 and enable local APIC at default base.
92	 */
93addrsize_set_high:
94	xorl	%eax, %eax
95	movl	$MTRR_PHYS_MASK(0), %ecx
96	wrmsr
97	movl	$MTRR_PHYS_MASK(1), %ecx
98	wrmsr
99	movl	$LAPIC_BASE_MSR, %ecx
100	not	%edx
101	movl	%edx, %ebx
102	rdmsr
103	andl	%ebx, %edx
104	andl	$(~LAPIC_BASE_MSR_ADDR_MASK), %eax
105	orl	$(LAPIC_DEFAULT_BASE | LAPIC_BASE_MSR_ENABLE), %eax
106	wrmsr
107
108bsp_init:
109
110	post_code(POSTCODE_SOC_BSP_INIT)
111
112	/* Send INIT IPI to all excluding ourself. */
113	movl	LAPIC(ICR), %edi
114	movl	$(LAPIC_DEST_ALLBUT | LAPIC_INT_ASSERT | LAPIC_DM_INIT), %eax
1151:	movl	%eax, (%edi)
116	movl	$0x30, %ecx
1172:	pause
118	dec	%ecx
119	jnz	2b
120	movl	(%edi), %ecx
121	andl	$LAPIC_ICR_BUSY, %ecx
122	jnz	1b
123
124	post_code(POSTCODE_SOC_COUNT_CORES)
125
126	movl	$1, %eax
127	cpuid
128	btl	$28, %edx
129	jnc	sipi_complete
130	bswapl	%ebx
131	movzx	%bh, %edi
132	cmpb	$1, %bh
133	jbe	sipi_complete	/* only one LAPIC ID in package */
134
135	movl	$0, %eax
136	cpuid
137	movb	$1, %bl
138	cmpl	$4, %eax
139	jb	cores_counted
140	movl	$4, %eax
141	movl	$0, %ecx
142	cpuid
143	shr	$26, %eax
144	movb	%al, %bl
145	inc	%bl
146
147cores_counted:
148	movl	%edi, %eax
149	divb	%bl
150	cmpb	$1, %al
151	jbe	sipi_complete	/* only LAPIC ID of a core */
152
153	/* For a hyper-threading processor, cache must not be disabled
154	 * on an AP on the same physical package with the BSP.
155	 */
156
157hyper_threading_cpu:
158
159	post_code(POSTCODE_SOC_CPU_HYPER_THREADING)
160
161	/* Send Start IPI to all excluding ourself. */
162	movl	LAPIC(ICR), %edi
163	movl	$(LAPIC_DEST_ALLBUT | LAPIC_DM_STARTUP), %eax
164	orl	$ap_sipi_vector_in_rom, %eax
1651:	movl	%eax, (%edi)
166	movl	$0x30, %ecx
1672:	pause
168	dec	%ecx
169	jnz	2b
170	movl	(%edi), %ecx
171	andl	$LAPIC_ICR_BUSY, %ecx
172	jnz	1b
173
174	post_code(POSTCODE_SOC_CPU_SIBLING_DELAY)
175
176	/* Wait for sibling CPU to start. */
1771:	movl	$(MTRR_PHYS_BASE(0)), %ecx
178	rdmsr
179	andl	%eax, %eax
180	jnz	sipi_complete
181
182	movl	$0x30, %ecx
1832:	pause
184	dec	%ecx
185	jnz	2b
186	jmp	1b
187
188
189ap_init:
190	post_code(POSTCODE_SOC_CPU_AP_INIT)
191
192	/* Do not disable cache (so BSP can enable it). */
193	movl	%cr0, %eax
194	andl	$(~(CR0_CacheDisable | CR0_NoWriteThrough)), %eax
195	movl	%eax, %cr0
196
197	post_code(POSTCODE_SOC_SET_MTRR_BASE)
198
199	/* MTRR registers are shared between HT siblings. */
200	movl	$(MTRR_PHYS_BASE(0)), %ecx
201	movl	$(1 << 12), %eax
202	xorl	%edx, %edx
203	wrmsr
204
205	post_code(POSTCODE_SOC_AP_HALT)
206
207ap_halt:
208	cli
2091:	hlt
210	jmp	1b
211
212
213
214sipi_complete:
215
216	post_code(POSTCODE_SOC_SET_CAR_BASE)
217
218	/* Set Cache-as-RAM base address. */
219	movl	$(MTRR_PHYS_BASE(0)), %ecx
220	movl	car_mtrr_start, %eax
221	orl	$MTRR_TYPE_WRBACK, %eax
222	xorl	%edx, %edx
223	wrmsr
224
225	/* Set Cache-as-RAM mask. */
226	movl	$(MTRR_PHYS_MASK(0)), %ecx
227	rdmsr
228	movl	car_mtrr_mask, %eax
229	orl	$MTRR_PHYS_MASK_VALID, %eax
230	wrmsr
231
232	post_code(POSTCODE_SOC_ENABLE_MTRRS)
233
234	/* Enable MTRR. */
235	movl	$MTRR_DEF_TYPE_MSR, %ecx
236	rdmsr
237	orl	$MTRR_DEF_TYPE_EN, %eax
238	wrmsr
239
240	/* Enable L2 cache Write-Back (WBINVD and FLUSH#).
241	 *
242	 * MSR is set when DisplayFamily_DisplayModel is one of:
243	 * 06_0x, 06_17, 06_1C
244	 *
245	 * Description says this bit enables use of WBINVD and FLUSH#.
246	 * Should this be set only after the system bus and/or memory
247	 * controller can successfully handle write cycles?
248	 */
249
250#define EAX_FAMILY(a)	(a << 8)	/* for family <= 0fH */
251#define EAX_MODEL(a)	(((a & 0xf0) << 12) | ((a & 0xf) << 4))
252
253	movl	$1, %eax
254	cpuid
255	movl	%eax, %ebx
256	andl	$EAX_FAMILY(0x0f), %eax
257	cmpl	$EAX_FAMILY(0x06), %eax
258	jne	no_msr_11e
259	movl	%ebx, %eax
260	andl	$EAX_MODEL(0xff), %eax
261	cmpl	$EAX_MODEL(0x17), %eax
262	je	has_msr_11e
263	cmpl	$EAX_MODEL(0x1c), %eax
264	je	has_msr_11e
265	andl	$EAX_MODEL(0xf0), %eax
266	cmpl	$EAX_MODEL(0x00), %eax
267	jne	no_msr_11e
268has_msr_11e:
269	movl	$0x11e, %ecx
270	rdmsr
271	orl	$(1 << 8), %eax
272	wrmsr
273no_msr_11e:
274
275	post_code(POSTCODE_SOC_ENABLE_CACHE)
276
277	/* Cache the whole rom to fetch microcode updates */
278	movl	$MTRR_PHYS_BASE(1), %ecx
279	xorl	%edx, %edx
280	movl	rom_mtrr_base, %eax
281	orl	$MTRR_TYPE_WRPROT, %eax
282	wrmsr
283	movl	$MTRR_PHYS_MASK(1), %ecx
284	rdmsr
285	movl	rom_mtrr_mask, %eax
286	orl	$MTRR_PHYS_MASK_VALID, %eax
287	wrmsr
288
289	/* Enable cache (CR0.CD = 0, CR0.NW = 0). */
290	movl	%cr0, %eax
291	andl	$(~(CR0_CacheDisable | CR0_NoWriteThrough)), %eax
292	invd
293	movl	%eax, %cr0
294
295#if CONFIG(MICROCODE_UPDATE_PRE_RAM)
296	update_microcode:
297	/* put the return address in %esp */
298	movl	$end_microcode_update, %esp
299	jmp	update_bsp_microcode
300	end_microcode_update:
301#endif
302	post_code(POSTCODE_SOC_DISABLE_CACHE)
303	/* Disable caching to change MTRR's. */
304	movl	%cr0, %eax
305	orl	$CR0_CacheDisable, %eax
306	movl	%eax, %cr0
307
308	/*
309	 * An unidentified combination of speculative reads and branch
310	 * predictions inside WRPROT-cacheable memory can cause invalidation
311	 * of cachelines and loss of stack on models based on NetBurst
312	 * microarchitecture. Therefore disable WRPROT region entirely for
313	 * all family F models.
314	 */
315	movl	$1, %eax
316	cpuid
317	cmp	$0xf, %ah
318	jne	cache_rom
319
320disable_cache_rom:
321	movl	$MTRR_PHYS_MASK(1), %ecx
322	rdmsr
323	andl	$(~MTRR_PHYS_MASK_VALID), %eax
324	wrmsr
325	jmp	fill_cache
326
327cache_rom:
328	/* Enable cache for our code in Flash because we do XIP here */
329	movl	$MTRR_PHYS_BASE(1), %ecx
330	xorl	%edx, %edx
331	movl	$_program, %eax
332	andl	xip_mtrr_mask, %eax
333	orl	$MTRR_TYPE_WRPROT, %eax
334	wrmsr
335	movl	$MTRR_PHYS_MASK(1), %ecx
336	rdmsr
337	movl	xip_mtrr_mask, %eax
338	orl	$MTRR_PHYS_MASK_VALID, %eax
339	wrmsr
340
341fill_cache:
342	post_code(POSTCODE_SOC_FILL_CACHE)
343	/* Enable cache. */
344	movl	%cr0, %eax
345	andl	$(~(CR0_CacheDisable | CR0_NoWriteThrough)), %eax
346	invd
347	movl	%eax, %cr0
348
349	/* Clear the cache memory region. This will also fill up the cache. */
350	cld
351	xorl	%eax, %eax
352	movl	$_car_mtrr_start, %edi
353	movl	$_car_mtrr_size, %ecx
354	shr	$2, %ecx
355	rep	stosl
356
357	/* Setup the stack. */
358	mov	$_ecar_stack, %esp
359
360	/* Need to align stack to 16 bytes at call instruction. Account for
361	the pushes below. */
362	andl	$0xfffffff0, %esp
363	subl	$4, %esp
364
365#if ENV_X86_64
366	setup_longmode $PM4LE
367
368	movd	%mm2, %rdi
369	shlq	$32, %rdi	/* BIST */
370	movd	%mm1, %rsi
371	or	%rsi, %rdi	/* tsc[63:32] */
372	movd	%mm0, %rsi	/* tsc[31:0] */
373
374#else
375	/* push TSC and BIST to stack */
376	movd	%mm0, %eax
377	pushl	%eax	/* BIST */
378	movd	%mm2, %eax
379	pushl	%eax	/* tsc[63:32] */
380	movd	%mm1, %eax
381	pushl	%eax	/* tsc[31:0] */
382#endif
383
384	/* Copy .data section content to Cache-As-Ram */
385#include <cpu/x86/copy_data_section.inc>
386
387before_c_entry:
388	post_code(POSTCODE_BOOTBLOCK_BEFORE_C_ENTRY)
389	call	bootblock_c_entry_bist
390
391	/* Should never see this postcode */
392	post_code(POSTCODE_DEAD_CODE)
393
394.Lhlt:
395	hlt
396	jmp	.Lhlt
397
398fixed_mtrr_list:
399	.word	MTRR_FIX_64K_00000
400	.word	MTRR_FIX_16K_80000
401	.word	MTRR_FIX_16K_A0000
402	.word	MTRR_FIX_4K_C0000
403	.word	MTRR_FIX_4K_C8000
404	.word	MTRR_FIX_4K_D0000
405	.word	MTRR_FIX_4K_D8000
406	.word	MTRR_FIX_4K_E0000
407	.word	MTRR_FIX_4K_E8000
408	.word	MTRR_FIX_4K_F0000
409	.word	MTRR_FIX_4K_F8000
410fixed_mtrr_list_end:
411
412_cache_as_ram_setup_end:
413