1 /* SPDX-License-Identifier: GPL-2.0-only */
3 #include <cpu/intel/post_codes.h>
4 #include <cpu/x86/mtrr.h>
5 #include <cpu/x86/cache.h>
6 #include <cpu/x86/post_code.h>
7 #include <cpu/x86/lapic_def.h>
9 /* Macro to access Local APIC registers at default base. */
10 #define LAPIC(x) $(LAPIC_DEFAULT_BASE | LAPIC_ ## x)
13 .global bootblock_pre_c_entry
15 #include <cpu/intel/car/cache_as_ram_symbols.inc>
16 #include <cpu/x86/64bit/entry64.inc>
21 bootblock_pre_c_entry:
24 post_code(POSTCODE_BOOTBLOCK_CAR)
26 movl $LAPIC_BASE_MSR, %ecx
28 andl $LAPIC_BASE_MSR_BOOTSTRAP_PROCESSOR, %eax
31 /* Clear/disable fixed MTRRs */
32 mov $fixed_mtrr_list, %ebx
40 cmp $fixed_mtrr_list_end, %ebx
43 /* Figure out how many MTRRs we have, and clear them out */
44 mov $MTRR_CAP_MSR, %ecx
46 movzb %al, %ebx /* Number of variable MTRRs */
47 mov $MTRR_PHYS_BASE(0), %ecx
58 post_code(POSTCODE_SOC_SET_DEF_MTRR_TYPE)
60 /* Configure the default memory type to uncacheable. */
61 movl $MTRR_DEF_TYPE_MSR, %ecx
63 andl $(~0x00000cff), %eax
66 post_code(POSTCODE_SOC_DETERMINE_CPU_ADDR_BITS)
68 /* Determine CPU_ADDR_BITS and load PHYSMASK high
71 movl $0x80000000, %eax
73 cmpl $0x80000008, %eax
75 movl $0x80000008, %eax
86 andl $(1 << 6 | 1 << 17), %edx /* PAE or PSE36 */
90 /* Preload high word of address mask (in %edx) for Variable
91 * MTRRs 0 and 1 and enable local APIC at default base.
95 movl $MTRR_PHYS_MASK(0), %ecx
97 movl $MTRR_PHYS_MASK(1), %ecx
99 movl $LAPIC_BASE_MSR, %ecx
104 andl $(~LAPIC_BASE_MSR_ADDR_MASK), %eax
105 orl $(LAPIC_DEFAULT_BASE | LAPIC_BASE_MSR_ENABLE), %eax
110 post_code(POSTCODE_SOC_BSP_INIT)
112 /* Send INIT IPI to all excluding ourself. */
113 movl LAPIC(ICR), %edi
114 movl $(LAPIC_DEST_ALLBUT | LAPIC_INT_ASSERT | LAPIC_DM_INIT), %eax
121 andl $LAPIC_ICR_BUSY, %ecx
124 post_code(POSTCODE_SOC_COUNT_CORES)
133 jbe sipi_complete /* only one LAPIC ID in package */
151 jbe sipi_complete /* only LAPIC ID of a core */
153 /* For a hyper-threading processor, cache must not be disabled
154 * on an AP on the same physical package with the BSP.
159 post_code(POSTCODE_SOC_CPU_HYPER_THREADING)
161 /* Send Start IPI to all excluding ourself. */
162 movl LAPIC(ICR), %edi
163 movl $(LAPIC_DEST_ALLBUT | LAPIC_DM_STARTUP), %eax
164 orl $ap_sipi_vector_in_rom, %eax
171 andl $LAPIC_ICR_BUSY, %ecx
174 post_code(POSTCODE_SOC_CPU_SIBLING_DELAY)
176 /* Wait for sibling CPU to start. */
177 1: movl $(MTRR_PHYS_BASE(0)), %ecx
190 post_code(POSTCODE_SOC_CPU_AP_INIT)
192 /* Do not disable cache (so BSP can enable it). */
194 andl $(~(CR0_CacheDisable | CR0_NoWriteThrough)), %eax
197 post_code(POSTCODE_SOC_SET_MTRR_BASE)
199 /* MTRR registers are shared between HT siblings. */
200 movl $(MTRR_PHYS_BASE(0)), %ecx
201 movl $(1 << 12), %eax
205 post_code(POSTCODE_SOC_AP_HALT)
216 post_code(POSTCODE_SOC_SET_CAR_BASE)
218 /* Set Cache-as-RAM base address. */
219 movl $(MTRR_PHYS_BASE(0)), %ecx
220 movl car_mtrr_start, %eax
221 orl $MTRR_TYPE_WRBACK, %eax
225 /* Set Cache-as-RAM mask. */
226 movl $(MTRR_PHYS_MASK(0)), %ecx
228 movl car_mtrr_mask, %eax
229 orl $MTRR_PHYS_MASK_VALID, %eax
232 post_code(POSTCODE_SOC_ENABLE_MTRRS)
235 movl $MTRR_DEF_TYPE_MSR, %ecx
237 orl $MTRR_DEF_TYPE_EN, %eax
240 /* Enable L2 cache Write-Back (WBINVD and FLUSH#).
242 * MSR is set when DisplayFamily_DisplayModel is one of:
243 * 06_0x, 06_17, 06_1C
245 * Description says this bit enables use of WBINVD and FLUSH#.
246 * Should this be set only after the system bus and/or memory
247 * controller can successfully handle write cycles?
250 #define EAX_FAMILY(a) (a << 8) /* for family <= 0fH */
251 #define EAX_MODEL(a) (((a & 0xf0) << 12) | ((a & 0xf) << 4))
256 andl $EAX_FAMILY(0x0f), %eax
257 cmpl $EAX_FAMILY(0x06), %eax
260 andl $EAX_MODEL(0xff), %eax
261 cmpl $EAX_MODEL(0x17), %eax
263 cmpl $EAX_MODEL(0x1c), %eax
265 andl $EAX_MODEL(0xf0), %eax
266 cmpl $EAX_MODEL(0x00), %eax
275 post_code(POSTCODE_SOC_ENABLE_CACHE)
277 /* Cache the whole rom to fetch microcode updates */
278 movl $MTRR_PHYS_BASE(1), %ecx
280 movl rom_mtrr_base, %eax
281 orl $MTRR_TYPE_WRPROT, %eax
283 movl $MTRR_PHYS_MASK(1), %ecx
285 movl rom_mtrr_mask, %eax
286 orl $MTRR_PHYS_MASK_VALID, %eax
289 /* Enable cache (CR0.CD = 0, CR0.NW = 0). */
291 andl $(~(CR0_CacheDisable | CR0_NoWriteThrough)), %eax
295 #if CONFIG(MICROCODE_UPDATE_PRE_RAM)
297 /* put the return address in %esp */
298 movl $end_microcode_update, %esp
299 jmp update_bsp_microcode
300 end_microcode_update:
302 post_code(POSTCODE_SOC_DISABLE_CACHE)
303 /* Disable caching to change MTRR's. */
305 orl $CR0_CacheDisable, %eax
309 * An unidentified combination of speculative reads and branch
310 * predictions inside WRPROT-cacheable memory can cause invalidation
311 * of cachelines and loss of stack on models based on NetBurst
312 * microarchitecture. Therefore disable WRPROT region entirely for
313 * all family F models.
321 movl $MTRR_PHYS_MASK(1), %ecx
323 andl $(~MTRR_PHYS_MASK_VALID), %eax
328 /* Enable cache for our code in Flash because we do XIP here */
329 movl $MTRR_PHYS_BASE(1), %ecx
332 andl xip_mtrr_mask, %eax
333 orl $MTRR_TYPE_WRPROT, %eax
335 movl $MTRR_PHYS_MASK(1), %ecx
337 movl xip_mtrr_mask, %eax
338 orl $MTRR_PHYS_MASK_VALID, %eax
342 post_code(POSTCODE_SOC_FILL_CACHE)
345 andl $(~(CR0_CacheDisable | CR0_NoWriteThrough)), %eax
349 /* Clear the cache memory region. This will also fill up the cache. */
352 movl $_car_mtrr_start, %edi
353 movl $_car_mtrr_size, %ecx
357 /* Setup the stack. */
358 mov $_ecar_stack, %esp
360 /* Need to align stack to 16 bytes at call instruction. Account for
362 andl $0xfffffff0, %esp
366 setup_longmode $PM4LE
369 shlq $32, %rdi /* BIST */
371 or %rsi, %rdi /* tsc[63:32] */
372 movd %mm0, %rsi /* tsc[31:0] */
375 /* push TSC and BIST to stack */
377 pushl %eax /* BIST */
379 pushl %eax /* tsc[63:32] */
381 pushl %eax /* tsc[31:0] */
384 /* Copy .data section content to Cache-As-Ram */
385 #include <cpu/x86/copy_data_section.inc>
388 post_code(POSTCODE_BOOTBLOCK_BEFORE_C_ENTRY)
389 call bootblock_c_entry_bist
391 /* Should never see this postcode */
392 post_code(POSTCODE_DEAD_CODE)
399 .word MTRR_FIX_64K_00000
400 .word MTRR_FIX_16K_80000
401 .word MTRR_FIX_16K_A0000
402 .word MTRR_FIX_4K_C0000
403 .word MTRR_FIX_4K_C8000
404 .word MTRR_FIX_4K_D0000
405 .word MTRR_FIX_4K_D8000
406 .word MTRR_FIX_4K_E0000
407 .word MTRR_FIX_4K_E8000
408 .word MTRR_FIX_4K_F0000
409 .word MTRR_FIX_4K_F8000
412 _cache_as_ram_setup_end: