1 /* SPDX-License-Identifier: GPL-2.0-only */
3 #include <cpu/intel/post_codes.h>
4 #include <cpu/x86/mtrr.h>
5 #include <cpu/x86/cache.h>
6 #include <cpu/x86/post_code.h>
8 #define NoEvictMod_MSR 0x2e0
9 #define BBL_CR_CTL3_MSR 0x11e
12 .global bootblock_pre_c_entry
14 #include <cpu/intel/car/cache_as_ram_symbols.inc>
19 bootblock_pre_c_entry:
20 movl $cache_as_ram, %esp /* return address */
21 jmp check_mtrr /* Check if CPU properly reset */
24 post_code(POST_BOOTBLOCK_CAR)
26 /* Send INIT IPI to all excluding ourself. */
27 movl $0x000C4500, %eax
28 movl $0xFEE00300, %esi
31 /* All CPUs need to be in Wait for SIPI state */
37 post_code(POST_SOC_SET_DEF_MTRR_TYPE)
38 /* Clean-up MTRR_DEF_TYPE_MSR. */
39 movl $MTRR_DEF_TYPE_MSR, %ecx
44 post_code(POST_SOC_CLEAR_FIXED_MTRRS)
45 /* Clear/disable fixed MTRRs */
46 mov $fixed_mtrr_list, %ebx
54 cmp $fixed_mtrr_list_end, %ebx
57 /* Zero out all variable range MTRRs. */
58 movl $MTRR_CAP_MSR, %ecx
72 /* Determine CPU_ADDR_BITS and load PHYSMASK high word to %edx. */
73 movl $0x80000008, %eax
81 /* Preload high word of address mask (in %edx) for Variable
86 movl $MTRR_PHYS_MASK(0), %ecx
88 movl $MTRR_PHYS_MASK(1), %ecx
91 post_code(POST_SOC_SET_MTRR_BASE)
92 /* Set Cache-as-RAM base address. */
93 movl $(MTRR_PHYS_BASE(0)), %ecx
94 movl car_mtrr_start, %eax
95 orl $MTRR_TYPE_WRBACK, %eax
99 post_code(POST_SOC_SET_MTRR_MASK)
100 /* Set Cache-as-RAM mask. */
101 movl $(MTRR_PHYS_MASK(0)), %ecx
103 mov car_mtrr_mask, %eax
104 orl $MTRR_PHYS_MASK_VALID, %eax
107 /* Enable cache for our code in Flash because we do XIP here */
108 movl $MTRR_PHYS_BASE(1), %ecx
110 mov rom_mtrr_base, %eax
111 orl $MTRR_TYPE_WRPROT, %eax
114 movl $MTRR_PHYS_MASK(1), %ecx
116 mov rom_mtrr_mask, %eax
117 orl $MTRR_PHYS_MASK_VALID, %eax
120 post_code(POST_SOC_ENABLE_MTRRS)
123 movl $MTRR_DEF_TYPE_MSR, %ecx
125 orl $MTRR_DEF_TYPE_EN, %eax
128 #if CONFIG(CPU_HAS_L2_ENABLE_MSR)
130 * Enable the L2 cache. Currently this assumes that this
131 * only affect socketed CPU's for which this is always valid,
132 * hence the static preprocesser.
134 movl $BBL_CR_CTL3_MSR, %ecx
140 /* Enable cache (CR0.CD = 0, CR0.NW = 0). */
142 andl $(~(CR0_CacheDisable | CR0_NoWriteThrough)), %eax
146 #if CONFIG(MICROCODE_UPDATE_PRE_RAM)
148 /* put the return address in %esp */
149 movl $end_microcode_update, %esp
150 jmp update_bsp_microcode
151 end_microcode_update:
153 /* Disable caching to change MTRR's. */
155 orl $CR0_CacheDisable, %eax
158 /* Clear the mask valid to disable the MTRR */
159 movl $MTRR_PHYS_MASK(1), %ecx
161 andl $(~MTRR_PHYS_MASK_VALID), %eax
166 andl $(~(CR0_CacheDisable | CR0_NoWriteThrough)), %eax
170 /* enable the 'no eviction' mode */
171 movl $NoEvictMod_MSR, %ecx
177 /* Clear the cache memory region. This will also fill up the cache. */
180 movl $_car_mtrr_start, %edi
181 movl $_car_mtrr_size, %ecx
185 /* enable the 'no eviction run' state */
186 movl $NoEvictMod_MSR, %ecx
191 post_code(POST_SOC_DISABLE_CACHE)
192 /* Enable Cache-as-RAM mode by disabling cache. */
194 orl $CR0_CacheDisable, %eax
197 movl $MTRR_PHYS_MASK(1), %ecx
199 orl $MTRR_PHYS_MASK_VALID, %eax
202 post_code(POST_SOC_ENABLE_CACHE)
205 andl $(~(CR0_CacheDisable | CR0_NoWriteThrough)), %eax
208 /* Setup the stack. */
209 mov $_ecar_stack, %esp
211 /* Need to align stack to 16 bytes at call instruction. Account for
213 andl $0xfffffff0, %esp
217 #include <cpu/x86/64bit/entry64.inc>
227 /* push TSC and BIST to stack */
229 pushl %eax /* BIST */
231 pushl %eax /* tsc[63:32] */
233 pushl %eax /* tsc[31:0] */
237 post_code(POST_BOOTBLOCK_BEFORE_C_ENTRY)
238 call bootblock_c_entry_bist
240 /* Should never see this postcode */
241 post_code(POST_DEAD_CODE)
249 .word MTRR_FIX_64K_00000
250 .word MTRR_FIX_16K_80000
251 .word MTRR_FIX_16K_A0000
252 .word MTRR_FIX_4K_C0000
253 .word MTRR_FIX_4K_C8000
254 .word MTRR_FIX_4K_D0000
255 .word MTRR_FIX_4K_D8000
256 .word MTRR_FIX_4K_E0000
257 .word MTRR_FIX_4K_E8000
258 .word MTRR_FIX_4K_F0000
259 .word MTRR_FIX_4K_F8000
262 _cache_as_ram_setup_end: