1 /* SPDX-License-Identifier: GPL-2.0-only */
3 #include <cpu/intel/post_codes.h>
4 #include <cpu/x86/mtrr.h>
5 #include <cpu/x86/cache.h>
6 #include <cpu/x86/post_code.h>
7 #include <cpu/x86/64bit/entry64.inc>
9 #define NoEvictMod_MSR 0x2e0
10 #define BBL_CR_CTL3_MSR 0x11e
13 .global bootblock_pre_c_entry
15 #include <cpu/intel/car/cache_as_ram_symbols.inc>
20 bootblock_pre_c_entry:
21 movl $cache_as_ram, %esp /* return address */
22 jmp check_mtrr /* Check if CPU properly reset */
25 post_code(POSTCODE_BOOTBLOCK_CAR)
27 /* Send INIT IPI to all excluding ourself. */
28 movl $0x000C4500, %eax
29 movl $0xFEE00300, %esi
32 /* All CPUs need to be in Wait for SIPI state */
38 post_code(POSTCODE_SOC_SET_DEF_MTRR_TYPE)
39 /* Clean-up MTRR_DEF_TYPE_MSR. */
40 movl $MTRR_DEF_TYPE_MSR, %ecx
45 post_code(POSTCODE_SOC_CLEAR_FIXED_MTRRS)
46 /* Clear/disable fixed MTRRs */
47 mov $fixed_mtrr_list, %ebx
55 cmp $fixed_mtrr_list_end, %ebx
58 /* Zero out all variable range MTRRs. */
59 movl $MTRR_CAP_MSR, %ecx
73 /* Determine CPU_ADDR_BITS and load PHYSMASK high word to %edx. */
74 movl $0x80000008, %eax
82 /* Preload high word of address mask (in %edx) for Variable
87 movl $MTRR_PHYS_MASK(0), %ecx
89 movl $MTRR_PHYS_MASK(1), %ecx
92 post_code(POSTCODE_SOC_SET_MTRR_BASE)
93 /* Set Cache-as-RAM base address. */
94 movl $(MTRR_PHYS_BASE(0)), %ecx
95 movl car_mtrr_start, %eax
96 orl $MTRR_TYPE_WRBACK, %eax
100 post_code(POSTCODE_SOC_SET_MTRR_MASK)
101 /* Set Cache-as-RAM mask. */
102 movl $(MTRR_PHYS_MASK(0)), %ecx
104 mov car_mtrr_mask, %eax
105 orl $MTRR_PHYS_MASK_VALID, %eax
108 /* Enable cache for our code in Flash because we do XIP here */
109 movl $MTRR_PHYS_BASE(1), %ecx
111 mov rom_mtrr_base, %eax
112 orl $MTRR_TYPE_WRPROT, %eax
115 movl $MTRR_PHYS_MASK(1), %ecx
117 mov rom_mtrr_mask, %eax
118 orl $MTRR_PHYS_MASK_VALID, %eax
121 post_code(POSTCODE_SOC_ENABLE_MTRRS)
124 movl $MTRR_DEF_TYPE_MSR, %ecx
126 orl $MTRR_DEF_TYPE_EN, %eax
129 #if CONFIG(CPU_HAS_L2_ENABLE_MSR)
131 * Enable the L2 cache. Currently this assumes that this
132 * only affect socketed CPU's for which this is always valid,
133 * hence the static preprocesser.
135 movl $BBL_CR_CTL3_MSR, %ecx
141 /* Enable cache (CR0.CD = 0, CR0.NW = 0). */
143 andl $(~(CR0_CacheDisable | CR0_NoWriteThrough)), %eax
147 #if CONFIG(MICROCODE_UPDATE_PRE_RAM)
149 /* put the return address in %esp */
150 movl $end_microcode_update, %esp
151 jmp update_bsp_microcode
152 end_microcode_update:
154 /* Disable caching to change MTRR's. */
156 orl $CR0_CacheDisable, %eax
159 /* Clear the mask valid to disable the MTRR */
160 movl $MTRR_PHYS_MASK(1), %ecx
162 andl $(~MTRR_PHYS_MASK_VALID), %eax
167 andl $(~(CR0_CacheDisable | CR0_NoWriteThrough)), %eax
171 /* enable the 'no eviction' mode */
172 movl $NoEvictMod_MSR, %ecx
178 /* Clear the cache memory region. This will also fill up the cache. */
181 movl $_car_mtrr_start, %edi
182 movl $_car_mtrr_size, %ecx
186 /* enable the 'no eviction run' state */
187 movl $NoEvictMod_MSR, %ecx
192 post_code(POSTCODE_SOC_DISABLE_CACHE)
193 /* Enable Cache-as-RAM mode by disabling cache. */
195 orl $CR0_CacheDisable, %eax
198 movl $MTRR_PHYS_MASK(1), %ecx
200 orl $MTRR_PHYS_MASK_VALID, %eax
203 post_code(POSTCODE_SOC_ENABLE_CACHE)
206 andl $(~(CR0_CacheDisable | CR0_NoWriteThrough)), %eax
209 /* Setup the stack. */
210 mov $_ecar_stack, %esp
212 /* Need to align stack to 16 bytes at call instruction. Account for
214 andl $0xfffffff0, %esp
217 setup_longmode $PM4LE
227 /* push TSC and BIST to stack */
229 pushl %eax /* BIST */
231 pushl %eax /* tsc[63:32] */
233 pushl %eax /* tsc[31:0] */
236 /* Copy .data section content to Cache-As-Ram */
237 #include <cpu/x86/copy_data_section.inc>
240 post_code(POSTCODE_BOOTBLOCK_BEFORE_C_ENTRY)
241 call bootblock_c_entry_bist
243 /* Should never see this postcode */
244 post_code(POSTCODE_DEAD_CODE)
252 .word MTRR_FIX_64K_00000
253 .word MTRR_FIX_16K_80000
254 .word MTRR_FIX_16K_A0000
255 .word MTRR_FIX_4K_C0000
256 .word MTRR_FIX_4K_C8000
257 .word MTRR_FIX_4K_D0000
258 .word MTRR_FIX_4K_D8000
259 .word MTRR_FIX_4K_E0000
260 .word MTRR_FIX_4K_E8000
261 .word MTRR_FIX_4K_F0000
262 .word MTRR_FIX_4K_F8000
265 _cache_as_ram_setup_end: