1 #ifndef _ASM_X86_MICROCODE_H
2 #define _ASM_X86_MICROCODE_H
4 #define native_rdmsr(msr, val1, val2) \
6 u64 __val = native_read_msr((msr)); \
7 (void)((val1) = (u32)__val); \
8 (void)((val2) = (u32)(__val >> 32)); \
11 #define native_wrmsr(msr, low, high) \
12 native_write_msr(msr, low, high)
14 #define native_wrmsrl(msr, val) \
15 native_write_msr((msr), \
17 (u32)((u64)(val) >> 32))
19 struct cpu_signature
{
27 enum ucode_state
{ UCODE_ERROR
, UCODE_OK
, UCODE_NFOUND
};
28 extern bool dis_ucode_ldr
;
30 struct microcode_ops
{
31 enum ucode_state (*request_microcode_user
) (int cpu
,
32 const void __user
*buf
, size_t size
);
34 enum ucode_state (*request_microcode_fw
) (int cpu
, struct device
*,
37 void (*microcode_fini_cpu
) (int cpu
);
40 * The generic 'microcode_core' part guarantees that
41 * the callbacks below run on a target cpu when they
43 * See also the "Synchronization" section in microcode_core.c.
45 int (*apply_microcode
) (int cpu
);
46 int (*collect_cpu_info
) (int cpu
, struct cpu_signature
*csig
);
49 struct ucode_cpu_info
{
50 struct cpu_signature cpu_sig
;
54 extern struct ucode_cpu_info ucode_cpu_info
[];
56 #ifdef CONFIG_MICROCODE_INTEL
57 extern struct microcode_ops
* __init
init_intel_microcode(void);
59 static inline struct microcode_ops
* __init
init_intel_microcode(void)
63 #endif /* CONFIG_MICROCODE_INTEL */
65 #ifdef CONFIG_MICROCODE_AMD
66 extern struct microcode_ops
* __init
init_amd_microcode(void);
67 extern void __exit
exit_amd_microcode(void);
69 static inline struct microcode_ops
* __init
init_amd_microcode(void)
73 static inline void __exit
exit_amd_microcode(void) {}
76 #ifdef CONFIG_MICROCODE_EARLY
77 #define MAX_UCODE_COUNT 128
79 #define QCHAR(a, b, c, d) ((a) + ((b) << 8) + ((c) << 16) + ((d) << 24))
80 #define CPUID_INTEL1 QCHAR('G', 'e', 'n', 'u')
81 #define CPUID_INTEL2 QCHAR('i', 'n', 'e', 'I')
82 #define CPUID_INTEL3 QCHAR('n', 't', 'e', 'l')
83 #define CPUID_AMD1 QCHAR('A', 'u', 't', 'h')
84 #define CPUID_AMD2 QCHAR('e', 'n', 't', 'i')
85 #define CPUID_AMD3 QCHAR('c', 'A', 'M', 'D')
87 #define CPUID_IS(a, b, c, ebx, ecx, edx) \
88 (!((ebx ^ (a))|(edx ^ (b))|(ecx ^ (c))))
91 * In early loading microcode phase on BSP, boot_cpu_data is not set up yet.
92 * x86_vendor() gets vendor id for BSP.
94 * In 32 bit AP case, accessing boot_cpu_data needs linear address. To simplify
95 * coding, we still use x86_vendor() to get vendor id for AP.
97 * x86_vendor() gets vendor information directly from CPUID.
99 static inline int x86_vendor(void)
101 u32 eax
= 0x00000000;
102 u32 ebx
, ecx
= 0, edx
;
104 native_cpuid(&eax
, &ebx
, &ecx
, &edx
);
106 if (CPUID_IS(CPUID_INTEL1
, CPUID_INTEL2
, CPUID_INTEL3
, ebx
, ecx
, edx
))
107 return X86_VENDOR_INTEL
;
109 if (CPUID_IS(CPUID_AMD1
, CPUID_AMD2
, CPUID_AMD3
, ebx
, ecx
, edx
))
110 return X86_VENDOR_AMD
;
112 return X86_VENDOR_UNKNOWN
;
115 static inline unsigned int __x86_family(unsigned int sig
)
119 x86
= (sig
>> 8) & 0xf;
122 x86
+= (sig
>> 20) & 0xff;
127 static inline unsigned int x86_family(void)
129 u32 eax
= 0x00000001;
130 u32 ebx
, ecx
= 0, edx
;
132 native_cpuid(&eax
, &ebx
, &ecx
, &edx
);
134 return __x86_family(eax
);
137 static inline unsigned int x86_model(unsigned int sig
)
139 unsigned int x86
, model
;
141 x86
= __x86_family(sig
);
143 model
= (sig
>> 4) & 0xf;
145 if (x86
== 0x6 || x86
== 0xf)
146 model
+= ((sig
>> 16) & 0xf) << 4;
151 extern void __init
load_ucode_bsp(void);
152 extern void load_ucode_ap(void);
153 extern int __init
save_microcode_in_initrd(void);
154 void reload_early_microcode(void);
156 static inline void __init
load_ucode_bsp(void) {}
157 static inline void load_ucode_ap(void) {}
158 static inline int __init
save_microcode_in_initrd(void)
162 static inline void reload_early_microcode(void) {}
165 #endif /* _ASM_X86_MICROCODE_H */