1 /* $NetBSD: cacheinfo.h,v 1.13 2011/12/04 17:00:10 chs Exp $ */
3 #ifndef _X86_CACHEINFO_H_
4 #define _X86_CACHEINFO_H_
6 struct x86_cache_info
{
9 uint8_t cai_associativity
;
10 u_int cai_totalsize
; /* #entries for TLB, bytes for cache */
11 u_int cai_linesize
; /* or page size for TLB */
13 const char *cai_string
;
17 #define CAI_ITLB 0 /* Instruction TLB (4K pages) */
18 #define CAI_ITLB2 1 /* Instruction TLB (2/4M pages) */
19 #define CAI_DTLB 2 /* Data TLB (4K pages) */
20 #define CAI_DTLB2 3 /* Data TLB (2/4M pages) */
21 #define CAI_ICACHE 4 /* Instruction cache */
22 #define CAI_DCACHE 5 /* Data cache */
23 #define CAI_L2CACHE 6 /* Level 2 cache */
24 #define CAI_L3CACHE 7 /* Level 3 cache */
25 #define CAI_L1_1GBITLB 8 /* L1 1GB Page instruction TLB */
26 #define CAI_L1_1GBDTLB 9 /* L1 1GB Page data TLB */
27 #define CAI_L2_1GBITLB 10 /* L2 1GB Page instruction TLB */
28 #define CAI_L2_1GBDTLB 11 /* L2 1GB Page data TLB */
29 #define CAI_L2_ITLB 12 /* L2 Instruction TLB (4K pages) */
30 #define CAI_L2_ITLB2 13 /* L2 Instruction TLB (2/4M pages) */
31 #define CAI_L2_DTLB 14 /* L2 Data TLB (4K pages) */
32 #define CAI_L2_DTLB2 15 /* L2 Data TLB (2/4M pages) */
41 * Function 8000.0005 L1 TLB/Cache Information
42 * EAX -- L1 TLB 2/4MB pages
43 * EBX -- L1 TLB 4K pages
47 * Function 8000.0006 L2 TLB/Cache Information
48 * EAX -- L2 TLB 2/4MB pages
49 * EBX -- L2 TLB 4K pages
50 * ECX -- L2 Unified cache
51 * EDX -- L3 Unified Cache
53 * Function 8000.0019 TLB 1GB Page Information
61 * Function 8000.0005 L1 TLB/Cache Information
62 * EAX -- L1 TLB 2/4MB pages
63 * EBX -- L1 TLB 4K pages
67 * Function 8000.0006 L2 TLB/Cache Information
68 * EAX -- L2 TLB 2/4MB pages
69 * EBX -- L2 TLB 4K pages
70 * ECX -- L2 Unified cache
75 * Function 8000.0005 L1 TLB/Cache Information
83 * Function 8000.0006 L2 Cache Information
86 * ECX -- L2 Unified cache
90 /* L1 TLB 2/4MB pages */
91 #define AMD_L1_EAX_DTLB_ASSOC(x) (((x) >> 24) & 0xff)
92 #define AMD_L1_EAX_DTLB_ENTRIES(x) (((x) >> 16) & 0xff)
93 #define AMD_L1_EAX_ITLB_ASSOC(x) (((x) >> 8) & 0xff)
94 #define AMD_L1_EAX_ITLB_ENTRIES(x) ( (x) & 0xff)
97 #define AMD_L1_EBX_DTLB_ASSOC(x) (((x) >> 24) & 0xff)
98 #define AMD_L1_EBX_DTLB_ENTRIES(x) (((x) >> 16) & 0xff)
99 #define AMD_L1_EBX_ITLB_ASSOC(x) (((x) >> 8) & 0xff)
100 #define AMD_L1_EBX_ITLB_ENTRIES(x) ( (x) & 0xff)
103 #define AMD_L1_ECX_DC_SIZE(x) ((((x) >> 24) & 0xff) * 1024)
104 #define AMD_L1_ECX_DC_ASSOC(x) (((x) >> 16) & 0xff)
105 #define AMD_L1_ECX_DC_LPT(x) (((x) >> 8) & 0xff)
106 #define AMD_L1_ECX_DC_LS(x) ( (x) & 0xff)
108 /* L1 Instruction Cache */
109 #define AMD_L1_EDX_IC_SIZE(x) ((((x) >> 24) & 0xff) * 1024)
110 #define AMD_L1_EDX_IC_ASSOC(x) (((x) >> 16) & 0xff)
111 #define AMD_L1_EDX_IC_LPT(x) (((x) >> 8) & 0xff)
112 #define AMD_L1_EDX_IC_LS(x) ( (x) & 0xff)
114 /* Note for L2 TLB -- if the upper 16 bits are 0, it is a unified TLB */
116 /* L2 TLB 2/4MB pages */
117 #define AMD_L2_EAX_DTLB_ASSOC(x) (((x) >> 28) & 0xf)
118 #define AMD_L2_EAX_DTLB_ENTRIES(x) (((x) >> 16) & 0xfff)
119 #define AMD_L2_EAX_IUTLB_ASSOC(x) (((x) >> 12) & 0xf)
120 #define AMD_L2_EAX_IUTLB_ENTRIES(x) ( (x) & 0xfff)
122 /* L2 TLB 4K pages */
123 #define AMD_L2_EBX_DTLB_ASSOC(x) (((x) >> 28) & 0xf)
124 #define AMD_L2_EBX_DTLB_ENTRIES(x) (((x) >> 16) & 0xfff)
125 #define AMD_L2_EBX_IUTLB_ASSOC(x) (((x) >> 12) & 0xf)
126 #define AMD_L2_EBX_IUTLB_ENTRIES(x) ( (x) & 0xfff)
129 #define AMD_L2_ECX_C_SIZE(x) ((((x) >> 16) & 0xffff) * 1024)
130 #define AMD_L2_ECX_C_ASSOC(x) (((x) >> 12) & 0xf)
131 #define AMD_L2_ECX_C_LPT(x) (((x) >> 8) & 0xf)
132 #define AMD_L2_ECX_C_LS(x) ( (x) & 0xff)
135 #define AMD_L3_EDX_C_SIZE(x) ((((x) >> 18) & 0xffff) * 1024 * 512)
136 #define AMD_L3_EDX_C_ASSOC(x) (((x) >> 12) & 0xff)
137 #define AMD_L3_EDX_C_LPT(x) (((x) >> 8) & 0xf)
138 #define AMD_L3_EDX_C_LS(x) ( (x) & 0xff)
140 /* L1 TLB 1GB pages */
141 #define AMD_L1_1GB_EAX_DTLB_ASSOC(x) (((x) >> 28) & 0xf)
142 #define AMD_L1_1GB_EAX_DTLB_ENTRIES(x) (((x) >> 16) & 0xfff)
143 #define AMD_L1_1GB_EAX_IUTLB_ASSOC(x) (((x) >> 12) & 0xf)
144 #define AMD_L1_1GB_EAX_IUTLB_ENTRIES(x) ( (x) & 0xfff)
146 /* L2 TLB 1GB pages */
147 #define AMD_L2_1GB_EBX_DUTLB_ASSOC(x) (((x) >> 28) & 0xf)
148 #define AMD_L2_1GB_EBX_DUTLB_ENTRIES(x) (((x) >> 16) & 0xfff)
149 #define AMD_L2_1GB_EBX_IUTLB_ASSOC(x) (((x) >> 12) & 0xf)
150 #define AMD_L2_1GB_EBX_IUTLB_ENTRIES(x) ( (x) & 0xfff)
155 * Nehemiah (at least)
157 * Function 8000.0005 L1 TLB/Cache Information
159 * EBX -- L1 TLB 4K pages
163 * Function 8000.0006 L2 Cache Information
166 * ECX -- L2 Unified cache
170 /* L1 TLB 4K pages */
171 #define VIA_L1_EBX_DTLB_ASSOC(x) (((x) >> 24) & 0xff)
172 #define VIA_L1_EBX_DTLB_ENTRIES(x) (((x) >> 16) & 0xff)
173 #define VIA_L1_EBX_ITLB_ASSOC(x) (((x) >> 8) & 0xff)
174 #define VIA_L1_EBX_ITLB_ENTRIES(x) ( (x) & 0xff)
177 #define VIA_L1_ECX_DC_SIZE(x) ((((x) >> 24) & 0xff) * 1024)
178 #define VIA_L1_ECX_DC_ASSOC(x) (((x) >> 16) & 0xff)
179 #define VIA_L1_ECX_DC_LPT(x) (((x) >> 8) & 0xff)
180 #define VIA_L1_ECX_DC_LS(x) ( (x) & 0xff)
182 /* L1 Instruction Cache */
183 #define VIA_L1_EDX_IC_SIZE(x) ((((x) >> 24) & 0xff) * 1024)
184 #define VIA_L1_EDX_IC_ASSOC(x) (((x) >> 16) & 0xff)
185 #define VIA_L1_EDX_IC_LPT(x) (((x) >> 8) & 0xff)
186 #define VIA_L1_EDX_IC_LS(x) ( (x) & 0xff)
188 /* L2 Cache (pre-Nehemiah) */
189 #define VIA_L2_ECX_C_SIZE(x) ((((x) >> 24) & 0xff) * 1024)
190 #define VIA_L2_ECX_C_ASSOC(x) (((x) >> 16) & 0xff)
191 #define VIA_L2_ECX_C_LPT(x) (((x) >> 8) & 0xff)
192 #define VIA_L2_ECX_C_LS(x) ( (x) & 0xff)
194 /* L2 Cache (Nehemiah and newer) */
195 #define VIA_L2N_ECX_C_SIZE(x) ((((x) >> 16) & 0xffff) * 1024)
196 #define VIA_L2N_ECX_C_ASSOC(x) (((x) >> 12) & 0xf)
197 #define VIA_L2N_ECX_C_LPT(x) (((x) >> 8) & 0xf)
198 #define VIA_L2N_ECX_C_LS(x) ( (x) & 0xff)
201 #define __CI_TBL(a,b,c,d,e,f) { a, b, c, d, e }
203 #define __CI_TBL(a,b,c,d,e,f) { a, b, c, d, e, f }
207 * XXX Currently organized mostly by cache type, but would be
208 * XXX easier to maintain if it were in descriptor type order.
210 #define INTEL_CACHE_INFO { \
211 __CI_TBL(CAI_ITLB, 0x01, 4, 32, 4 * 1024, NULL), \
212 __CI_TBL(CAI_ITLB, 0xb0, 4,128, 4 * 1024, NULL), \
213 __CI_TBL(CAI_ITLB2, 0x02, 0xff, 2, 4 * 1024 * 1024, NULL), \
214 __CI_TBL(CAI_DTLB, 0x03, 4, 64, 4 * 1024, NULL), \
215 __CI_TBL(CAI_DTLB, 0xb3, 4,128, 4 * 1024, NULL), \
216 __CI_TBL(CAI_DTLB, 0xb4, 4,256, 4 * 1024, NULL), \
217 __CI_TBL(CAI_DTLB2, 0x04, 4, 8, 4 * 1024 * 1024, NULL), \
218 __CI_TBL(CAI_DTLB2, 0x05, 4, 32, 4 * 1024 * 1024, NULL), \
219 __CI_TBL(CAI_ITLB, 0x50, 0xff, 64, 4 * 1024, "4K/4M: 64 entries"), \
220 __CI_TBL(CAI_ITLB, 0x51, 0xff, 64, 4 * 1024, "4K/4M: 128 entries"),\
221 __CI_TBL(CAI_ITLB, 0x52, 0xff, 64, 4 * 1024, "4K/4M: 256 entries"),\
222 __CI_TBL(CAI_ITLB, 0x55, 0xff, 64, 4 * 1024, "2M/4M: 7 entries"), \
223 __CI_TBL(CAI_DTLB2, 0x56, 4, 16, 4 * 1024 * 1024, NULL), \
224 __CI_TBL(CAI_DTLB2, 0x57, 4, 16, 4 * 1024, NULL), \
225 __CI_TBL(CAI_DTLB, 0x5a, 0xff, 64, 4 * 1024, "2M/4M: 32 entries (L0)"), \
226 __CI_TBL(CAI_DTLB, 0x5b, 0xff, 64, 4 * 1024, "4K/4M: 64 entries"), \
227 __CI_TBL(CAI_DTLB, 0x5c, 0xff, 64, 4 * 1024, "4K/4M: 128 entries"),\
228 __CI_TBL(CAI_DTLB, 0x5d, 0xff, 64, 4 * 1024, "4K/4M: 256 entries"),\
229 __CI_TBL(CAI_ITLB, 0xb1, 4, 64, 0, "8 2M/4 4M entries"), \
230 __CI_TBL(CAI_ITLB, 0xb2, 4, 64, 4 * 1024, NULL), \
231 __CI_TBL(CAI_ICACHE, 0x06, 4, 8 * 1024, 32, NULL), \
232 __CI_TBL(CAI_ICACHE, 0x08, 4, 16 * 1024, 32, NULL), \
233 __CI_TBL(CAI_ICACHE, 0x09, 4, 32 * 1024, 64, NULL), \
234 __CI_TBL(CAI_ICACHE, 0x30, 8, 32 * 1024, 64, NULL), \
235 __CI_TBL(CAI_DCACHE, 0x0a, 2, 8 * 1024, 32, NULL), \
236 __CI_TBL(CAI_DCACHE, 0x0c, 4, 16 * 1024, 32, NULL), \
237 __CI_TBL(CAI_DCACHE, 0x0d, 4, 16 * 1024, 32, NULL), \
238 __CI_TBL(CAI_L2CACHE, 0x21, 8, 256 * 1024, 64, NULL), /* L2 (MLC) */ \
239 __CI_TBL(CAI_L2CACHE, 0x39, 4, 128 * 1024, 64, NULL), \
240 __CI_TBL(CAI_L2CACHE, 0x3a, 6, 192 * 1024, 64, NULL), \
241 __CI_TBL(CAI_L2CACHE, 0x3b, 2, 128 * 1024, 64, NULL), \
242 __CI_TBL(CAI_L2CACHE, 0x3c, 4, 256 * 1024, 64, NULL), \
243 __CI_TBL(CAI_L2CACHE, 0x3d, 6, 384 * 1024, 64, NULL), \
244 __CI_TBL(CAI_L2CACHE, 0x3e, 4, 512 * 1024, 64, NULL), \
245 __CI_TBL(CAI_L2CACHE, 0x40, 0, 0, 0, "not present"), \
246 __CI_TBL(CAI_L2CACHE, 0x41, 4, 128 * 1024, 32, NULL), \
247 __CI_TBL(CAI_L2CACHE, 0x42, 4, 256 * 1024, 32, NULL), \
248 __CI_TBL(CAI_L2CACHE, 0x43, 4, 512 * 1024, 32, NULL), \
249 __CI_TBL(CAI_L2CACHE, 0x44, 4, 1 * 1024 * 1024, 32, NULL), \
250 __CI_TBL(CAI_L2CACHE, 0x45, 4, 2 * 1024 * 1024, 32, NULL), \
251 __CI_TBL(CAI_L2CACHE, 0x48, 12, 3 * 1024 * 1024, 64, NULL), \
253 /* 0x49 Is L2 on Xeon MP (Family 0f, Model 06), L3 otherwise */ \
254 __CI_TBL(CAI_L2CACHE, 0x49, 16, 4 * 1024 * 1024, 64, NULL), \
255 __CI_TBL(CAI_L2CACHE, 0x4e, 24, 6 * 1024 * 1024, 64, NULL), \
256 __CI_TBL(CAI_DCACHE, 0x60, 8, 16 * 1024, 64, NULL), \
257 __CI_TBL(CAI_DCACHE, 0x66, 4, 8 * 1024, 64, NULL), \
258 __CI_TBL(CAI_DCACHE, 0x67, 4, 16 * 1024, 64, NULL), \
259 __CI_TBL(CAI_DCACHE, 0x2c, 8, 32 * 1024, 64, NULL), \
260 __CI_TBL(CAI_DCACHE, 0x68, 4, 32 * 1024, 64, NULL), \
261 __CI_TBL(CAI_ICACHE, 0x70, 8, 12 * 1024, 64, "12K uOp cache"), \
262 __CI_TBL(CAI_ICACHE, 0x71, 8, 16 * 1024, 64, "16K uOp cache"), \
263 __CI_TBL(CAI_ICACHE, 0x72, 8, 32 * 1024, 64, "32K uOp cache"), \
264 __CI_TBL(CAI_ICACHE, 0x73, 8, 64 * 1024, 64, "64K uOp cache"), \
265 __CI_TBL(CAI_L2CACHE, 0x78, 4, 1 * 1024 * 1024, 64, NULL), \
266 __CI_TBL(CAI_L2CACHE, 0x79, 8, 128 * 1024, 64, NULL), \
267 __CI_TBL(CAI_L2CACHE, 0x7a, 8, 256 * 1024, 64, NULL), \
268 __CI_TBL(CAI_L2CACHE, 0x7b, 8, 512 * 1024, 64, NULL), \
269 __CI_TBL(CAI_L2CACHE, 0x7c, 8, 1 * 1024 * 1024, 64, NULL), \
270 __CI_TBL(CAI_L2CACHE, 0x7d, 8, 2 * 1024 * 1024, 64, NULL), \
271 __CI_TBL(CAI_L2CACHE, 0x7f, 2, 512 * 1024, 64, NULL), \
272 __CI_TBL(CAI_L2CACHE, 0x82, 8, 256 * 1024, 32, NULL), \
273 __CI_TBL(CAI_L2CACHE, 0x83, 8, 512 * 1024, 32, NULL), \
274 __CI_TBL(CAI_L2CACHE, 0x84, 8, 1 * 1024 * 1024, 32, NULL), \
275 __CI_TBL(CAI_L2CACHE, 0x85, 8, 2 * 1024 * 1024, 32, NULL), \
276 __CI_TBL(CAI_L2CACHE, 0x86, 4, 512 * 1024, 64, NULL), \
277 __CI_TBL(CAI_L2CACHE, 0x87, 8, 1 * 1024 * 1024, 64, NULL), \
278 __CI_TBL(CAI_L3CACHE, 0x22, 0xff, 512 * 1024, 64, "sectored, 4-way "), \
279 __CI_TBL(CAI_L3CACHE, 0x23, 0xff, 1 * 1024 * 1024, 64, "sectored, 8-way "), \
280 __CI_TBL(CAI_L3CACHE, 0x25, 0xff, 2 * 1024 * 1024, 64, "sectored, 8-way "), \
281 __CI_TBL(CAI_L3CACHE, 0x29, 0xff, 4 * 1024 * 1024, 64, "sectored, 8-way "), \
282 __CI_TBL(CAI_L3CACHE, 0x46, 4, 4 * 1024 * 1024, 64, NULL), \
283 __CI_TBL(CAI_L3CACHE, 0x47, 8, 8 * 1024 * 1024, 64, NULL), \
284 __CI_TBL(CAI_L3CACHE, 0x49, 16, 4 * 1024 * 1024, 64, NULL), \
285 __CI_TBL(CAI_L3CACHE, 0x4a, 12, 6 * 1024 * 1024, 64, NULL), \
286 __CI_TBL(CAI_L3CACHE, 0x4b, 16, 8 * 1024 * 1024, 64, NULL), \
287 __CI_TBL(CAI_L3CACHE, 0x4c, 12,12 * 1024 * 1024, 64, NULL), \
288 __CI_TBL(CAI_L3CACHE, 0x4d, 16,16 * 1024 * 1024, 64, NULL), \
289 __CI_TBL(CAI_L3CACHE, 0xd0, 4, 512 * 1024, 64, NULL), \
290 __CI_TBL(CAI_L3CACHE, 0xd1, 4, 1 * 1024 * 1024, 64, NULL), \
291 __CI_TBL(CAI_L3CACHE, 0xd2, 4, 2 * 1024 * 1024, 64, NULL), \
292 __CI_TBL(CAI_L3CACHE, 0xd6, 8, 1 * 1024 * 1024, 64, NULL), \
293 __CI_TBL(CAI_L3CACHE, 0xd7, 8, 2 * 1024 * 1024, 64, NULL), \
294 __CI_TBL(CAI_L3CACHE, 0xd8, 8, 4 * 1024 * 1024, 64, NULL), \
295 __CI_TBL(CAI_L3CACHE, 0xdc, 12, 3 * 512 * 1024, 64, NULL), \
296 __CI_TBL(CAI_L3CACHE, 0xdd, 12, 3 * 1024 * 1024, 64, NULL), \
297 __CI_TBL(CAI_L3CACHE, 0xde, 12, 6 * 1024 * 1024, 64, NULL), \
298 __CI_TBL(CAI_L3CACHE, 0xe2, 16, 2 * 1024 * 1024, 64, NULL), \
299 __CI_TBL(CAI_L3CACHE, 0xe3, 16, 4 * 1024 * 1024, 64, NULL), \
300 __CI_TBL(CAI_L3CACHE, 0xe4, 16, 8 * 1024 * 1024, 64, NULL), \
301 __CI_TBL(CAI_L3CACHE, 0xea, 24,12 * 1024 * 1024, 64, NULL), \
302 __CI_TBL(CAI_L3CACHE, 0xeb, 24,24 * 1024 * 1024, 64, NULL), \
303 __CI_TBL(CAI_L3CACHE, 0xec, 24,24 * 1024 * 1024, 64, NULL), \
304 __CI_TBL(0, 0, 0, 0, 0, NULL) \
307 #define AMD_L2CACHE_INFO { \
308 __CI_TBL(0, 0x01, 1, 0, 0, NULL), \
309 __CI_TBL(0, 0x02, 2, 0, 0, NULL), \
310 __CI_TBL(0, 0x04, 4, 0, 0, NULL), \
311 __CI_TBL(0, 0x06, 8, 0, 0, NULL), \
312 __CI_TBL(0, 0x08, 16, 0, 0, NULL), \
313 __CI_TBL(0, 0x0a, 32, 0, 0, NULL), \
314 __CI_TBL(0, 0x0b, 48, 0, 0, NULL), \
315 __CI_TBL(0, 0x0c, 64, 0, 0, NULL), \
316 __CI_TBL(0, 0x0d, 96, 0, 0, NULL), \
317 __CI_TBL(0, 0x0e, 128, 0, 0, NULL), \
318 __CI_TBL(0, 0x0f, 0xff, 0, 0, NULL), \
319 __CI_TBL(0, 0x00, 0, 0, 0, NULL) \
322 #define AMD_L3CACHE_INFO { \
323 __CI_TBL(0, 0x01, 1, 0, 0, NULL), \
324 __CI_TBL(0, 0x02, 2, 0, 0, NULL), \
325 __CI_TBL(0, 0x04, 4, 0, 0, NULL), \
326 __CI_TBL(0, 0x06, 8, 0, 0, NULL), \
327 __CI_TBL(0, 0x08, 16, 0, 0, NULL), \
328 __CI_TBL(0, 0x0a, 32, 0, 0, NULL), \
329 __CI_TBL(0, 0x0b, 48, 0, 0, NULL), \
330 __CI_TBL(0, 0x0c, 64, 0, 0, NULL), \
331 __CI_TBL(0, 0x0d, 96, 0, 0, NULL), \
332 __CI_TBL(0, 0x0e, 128, 0, 0, NULL), \
333 __CI_TBL(0, 0x0f, 0xff, 0, 0, NULL), \
334 __CI_TBL(0, 0x00, 0, 0, 0, NULL) \
337 #endif /* _X86_CACHEINFO_H_ */