x86/mm/pat: Don't report PAT on CPUs that don't support it
[linux/fpc-iii.git] / arch / m68k / include / asm / m54xxacr.h
blobc6ac05cda282c791f0261dba9bed7b5ec6afad31
1 /*
2 * Bit definitions for the MCF54xx ACR and CACR registers.
3 */
5 #ifndef m54xxacr_h
6 #define m54xxacr_h
8 /*
9 * Define the Cache register flags.
11 #define CACR_DEC 0x80000000 /* Enable data cache */
12 #define CACR_DWP 0x40000000 /* Data write protection */
13 #define CACR_DESB 0x20000000 /* Enable data store buffer */
14 #define CACR_DDPI 0x10000000 /* Disable invalidation by CPUSHL */
15 #define CACR_DHCLK 0x08000000 /* Half data cache lock mode */
16 #define CACR_DDCM_WT 0x00000000 /* Write through cache*/
17 #define CACR_DDCM_CP 0x02000000 /* Copyback cache */
18 #define CACR_DDCM_P 0x04000000 /* No cache, precise */
19 #define CACR_DDCM_IMP 0x06000000 /* No cache, imprecise */
20 #define CACR_DCINVA 0x01000000 /* Invalidate data cache */
21 #define CACR_BEC 0x00080000 /* Enable branch cache */
22 #define CACR_BCINVA 0x00040000 /* Invalidate branch cache */
23 #define CACR_IEC 0x00008000 /* Enable instruction cache */
24 #define CACR_DNFB 0x00002000 /* Inhibited fill buffer */
25 #define CACR_IDPI 0x00001000 /* Disable CPUSHL */
26 #define CACR_IHLCK 0x00000800 /* Instruction cache half lock */
27 #define CACR_IDCM 0x00000400 /* Instruction cache inhibit */
28 #define CACR_ICINVA 0x00000100 /* Invalidate instr cache */
29 #define CACR_EUSP 0x00000020 /* Enable separate user a7 */
31 #define ACR_BASE_POS 24 /* Address Base */
32 #define ACR_MASK_POS 16 /* Address Mask */
33 #define ACR_ENABLE 0x00008000 /* Enable address */
34 #define ACR_USER 0x00000000 /* User mode access only */
35 #define ACR_SUPER 0x00002000 /* Supervisor mode only */
36 #define ACR_ANY 0x00004000 /* Match any access mode */
37 #define ACR_CM_WT 0x00000000 /* Write through mode */
38 #define ACR_CM_CP 0x00000020 /* Copyback mode */
39 #define ACR_CM_OFF_PRE 0x00000040 /* No cache, precise */
40 #define ACR_CM_OFF_IMP 0x00000060 /* No cache, imprecise */
41 #define ACR_CM 0x00000060 /* Cache mode mask */
42 #define ACR_SP 0x00000008 /* Supervisor protect */
43 #define ACR_WPROTECT 0x00000004 /* Write protect */
45 #define ACR_BA(x) ((x) & 0xff000000)
46 #define ACR_ADMSK(x) ((((x) - 1) & 0xff000000) >> 8)
48 #if defined(CONFIG_M5407)
50 #define ICACHE_SIZE 0x4000 /* instruction - 16k */
51 #define DCACHE_SIZE 0x2000 /* data - 8k */
53 #elif defined(CONFIG_M54xx)
55 #define ICACHE_SIZE 0x8000 /* instruction - 32k */
56 #define DCACHE_SIZE 0x8000 /* data - 32k */
58 #elif defined(CONFIG_M5441x)
60 #define ICACHE_SIZE 0x2000 /* instruction - 8k */
61 #define DCACHE_SIZE 0x2000 /* data - 8k */
62 #endif
64 #define CACHE_LINE_SIZE 0x0010 /* 16 bytes */
65 #define CACHE_WAYS 4 /* 4 ways */
67 #define ICACHE_SET_MASK ((ICACHE_SIZE / 64 - 1) << CACHE_WAYS)
68 #define DCACHE_SET_MASK ((DCACHE_SIZE / 64 - 1) << CACHE_WAYS)
69 #define ICACHE_MAX_ADDR ICACHE_SET_MASK
70 #define DCACHE_MAX_ADDR DCACHE_SET_MASK
73 * Version 4 cores have a true harvard style separate instruction
74 * and data cache. Enable data and instruction caches, also enable write
75 * buffers and branch accelerator.
77 /* attention : enabling CACR_DESB requires a "nop" to flush the store buffer */
78 /* use '+' instead of '|' for assembler's sake */
80 /* Enable data cache */
81 /* Enable data store buffer */
82 /* outside ACRs : No cache, precise */
83 /* Enable instruction+branch caches */
84 #if defined(CONFIG_M5407)
85 #define CACHE_MODE (CACR_DEC+CACR_DESB+CACR_DDCM_P+CACR_BEC+CACR_IEC)
86 #else
87 #define CACHE_MODE (CACR_DEC+CACR_DESB+CACR_DDCM_P+CACR_BEC+CACR_IEC+CACR_EUSP)
88 #endif
89 #define CACHE_INIT (CACR_DCINVA+CACR_BCINVA+CACR_ICINVA)
91 #if defined(CONFIG_MMU)
93 * If running with the MMU enabled then we need to map the internal
94 * register region as non-cacheable. And then we map all our RAM as
95 * cacheable and supervisor access only.
97 #define ACR0_MODE (ACR_BA(IOMEMBASE)+ACR_ADMSK(IOMEMSIZE)+ \
98 ACR_ENABLE+ACR_SUPER+ACR_CM_OFF_PRE+ACR_SP)
99 #if defined(CONFIG_CACHE_COPYBACK)
100 #define ACR1_MODE (ACR_BA(CONFIG_RAMBASE)+ACR_ADMSK(CONFIG_RAMSIZE)+ \
101 ACR_ENABLE+ACR_SUPER+ACR_SP+ACR_CM_CP)
102 #else
103 #define ACR1_MODE (ACR_BA(CONFIG_RAMBASE)+ACR_ADMSK(CONFIG_RAMSIZE)+ \
104 ACR_ENABLE+ACR_SUPER+ACR_SP+ACR_CM_WT)
105 #endif
106 #define ACR2_MODE 0
107 #define ACR3_MODE (ACR_BA(CONFIG_RAMBASE)+ACR_ADMSK(CONFIG_RAMSIZE)+ \
108 ACR_ENABLE+ACR_SUPER+ACR_SP)
110 #else
113 * For the non-MMU enabled case we map all of RAM as cacheable.
115 #if defined(CONFIG_CACHE_COPYBACK)
116 #define DATA_CACHE_MODE (ACR_ENABLE+ACR_ANY+ACR_CM_CP)
117 #else
118 #define DATA_CACHE_MODE (ACR_ENABLE+ACR_ANY+ACR_CM_WT)
119 #endif
120 #define INSN_CACHE_MODE (ACR_ENABLE+ACR_ANY)
122 #define CACHE_INVALIDATE (CACHE_MODE+CACR_DCINVA+CACR_BCINVA+CACR_ICINVA)
123 #define CACHE_INVALIDATEI (CACHE_MODE+CACR_BCINVA+CACR_ICINVA)
124 #define CACHE_INVALIDATED (CACHE_MODE+CACR_DCINVA)
125 #define ACR0_MODE (0x000f0000+DATA_CACHE_MODE)
126 #define ACR1_MODE 0
127 #define ACR2_MODE (0x000f0000+INSN_CACHE_MODE)
128 #define ACR3_MODE 0
130 #if ((DATA_CACHE_MODE & ACR_CM) == ACR_CM_CP)
131 /* Copyback cache mode must push dirty cache lines first */
132 #define CACHE_PUSH
133 #endif
135 #endif /* CONFIG_MMU */
136 #endif /* m54xxacr_h */