treewide: remove redundant IS_ERR() before error code check
[linux/fpc-iii.git] / arch / c6x / platforms / cache.c
blobfff027b725139c53b9f1cb209bf2d05d7ea44040
1 // SPDX-License-Identifier: GPL-2.0-only
2 /*
3 * Copyright (C) 2011 Texas Instruments Incorporated
4 * Author: Mark Salter <msalter@redhat.com>
5 */
6 #include <linux/of.h>
7 #include <linux/of_address.h>
8 #include <linux/io.h>
10 #include <asm/cache.h>
11 #include <asm/soc.h>
14 * Internal Memory Control Registers for caches
16 #define IMCR_CCFG 0x0000
17 #define IMCR_L1PCFG 0x0020
18 #define IMCR_L1PCC 0x0024
19 #define IMCR_L1DCFG 0x0040
20 #define IMCR_L1DCC 0x0044
21 #define IMCR_L2ALLOC0 0x2000
22 #define IMCR_L2ALLOC1 0x2004
23 #define IMCR_L2ALLOC2 0x2008
24 #define IMCR_L2ALLOC3 0x200c
25 #define IMCR_L2WBAR 0x4000
26 #define IMCR_L2WWC 0x4004
27 #define IMCR_L2WIBAR 0x4010
28 #define IMCR_L2WIWC 0x4014
29 #define IMCR_L2IBAR 0x4018
30 #define IMCR_L2IWC 0x401c
31 #define IMCR_L1PIBAR 0x4020
32 #define IMCR_L1PIWC 0x4024
33 #define IMCR_L1DWIBAR 0x4030
34 #define IMCR_L1DWIWC 0x4034
35 #define IMCR_L1DWBAR 0x4040
36 #define IMCR_L1DWWC 0x4044
37 #define IMCR_L1DIBAR 0x4048
38 #define IMCR_L1DIWC 0x404c
39 #define IMCR_L2WB 0x5000
40 #define IMCR_L2WBINV 0x5004
41 #define IMCR_L2INV 0x5008
42 #define IMCR_L1PINV 0x5028
43 #define IMCR_L1DWB 0x5040
44 #define IMCR_L1DWBINV 0x5044
45 #define IMCR_L1DINV 0x5048
46 #define IMCR_MAR_BASE 0x8000
47 #define IMCR_MAR96_111 0x8180
48 #define IMCR_MAR128_191 0x8200
49 #define IMCR_MAR224_239 0x8380
50 #define IMCR_L2MPFAR 0xa000
51 #define IMCR_L2MPFSR 0xa004
52 #define IMCR_L2MPFCR 0xa008
53 #define IMCR_L2MPLK0 0xa100
54 #define IMCR_L2MPLK1 0xa104
55 #define IMCR_L2MPLK2 0xa108
56 #define IMCR_L2MPLK3 0xa10c
57 #define IMCR_L2MPLKCMD 0xa110
58 #define IMCR_L2MPLKSTAT 0xa114
59 #define IMCR_L2MPPA_BASE 0xa200
60 #define IMCR_L1PMPFAR 0xa400
61 #define IMCR_L1PMPFSR 0xa404
62 #define IMCR_L1PMPFCR 0xa408
63 #define IMCR_L1PMPLK0 0xa500
64 #define IMCR_L1PMPLK1 0xa504
65 #define IMCR_L1PMPLK2 0xa508
66 #define IMCR_L1PMPLK3 0xa50c
67 #define IMCR_L1PMPLKCMD 0xa510
68 #define IMCR_L1PMPLKSTAT 0xa514
69 #define IMCR_L1PMPPA_BASE 0xa600
70 #define IMCR_L1DMPFAR 0xac00
71 #define IMCR_L1DMPFSR 0xac04
72 #define IMCR_L1DMPFCR 0xac08
73 #define IMCR_L1DMPLK0 0xad00
74 #define IMCR_L1DMPLK1 0xad04
75 #define IMCR_L1DMPLK2 0xad08
76 #define IMCR_L1DMPLK3 0xad0c
77 #define IMCR_L1DMPLKCMD 0xad10
78 #define IMCR_L1DMPLKSTAT 0xad14
79 #define IMCR_L1DMPPA_BASE 0xae00
80 #define IMCR_L2PDWAKE0 0xc040
81 #define IMCR_L2PDWAKE1 0xc044
82 #define IMCR_L2PDSLEEP0 0xc050
83 #define IMCR_L2PDSLEEP1 0xc054
84 #define IMCR_L2PDSTAT0 0xc060
85 #define IMCR_L2PDSTAT1 0xc064
88 * CCFG register values and bits
90 #define L2MODE_0K_CACHE 0x0
91 #define L2MODE_32K_CACHE 0x1
92 #define L2MODE_64K_CACHE 0x2
93 #define L2MODE_128K_CACHE 0x3
94 #define L2MODE_256K_CACHE 0x7
96 #define L2PRIO_URGENT 0x0
97 #define L2PRIO_HIGH 0x1
98 #define L2PRIO_MEDIUM 0x2
99 #define L2PRIO_LOW 0x3
101 #define CCFG_ID 0x100 /* Invalidate L1P bit */
102 #define CCFG_IP 0x200 /* Invalidate L1D bit */
104 static void __iomem *cache_base;
107 * L1 & L2 caches generic functions
109 #define imcr_get(reg) soc_readl(cache_base + (reg))
110 #define imcr_set(reg, value) \
111 do { \
112 soc_writel((value), cache_base + (reg)); \
113 soc_readl(cache_base + (reg)); \
114 } while (0)
116 static void cache_block_operation_wait(unsigned int wc_reg)
118 /* Wait for completion */
119 while (imcr_get(wc_reg))
120 cpu_relax();
123 static DEFINE_SPINLOCK(cache_lock);
126 * Generic function to perform a block cache operation as
127 * invalidate or writeback/invalidate
129 static void cache_block_operation(unsigned int *start,
130 unsigned int *end,
131 unsigned int bar_reg,
132 unsigned int wc_reg)
134 unsigned long flags;
135 unsigned int wcnt =
136 (L2_CACHE_ALIGN_CNT((unsigned int) end)
137 - L2_CACHE_ALIGN_LOW((unsigned int) start)) >> 2;
138 unsigned int wc = 0;
140 for (; wcnt; wcnt -= wc, start += wc) {
141 loop:
142 spin_lock_irqsave(&cache_lock, flags);
145 * If another cache operation is occurring
147 if (unlikely(imcr_get(wc_reg))) {
148 spin_unlock_irqrestore(&cache_lock, flags);
150 /* Wait for previous operation completion */
151 cache_block_operation_wait(wc_reg);
153 /* Try again */
154 goto loop;
157 imcr_set(bar_reg, L2_CACHE_ALIGN_LOW((unsigned int) start));
159 if (wcnt > 0xffff)
160 wc = 0xffff;
161 else
162 wc = wcnt;
164 /* Set word count value in the WC register */
165 imcr_set(wc_reg, wc & 0xffff);
167 spin_unlock_irqrestore(&cache_lock, flags);
169 /* Wait for completion */
170 cache_block_operation_wait(wc_reg);
174 static void cache_block_operation_nowait(unsigned int *start,
175 unsigned int *end,
176 unsigned int bar_reg,
177 unsigned int wc_reg)
179 unsigned long flags;
180 unsigned int wcnt =
181 (L2_CACHE_ALIGN_CNT((unsigned int) end)
182 - L2_CACHE_ALIGN_LOW((unsigned int) start)) >> 2;
183 unsigned int wc = 0;
185 for (; wcnt; wcnt -= wc, start += wc) {
187 spin_lock_irqsave(&cache_lock, flags);
189 imcr_set(bar_reg, L2_CACHE_ALIGN_LOW((unsigned int) start));
191 if (wcnt > 0xffff)
192 wc = 0xffff;
193 else
194 wc = wcnt;
196 /* Set word count value in the WC register */
197 imcr_set(wc_reg, wc & 0xffff);
199 spin_unlock_irqrestore(&cache_lock, flags);
201 /* Don't wait for completion on last cache operation */
202 if (wcnt > 0xffff)
203 cache_block_operation_wait(wc_reg);
208 * L1 caches management
212 * Disable L1 caches
214 void L1_cache_off(void)
216 unsigned int dummy;
218 imcr_set(IMCR_L1PCFG, 0);
219 dummy = imcr_get(IMCR_L1PCFG);
221 imcr_set(IMCR_L1DCFG, 0);
222 dummy = imcr_get(IMCR_L1DCFG);
226 * Enable L1 caches
228 void L1_cache_on(void)
230 unsigned int dummy;
232 imcr_set(IMCR_L1PCFG, 7);
233 dummy = imcr_get(IMCR_L1PCFG);
235 imcr_set(IMCR_L1DCFG, 7);
236 dummy = imcr_get(IMCR_L1DCFG);
240 * L1P global-invalidate all
242 void L1P_cache_global_invalidate(void)
244 unsigned int set = 1;
245 imcr_set(IMCR_L1PINV, set);
246 while (imcr_get(IMCR_L1PINV) & 1)
247 cpu_relax();
251 * L1D global-invalidate all
253 * Warning: this operation causes all updated data in L1D to
254 * be discarded rather than written back to the lower levels of
255 * memory
257 void L1D_cache_global_invalidate(void)
259 unsigned int set = 1;
260 imcr_set(IMCR_L1DINV, set);
261 while (imcr_get(IMCR_L1DINV) & 1)
262 cpu_relax();
265 void L1D_cache_global_writeback(void)
267 unsigned int set = 1;
268 imcr_set(IMCR_L1DWB, set);
269 while (imcr_get(IMCR_L1DWB) & 1)
270 cpu_relax();
273 void L1D_cache_global_writeback_invalidate(void)
275 unsigned int set = 1;
276 imcr_set(IMCR_L1DWBINV, set);
277 while (imcr_get(IMCR_L1DWBINV) & 1)
278 cpu_relax();
282 * L2 caches management
286 * Set L2 operation mode
288 void L2_cache_set_mode(unsigned int mode)
290 unsigned int ccfg = imcr_get(IMCR_CCFG);
292 /* Clear and set the L2MODE bits in CCFG */
293 ccfg &= ~7;
294 ccfg |= (mode & 7);
295 imcr_set(IMCR_CCFG, ccfg);
296 ccfg = imcr_get(IMCR_CCFG);
300 * L2 global-writeback and global-invalidate all
302 void L2_cache_global_writeback_invalidate(void)
304 imcr_set(IMCR_L2WBINV, 1);
305 while (imcr_get(IMCR_L2WBINV))
306 cpu_relax();
310 * L2 global-writeback all
312 void L2_cache_global_writeback(void)
314 imcr_set(IMCR_L2WB, 1);
315 while (imcr_get(IMCR_L2WB))
316 cpu_relax();
320 * Cacheability controls
322 void enable_caching(unsigned long start, unsigned long end)
324 unsigned int mar = IMCR_MAR_BASE + ((start >> 24) << 2);
325 unsigned int mar_e = IMCR_MAR_BASE + ((end >> 24) << 2);
327 for (; mar <= mar_e; mar += 4)
328 imcr_set(mar, imcr_get(mar) | 1);
331 void disable_caching(unsigned long start, unsigned long end)
333 unsigned int mar = IMCR_MAR_BASE + ((start >> 24) << 2);
334 unsigned int mar_e = IMCR_MAR_BASE + ((end >> 24) << 2);
336 for (; mar <= mar_e; mar += 4)
337 imcr_set(mar, imcr_get(mar) & ~1);
342 * L1 block operations
344 void L1P_cache_block_invalidate(unsigned int start, unsigned int end)
346 cache_block_operation((unsigned int *) start,
347 (unsigned int *) end,
348 IMCR_L1PIBAR, IMCR_L1PIWC);
350 EXPORT_SYMBOL(L1P_cache_block_invalidate);
352 void L1D_cache_block_invalidate(unsigned int start, unsigned int end)
354 cache_block_operation((unsigned int *) start,
355 (unsigned int *) end,
356 IMCR_L1DIBAR, IMCR_L1DIWC);
359 void L1D_cache_block_writeback_invalidate(unsigned int start, unsigned int end)
361 cache_block_operation((unsigned int *) start,
362 (unsigned int *) end,
363 IMCR_L1DWIBAR, IMCR_L1DWIWC);
366 void L1D_cache_block_writeback(unsigned int start, unsigned int end)
368 cache_block_operation((unsigned int *) start,
369 (unsigned int *) end,
370 IMCR_L1DWBAR, IMCR_L1DWWC);
372 EXPORT_SYMBOL(L1D_cache_block_writeback);
375 * L2 block operations
377 void L2_cache_block_invalidate(unsigned int start, unsigned int end)
379 cache_block_operation((unsigned int *) start,
380 (unsigned int *) end,
381 IMCR_L2IBAR, IMCR_L2IWC);
384 void L2_cache_block_writeback(unsigned int start, unsigned int end)
386 cache_block_operation((unsigned int *) start,
387 (unsigned int *) end,
388 IMCR_L2WBAR, IMCR_L2WWC);
391 void L2_cache_block_writeback_invalidate(unsigned int start, unsigned int end)
393 cache_block_operation((unsigned int *) start,
394 (unsigned int *) end,
395 IMCR_L2WIBAR, IMCR_L2WIWC);
398 void L2_cache_block_invalidate_nowait(unsigned int start, unsigned int end)
400 cache_block_operation_nowait((unsigned int *) start,
401 (unsigned int *) end,
402 IMCR_L2IBAR, IMCR_L2IWC);
405 void L2_cache_block_writeback_nowait(unsigned int start, unsigned int end)
407 cache_block_operation_nowait((unsigned int *) start,
408 (unsigned int *) end,
409 IMCR_L2WBAR, IMCR_L2WWC);
412 void L2_cache_block_writeback_invalidate_nowait(unsigned int start,
413 unsigned int end)
415 cache_block_operation_nowait((unsigned int *) start,
416 (unsigned int *) end,
417 IMCR_L2WIBAR, IMCR_L2WIWC);
422 * L1 and L2 caches configuration
424 void __init c6x_cache_init(void)
426 struct device_node *node;
428 node = of_find_compatible_node(NULL, NULL, "ti,c64x+cache");
429 if (!node)
430 return;
432 cache_base = of_iomap(node, 0);
434 of_node_put(node);
436 if (!cache_base)
437 return;
439 /* Set L2 caches on the the whole L2 SRAM memory */
440 L2_cache_set_mode(L2MODE_SIZE);
442 /* Enable L1 */
443 L1_cache_on();