fs: use kmem_cache_zalloc instead
[pv_ops_mirror.git] / include / asm-sh64 / tlb.h
blob4979408bd88cd2b2cbc4a9f154f941d36fa7ebcc
1 /*
2 * include/asm-sh64/tlb.h
4 * Copyright (C) 2003 Paul Mundt
6 * This file is subject to the terms and conditions of the GNU General Public
7 * License. See the file "COPYING" in the main directory of this archive
8 * for more details.
11 #ifndef __ASM_SH64_TLB_H
12 #define __ASM_SH64_TLB_H
15 * Note! These are mostly unused, we just need the xTLB_LAST_VAR_UNRESTRICTED
16 * for head.S! Once this limitation is gone, we can clean the rest of this up.
19 /* ITLB defines */
20 #define ITLB_FIXED 0x00000000 /* First fixed ITLB, see head.S */
21 #define ITLB_LAST_VAR_UNRESTRICTED 0x000003F0 /* Last ITLB */
23 /* DTLB defines */
24 #define DTLB_FIXED 0x00800000 /* First fixed DTLB, see head.S */
25 #define DTLB_LAST_VAR_UNRESTRICTED 0x008003F0 /* Last DTLB */
27 #ifndef __ASSEMBLY__
29 /**
30 * for_each_dtlb_entry
32 * @tlb: TLB entry
34 * Iterate over free (non-wired) DTLB entries
36 #define for_each_dtlb_entry(tlb) \
37 for (tlb = cpu_data->dtlb.first; \
38 tlb <= cpu_data->dtlb.last; \
39 tlb += cpu_data->dtlb.step)
41 /**
42 * for_each_itlb_entry
44 * @tlb: TLB entry
46 * Iterate over free (non-wired) ITLB entries
48 #define for_each_itlb_entry(tlb) \
49 for (tlb = cpu_data->itlb.first; \
50 tlb <= cpu_data->itlb.last; \
51 tlb += cpu_data->itlb.step)
53 /**
54 * __flush_tlb_slot
56 * @slot: Address of TLB slot.
58 * Flushes TLB slot @slot.
60 static inline void __flush_tlb_slot(unsigned long long slot)
62 __asm__ __volatile__ ("putcfg %0, 0, r63\n" : : "r" (slot));
65 /* arch/sh64/mm/tlb.c */
66 extern int sh64_tlb_init(void);
67 extern unsigned long long sh64_next_free_dtlb_entry(void);
68 extern unsigned long long sh64_get_wired_dtlb_entry(void);
69 extern int sh64_put_wired_dtlb_entry(unsigned long long entry);
71 extern void sh64_setup_tlb_slot(unsigned long long config_addr, unsigned long eaddr, unsigned long asid, unsigned long paddr);
72 extern void sh64_teardown_tlb_slot(unsigned long long config_addr);
74 #define tlb_start_vma(tlb, vma) \
75 flush_cache_range(vma, vma->vm_start, vma->vm_end)
77 #define tlb_end_vma(tlb, vma) \
78 flush_tlb_range(vma, vma->vm_start, vma->vm_end)
80 #define __tlb_remove_tlb_entry(tlb, pte, address) do { } while (0)
83 * Flush whole TLBs for MM
85 #define tlb_flush(tlb) flush_tlb_mm((tlb)->mm)
87 #include <asm-generic/tlb.h>
89 #endif /* __ASSEMBLY__ */
91 #endif /* __ASM_SH64_TLB_H */