1 // SPDX-License-Identifier: GPL-2.0-only
3 * Copyright (C) 2020 Western Digital Corporation or its affiliates.
5 #include <linux/kernel.h>
6 #include <linux/init.h>
8 #include <linux/module.h>
11 #include <asm/processor.h>
12 #include <asm/ptrace.h>
15 #define INSN_MATCH_LB 0x3
16 #define INSN_MASK_LB 0x707f
17 #define INSN_MATCH_LH 0x1003
18 #define INSN_MASK_LH 0x707f
19 #define INSN_MATCH_LW 0x2003
20 #define INSN_MASK_LW 0x707f
21 #define INSN_MATCH_LD 0x3003
22 #define INSN_MASK_LD 0x707f
23 #define INSN_MATCH_LBU 0x4003
24 #define INSN_MASK_LBU 0x707f
25 #define INSN_MATCH_LHU 0x5003
26 #define INSN_MASK_LHU 0x707f
27 #define INSN_MATCH_LWU 0x6003
28 #define INSN_MASK_LWU 0x707f
29 #define INSN_MATCH_SB 0x23
30 #define INSN_MASK_SB 0x707f
31 #define INSN_MATCH_SH 0x1023
32 #define INSN_MASK_SH 0x707f
33 #define INSN_MATCH_SW 0x2023
34 #define INSN_MASK_SW 0x707f
35 #define INSN_MATCH_SD 0x3023
36 #define INSN_MASK_SD 0x707f
38 #define INSN_MATCH_FLW 0x2007
39 #define INSN_MASK_FLW 0x707f
40 #define INSN_MATCH_FLD 0x3007
41 #define INSN_MASK_FLD 0x707f
42 #define INSN_MATCH_FLQ 0x4007
43 #define INSN_MASK_FLQ 0x707f
44 #define INSN_MATCH_FSW 0x2027
45 #define INSN_MASK_FSW 0x707f
46 #define INSN_MATCH_FSD 0x3027
47 #define INSN_MASK_FSD 0x707f
48 #define INSN_MATCH_FSQ 0x4027
49 #define INSN_MASK_FSQ 0x707f
51 #define INSN_MATCH_C_LD 0x6000
52 #define INSN_MASK_C_LD 0xe003
53 #define INSN_MATCH_C_SD 0xe000
54 #define INSN_MASK_C_SD 0xe003
55 #define INSN_MATCH_C_LW 0x4000
56 #define INSN_MASK_C_LW 0xe003
57 #define INSN_MATCH_C_SW 0xc000
58 #define INSN_MASK_C_SW 0xe003
59 #define INSN_MATCH_C_LDSP 0x6002
60 #define INSN_MASK_C_LDSP 0xe003
61 #define INSN_MATCH_C_SDSP 0xe002
62 #define INSN_MASK_C_SDSP 0xe003
63 #define INSN_MATCH_C_LWSP 0x4002
64 #define INSN_MASK_C_LWSP 0xe003
65 #define INSN_MATCH_C_SWSP 0xc002
66 #define INSN_MASK_C_SWSP 0xe003
68 #define INSN_MATCH_C_FLD 0x2000
69 #define INSN_MASK_C_FLD 0xe003
70 #define INSN_MATCH_C_FLW 0x6000
71 #define INSN_MASK_C_FLW 0xe003
72 #define INSN_MATCH_C_FSD 0xa000
73 #define INSN_MASK_C_FSD 0xe003
74 #define INSN_MATCH_C_FSW 0xe000
75 #define INSN_MASK_C_FSW 0xe003
76 #define INSN_MATCH_C_FLDSP 0x2002
77 #define INSN_MASK_C_FLDSP 0xe003
78 #define INSN_MATCH_C_FSDSP 0xa002
79 #define INSN_MASK_C_FSDSP 0xe003
80 #define INSN_MATCH_C_FLWSP 0x6002
81 #define INSN_MASK_C_FLWSP 0xe003
82 #define INSN_MATCH_C_FSWSP 0xe002
83 #define INSN_MASK_C_FSWSP 0xe003
85 #define INSN_LEN(insn) ((((insn) & 0x3) < 0x3) ? 2 : 4)
87 #if defined(CONFIG_64BIT)
88 #define LOG_REGBYTES 3
91 #define LOG_REGBYTES 2
94 #define REGBYTES (1 << LOG_REGBYTES)
95 #define XLEN_MINUS_16 ((XLEN) - 16)
102 #define RV_X(x, s, n) (((x) >> (s)) & ((1 << (n)) - 1))
103 #define RVC_LW_IMM(x) ((RV_X(x, 6, 1) << 2) | \
104 (RV_X(x, 10, 3) << 3) | \
105 (RV_X(x, 5, 1) << 6))
106 #define RVC_LD_IMM(x) ((RV_X(x, 10, 3) << 3) | \
107 (RV_X(x, 5, 2) << 6))
108 #define RVC_LWSP_IMM(x) ((RV_X(x, 4, 3) << 2) | \
109 (RV_X(x, 12, 1) << 5) | \
110 (RV_X(x, 2, 2) << 6))
111 #define RVC_LDSP_IMM(x) ((RV_X(x, 5, 2) << 3) | \
112 (RV_X(x, 12, 1) << 5) | \
113 (RV_X(x, 2, 3) << 6))
114 #define RVC_SWSP_IMM(x) ((RV_X(x, 9, 4) << 2) | \
115 (RV_X(x, 7, 2) << 6))
116 #define RVC_SDSP_IMM(x) ((RV_X(x, 10, 3) << 3) | \
117 (RV_X(x, 7, 3) << 6))
118 #define RVC_RS1S(insn) (8 + RV_X(insn, SH_RD, 3))
119 #define RVC_RS2S(insn) (8 + RV_X(insn, SH_RS2C, 3))
120 #define RVC_RS2(insn) RV_X(insn, SH_RS2C, 5)
122 #define SHIFT_RIGHT(x, y) \
123 ((y) < 0 ? ((x) << -(y)) : ((x) >> (y)))
126 ((1 << (5 + LOG_REGBYTES)) - (1 << LOG_REGBYTES))
128 #define REG_OFFSET(insn, pos) \
129 (SHIFT_RIGHT((insn), (pos) - LOG_REGBYTES) & REG_MASK)
131 #define REG_PTR(insn, pos, regs) \
132 (ulong *)((ulong)(regs) + REG_OFFSET(insn, pos))
134 #define GET_RM(insn) (((insn) >> 12) & 7)
136 #define GET_RS1(insn, regs) (*REG_PTR(insn, SH_RS1, regs))
137 #define GET_RS2(insn, regs) (*REG_PTR(insn, SH_RS2, regs))
138 #define GET_RS1S(insn, regs) (*REG_PTR(RVC_RS1S(insn), 0, regs))
139 #define GET_RS2S(insn, regs) (*REG_PTR(RVC_RS2S(insn), 0, regs))
140 #define GET_RS2C(insn, regs) (*REG_PTR(insn, SH_RS2C, regs))
141 #define GET_SP(regs) (*REG_PTR(2, 0, regs))
142 #define SET_RD(insn, regs, val) (*REG_PTR(insn, SH_RD, regs) = (val))
143 #define IMM_I(insn) ((s32)(insn) >> 20)
144 #define IMM_S(insn) (((s32)(insn) >> 25 << 5) | \
145 (s32)(((insn) >> 7) & 0x1f))
146 #define MASK_FUNCT3 0x7000
148 #define GET_PRECISION(insn) (((insn) >> 25) & 3)
149 #define GET_RM(insn) (((insn) >> 12) & 7)
150 #define PRECISION_S 0
151 #define PRECISION_D 1
153 #define STR(x) XSTR(x)
156 #define DECLARE_UNPRIVILEGED_LOAD_FUNCTION(type, insn) \
157 static inline type load_##type(const type *addr) \
160 asm (#insn " %0, %1" \
161 : "=&r" (val) : "m" (*addr)); \
165 #define DECLARE_UNPRIVILEGED_STORE_FUNCTION(type, insn) \
166 static inline void store_##type(type *addr, type val) \
168 asm volatile (#insn " %0, %1\n" \
169 : : "r" (val), "m" (*addr)); \
172 DECLARE_UNPRIVILEGED_LOAD_FUNCTION(u8
, lbu
)
173 DECLARE_UNPRIVILEGED_LOAD_FUNCTION(u16
, lhu
)
174 DECLARE_UNPRIVILEGED_LOAD_FUNCTION(s8
, lb
)
175 DECLARE_UNPRIVILEGED_LOAD_FUNCTION(s16
, lh
)
176 DECLARE_UNPRIVILEGED_LOAD_FUNCTION(s32
, lw
)
177 DECLARE_UNPRIVILEGED_STORE_FUNCTION(u8
, sb
)
178 DECLARE_UNPRIVILEGED_STORE_FUNCTION(u16
, sh
)
179 DECLARE_UNPRIVILEGED_STORE_FUNCTION(u32
, sw
)
180 #if defined(CONFIG_64BIT)
181 DECLARE_UNPRIVILEGED_LOAD_FUNCTION(u32
, lwu
)
182 DECLARE_UNPRIVILEGED_LOAD_FUNCTION(u64
, ld
)
183 DECLARE_UNPRIVILEGED_STORE_FUNCTION(u64
, sd
)
184 DECLARE_UNPRIVILEGED_LOAD_FUNCTION(ulong
, ld
)
186 DECLARE_UNPRIVILEGED_LOAD_FUNCTION(u32
, lw
)
187 DECLARE_UNPRIVILEGED_LOAD_FUNCTION(ulong
, lw
)
189 static inline u64
load_u64(const u64
*addr
)
191 return load_u32((u32
*)addr
)
192 + ((u64
)load_u32((u32
*)addr
+ 1) << 32);
195 static inline void store_u64(u64
*addr
, u64 val
)
197 store_u32((u32
*)addr
, val
);
198 store_u32((u32
*)addr
+ 1, val
>> 32);
202 static inline ulong
get_insn(ulong mepc
)
204 register ulong __mepc
asm ("a2") = mepc
;
205 ulong val
, rvc_mask
= 3, tmp
;
207 asm ("and %[tmp], %[addr], 2\n"
209 #if defined(CONFIG_64BIT)
210 STR(LWU
) " %[insn], (%[addr])\n"
212 STR(LW
) " %[insn], (%[addr])\n"
214 "and %[tmp], %[insn], %[rvc_mask]\n"
215 "beq %[tmp], %[rvc_mask], 2f\n"
216 "sll %[insn], %[insn], %[xlen_minus_16]\n"
217 "srl %[insn], %[insn], %[xlen_minus_16]\n"
220 "lhu %[insn], (%[addr])\n"
221 "and %[tmp], %[insn], %[rvc_mask]\n"
222 "bne %[tmp], %[rvc_mask], 2f\n"
223 "lhu %[tmp], 2(%[addr])\n"
224 "sll %[tmp], %[tmp], 16\n"
225 "add %[insn], %[insn], %[tmp]\n"
227 : [insn
] "=&r" (val
), [tmp
] "=&r" (tmp
)
228 : [addr
] "r" (__mepc
), [rvc_mask
] "r" (rvc_mask
),
229 [xlen_minus_16
] "i" (XLEN_MINUS_16
));
240 int handle_misaligned_load(struct pt_regs
*regs
)
243 unsigned long epc
= regs
->epc
;
244 unsigned long insn
= get_insn(epc
);
245 unsigned long addr
= csr_read(mtval
);
246 int i
, fp
= 0, shift
= 0, len
= 0;
250 if ((insn
& INSN_MASK_LW
) == INSN_MATCH_LW
) {
252 shift
= 8 * (sizeof(unsigned long) - len
);
253 #if defined(CONFIG_64BIT)
254 } else if ((insn
& INSN_MASK_LD
) == INSN_MATCH_LD
) {
256 shift
= 8 * (sizeof(unsigned long) - len
);
257 } else if ((insn
& INSN_MASK_LWU
) == INSN_MATCH_LWU
) {
260 } else if ((insn
& INSN_MASK_FLD
) == INSN_MATCH_FLD
) {
263 } else if ((insn
& INSN_MASK_FLW
) == INSN_MATCH_FLW
) {
266 } else if ((insn
& INSN_MASK_LH
) == INSN_MATCH_LH
) {
268 shift
= 8 * (sizeof(unsigned long) - len
);
269 } else if ((insn
& INSN_MASK_LHU
) == INSN_MATCH_LHU
) {
271 #if defined(CONFIG_64BIT)
272 } else if ((insn
& INSN_MASK_C_LD
) == INSN_MATCH_C_LD
) {
274 shift
= 8 * (sizeof(unsigned long) - len
);
275 insn
= RVC_RS2S(insn
) << SH_RD
;
276 } else if ((insn
& INSN_MASK_C_LDSP
) == INSN_MATCH_C_LDSP
&&
277 ((insn
>> SH_RD
) & 0x1f)) {
279 shift
= 8 * (sizeof(unsigned long) - len
);
281 } else if ((insn
& INSN_MASK_C_LW
) == INSN_MATCH_C_LW
) {
283 shift
= 8 * (sizeof(unsigned long) - len
);
284 insn
= RVC_RS2S(insn
) << SH_RD
;
285 } else if ((insn
& INSN_MASK_C_LWSP
) == INSN_MATCH_C_LWSP
&&
286 ((insn
>> SH_RD
) & 0x1f)) {
288 shift
= 8 * (sizeof(unsigned long) - len
);
289 } else if ((insn
& INSN_MASK_C_FLD
) == INSN_MATCH_C_FLD
) {
292 insn
= RVC_RS2S(insn
) << SH_RD
;
293 } else if ((insn
& INSN_MASK_C_FLDSP
) == INSN_MATCH_C_FLDSP
) {
296 #if defined(CONFIG_32BIT)
297 } else if ((insn
& INSN_MASK_C_FLW
) == INSN_MATCH_C_FLW
) {
300 insn
= RVC_RS2S(insn
) << SH_RD
;
301 } else if ((insn
& INSN_MASK_C_FLWSP
) == INSN_MATCH_C_FLWSP
) {
311 for (i
= 0; i
< len
; i
++)
312 val
.data_bytes
[i
] = load_u8((void *)(addr
+ i
));
316 SET_RD(insn
, regs
, val
.data_ulong
<< shift
>> shift
);
318 regs
->epc
= epc
+ INSN_LEN(insn
);
323 int handle_misaligned_store(struct pt_regs
*regs
)
326 unsigned long epc
= regs
->epc
;
327 unsigned long insn
= get_insn(epc
);
328 unsigned long addr
= csr_read(mtval
);
333 val
.data_ulong
= GET_RS2(insn
, regs
);
335 if ((insn
& INSN_MASK_SW
) == INSN_MATCH_SW
) {
337 #if defined(CONFIG_64BIT)
338 } else if ((insn
& INSN_MASK_SD
) == INSN_MATCH_SD
) {
341 } else if ((insn
& INSN_MASK_SH
) == INSN_MATCH_SH
) {
343 #if defined(CONFIG_64BIT)
344 } else if ((insn
& INSN_MASK_C_SD
) == INSN_MATCH_C_SD
) {
346 val
.data_ulong
= GET_RS2S(insn
, regs
);
347 } else if ((insn
& INSN_MASK_C_SDSP
) == INSN_MATCH_C_SDSP
&&
348 ((insn
>> SH_RD
) & 0x1f)) {
350 val
.data_ulong
= GET_RS2C(insn
, regs
);
352 } else if ((insn
& INSN_MASK_C_SW
) == INSN_MATCH_C_SW
) {
354 val
.data_ulong
= GET_RS2S(insn
, regs
);
355 } else if ((insn
& INSN_MASK_C_SWSP
) == INSN_MATCH_C_SWSP
&&
356 ((insn
>> SH_RD
) & 0x1f)) {
358 val
.data_ulong
= GET_RS2C(insn
, regs
);
364 for (i
= 0; i
< len
; i
++)
365 store_u8((void *)(addr
+ i
), val
.data_bytes
[i
]);
367 regs
->epc
= epc
+ INSN_LEN(insn
);