1 // SPDX-License-Identifier: GPL-2.0
2 /* BPF JIT compiler for RV64G
4 * Copyright(c) 2019 Björn Töpel <bjorn.topel@gmail.com>
9 #include <linux/filter.h>
10 #include <asm/cacheflush.h>
13 RV_REG_ZERO
= 0, /* The constant value 0 */
14 RV_REG_RA
= 1, /* Return address */
15 RV_REG_SP
= 2, /* Stack pointer */
16 RV_REG_GP
= 3, /* Global pointer */
17 RV_REG_TP
= 4, /* Thread pointer */
18 RV_REG_T0
= 5, /* Temporaries */
22 RV_REG_S1
= 9, /* Saved registers */
23 RV_REG_A0
= 10, /* Function argument/return values */
24 RV_REG_A1
= 11, /* Function arguments */
31 RV_REG_S2
= 18, /* Saved registers */
41 RV_REG_T3
= 28, /* Temporaries */
47 #define RV_REG_TCC RV_REG_A6
48 #define RV_REG_TCC_SAVED RV_REG_S6 /* Store A6 in S6 if program do calls */
50 static const int regmap
[] = {
51 [BPF_REG_0
] = RV_REG_A5
,
52 [BPF_REG_1
] = RV_REG_A0
,
53 [BPF_REG_2
] = RV_REG_A1
,
54 [BPF_REG_3
] = RV_REG_A2
,
55 [BPF_REG_4
] = RV_REG_A3
,
56 [BPF_REG_5
] = RV_REG_A4
,
57 [BPF_REG_6
] = RV_REG_S1
,
58 [BPF_REG_7
] = RV_REG_S2
,
59 [BPF_REG_8
] = RV_REG_S3
,
60 [BPF_REG_9
] = RV_REG_S4
,
61 [BPF_REG_FP
] = RV_REG_S5
,
62 [BPF_REG_AX
] = RV_REG_T0
,
66 RV_CTX_F_SEEN_TAIL_CALL
= 0,
67 RV_CTX_F_SEEN_CALL
= RV_REG_RA
,
68 RV_CTX_F_SEEN_S1
= RV_REG_S1
,
69 RV_CTX_F_SEEN_S2
= RV_REG_S2
,
70 RV_CTX_F_SEEN_S3
= RV_REG_S3
,
71 RV_CTX_F_SEEN_S4
= RV_REG_S4
,
72 RV_CTX_F_SEEN_S5
= RV_REG_S5
,
73 RV_CTX_F_SEEN_S6
= RV_REG_S6
,
76 struct rv_jit_context
{
77 struct bpf_prog
*prog
;
78 u32
*insns
; /* RV insns */
81 int *offset
; /* BPF to RV */
87 struct bpf_binary_header
*header
;
89 struct rv_jit_context ctx
;
92 static u8
bpf_to_rv_reg(int bpf_reg
, struct rv_jit_context
*ctx
)
94 u8 reg
= regmap
[bpf_reg
];
97 case RV_CTX_F_SEEN_S1
:
98 case RV_CTX_F_SEEN_S2
:
99 case RV_CTX_F_SEEN_S3
:
100 case RV_CTX_F_SEEN_S4
:
101 case RV_CTX_F_SEEN_S5
:
102 case RV_CTX_F_SEEN_S6
:
103 __set_bit(reg
, &ctx
->flags
);
108 static bool seen_reg(int reg
, struct rv_jit_context
*ctx
)
111 case RV_CTX_F_SEEN_CALL
:
112 case RV_CTX_F_SEEN_S1
:
113 case RV_CTX_F_SEEN_S2
:
114 case RV_CTX_F_SEEN_S3
:
115 case RV_CTX_F_SEEN_S4
:
116 case RV_CTX_F_SEEN_S5
:
117 case RV_CTX_F_SEEN_S6
:
118 return test_bit(reg
, &ctx
->flags
);
123 static void mark_call(struct rv_jit_context
*ctx
)
125 __set_bit(RV_CTX_F_SEEN_CALL
, &ctx
->flags
);
128 static bool seen_call(struct rv_jit_context
*ctx
)
130 return test_bit(RV_CTX_F_SEEN_CALL
, &ctx
->flags
);
133 static void mark_tail_call(struct rv_jit_context
*ctx
)
135 __set_bit(RV_CTX_F_SEEN_TAIL_CALL
, &ctx
->flags
);
138 static bool seen_tail_call(struct rv_jit_context
*ctx
)
140 return test_bit(RV_CTX_F_SEEN_TAIL_CALL
, &ctx
->flags
);
143 static u8
rv_tail_call_reg(struct rv_jit_context
*ctx
)
147 if (seen_call(ctx
)) {
148 __set_bit(RV_CTX_F_SEEN_S6
, &ctx
->flags
);
154 static void emit(const u32 insn
, struct rv_jit_context
*ctx
)
157 ctx
->insns
[ctx
->ninsns
] = insn
;
162 static u32
rv_r_insn(u8 funct7
, u8 rs2
, u8 rs1
, u8 funct3
, u8 rd
, u8 opcode
)
164 return (funct7
<< 25) | (rs2
<< 20) | (rs1
<< 15) | (funct3
<< 12) |
168 static u32
rv_i_insn(u16 imm11_0
, u8 rs1
, u8 funct3
, u8 rd
, u8 opcode
)
170 return (imm11_0
<< 20) | (rs1
<< 15) | (funct3
<< 12) | (rd
<< 7) |
174 static u32
rv_s_insn(u16 imm11_0
, u8 rs2
, u8 rs1
, u8 funct3
, u8 opcode
)
176 u8 imm11_5
= imm11_0
>> 5, imm4_0
= imm11_0
& 0x1f;
178 return (imm11_5
<< 25) | (rs2
<< 20) | (rs1
<< 15) | (funct3
<< 12) |
179 (imm4_0
<< 7) | opcode
;
182 static u32
rv_sb_insn(u16 imm12_1
, u8 rs2
, u8 rs1
, u8 funct3
, u8 opcode
)
184 u8 imm12
= ((imm12_1
& 0x800) >> 5) | ((imm12_1
& 0x3f0) >> 4);
185 u8 imm4_1
= ((imm12_1
& 0xf) << 1) | ((imm12_1
& 0x400) >> 10);
187 return (imm12
<< 25) | (rs2
<< 20) | (rs1
<< 15) | (funct3
<< 12) |
188 (imm4_1
<< 7) | opcode
;
191 static u32
rv_u_insn(u32 imm31_12
, u8 rd
, u8 opcode
)
193 return (imm31_12
<< 12) | (rd
<< 7) | opcode
;
196 static u32
rv_uj_insn(u32 imm20_1
, u8 rd
, u8 opcode
)
200 imm
= (imm20_1
& 0x80000) | ((imm20_1
& 0x3ff) << 9) |
201 ((imm20_1
& 0x400) >> 2) | ((imm20_1
& 0x7f800) >> 11);
203 return (imm
<< 12) | (rd
<< 7) | opcode
;
206 static u32
rv_amo_insn(u8 funct5
, u8 aq
, u8 rl
, u8 rs2
, u8 rs1
,
207 u8 funct3
, u8 rd
, u8 opcode
)
209 u8 funct7
= (funct5
<< 2) | (aq
<< 1) | rl
;
211 return rv_r_insn(funct7
, rs2
, rs1
, funct3
, rd
, opcode
);
214 static u32
rv_addiw(u8 rd
, u8 rs1
, u16 imm11_0
)
216 return rv_i_insn(imm11_0
, rs1
, 0, rd
, 0x1b);
219 static u32
rv_addi(u8 rd
, u8 rs1
, u16 imm11_0
)
221 return rv_i_insn(imm11_0
, rs1
, 0, rd
, 0x13);
224 static u32
rv_addw(u8 rd
, u8 rs1
, u8 rs2
)
226 return rv_r_insn(0, rs2
, rs1
, 0, rd
, 0x3b);
229 static u32
rv_add(u8 rd
, u8 rs1
, u8 rs2
)
231 return rv_r_insn(0, rs2
, rs1
, 0, rd
, 0x33);
234 static u32
rv_subw(u8 rd
, u8 rs1
, u8 rs2
)
236 return rv_r_insn(0x20, rs2
, rs1
, 0, rd
, 0x3b);
239 static u32
rv_sub(u8 rd
, u8 rs1
, u8 rs2
)
241 return rv_r_insn(0x20, rs2
, rs1
, 0, rd
, 0x33);
244 static u32
rv_and(u8 rd
, u8 rs1
, u8 rs2
)
246 return rv_r_insn(0, rs2
, rs1
, 7, rd
, 0x33);
249 static u32
rv_or(u8 rd
, u8 rs1
, u8 rs2
)
251 return rv_r_insn(0, rs2
, rs1
, 6, rd
, 0x33);
254 static u32
rv_xor(u8 rd
, u8 rs1
, u8 rs2
)
256 return rv_r_insn(0, rs2
, rs1
, 4, rd
, 0x33);
259 static u32
rv_mulw(u8 rd
, u8 rs1
, u8 rs2
)
261 return rv_r_insn(1, rs2
, rs1
, 0, rd
, 0x3b);
264 static u32
rv_mul(u8 rd
, u8 rs1
, u8 rs2
)
266 return rv_r_insn(1, rs2
, rs1
, 0, rd
, 0x33);
269 static u32
rv_divuw(u8 rd
, u8 rs1
, u8 rs2
)
271 return rv_r_insn(1, rs2
, rs1
, 5, rd
, 0x3b);
274 static u32
rv_divu(u8 rd
, u8 rs1
, u8 rs2
)
276 return rv_r_insn(1, rs2
, rs1
, 5, rd
, 0x33);
279 static u32
rv_remuw(u8 rd
, u8 rs1
, u8 rs2
)
281 return rv_r_insn(1, rs2
, rs1
, 7, rd
, 0x3b);
284 static u32
rv_remu(u8 rd
, u8 rs1
, u8 rs2
)
286 return rv_r_insn(1, rs2
, rs1
, 7, rd
, 0x33);
289 static u32
rv_sllw(u8 rd
, u8 rs1
, u8 rs2
)
291 return rv_r_insn(0, rs2
, rs1
, 1, rd
, 0x3b);
294 static u32
rv_sll(u8 rd
, u8 rs1
, u8 rs2
)
296 return rv_r_insn(0, rs2
, rs1
, 1, rd
, 0x33);
299 static u32
rv_srlw(u8 rd
, u8 rs1
, u8 rs2
)
301 return rv_r_insn(0, rs2
, rs1
, 5, rd
, 0x3b);
304 static u32
rv_srl(u8 rd
, u8 rs1
, u8 rs2
)
306 return rv_r_insn(0, rs2
, rs1
, 5, rd
, 0x33);
309 static u32
rv_sraw(u8 rd
, u8 rs1
, u8 rs2
)
311 return rv_r_insn(0x20, rs2
, rs1
, 5, rd
, 0x3b);
314 static u32
rv_sra(u8 rd
, u8 rs1
, u8 rs2
)
316 return rv_r_insn(0x20, rs2
, rs1
, 5, rd
, 0x33);
319 static u32
rv_lui(u8 rd
, u32 imm31_12
)
321 return rv_u_insn(imm31_12
, rd
, 0x37);
324 static u32
rv_slli(u8 rd
, u8 rs1
, u16 imm11_0
)
326 return rv_i_insn(imm11_0
, rs1
, 1, rd
, 0x13);
329 static u32
rv_andi(u8 rd
, u8 rs1
, u16 imm11_0
)
331 return rv_i_insn(imm11_0
, rs1
, 7, rd
, 0x13);
334 static u32
rv_ori(u8 rd
, u8 rs1
, u16 imm11_0
)
336 return rv_i_insn(imm11_0
, rs1
, 6, rd
, 0x13);
339 static u32
rv_xori(u8 rd
, u8 rs1
, u16 imm11_0
)
341 return rv_i_insn(imm11_0
, rs1
, 4, rd
, 0x13);
344 static u32
rv_slliw(u8 rd
, u8 rs1
, u16 imm11_0
)
346 return rv_i_insn(imm11_0
, rs1
, 1, rd
, 0x1b);
349 static u32
rv_srliw(u8 rd
, u8 rs1
, u16 imm11_0
)
351 return rv_i_insn(imm11_0
, rs1
, 5, rd
, 0x1b);
354 static u32
rv_srli(u8 rd
, u8 rs1
, u16 imm11_0
)
356 return rv_i_insn(imm11_0
, rs1
, 5, rd
, 0x13);
359 static u32
rv_sraiw(u8 rd
, u8 rs1
, u16 imm11_0
)
361 return rv_i_insn(0x400 | imm11_0
, rs1
, 5, rd
, 0x1b);
364 static u32
rv_srai(u8 rd
, u8 rs1
, u16 imm11_0
)
366 return rv_i_insn(0x400 | imm11_0
, rs1
, 5, rd
, 0x13);
369 static u32
rv_jal(u8 rd
, u32 imm20_1
)
371 return rv_uj_insn(imm20_1
, rd
, 0x6f);
374 static u32
rv_jalr(u8 rd
, u8 rs1
, u16 imm11_0
)
376 return rv_i_insn(imm11_0
, rs1
, 0, rd
, 0x67);
379 static u32
rv_beq(u8 rs1
, u8 rs2
, u16 imm12_1
)
381 return rv_sb_insn(imm12_1
, rs2
, rs1
, 0, 0x63);
384 static u32
rv_bltu(u8 rs1
, u8 rs2
, u16 imm12_1
)
386 return rv_sb_insn(imm12_1
, rs2
, rs1
, 6, 0x63);
389 static u32
rv_bgeu(u8 rs1
, u8 rs2
, u16 imm12_1
)
391 return rv_sb_insn(imm12_1
, rs2
, rs1
, 7, 0x63);
394 static u32
rv_bne(u8 rs1
, u8 rs2
, u16 imm12_1
)
396 return rv_sb_insn(imm12_1
, rs2
, rs1
, 1, 0x63);
399 static u32
rv_blt(u8 rs1
, u8 rs2
, u16 imm12_1
)
401 return rv_sb_insn(imm12_1
, rs2
, rs1
, 4, 0x63);
404 static u32
rv_bge(u8 rs1
, u8 rs2
, u16 imm12_1
)
406 return rv_sb_insn(imm12_1
, rs2
, rs1
, 5, 0x63);
409 static u32
rv_sb(u8 rs1
, u16 imm11_0
, u8 rs2
)
411 return rv_s_insn(imm11_0
, rs2
, rs1
, 0, 0x23);
414 static u32
rv_sh(u8 rs1
, u16 imm11_0
, u8 rs2
)
416 return rv_s_insn(imm11_0
, rs2
, rs1
, 1, 0x23);
419 static u32
rv_sw(u8 rs1
, u16 imm11_0
, u8 rs2
)
421 return rv_s_insn(imm11_0
, rs2
, rs1
, 2, 0x23);
424 static u32
rv_sd(u8 rs1
, u16 imm11_0
, u8 rs2
)
426 return rv_s_insn(imm11_0
, rs2
, rs1
, 3, 0x23);
429 static u32
rv_lbu(u8 rd
, u16 imm11_0
, u8 rs1
)
431 return rv_i_insn(imm11_0
, rs1
, 4, rd
, 0x03);
434 static u32
rv_lhu(u8 rd
, u16 imm11_0
, u8 rs1
)
436 return rv_i_insn(imm11_0
, rs1
, 5, rd
, 0x03);
439 static u32
rv_lwu(u8 rd
, u16 imm11_0
, u8 rs1
)
441 return rv_i_insn(imm11_0
, rs1
, 6, rd
, 0x03);
444 static u32
rv_ld(u8 rd
, u16 imm11_0
, u8 rs1
)
446 return rv_i_insn(imm11_0
, rs1
, 3, rd
, 0x03);
449 static u32
rv_amoadd_w(u8 rd
, u8 rs2
, u8 rs1
, u8 aq
, u8 rl
)
451 return rv_amo_insn(0, aq
, rl
, rs2
, rs1
, 2, rd
, 0x2f);
454 static u32
rv_amoadd_d(u8 rd
, u8 rs2
, u8 rs1
, u8 aq
, u8 rl
)
456 return rv_amo_insn(0, aq
, rl
, rs2
, rs1
, 3, rd
, 0x2f);
459 static bool is_12b_int(s64 val
)
461 return -(1 << 11) <= val
&& val
< (1 << 11);
464 static bool is_13b_int(s64 val
)
466 return -(1 << 12) <= val
&& val
< (1 << 12);
469 static bool is_21b_int(s64 val
)
471 return -(1L << 20) <= val
&& val
< (1L << 20);
474 static bool is_32b_int(s64 val
)
476 return -(1L << 31) <= val
&& val
< (1L << 31);
479 static int is_12b_check(int off
, int insn
)
481 if (!is_12b_int(off
)) {
482 pr_err("bpf-jit: insn=%d offset=%d not supported yet!\n",
489 static int is_13b_check(int off
, int insn
)
491 if (!is_13b_int(off
)) {
492 pr_err("bpf-jit: insn=%d offset=%d not supported yet!\n",
499 static int is_21b_check(int off
, int insn
)
501 if (!is_21b_int(off
)) {
502 pr_err("bpf-jit: insn=%d offset=%d not supported yet!\n",
509 static void emit_imm(u8 rd
, s64 val
, struct rv_jit_context
*ctx
)
511 /* Note that the immediate from the add is sign-extended,
512 * which means that we need to compensate this by adding 2^12,
513 * when the 12th bit is set. A simpler way of doing this, and
514 * getting rid of the check, is to just add 2**11 before the
515 * shift. The "Loading a 32-Bit constant" example from the
516 * "Computer Organization and Design, RISC-V edition" book by
517 * Patterson/Hennessy highlights this fact.
519 * This also means that we need to process LSB to MSB.
521 s64 upper
= (val
+ (1 << 11)) >> 12, lower
= val
& 0xfff;
524 if (is_32b_int(val
)) {
526 emit(rv_lui(rd
, upper
), ctx
);
529 emit(rv_addi(rd
, RV_REG_ZERO
, lower
), ctx
);
533 emit(rv_addiw(rd
, rd
, lower
), ctx
);
537 shift
= __ffs(upper
);
541 emit_imm(rd
, upper
, ctx
);
543 emit(rv_slli(rd
, rd
, shift
), ctx
);
545 emit(rv_addi(rd
, rd
, lower
), ctx
);
548 static int rv_offset(int bpf_to
, int bpf_from
, struct rv_jit_context
*ctx
)
550 int from
= ctx
->offset
[bpf_from
] - 1, to
= ctx
->offset
[bpf_to
];
552 return (to
- from
) << 2;
555 static int epilogue_offset(struct rv_jit_context
*ctx
)
557 int to
= ctx
->epilogue_offset
, from
= ctx
->ninsns
;
559 return (to
- from
) << 2;
562 static void __build_epilogue(u8 reg
, struct rv_jit_context
*ctx
)
564 int stack_adjust
= ctx
->stack_size
, store_offset
= stack_adjust
- 8;
566 if (seen_reg(RV_REG_RA
, ctx
)) {
567 emit(rv_ld(RV_REG_RA
, store_offset
, RV_REG_SP
), ctx
);
570 emit(rv_ld(RV_REG_FP
, store_offset
, RV_REG_SP
), ctx
);
572 if (seen_reg(RV_REG_S1
, ctx
)) {
573 emit(rv_ld(RV_REG_S1
, store_offset
, RV_REG_SP
), ctx
);
576 if (seen_reg(RV_REG_S2
, ctx
)) {
577 emit(rv_ld(RV_REG_S2
, store_offset
, RV_REG_SP
), ctx
);
580 if (seen_reg(RV_REG_S3
, ctx
)) {
581 emit(rv_ld(RV_REG_S3
, store_offset
, RV_REG_SP
), ctx
);
584 if (seen_reg(RV_REG_S4
, ctx
)) {
585 emit(rv_ld(RV_REG_S4
, store_offset
, RV_REG_SP
), ctx
);
588 if (seen_reg(RV_REG_S5
, ctx
)) {
589 emit(rv_ld(RV_REG_S5
, store_offset
, RV_REG_SP
), ctx
);
592 if (seen_reg(RV_REG_S6
, ctx
)) {
593 emit(rv_ld(RV_REG_S6
, store_offset
, RV_REG_SP
), ctx
);
597 emit(rv_addi(RV_REG_SP
, RV_REG_SP
, stack_adjust
), ctx
);
598 /* Set return value. */
599 emit(rv_addi(RV_REG_A0
, RV_REG_A5
, 0), ctx
);
600 emit(rv_jalr(RV_REG_ZERO
, reg
, 0), ctx
);
603 static void emit_zext_32(u8 reg
, struct rv_jit_context
*ctx
)
605 emit(rv_slli(reg
, reg
, 32), ctx
);
606 emit(rv_srli(reg
, reg
, 32), ctx
);
609 static int emit_bpf_tail_call(int insn
, struct rv_jit_context
*ctx
)
611 int tc_ninsn
, off
, start_insn
= ctx
->ninsns
;
612 u8 tcc
= rv_tail_call_reg(ctx
);
618 * if (index >= array->map.max_entries)
621 tc_ninsn
= insn
? ctx
->offset
[insn
] - ctx
->offset
[insn
- 1] :
623 emit_zext_32(RV_REG_A2
, ctx
);
625 off
= offsetof(struct bpf_array
, map
.max_entries
);
626 if (is_12b_check(off
, insn
))
628 emit(rv_lwu(RV_REG_T1
, off
, RV_REG_A1
), ctx
);
629 off
= (tc_ninsn
- (ctx
->ninsns
- start_insn
)) << 2;
630 if (is_13b_check(off
, insn
))
632 emit(rv_bgeu(RV_REG_A2
, RV_REG_T1
, off
>> 1), ctx
);
637 emit(rv_addi(RV_REG_T1
, tcc
, -1), ctx
);
638 off
= (tc_ninsn
- (ctx
->ninsns
- start_insn
)) << 2;
639 if (is_13b_check(off
, insn
))
641 emit(rv_blt(RV_REG_T1
, RV_REG_ZERO
, off
>> 1), ctx
);
643 /* prog = array->ptrs[index];
647 emit(rv_slli(RV_REG_T2
, RV_REG_A2
, 3), ctx
);
648 emit(rv_add(RV_REG_T2
, RV_REG_T2
, RV_REG_A1
), ctx
);
649 off
= offsetof(struct bpf_array
, ptrs
);
650 if (is_12b_check(off
, insn
))
652 emit(rv_ld(RV_REG_T2
, off
, RV_REG_T2
), ctx
);
653 off
= (tc_ninsn
- (ctx
->ninsns
- start_insn
)) << 2;
654 if (is_13b_check(off
, insn
))
656 emit(rv_beq(RV_REG_T2
, RV_REG_ZERO
, off
>> 1), ctx
);
658 /* goto *(prog->bpf_func + 4); */
659 off
= offsetof(struct bpf_prog
, bpf_func
);
660 if (is_12b_check(off
, insn
))
662 emit(rv_ld(RV_REG_T3
, off
, RV_REG_T2
), ctx
);
663 emit(rv_addi(RV_REG_T3
, RV_REG_T3
, 4), ctx
);
664 emit(rv_addi(RV_REG_TCC
, RV_REG_T1
, 0), ctx
);
665 __build_epilogue(RV_REG_T3
, ctx
);
669 static void init_regs(u8
*rd
, u8
*rs
, const struct bpf_insn
*insn
,
670 struct rv_jit_context
*ctx
)
672 u8 code
= insn
->code
;
675 case BPF_JMP
| BPF_JA
:
676 case BPF_JMP
| BPF_CALL
:
677 case BPF_JMP
| BPF_EXIT
:
678 case BPF_JMP
| BPF_TAIL_CALL
:
681 *rd
= bpf_to_rv_reg(insn
->dst_reg
, ctx
);
684 if (code
& (BPF_ALU
| BPF_X
) || code
& (BPF_ALU64
| BPF_X
) ||
685 code
& (BPF_JMP
| BPF_X
) || code
& (BPF_JMP32
| BPF_X
) ||
686 code
& BPF_LDX
|| code
& BPF_STX
)
687 *rs
= bpf_to_rv_reg(insn
->src_reg
, ctx
);
690 static int rv_offset_check(int *rvoff
, s16 off
, int insn
,
691 struct rv_jit_context
*ctx
)
693 *rvoff
= rv_offset(insn
+ off
, insn
, ctx
);
694 return is_13b_check(*rvoff
, insn
);
697 static void emit_zext_32_rd_rs(u8
*rd
, u8
*rs
, struct rv_jit_context
*ctx
)
699 emit(rv_addi(RV_REG_T2
, *rd
, 0), ctx
);
700 emit_zext_32(RV_REG_T2
, ctx
);
701 emit(rv_addi(RV_REG_T1
, *rs
, 0), ctx
);
702 emit_zext_32(RV_REG_T1
, ctx
);
707 static void emit_sext_32_rd_rs(u8
*rd
, u8
*rs
, struct rv_jit_context
*ctx
)
709 emit(rv_addiw(RV_REG_T2
, *rd
, 0), ctx
);
710 emit(rv_addiw(RV_REG_T1
, *rs
, 0), ctx
);
715 static void emit_zext_32_rd_t1(u8
*rd
, struct rv_jit_context
*ctx
)
717 emit(rv_addi(RV_REG_T2
, *rd
, 0), ctx
);
718 emit_zext_32(RV_REG_T2
, ctx
);
719 emit_zext_32(RV_REG_T1
, ctx
);
723 static void emit_sext_32_rd(u8
*rd
, struct rv_jit_context
*ctx
)
725 emit(rv_addiw(RV_REG_T2
, *rd
, 0), ctx
);
729 static int emit_insn(const struct bpf_insn
*insn
, struct rv_jit_context
*ctx
,
732 bool is64
= BPF_CLASS(insn
->code
) == BPF_ALU64
||
733 BPF_CLASS(insn
->code
) == BPF_JMP
;
734 int rvoff
, i
= insn
- ctx
->prog
->insnsi
;
735 u8 rd
= -1, rs
= -1, code
= insn
->code
;
739 init_regs(&rd
, &rs
, insn
, ctx
);
743 case BPF_ALU
| BPF_MOV
| BPF_X
:
744 case BPF_ALU64
| BPF_MOV
| BPF_X
:
745 emit(is64
? rv_addi(rd
, rs
, 0) : rv_addiw(rd
, rs
, 0), ctx
);
747 emit_zext_32(rd
, ctx
);
750 /* dst = dst OP src */
751 case BPF_ALU
| BPF_ADD
| BPF_X
:
752 case BPF_ALU64
| BPF_ADD
| BPF_X
:
753 emit(is64
? rv_add(rd
, rd
, rs
) : rv_addw(rd
, rd
, rs
), ctx
);
755 emit_zext_32(rd
, ctx
);
757 case BPF_ALU
| BPF_SUB
| BPF_X
:
758 case BPF_ALU64
| BPF_SUB
| BPF_X
:
759 emit(is64
? rv_sub(rd
, rd
, rs
) : rv_subw(rd
, rd
, rs
), ctx
);
761 emit_zext_32(rd
, ctx
);
763 case BPF_ALU
| BPF_AND
| BPF_X
:
764 case BPF_ALU64
| BPF_AND
| BPF_X
:
765 emit(rv_and(rd
, rd
, rs
), ctx
);
767 emit_zext_32(rd
, ctx
);
769 case BPF_ALU
| BPF_OR
| BPF_X
:
770 case BPF_ALU64
| BPF_OR
| BPF_X
:
771 emit(rv_or(rd
, rd
, rs
), ctx
);
773 emit_zext_32(rd
, ctx
);
775 case BPF_ALU
| BPF_XOR
| BPF_X
:
776 case BPF_ALU64
| BPF_XOR
| BPF_X
:
777 emit(rv_xor(rd
, rd
, rs
), ctx
);
779 emit_zext_32(rd
, ctx
);
781 case BPF_ALU
| BPF_MUL
| BPF_X
:
782 case BPF_ALU64
| BPF_MUL
| BPF_X
:
783 emit(is64
? rv_mul(rd
, rd
, rs
) : rv_mulw(rd
, rd
, rs
), ctx
);
785 emit_zext_32(rd
, ctx
);
787 case BPF_ALU
| BPF_DIV
| BPF_X
:
788 case BPF_ALU64
| BPF_DIV
| BPF_X
:
789 emit(is64
? rv_divu(rd
, rd
, rs
) : rv_divuw(rd
, rd
, rs
), ctx
);
791 emit_zext_32(rd
, ctx
);
793 case BPF_ALU
| BPF_MOD
| BPF_X
:
794 case BPF_ALU64
| BPF_MOD
| BPF_X
:
795 emit(is64
? rv_remu(rd
, rd
, rs
) : rv_remuw(rd
, rd
, rs
), ctx
);
797 emit_zext_32(rd
, ctx
);
799 case BPF_ALU
| BPF_LSH
| BPF_X
:
800 case BPF_ALU64
| BPF_LSH
| BPF_X
:
801 emit(is64
? rv_sll(rd
, rd
, rs
) : rv_sllw(rd
, rd
, rs
), ctx
);
803 emit_zext_32(rd
, ctx
);
805 case BPF_ALU
| BPF_RSH
| BPF_X
:
806 case BPF_ALU64
| BPF_RSH
| BPF_X
:
807 emit(is64
? rv_srl(rd
, rd
, rs
) : rv_srlw(rd
, rd
, rs
), ctx
);
809 emit_zext_32(rd
, ctx
);
811 case BPF_ALU
| BPF_ARSH
| BPF_X
:
812 case BPF_ALU64
| BPF_ARSH
| BPF_X
:
813 emit(is64
? rv_sra(rd
, rd
, rs
) : rv_sraw(rd
, rd
, rs
), ctx
);
815 emit_zext_32(rd
, ctx
);
819 case BPF_ALU
| BPF_NEG
:
820 case BPF_ALU64
| BPF_NEG
:
821 emit(is64
? rv_sub(rd
, RV_REG_ZERO
, rd
) :
822 rv_subw(rd
, RV_REG_ZERO
, rd
), ctx
);
824 emit_zext_32(rd
, ctx
);
827 /* dst = BSWAP##imm(dst) */
828 case BPF_ALU
| BPF_END
| BPF_FROM_LE
:
830 int shift
= 64 - imm
;
832 emit(rv_slli(rd
, rd
, shift
), ctx
);
833 emit(rv_srli(rd
, rd
, shift
), ctx
);
836 case BPF_ALU
| BPF_END
| BPF_FROM_BE
:
837 emit(rv_addi(RV_REG_T2
, RV_REG_ZERO
, 0), ctx
);
839 emit(rv_andi(RV_REG_T1
, rd
, 0xff), ctx
);
840 emit(rv_add(RV_REG_T2
, RV_REG_T2
, RV_REG_T1
), ctx
);
841 emit(rv_slli(RV_REG_T2
, RV_REG_T2
, 8), ctx
);
842 emit(rv_srli(rd
, rd
, 8), ctx
);
846 emit(rv_andi(RV_REG_T1
, rd
, 0xff), ctx
);
847 emit(rv_add(RV_REG_T2
, RV_REG_T2
, RV_REG_T1
), ctx
);
848 emit(rv_slli(RV_REG_T2
, RV_REG_T2
, 8), ctx
);
849 emit(rv_srli(rd
, rd
, 8), ctx
);
851 emit(rv_andi(RV_REG_T1
, rd
, 0xff), ctx
);
852 emit(rv_add(RV_REG_T2
, RV_REG_T2
, RV_REG_T1
), ctx
);
853 emit(rv_slli(RV_REG_T2
, RV_REG_T2
, 8), ctx
);
854 emit(rv_srli(rd
, rd
, 8), ctx
);
858 emit(rv_andi(RV_REG_T1
, rd
, 0xff), ctx
);
859 emit(rv_add(RV_REG_T2
, RV_REG_T2
, RV_REG_T1
), ctx
);
860 emit(rv_slli(RV_REG_T2
, RV_REG_T2
, 8), ctx
);
861 emit(rv_srli(rd
, rd
, 8), ctx
);
863 emit(rv_andi(RV_REG_T1
, rd
, 0xff), ctx
);
864 emit(rv_add(RV_REG_T2
, RV_REG_T2
, RV_REG_T1
), ctx
);
865 emit(rv_slli(RV_REG_T2
, RV_REG_T2
, 8), ctx
);
866 emit(rv_srli(rd
, rd
, 8), ctx
);
868 emit(rv_andi(RV_REG_T1
, rd
, 0xff), ctx
);
869 emit(rv_add(RV_REG_T2
, RV_REG_T2
, RV_REG_T1
), ctx
);
870 emit(rv_slli(RV_REG_T2
, RV_REG_T2
, 8), ctx
);
871 emit(rv_srli(rd
, rd
, 8), ctx
);
873 emit(rv_andi(RV_REG_T1
, rd
, 0xff), ctx
);
874 emit(rv_add(RV_REG_T2
, RV_REG_T2
, RV_REG_T1
), ctx
);
875 emit(rv_slli(RV_REG_T2
, RV_REG_T2
, 8), ctx
);
876 emit(rv_srli(rd
, rd
, 8), ctx
);
878 emit(rv_andi(RV_REG_T1
, rd
, 0xff), ctx
);
879 emit(rv_add(RV_REG_T2
, RV_REG_T2
, RV_REG_T1
), ctx
);
881 emit(rv_addi(rd
, RV_REG_T2
, 0), ctx
);
885 case BPF_ALU
| BPF_MOV
| BPF_K
:
886 case BPF_ALU64
| BPF_MOV
| BPF_K
:
887 emit_imm(rd
, imm
, ctx
);
889 emit_zext_32(rd
, ctx
);
892 /* dst = dst OP imm */
893 case BPF_ALU
| BPF_ADD
| BPF_K
:
894 case BPF_ALU64
| BPF_ADD
| BPF_K
:
895 if (is_12b_int(imm
)) {
896 emit(is64
? rv_addi(rd
, rd
, imm
) :
897 rv_addiw(rd
, rd
, imm
), ctx
);
899 emit_imm(RV_REG_T1
, imm
, ctx
);
900 emit(is64
? rv_add(rd
, rd
, RV_REG_T1
) :
901 rv_addw(rd
, rd
, RV_REG_T1
), ctx
);
904 emit_zext_32(rd
, ctx
);
906 case BPF_ALU
| BPF_SUB
| BPF_K
:
907 case BPF_ALU64
| BPF_SUB
| BPF_K
:
908 if (is_12b_int(-imm
)) {
909 emit(is64
? rv_addi(rd
, rd
, -imm
) :
910 rv_addiw(rd
, rd
, -imm
), ctx
);
912 emit_imm(RV_REG_T1
, imm
, ctx
);
913 emit(is64
? rv_sub(rd
, rd
, RV_REG_T1
) :
914 rv_subw(rd
, rd
, RV_REG_T1
), ctx
);
917 emit_zext_32(rd
, ctx
);
919 case BPF_ALU
| BPF_AND
| BPF_K
:
920 case BPF_ALU64
| BPF_AND
| BPF_K
:
921 if (is_12b_int(imm
)) {
922 emit(rv_andi(rd
, rd
, imm
), ctx
);
924 emit_imm(RV_REG_T1
, imm
, ctx
);
925 emit(rv_and(rd
, rd
, RV_REG_T1
), ctx
);
928 emit_zext_32(rd
, ctx
);
930 case BPF_ALU
| BPF_OR
| BPF_K
:
931 case BPF_ALU64
| BPF_OR
| BPF_K
:
932 if (is_12b_int(imm
)) {
933 emit(rv_ori(rd
, rd
, imm
), ctx
);
935 emit_imm(RV_REG_T1
, imm
, ctx
);
936 emit(rv_or(rd
, rd
, RV_REG_T1
), ctx
);
939 emit_zext_32(rd
, ctx
);
941 case BPF_ALU
| BPF_XOR
| BPF_K
:
942 case BPF_ALU64
| BPF_XOR
| BPF_K
:
943 if (is_12b_int(imm
)) {
944 emit(rv_xori(rd
, rd
, imm
), ctx
);
946 emit_imm(RV_REG_T1
, imm
, ctx
);
947 emit(rv_xor(rd
, rd
, RV_REG_T1
), ctx
);
950 emit_zext_32(rd
, ctx
);
952 case BPF_ALU
| BPF_MUL
| BPF_K
:
953 case BPF_ALU64
| BPF_MUL
| BPF_K
:
954 emit_imm(RV_REG_T1
, imm
, ctx
);
955 emit(is64
? rv_mul(rd
, rd
, RV_REG_T1
) :
956 rv_mulw(rd
, rd
, RV_REG_T1
), ctx
);
958 emit_zext_32(rd
, ctx
);
960 case BPF_ALU
| BPF_DIV
| BPF_K
:
961 case BPF_ALU64
| BPF_DIV
| BPF_K
:
962 emit_imm(RV_REG_T1
, imm
, ctx
);
963 emit(is64
? rv_divu(rd
, rd
, RV_REG_T1
) :
964 rv_divuw(rd
, rd
, RV_REG_T1
), ctx
);
966 emit_zext_32(rd
, ctx
);
968 case BPF_ALU
| BPF_MOD
| BPF_K
:
969 case BPF_ALU64
| BPF_MOD
| BPF_K
:
970 emit_imm(RV_REG_T1
, imm
, ctx
);
971 emit(is64
? rv_remu(rd
, rd
, RV_REG_T1
) :
972 rv_remuw(rd
, rd
, RV_REG_T1
), ctx
);
974 emit_zext_32(rd
, ctx
);
976 case BPF_ALU
| BPF_LSH
| BPF_K
:
977 case BPF_ALU64
| BPF_LSH
| BPF_K
:
978 emit(is64
? rv_slli(rd
, rd
, imm
) : rv_slliw(rd
, rd
, imm
), ctx
);
980 emit_zext_32(rd
, ctx
);
982 case BPF_ALU
| BPF_RSH
| BPF_K
:
983 case BPF_ALU64
| BPF_RSH
| BPF_K
:
984 emit(is64
? rv_srli(rd
, rd
, imm
) : rv_srliw(rd
, rd
, imm
), ctx
);
986 emit_zext_32(rd
, ctx
);
988 case BPF_ALU
| BPF_ARSH
| BPF_K
:
989 case BPF_ALU64
| BPF_ARSH
| BPF_K
:
990 emit(is64
? rv_srai(rd
, rd
, imm
) : rv_sraiw(rd
, rd
, imm
), ctx
);
992 emit_zext_32(rd
, ctx
);
996 case BPF_JMP
| BPF_JA
:
997 rvoff
= rv_offset(i
+ off
, i
, ctx
);
998 if (!is_21b_int(rvoff
)) {
999 pr_err("bpf-jit: insn=%d offset=%d not supported yet!\n",
1004 emit(rv_jal(RV_REG_ZERO
, rvoff
>> 1), ctx
);
1007 /* IF (dst COND src) JUMP off */
1008 case BPF_JMP
| BPF_JEQ
| BPF_X
:
1009 case BPF_JMP32
| BPF_JEQ
| BPF_X
:
1010 if (rv_offset_check(&rvoff
, off
, i
, ctx
))
1013 emit_zext_32_rd_rs(&rd
, &rs
, ctx
);
1014 emit(rv_beq(rd
, rs
, rvoff
>> 1), ctx
);
1016 case BPF_JMP
| BPF_JGT
| BPF_X
:
1017 case BPF_JMP32
| BPF_JGT
| BPF_X
:
1018 if (rv_offset_check(&rvoff
, off
, i
, ctx
))
1021 emit_zext_32_rd_rs(&rd
, &rs
, ctx
);
1022 emit(rv_bltu(rs
, rd
, rvoff
>> 1), ctx
);
1024 case BPF_JMP
| BPF_JLT
| BPF_X
:
1025 case BPF_JMP32
| BPF_JLT
| BPF_X
:
1026 if (rv_offset_check(&rvoff
, off
, i
, ctx
))
1029 emit_zext_32_rd_rs(&rd
, &rs
, ctx
);
1030 emit(rv_bltu(rd
, rs
, rvoff
>> 1), ctx
);
1032 case BPF_JMP
| BPF_JGE
| BPF_X
:
1033 case BPF_JMP32
| BPF_JGE
| BPF_X
:
1034 if (rv_offset_check(&rvoff
, off
, i
, ctx
))
1037 emit_zext_32_rd_rs(&rd
, &rs
, ctx
);
1038 emit(rv_bgeu(rd
, rs
, rvoff
>> 1), ctx
);
1040 case BPF_JMP
| BPF_JLE
| BPF_X
:
1041 case BPF_JMP32
| BPF_JLE
| BPF_X
:
1042 if (rv_offset_check(&rvoff
, off
, i
, ctx
))
1045 emit_zext_32_rd_rs(&rd
, &rs
, ctx
);
1046 emit(rv_bgeu(rs
, rd
, rvoff
>> 1), ctx
);
1048 case BPF_JMP
| BPF_JNE
| BPF_X
:
1049 case BPF_JMP32
| BPF_JNE
| BPF_X
:
1050 if (rv_offset_check(&rvoff
, off
, i
, ctx
))
1053 emit_zext_32_rd_rs(&rd
, &rs
, ctx
);
1054 emit(rv_bne(rd
, rs
, rvoff
>> 1), ctx
);
1056 case BPF_JMP
| BPF_JSGT
| BPF_X
:
1057 case BPF_JMP32
| BPF_JSGT
| BPF_X
:
1058 if (rv_offset_check(&rvoff
, off
, i
, ctx
))
1061 emit_sext_32_rd_rs(&rd
, &rs
, ctx
);
1062 emit(rv_blt(rs
, rd
, rvoff
>> 1), ctx
);
1064 case BPF_JMP
| BPF_JSLT
| BPF_X
:
1065 case BPF_JMP32
| BPF_JSLT
| BPF_X
:
1066 if (rv_offset_check(&rvoff
, off
, i
, ctx
))
1069 emit_sext_32_rd_rs(&rd
, &rs
, ctx
);
1070 emit(rv_blt(rd
, rs
, rvoff
>> 1), ctx
);
1072 case BPF_JMP
| BPF_JSGE
| BPF_X
:
1073 case BPF_JMP32
| BPF_JSGE
| BPF_X
:
1074 if (rv_offset_check(&rvoff
, off
, i
, ctx
))
1077 emit_sext_32_rd_rs(&rd
, &rs
, ctx
);
1078 emit(rv_bge(rd
, rs
, rvoff
>> 1), ctx
);
1080 case BPF_JMP
| BPF_JSLE
| BPF_X
:
1081 case BPF_JMP32
| BPF_JSLE
| BPF_X
:
1082 if (rv_offset_check(&rvoff
, off
, i
, ctx
))
1085 emit_sext_32_rd_rs(&rd
, &rs
, ctx
);
1086 emit(rv_bge(rs
, rd
, rvoff
>> 1), ctx
);
1088 case BPF_JMP
| BPF_JSET
| BPF_X
:
1089 case BPF_JMP32
| BPF_JSET
| BPF_X
:
1090 if (rv_offset_check(&rvoff
, off
, i
, ctx
))
1093 emit_zext_32_rd_rs(&rd
, &rs
, ctx
);
1094 emit(rv_and(RV_REG_T1
, rd
, rs
), ctx
);
1095 emit(rv_bne(RV_REG_T1
, RV_REG_ZERO
, rvoff
>> 1), ctx
);
1098 /* IF (dst COND imm) JUMP off */
1099 case BPF_JMP
| BPF_JEQ
| BPF_K
:
1100 case BPF_JMP32
| BPF_JEQ
| BPF_K
:
1101 if (rv_offset_check(&rvoff
, off
, i
, ctx
))
1103 emit_imm(RV_REG_T1
, imm
, ctx
);
1105 emit_zext_32_rd_t1(&rd
, ctx
);
1106 emit(rv_beq(rd
, RV_REG_T1
, rvoff
>> 1), ctx
);
1108 case BPF_JMP
| BPF_JGT
| BPF_K
:
1109 case BPF_JMP32
| BPF_JGT
| BPF_K
:
1110 if (rv_offset_check(&rvoff
, off
, i
, ctx
))
1112 emit_imm(RV_REG_T1
, imm
, ctx
);
1114 emit_zext_32_rd_t1(&rd
, ctx
);
1115 emit(rv_bltu(RV_REG_T1
, rd
, rvoff
>> 1), ctx
);
1117 case BPF_JMP
| BPF_JLT
| BPF_K
:
1118 case BPF_JMP32
| BPF_JLT
| BPF_K
:
1119 if (rv_offset_check(&rvoff
, off
, i
, ctx
))
1121 emit_imm(RV_REG_T1
, imm
, ctx
);
1123 emit_zext_32_rd_t1(&rd
, ctx
);
1124 emit(rv_bltu(rd
, RV_REG_T1
, rvoff
>> 1), ctx
);
1126 case BPF_JMP
| BPF_JGE
| BPF_K
:
1127 case BPF_JMP32
| BPF_JGE
| BPF_K
:
1128 if (rv_offset_check(&rvoff
, off
, i
, ctx
))
1130 emit_imm(RV_REG_T1
, imm
, ctx
);
1132 emit_zext_32_rd_t1(&rd
, ctx
);
1133 emit(rv_bgeu(rd
, RV_REG_T1
, rvoff
>> 1), ctx
);
1135 case BPF_JMP
| BPF_JLE
| BPF_K
:
1136 case BPF_JMP32
| BPF_JLE
| BPF_K
:
1137 if (rv_offset_check(&rvoff
, off
, i
, ctx
))
1139 emit_imm(RV_REG_T1
, imm
, ctx
);
1141 emit_zext_32_rd_t1(&rd
, ctx
);
1142 emit(rv_bgeu(RV_REG_T1
, rd
, rvoff
>> 1), ctx
);
1144 case BPF_JMP
| BPF_JNE
| BPF_K
:
1145 case BPF_JMP32
| BPF_JNE
| BPF_K
:
1146 if (rv_offset_check(&rvoff
, off
, i
, ctx
))
1148 emit_imm(RV_REG_T1
, imm
, ctx
);
1150 emit_zext_32_rd_t1(&rd
, ctx
);
1151 emit(rv_bne(rd
, RV_REG_T1
, rvoff
>> 1), ctx
);
1153 case BPF_JMP
| BPF_JSGT
| BPF_K
:
1154 case BPF_JMP32
| BPF_JSGT
| BPF_K
:
1155 if (rv_offset_check(&rvoff
, off
, i
, ctx
))
1157 emit_imm(RV_REG_T1
, imm
, ctx
);
1159 emit_sext_32_rd(&rd
, ctx
);
1160 emit(rv_blt(RV_REG_T1
, rd
, rvoff
>> 1), ctx
);
1162 case BPF_JMP
| BPF_JSLT
| BPF_K
:
1163 case BPF_JMP32
| BPF_JSLT
| BPF_K
:
1164 if (rv_offset_check(&rvoff
, off
, i
, ctx
))
1166 emit_imm(RV_REG_T1
, imm
, ctx
);
1168 emit_sext_32_rd(&rd
, ctx
);
1169 emit(rv_blt(rd
, RV_REG_T1
, rvoff
>> 1), ctx
);
1171 case BPF_JMP
| BPF_JSGE
| BPF_K
:
1172 case BPF_JMP32
| BPF_JSGE
| BPF_K
:
1173 if (rv_offset_check(&rvoff
, off
, i
, ctx
))
1175 emit_imm(RV_REG_T1
, imm
, ctx
);
1177 emit_sext_32_rd(&rd
, ctx
);
1178 emit(rv_bge(rd
, RV_REG_T1
, rvoff
>> 1), ctx
);
1180 case BPF_JMP
| BPF_JSLE
| BPF_K
:
1181 case BPF_JMP32
| BPF_JSLE
| BPF_K
:
1182 if (rv_offset_check(&rvoff
, off
, i
, ctx
))
1184 emit_imm(RV_REG_T1
, imm
, ctx
);
1186 emit_sext_32_rd(&rd
, ctx
);
1187 emit(rv_bge(RV_REG_T1
, rd
, rvoff
>> 1), ctx
);
1189 case BPF_JMP
| BPF_JSET
| BPF_K
:
1190 case BPF_JMP32
| BPF_JSET
| BPF_K
:
1191 if (rv_offset_check(&rvoff
, off
, i
, ctx
))
1193 emit_imm(RV_REG_T1
, imm
, ctx
);
1195 emit_zext_32_rd_t1(&rd
, ctx
);
1196 emit(rv_and(RV_REG_T1
, rd
, RV_REG_T1
), ctx
);
1197 emit(rv_bne(RV_REG_T1
, RV_REG_ZERO
, rvoff
>> 1), ctx
);
1201 case BPF_JMP
| BPF_CALL
:
1208 ret
= bpf_jit_get_func_addr(ctx
->prog
, insn
, extra_pass
, &addr
,
1213 emit_imm(RV_REG_T1
, addr
, ctx
);
1216 emit_imm(RV_REG_T1
, addr
, ctx
);
1217 for (i
= ctx
->ninsns
- i
; i
< 8; i
++) {
1219 emit(rv_addi(RV_REG_ZERO
, RV_REG_ZERO
, 0),
1223 emit(rv_jalr(RV_REG_RA
, RV_REG_T1
, 0), ctx
);
1224 rd
= bpf_to_rv_reg(BPF_REG_0
, ctx
);
1225 emit(rv_addi(rd
, RV_REG_A0
, 0), ctx
);
1229 case BPF_JMP
| BPF_TAIL_CALL
:
1230 if (emit_bpf_tail_call(i
, ctx
))
1234 /* function return */
1235 case BPF_JMP
| BPF_EXIT
:
1236 if (i
== ctx
->prog
->len
- 1)
1239 rvoff
= epilogue_offset(ctx
);
1240 if (is_21b_check(rvoff
, i
))
1242 emit(rv_jal(RV_REG_ZERO
, rvoff
>> 1), ctx
);
1246 case BPF_LD
| BPF_IMM
| BPF_DW
:
1248 struct bpf_insn insn1
= insn
[1];
1251 imm64
= (u64
)insn1
.imm
<< 32 | (u32
)imm
;
1252 emit_imm(rd
, imm64
, ctx
);
1256 /* LDX: dst = *(size *)(src + off) */
1257 case BPF_LDX
| BPF_MEM
| BPF_B
:
1258 if (is_12b_int(off
)) {
1259 emit(rv_lbu(rd
, off
, rs
), ctx
);
1263 emit_imm(RV_REG_T1
, off
, ctx
);
1264 emit(rv_add(RV_REG_T1
, RV_REG_T1
, rs
), ctx
);
1265 emit(rv_lbu(rd
, 0, RV_REG_T1
), ctx
);
1267 case BPF_LDX
| BPF_MEM
| BPF_H
:
1268 if (is_12b_int(off
)) {
1269 emit(rv_lhu(rd
, off
, rs
), ctx
);
1273 emit_imm(RV_REG_T1
, off
, ctx
);
1274 emit(rv_add(RV_REG_T1
, RV_REG_T1
, rs
), ctx
);
1275 emit(rv_lhu(rd
, 0, RV_REG_T1
), ctx
);
1277 case BPF_LDX
| BPF_MEM
| BPF_W
:
1278 if (is_12b_int(off
)) {
1279 emit(rv_lwu(rd
, off
, rs
), ctx
);
1283 emit_imm(RV_REG_T1
, off
, ctx
);
1284 emit(rv_add(RV_REG_T1
, RV_REG_T1
, rs
), ctx
);
1285 emit(rv_lwu(rd
, 0, RV_REG_T1
), ctx
);
1287 case BPF_LDX
| BPF_MEM
| BPF_DW
:
1288 if (is_12b_int(off
)) {
1289 emit(rv_ld(rd
, off
, rs
), ctx
);
1293 emit_imm(RV_REG_T1
, off
, ctx
);
1294 emit(rv_add(RV_REG_T1
, RV_REG_T1
, rs
), ctx
);
1295 emit(rv_ld(rd
, 0, RV_REG_T1
), ctx
);
1298 /* ST: *(size *)(dst + off) = imm */
1299 case BPF_ST
| BPF_MEM
| BPF_B
:
1300 emit_imm(RV_REG_T1
, imm
, ctx
);
1301 if (is_12b_int(off
)) {
1302 emit(rv_sb(rd
, off
, RV_REG_T1
), ctx
);
1306 emit_imm(RV_REG_T2
, off
, ctx
);
1307 emit(rv_add(RV_REG_T2
, RV_REG_T2
, rd
), ctx
);
1308 emit(rv_sb(RV_REG_T2
, 0, RV_REG_T1
), ctx
);
1311 case BPF_ST
| BPF_MEM
| BPF_H
:
1312 emit_imm(RV_REG_T1
, imm
, ctx
);
1313 if (is_12b_int(off
)) {
1314 emit(rv_sh(rd
, off
, RV_REG_T1
), ctx
);
1318 emit_imm(RV_REG_T2
, off
, ctx
);
1319 emit(rv_add(RV_REG_T2
, RV_REG_T2
, rd
), ctx
);
1320 emit(rv_sh(RV_REG_T2
, 0, RV_REG_T1
), ctx
);
1322 case BPF_ST
| BPF_MEM
| BPF_W
:
1323 emit_imm(RV_REG_T1
, imm
, ctx
);
1324 if (is_12b_int(off
)) {
1325 emit(rv_sw(rd
, off
, RV_REG_T1
), ctx
);
1329 emit_imm(RV_REG_T2
, off
, ctx
);
1330 emit(rv_add(RV_REG_T2
, RV_REG_T2
, rd
), ctx
);
1331 emit(rv_sw(RV_REG_T2
, 0, RV_REG_T1
), ctx
);
1333 case BPF_ST
| BPF_MEM
| BPF_DW
:
1334 emit_imm(RV_REG_T1
, imm
, ctx
);
1335 if (is_12b_int(off
)) {
1336 emit(rv_sd(rd
, off
, RV_REG_T1
), ctx
);
1340 emit_imm(RV_REG_T2
, off
, ctx
);
1341 emit(rv_add(RV_REG_T2
, RV_REG_T2
, rd
), ctx
);
1342 emit(rv_sd(RV_REG_T2
, 0, RV_REG_T1
), ctx
);
1345 /* STX: *(size *)(dst + off) = src */
1346 case BPF_STX
| BPF_MEM
| BPF_B
:
1347 if (is_12b_int(off
)) {
1348 emit(rv_sb(rd
, off
, rs
), ctx
);
1352 emit_imm(RV_REG_T1
, off
, ctx
);
1353 emit(rv_add(RV_REG_T1
, RV_REG_T1
, rd
), ctx
);
1354 emit(rv_sb(RV_REG_T1
, 0, rs
), ctx
);
1356 case BPF_STX
| BPF_MEM
| BPF_H
:
1357 if (is_12b_int(off
)) {
1358 emit(rv_sh(rd
, off
, rs
), ctx
);
1362 emit_imm(RV_REG_T1
, off
, ctx
);
1363 emit(rv_add(RV_REG_T1
, RV_REG_T1
, rd
), ctx
);
1364 emit(rv_sh(RV_REG_T1
, 0, rs
), ctx
);
1366 case BPF_STX
| BPF_MEM
| BPF_W
:
1367 if (is_12b_int(off
)) {
1368 emit(rv_sw(rd
, off
, rs
), ctx
);
1372 emit_imm(RV_REG_T1
, off
, ctx
);
1373 emit(rv_add(RV_REG_T1
, RV_REG_T1
, rd
), ctx
);
1374 emit(rv_sw(RV_REG_T1
, 0, rs
), ctx
);
1376 case BPF_STX
| BPF_MEM
| BPF_DW
:
1377 if (is_12b_int(off
)) {
1378 emit(rv_sd(rd
, off
, rs
), ctx
);
1382 emit_imm(RV_REG_T1
, off
, ctx
);
1383 emit(rv_add(RV_REG_T1
, RV_REG_T1
, rd
), ctx
);
1384 emit(rv_sd(RV_REG_T1
, 0, rs
), ctx
);
1386 /* STX XADD: lock *(u32 *)(dst + off) += src */
1387 case BPF_STX
| BPF_XADD
| BPF_W
:
1388 /* STX XADD: lock *(u64 *)(dst + off) += src */
1389 case BPF_STX
| BPF_XADD
| BPF_DW
:
1391 if (is_12b_int(off
)) {
1392 emit(rv_addi(RV_REG_T1
, rd
, off
), ctx
);
1394 emit_imm(RV_REG_T1
, off
, ctx
);
1395 emit(rv_add(RV_REG_T1
, RV_REG_T1
, rd
), ctx
);
1401 emit(BPF_SIZE(code
) == BPF_W
?
1402 rv_amoadd_w(RV_REG_ZERO
, rs
, rd
, 0, 0) :
1403 rv_amoadd_d(RV_REG_ZERO
, rs
, rd
, 0, 0), ctx
);
1406 pr_err("bpf-jit: unknown opcode %02x\n", code
);
1413 static void build_prologue(struct rv_jit_context
*ctx
)
1415 int stack_adjust
= 0, store_offset
, bpf_stack_adjust
;
1417 if (seen_reg(RV_REG_RA
, ctx
))
1419 stack_adjust
+= 8; /* RV_REG_FP */
1420 if (seen_reg(RV_REG_S1
, ctx
))
1422 if (seen_reg(RV_REG_S2
, ctx
))
1424 if (seen_reg(RV_REG_S3
, ctx
))
1426 if (seen_reg(RV_REG_S4
, ctx
))
1428 if (seen_reg(RV_REG_S5
, ctx
))
1430 if (seen_reg(RV_REG_S6
, ctx
))
1433 stack_adjust
= round_up(stack_adjust
, 16);
1434 bpf_stack_adjust
= round_up(ctx
->prog
->aux
->stack_depth
, 16);
1435 stack_adjust
+= bpf_stack_adjust
;
1437 store_offset
= stack_adjust
- 8;
1439 /* First instruction is always setting the tail-call-counter
1440 * (TCC) register. This instruction is skipped for tail calls.
1442 emit(rv_addi(RV_REG_TCC
, RV_REG_ZERO
, MAX_TAIL_CALL_CNT
), ctx
);
1444 emit(rv_addi(RV_REG_SP
, RV_REG_SP
, -stack_adjust
), ctx
);
1446 if (seen_reg(RV_REG_RA
, ctx
)) {
1447 emit(rv_sd(RV_REG_SP
, store_offset
, RV_REG_RA
), ctx
);
1450 emit(rv_sd(RV_REG_SP
, store_offset
, RV_REG_FP
), ctx
);
1452 if (seen_reg(RV_REG_S1
, ctx
)) {
1453 emit(rv_sd(RV_REG_SP
, store_offset
, RV_REG_S1
), ctx
);
1456 if (seen_reg(RV_REG_S2
, ctx
)) {
1457 emit(rv_sd(RV_REG_SP
, store_offset
, RV_REG_S2
), ctx
);
1460 if (seen_reg(RV_REG_S3
, ctx
)) {
1461 emit(rv_sd(RV_REG_SP
, store_offset
, RV_REG_S3
), ctx
);
1464 if (seen_reg(RV_REG_S4
, ctx
)) {
1465 emit(rv_sd(RV_REG_SP
, store_offset
, RV_REG_S4
), ctx
);
1468 if (seen_reg(RV_REG_S5
, ctx
)) {
1469 emit(rv_sd(RV_REG_SP
, store_offset
, RV_REG_S5
), ctx
);
1472 if (seen_reg(RV_REG_S6
, ctx
)) {
1473 emit(rv_sd(RV_REG_SP
, store_offset
, RV_REG_S6
), ctx
);
1477 emit(rv_addi(RV_REG_FP
, RV_REG_SP
, stack_adjust
), ctx
);
1479 if (bpf_stack_adjust
)
1480 emit(rv_addi(RV_REG_S5
, RV_REG_SP
, bpf_stack_adjust
), ctx
);
1482 /* Program contains calls and tail calls, so RV_REG_TCC need
1483 * to be saved across calls.
1485 if (seen_tail_call(ctx
) && seen_call(ctx
))
1486 emit(rv_addi(RV_REG_TCC_SAVED
, RV_REG_TCC
, 0), ctx
);
1488 ctx
->stack_size
= stack_adjust
;
1491 static void build_epilogue(struct rv_jit_context
*ctx
)
1493 __build_epilogue(RV_REG_RA
, ctx
);
1496 static int build_body(struct rv_jit_context
*ctx
, bool extra_pass
)
1498 const struct bpf_prog
*prog
= ctx
->prog
;
1501 for (i
= 0; i
< prog
->len
; i
++) {
1502 const struct bpf_insn
*insn
= &prog
->insnsi
[i
];
1505 ret
= emit_insn(insn
, ctx
, extra_pass
);
1508 if (ctx
->insns
== NULL
)
1509 ctx
->offset
[i
] = ctx
->ninsns
;
1512 if (ctx
->insns
== NULL
)
1513 ctx
->offset
[i
] = ctx
->ninsns
;
1520 static void bpf_fill_ill_insns(void *area
, unsigned int size
)
1522 memset(area
, 0, size
);
1525 static void bpf_flush_icache(void *start
, void *end
)
1527 flush_icache_range((unsigned long)start
, (unsigned long)end
);
1530 struct bpf_prog
*bpf_int_jit_compile(struct bpf_prog
*prog
)
1532 bool tmp_blinded
= false, extra_pass
= false;
1533 struct bpf_prog
*tmp
, *orig_prog
= prog
;
1534 struct rv_jit_data
*jit_data
;
1535 struct rv_jit_context
*ctx
;
1536 unsigned int image_size
;
1538 if (!prog
->jit_requested
)
1541 tmp
= bpf_jit_blind_constants(prog
);
1549 jit_data
= prog
->aux
->jit_data
;
1551 jit_data
= kzalloc(sizeof(*jit_data
), GFP_KERNEL
);
1556 prog
->aux
->jit_data
= jit_data
;
1559 ctx
= &jit_data
->ctx
;
1563 image_size
= sizeof(u32
) * ctx
->ninsns
;
1568 ctx
->offset
= kcalloc(prog
->len
, sizeof(int), GFP_KERNEL
);
1574 /* First pass generates the ctx->offset, but does not emit an image. */
1575 if (build_body(ctx
, extra_pass
)) {
1579 build_prologue(ctx
);
1580 ctx
->epilogue_offset
= ctx
->ninsns
;
1581 build_epilogue(ctx
);
1583 /* Allocate image, now that we know the size. */
1584 image_size
= sizeof(u32
) * ctx
->ninsns
;
1585 jit_data
->header
= bpf_jit_binary_alloc(image_size
, &jit_data
->image
,
1587 bpf_fill_ill_insns
);
1588 if (!jit_data
->header
) {
1593 /* Second, real pass, that acutally emits the image. */
1594 ctx
->insns
= (u32
*)jit_data
->image
;
1598 build_prologue(ctx
);
1599 if (build_body(ctx
, extra_pass
)) {
1600 bpf_jit_binary_free(jit_data
->header
);
1604 build_epilogue(ctx
);
1606 if (bpf_jit_enable
> 1)
1607 bpf_jit_dump(prog
->len
, image_size
, 2, ctx
->insns
);
1609 prog
->bpf_func
= (void *)ctx
->insns
;
1611 prog
->jited_len
= image_size
;
1613 bpf_flush_icache(jit_data
->header
, ctx
->insns
+ ctx
->ninsns
);
1615 if (!prog
->is_func
|| extra_pass
) {
1619 prog
->aux
->jit_data
= NULL
;
1623 bpf_jit_prog_release_other(prog
, prog
== orig_prog
?