1 // SPDX-License-Identifier: GPL-2.0
3 * Just-In-Time compiler for eBPF filters on IA32 (32bit x86)
5 * Author: Wang YanQing (udknight@gmail.com)
6 * The code based on code and ideas from:
7 * Eric Dumazet (eric.dumazet@gmail.com)
9 * Shubham Bansal <illusionist.neo@gmail.com>
12 #include <linux/netdevice.h>
13 #include <linux/filter.h>
14 #include <linux/if_vlan.h>
15 #include <asm/cacheflush.h>
16 #include <asm/set_memory.h>
17 #include <asm/nospec-branch.h>
18 #include <linux/bpf.h>
21 * eBPF prog stack layout:
24 * original ESP => +-----+
25 * | | callee saved registers
27 * | ... | eBPF JIT scratch space
28 * BPF_FP,IA32_EBP => +-----+
29 * | ... | eBPF prog stack
31 * |RSVD | JIT scratchpad
32 * current ESP => +-----+
34 * | ... | Function call stack
39 * The callee saved registers:
42 * original ESP => +------------------+ \
44 * current EBP => +------------------+ } callee saved registers
46 * +------------------+ /
50 static u8
*emit_code(u8
*ptr
, u32 bytes
, unsigned int len
)
63 #define EMIT(bytes, len) \
64 do { prog = emit_code(prog, bytes, len); cnt += len; } while (0)
66 #define EMIT1(b1) EMIT(b1, 1)
67 #define EMIT2(b1, b2) EMIT((b1) + ((b2) << 8), 2)
68 #define EMIT3(b1, b2, b3) EMIT((b1) + ((b2) << 8) + ((b3) << 16), 3)
69 #define EMIT4(b1, b2, b3, b4) \
70 EMIT((b1) + ((b2) << 8) + ((b3) << 16) + ((b4) << 24), 4)
72 #define EMIT1_off32(b1, off) \
73 do { EMIT1(b1); EMIT(off, 4); } while (0)
74 #define EMIT2_off32(b1, b2, off) \
75 do { EMIT2(b1, b2); EMIT(off, 4); } while (0)
76 #define EMIT3_off32(b1, b2, b3, off) \
77 do { EMIT3(b1, b2, b3); EMIT(off, 4); } while (0)
78 #define EMIT4_off32(b1, b2, b3, b4, off) \
79 do { EMIT4(b1, b2, b3, b4); EMIT(off, 4); } while (0)
81 #define jmp_label(label, jmp_insn_len) (label - cnt - jmp_insn_len)
83 static bool is_imm8(int value
)
85 return value
<= 127 && value
>= -128;
88 static bool is_simm32(s64 value
)
90 return value
== (s64
) (s32
) value
;
93 #define STACK_OFFSET(k) (k)
94 #define TCALL_CNT (MAX_BPF_JIT_REG + 0) /* Tail Call Count */
96 #define IA32_EAX (0x0)
97 #define IA32_EBX (0x3)
98 #define IA32_ECX (0x1)
99 #define IA32_EDX (0x2)
100 #define IA32_ESI (0x6)
101 #define IA32_EDI (0x7)
102 #define IA32_EBP (0x5)
103 #define IA32_ESP (0x4)
106 * List of x86 cond jumps opcodes (. + s8)
107 * Add 0x10 (and an extra 0x0f) to generate far jumps (. + s32)
110 #define IA32_JAE 0x73
112 #define IA32_JNE 0x75
113 #define IA32_JBE 0x76
116 #define IA32_JGE 0x7D
117 #define IA32_JLE 0x7E
121 * Map eBPF registers to IA32 32bit registers or stack scratch space.
123 * 1. All the registers, R0-R10, are mapped to scratch space on stack.
124 * 2. We need two 64 bit temp registers to do complex operations on eBPF
126 * 3. For performance reason, the BPF_REG_AX for blinding constant, is
127 * mapped to real hardware register pair, IA32_ESI and IA32_EDI.
129 * As the eBPF registers are all 64 bit registers and IA32 has only 32 bit
130 * registers, we have to map each eBPF registers with two IA32 32 bit regs
131 * or scratch memory space and we have to build eBPF 64 bit register from those.
133 * We use IA32_EAX, IA32_EDX, IA32_ECX, IA32_EBX as temporary registers.
135 static const u8 bpf2ia32
[][2] = {
136 /* Return value from in-kernel function, and exit value from eBPF */
137 [BPF_REG_0
] = {STACK_OFFSET(0), STACK_OFFSET(4)},
139 /* The arguments from eBPF program to in-kernel function */
140 /* Stored on stack scratch space */
141 [BPF_REG_1
] = {STACK_OFFSET(8), STACK_OFFSET(12)},
142 [BPF_REG_2
] = {STACK_OFFSET(16), STACK_OFFSET(20)},
143 [BPF_REG_3
] = {STACK_OFFSET(24), STACK_OFFSET(28)},
144 [BPF_REG_4
] = {STACK_OFFSET(32), STACK_OFFSET(36)},
145 [BPF_REG_5
] = {STACK_OFFSET(40), STACK_OFFSET(44)},
147 /* Callee saved registers that in-kernel function will preserve */
148 /* Stored on stack scratch space */
149 [BPF_REG_6
] = {STACK_OFFSET(48), STACK_OFFSET(52)},
150 [BPF_REG_7
] = {STACK_OFFSET(56), STACK_OFFSET(60)},
151 [BPF_REG_8
] = {STACK_OFFSET(64), STACK_OFFSET(68)},
152 [BPF_REG_9
] = {STACK_OFFSET(72), STACK_OFFSET(76)},
154 /* Read only Frame Pointer to access Stack */
155 [BPF_REG_FP
] = {STACK_OFFSET(80), STACK_OFFSET(84)},
157 /* Temporary register for blinding constants. */
158 [BPF_REG_AX
] = {IA32_ESI
, IA32_EDI
},
160 /* Tail call count. Stored on stack scratch space. */
161 [TCALL_CNT
] = {STACK_OFFSET(88), STACK_OFFSET(92)},
164 #define dst_lo dst[0]
165 #define dst_hi dst[1]
166 #define src_lo src[0]
167 #define src_hi src[1]
169 #define STACK_ALIGNMENT 8
171 * Stack space for BPF_REG_1, BPF_REG_2, BPF_REG_3, BPF_REG_4,
172 * BPF_REG_5, BPF_REG_6, BPF_REG_7, BPF_REG_8, BPF_REG_9,
173 * BPF_REG_FP, BPF_REG_AX and Tail call counts.
175 #define SCRATCH_SIZE 96
177 /* Total stack size used in JITed code */
178 #define _STACK_SIZE (stack_depth + SCRATCH_SIZE)
180 #define STACK_SIZE ALIGN(_STACK_SIZE, STACK_ALIGNMENT)
182 /* Get the offset of eBPF REGISTERs stored on scratch space. */
183 #define STACK_VAR(off) (off)
185 /* Encode 'dst_reg' register into IA32 opcode 'byte' */
186 static u8
add_1reg(u8 byte
, u32 dst_reg
)
188 return byte
+ dst_reg
;
191 /* Encode 'dst_reg' and 'src_reg' registers into IA32 opcode 'byte' */
192 static u8
add_2reg(u8 byte
, u32 dst_reg
, u32 src_reg
)
194 return byte
+ dst_reg
+ (src_reg
<< 3);
197 static void jit_fill_hole(void *area
, unsigned int size
)
199 /* Fill whole space with int3 instructions */
200 memset(area
, 0xcc, size
);
203 static inline void emit_ia32_mov_i(const u8 dst
, const u32 val
, bool dstk
,
212 EMIT2(0x33, add_2reg(0xC0, IA32_EAX
, IA32_EAX
));
213 /* mov dword ptr [ebp+off],eax */
214 EMIT3(0x89, add_2reg(0x40, IA32_EBP
, IA32_EAX
),
217 EMIT3_off32(0xC7, add_1reg(0x40, IA32_EBP
),
218 STACK_VAR(dst
), val
);
222 EMIT2(0x33, add_2reg(0xC0, dst
, dst
));
224 EMIT2_off32(0xC7, add_1reg(0xC0, dst
),
230 /* dst = imm (4 bytes)*/
231 static inline void emit_ia32_mov_r(const u8 dst
, const u8 src
, bool dstk
,
232 bool sstk
, u8
**pprog
)
236 u8 sreg
= sstk
? IA32_EAX
: src
;
239 /* mov eax,dword ptr [ebp+off] */
240 EMIT3(0x8B, add_2reg(0x40, IA32_EBP
, IA32_EAX
), STACK_VAR(src
));
242 /* mov dword ptr [ebp+off],eax */
243 EMIT3(0x89, add_2reg(0x40, IA32_EBP
, sreg
), STACK_VAR(dst
));
246 EMIT2(0x89, add_2reg(0xC0, dst
, sreg
));
252 static inline void emit_ia32_mov_r64(const bool is64
, const u8 dst
[],
253 const u8 src
[], bool dstk
,
254 bool sstk
, u8
**pprog
)
256 emit_ia32_mov_r(dst_lo
, src_lo
, dstk
, sstk
, pprog
);
258 /* complete 8 byte move */
259 emit_ia32_mov_r(dst_hi
, src_hi
, dstk
, sstk
, pprog
);
261 /* zero out high 4 bytes */
262 emit_ia32_mov_i(dst_hi
, 0, dstk
, pprog
);
265 /* Sign extended move */
266 static inline void emit_ia32_mov_i64(const bool is64
, const u8 dst
[],
267 const u32 val
, bool dstk
, u8
**pprog
)
271 if (is64
&& (val
& (1<<31)))
273 emit_ia32_mov_i(dst_lo
, val
, dstk
, pprog
);
274 emit_ia32_mov_i(dst_hi
, hi
, dstk
, pprog
);
278 * ALU operation (32 bit)
281 static inline void emit_ia32_mul_r(const u8 dst
, const u8 src
, bool dstk
,
282 bool sstk
, u8
**pprog
)
286 u8 sreg
= sstk
? IA32_ECX
: src
;
289 /* mov ecx,dword ptr [ebp+off] */
290 EMIT3(0x8B, add_2reg(0x40, IA32_EBP
, IA32_ECX
), STACK_VAR(src
));
293 /* mov eax,dword ptr [ebp+off] */
294 EMIT3(0x8B, add_2reg(0x40, IA32_EBP
, IA32_EAX
), STACK_VAR(dst
));
297 EMIT2(0x8B, add_2reg(0xC0, dst
, IA32_EAX
));
300 EMIT2(0xF7, add_1reg(0xE0, sreg
));
303 /* mov dword ptr [ebp+off],eax */
304 EMIT3(0x89, add_2reg(0x40, IA32_EBP
, IA32_EAX
),
308 EMIT2(0x89, add_2reg(0xC0, dst
, IA32_EAX
));
313 static inline void emit_ia32_to_le_r64(const u8 dst
[], s32 val
,
314 bool dstk
, u8
**pprog
)
318 u8 dreg_lo
= dstk
? IA32_EAX
: dst_lo
;
319 u8 dreg_hi
= dstk
? IA32_EDX
: dst_hi
;
321 if (dstk
&& val
!= 64) {
322 EMIT3(0x8B, add_2reg(0x40, IA32_EBP
, IA32_EAX
),
324 EMIT3(0x8B, add_2reg(0x40, IA32_EBP
, IA32_EDX
),
330 * Emit 'movzwl eax,ax' to zero extend 16-bit
334 EMIT1(add_2reg(0xC0, dreg_lo
, dreg_lo
));
335 /* xor dreg_hi,dreg_hi */
336 EMIT2(0x33, add_2reg(0xC0, dreg_hi
, dreg_hi
));
339 /* xor dreg_hi,dreg_hi */
340 EMIT2(0x33, add_2reg(0xC0, dreg_hi
, dreg_hi
));
347 if (dstk
&& val
!= 64) {
348 /* mov dword ptr [ebp+off],dreg_lo */
349 EMIT3(0x89, add_2reg(0x40, IA32_EBP
, dreg_lo
),
351 /* mov dword ptr [ebp+off],dreg_hi */
352 EMIT3(0x89, add_2reg(0x40, IA32_EBP
, dreg_hi
),
358 static inline void emit_ia32_to_be_r64(const u8 dst
[], s32 val
,
359 bool dstk
, u8
**pprog
)
363 u8 dreg_lo
= dstk
? IA32_EAX
: dst_lo
;
364 u8 dreg_hi
= dstk
? IA32_EDX
: dst_hi
;
367 EMIT3(0x8B, add_2reg(0x40, IA32_EBP
, IA32_EAX
),
369 EMIT3(0x8B, add_2reg(0x40, IA32_EBP
, IA32_EDX
),
374 /* Emit 'ror %ax, 8' to swap lower 2 bytes */
376 EMIT3(0xC1, add_1reg(0xC8, dreg_lo
), 8);
379 EMIT1(add_2reg(0xC0, dreg_lo
, dreg_lo
));
381 /* xor dreg_hi,dreg_hi */
382 EMIT2(0x33, add_2reg(0xC0, dreg_hi
, dreg_hi
));
385 /* Emit 'bswap eax' to swap lower 4 bytes */
387 EMIT1(add_1reg(0xC8, dreg_lo
));
389 /* xor dreg_hi,dreg_hi */
390 EMIT2(0x33, add_2reg(0xC0, dreg_hi
, dreg_hi
));
393 /* Emit 'bswap eax' to swap lower 4 bytes */
395 EMIT1(add_1reg(0xC8, dreg_lo
));
397 /* Emit 'bswap edx' to swap lower 4 bytes */
399 EMIT1(add_1reg(0xC8, dreg_hi
));
401 /* mov ecx,dreg_hi */
402 EMIT2(0x89, add_2reg(0xC0, IA32_ECX
, dreg_hi
));
403 /* mov dreg_hi,dreg_lo */
404 EMIT2(0x89, add_2reg(0xC0, dreg_hi
, dreg_lo
));
405 /* mov dreg_lo,ecx */
406 EMIT2(0x89, add_2reg(0xC0, dreg_lo
, IA32_ECX
));
411 /* mov dword ptr [ebp+off],dreg_lo */
412 EMIT3(0x89, add_2reg(0x40, IA32_EBP
, dreg_lo
),
414 /* mov dword ptr [ebp+off],dreg_hi */
415 EMIT3(0x89, add_2reg(0x40, IA32_EBP
, dreg_hi
),
422 * ALU operation (32 bit)
423 * dst = dst (div|mod) src
425 static inline void emit_ia32_div_mod_r(const u8 op
, const u8 dst
, const u8 src
,
426 bool dstk
, bool sstk
, u8
**pprog
)
432 /* mov ecx,dword ptr [ebp+off] */
433 EMIT3(0x8B, add_2reg(0x40, IA32_EBP
, IA32_ECX
),
435 else if (src
!= IA32_ECX
)
437 EMIT2(0x8B, add_2reg(0xC0, src
, IA32_ECX
));
440 /* mov eax,dword ptr [ebp+off] */
441 EMIT3(0x8B, add_2reg(0x40, IA32_EBP
, IA32_EAX
),
445 EMIT2(0x8B, add_2reg(0xC0, dst
, IA32_EAX
));
448 EMIT2(0x31, add_2reg(0xC0, IA32_EDX
, IA32_EDX
));
450 EMIT2(0xF7, add_1reg(0xF0, IA32_ECX
));
454 EMIT3(0x89, add_2reg(0x40, IA32_EBP
, IA32_EDX
),
457 EMIT2(0x89, add_2reg(0xC0, dst
, IA32_EDX
));
460 EMIT3(0x89, add_2reg(0x40, IA32_EBP
, IA32_EAX
),
463 EMIT2(0x89, add_2reg(0xC0, dst
, IA32_EAX
));
469 * ALU operation (32 bit)
470 * dst = dst (shift) src
472 static inline void emit_ia32_shift_r(const u8 op
, const u8 dst
, const u8 src
,
473 bool dstk
, bool sstk
, u8
**pprog
)
477 u8 dreg
= dstk
? IA32_EAX
: dst
;
481 /* mov eax,dword ptr [ebp+off] */
482 EMIT3(0x8B, add_2reg(0x40, IA32_EBP
, IA32_EAX
), STACK_VAR(dst
));
485 /* mov ecx,dword ptr [ebp+off] */
486 EMIT3(0x8B, add_2reg(0x40, IA32_EBP
, IA32_ECX
), STACK_VAR(src
));
487 else if (src
!= IA32_ECX
)
489 EMIT2(0x8B, add_2reg(0xC0, src
, IA32_ECX
));
501 EMIT2(0xD3, add_1reg(b2
, dreg
));
504 /* mov dword ptr [ebp+off],dreg */
505 EMIT3(0x89, add_2reg(0x40, IA32_EBP
, dreg
), STACK_VAR(dst
));
510 * ALU operation (32 bit)
513 static inline void emit_ia32_alu_r(const bool is64
, const bool hi
, const u8 op
,
514 const u8 dst
, const u8 src
, bool dstk
,
515 bool sstk
, u8
**pprog
)
519 u8 sreg
= sstk
? IA32_EAX
: src
;
520 u8 dreg
= dstk
? IA32_EDX
: dst
;
523 /* mov eax,dword ptr [ebp+off] */
524 EMIT3(0x8B, add_2reg(0x40, IA32_EBP
, IA32_EAX
), STACK_VAR(src
));
527 /* mov eax,dword ptr [ebp+off] */
528 EMIT3(0x8B, add_2reg(0x40, IA32_EBP
, IA32_EDX
), STACK_VAR(dst
));
530 switch (BPF_OP(op
)) {
531 /* dst = dst + src */
534 EMIT2(0x11, add_2reg(0xC0, dreg
, sreg
));
536 EMIT2(0x01, add_2reg(0xC0, dreg
, sreg
));
538 /* dst = dst - src */
541 EMIT2(0x19, add_2reg(0xC0, dreg
, sreg
));
543 EMIT2(0x29, add_2reg(0xC0, dreg
, sreg
));
545 /* dst = dst | src */
547 EMIT2(0x09, add_2reg(0xC0, dreg
, sreg
));
549 /* dst = dst & src */
551 EMIT2(0x21, add_2reg(0xC0, dreg
, sreg
));
553 /* dst = dst ^ src */
555 EMIT2(0x31, add_2reg(0xC0, dreg
, sreg
));
560 /* mov dword ptr [ebp+off],dreg */
561 EMIT3(0x89, add_2reg(0x40, IA32_EBP
, dreg
),
566 /* ALU operation (64 bit) */
567 static inline void emit_ia32_alu_r64(const bool is64
, const u8 op
,
568 const u8 dst
[], const u8 src
[],
569 bool dstk
, bool sstk
,
574 emit_ia32_alu_r(is64
, false, op
, dst_lo
, src_lo
, dstk
, sstk
, &prog
);
576 emit_ia32_alu_r(is64
, true, op
, dst_hi
, src_hi
, dstk
, sstk
,
579 emit_ia32_mov_i(dst_hi
, 0, dstk
, &prog
);
584 * ALU operation (32 bit)
587 static inline void emit_ia32_alu_i(const bool is64
, const bool hi
, const u8 op
,
588 const u8 dst
, const s32 val
, bool dstk
,
593 u8 dreg
= dstk
? IA32_EAX
: dst
;
597 /* mov eax,dword ptr [ebp+off] */
598 EMIT3(0x8B, add_2reg(0x40, IA32_EBP
, IA32_EAX
), STACK_VAR(dst
));
602 EMIT2_off32(0xC7, add_1reg(0xC0, IA32_EDX
), val
);
605 /* dst = dst + val */
609 EMIT3(0x83, add_1reg(0xD0, dreg
), val
);
611 EMIT2(0x11, add_2reg(0xC0, dreg
, sreg
));
614 EMIT3(0x83, add_1reg(0xC0, dreg
), val
);
616 EMIT2(0x01, add_2reg(0xC0, dreg
, sreg
));
619 /* dst = dst - val */
623 EMIT3(0x83, add_1reg(0xD8, dreg
), val
);
625 EMIT2(0x19, add_2reg(0xC0, dreg
, sreg
));
628 EMIT3(0x83, add_1reg(0xE8, dreg
), val
);
630 EMIT2(0x29, add_2reg(0xC0, dreg
, sreg
));
633 /* dst = dst | val */
636 EMIT3(0x83, add_1reg(0xC8, dreg
), val
);
638 EMIT2(0x09, add_2reg(0xC0, dreg
, sreg
));
640 /* dst = dst & val */
643 EMIT3(0x83, add_1reg(0xE0, dreg
), val
);
645 EMIT2(0x21, add_2reg(0xC0, dreg
, sreg
));
647 /* dst = dst ^ val */
650 EMIT3(0x83, add_1reg(0xF0, dreg
), val
);
652 EMIT2(0x31, add_2reg(0xC0, dreg
, sreg
));
655 EMIT2(0xF7, add_1reg(0xD8, dreg
));
660 /* mov dword ptr [ebp+off],dreg */
661 EMIT3(0x89, add_2reg(0x40, IA32_EBP
, dreg
),
666 /* ALU operation (64 bit) */
667 static inline void emit_ia32_alu_i64(const bool is64
, const u8 op
,
668 const u8 dst
[], const u32 val
,
669 bool dstk
, u8
**pprog
)
674 if (is64
&& (val
& (1<<31)))
677 emit_ia32_alu_i(is64
, false, op
, dst_lo
, val
, dstk
, &prog
);
679 emit_ia32_alu_i(is64
, true, op
, dst_hi
, hi
, dstk
, &prog
);
681 emit_ia32_mov_i(dst_hi
, 0, dstk
, &prog
);
686 /* dst = ~dst (64 bit) */
687 static inline void emit_ia32_neg64(const u8 dst
[], bool dstk
, u8
**pprog
)
691 u8 dreg_lo
= dstk
? IA32_EAX
: dst_lo
;
692 u8 dreg_hi
= dstk
? IA32_EDX
: dst_hi
;
695 EMIT3(0x8B, add_2reg(0x40, IA32_EBP
, IA32_EAX
),
697 EMIT3(0x8B, add_2reg(0x40, IA32_EBP
, IA32_EDX
),
702 EMIT2(0x31, add_2reg(0xC0, IA32_ECX
, IA32_ECX
));
703 /* sub dreg_lo,ecx */
704 EMIT2(0x2B, add_2reg(0xC0, dreg_lo
, IA32_ECX
));
705 /* mov dreg_lo,ecx */
706 EMIT2(0x89, add_2reg(0xC0, dreg_lo
, IA32_ECX
));
709 EMIT2(0x31, add_2reg(0xC0, IA32_ECX
, IA32_ECX
));
710 /* sbb dreg_hi,ecx */
711 EMIT2(0x19, add_2reg(0xC0, dreg_hi
, IA32_ECX
));
712 /* mov dreg_hi,ecx */
713 EMIT2(0x89, add_2reg(0xC0, dreg_hi
, IA32_ECX
));
716 /* mov dword ptr [ebp+off],dreg_lo */
717 EMIT3(0x89, add_2reg(0x40, IA32_EBP
, dreg_lo
),
719 /* mov dword ptr [ebp+off],dreg_hi */
720 EMIT3(0x89, add_2reg(0x40, IA32_EBP
, dreg_hi
),
726 /* dst = dst << src */
727 static inline void emit_ia32_lsh_r64(const u8 dst
[], const u8 src
[],
728 bool dstk
, bool sstk
, u8
**pprog
)
732 static int jmp_label1
= -1;
733 static int jmp_label2
= -1;
734 static int jmp_label3
= -1;
735 u8 dreg_lo
= dstk
? IA32_EAX
: dst_lo
;
736 u8 dreg_hi
= dstk
? IA32_EDX
: dst_hi
;
739 EMIT3(0x8B, add_2reg(0x40, IA32_EBP
, IA32_EAX
),
741 EMIT3(0x8B, add_2reg(0x40, IA32_EBP
, IA32_EDX
),
746 /* mov ecx,dword ptr [ebp+off] */
747 EMIT3(0x8B, add_2reg(0x40, IA32_EBP
, IA32_ECX
),
751 EMIT2(0x8B, add_2reg(0xC0, src_lo
, IA32_ECX
));
754 EMIT3(0x83, add_1reg(0xF8, IA32_ECX
), 32);
755 /* Jumps when >= 32 */
756 if (is_imm8(jmp_label(jmp_label1
, 2)))
757 EMIT2(IA32_JAE
, jmp_label(jmp_label1
, 2));
759 EMIT2_off32(0x0F, IA32_JAE
+ 0x10, jmp_label(jmp_label1
, 6));
763 EMIT2(0xD3, add_1reg(0xE0, dreg_hi
));
764 /* mov ebx,dreg_lo */
765 EMIT2(0x8B, add_2reg(0xC0, dreg_lo
, IA32_EBX
));
767 EMIT2(0xD3, add_1reg(0xE0, dreg_lo
));
769 /* IA32_ECX = -IA32_ECX + 32 */
771 EMIT2(0xF7, add_1reg(0xD8, IA32_ECX
));
773 EMIT3(0x83, add_1reg(0xC0, IA32_ECX
), 32);
776 EMIT2(0xD3, add_1reg(0xE8, IA32_EBX
));
778 EMIT2(0x09, add_2reg(0xC0, dreg_hi
, IA32_EBX
));
781 if (is_imm8(jmp_label(jmp_label3
, 2)))
782 EMIT2(0xEB, jmp_label(jmp_label3
, 2));
784 EMIT1_off32(0xE9, jmp_label(jmp_label3
, 5));
787 if (jmp_label1
== -1)
791 EMIT3(0x83, add_1reg(0xF8, IA32_ECX
), 64);
792 /* Jumps when >= 64 */
793 if (is_imm8(jmp_label(jmp_label2
, 2)))
794 EMIT2(IA32_JAE
, jmp_label(jmp_label2
, 2));
796 EMIT2_off32(0x0F, IA32_JAE
+ 0x10, jmp_label(jmp_label2
, 6));
800 EMIT3(0x83, add_1reg(0xE8, IA32_ECX
), 32);
802 EMIT2(0xD3, add_1reg(0xE0, dreg_lo
));
803 /* mov dreg_hi,dreg_lo */
804 EMIT2(0x89, add_2reg(0xC0, dreg_hi
, dreg_lo
));
806 /* xor dreg_lo,dreg_lo */
807 EMIT2(0x33, add_2reg(0xC0, dreg_lo
, dreg_lo
));
810 if (is_imm8(jmp_label(jmp_label3
, 2)))
811 EMIT2(0xEB, jmp_label(jmp_label3
, 2));
813 EMIT1_off32(0xE9, jmp_label(jmp_label3
, 5));
816 if (jmp_label2
== -1)
818 /* xor dreg_lo,dreg_lo */
819 EMIT2(0x33, add_2reg(0xC0, dreg_lo
, dreg_lo
));
820 /* xor dreg_hi,dreg_hi */
821 EMIT2(0x33, add_2reg(0xC0, dreg_hi
, dreg_hi
));
823 if (jmp_label3
== -1)
827 /* mov dword ptr [ebp+off],dreg_lo */
828 EMIT3(0x89, add_2reg(0x40, IA32_EBP
, dreg_lo
),
830 /* mov dword ptr [ebp+off],dreg_hi */
831 EMIT3(0x89, add_2reg(0x40, IA32_EBP
, dreg_hi
),
838 /* dst = dst >> src (signed)*/
839 static inline void emit_ia32_arsh_r64(const u8 dst
[], const u8 src
[],
840 bool dstk
, bool sstk
, u8
**pprog
)
844 static int jmp_label1
= -1;
845 static int jmp_label2
= -1;
846 static int jmp_label3
= -1;
847 u8 dreg_lo
= dstk
? IA32_EAX
: dst_lo
;
848 u8 dreg_hi
= dstk
? IA32_EDX
: dst_hi
;
851 EMIT3(0x8B, add_2reg(0x40, IA32_EBP
, IA32_EAX
),
853 EMIT3(0x8B, add_2reg(0x40, IA32_EBP
, IA32_EDX
),
858 /* mov ecx,dword ptr [ebp+off] */
859 EMIT3(0x8B, add_2reg(0x40, IA32_EBP
, IA32_ECX
),
863 EMIT2(0x8B, add_2reg(0xC0, src_lo
, IA32_ECX
));
866 EMIT3(0x83, add_1reg(0xF8, IA32_ECX
), 32);
867 /* Jumps when >= 32 */
868 if (is_imm8(jmp_label(jmp_label1
, 2)))
869 EMIT2(IA32_JAE
, jmp_label(jmp_label1
, 2));
871 EMIT2_off32(0x0F, IA32_JAE
+ 0x10, jmp_label(jmp_label1
, 6));
874 /* lshr dreg_lo,cl */
875 EMIT2(0xD3, add_1reg(0xE8, dreg_lo
));
876 /* mov ebx,dreg_hi */
877 EMIT2(0x8B, add_2reg(0xC0, dreg_hi
, IA32_EBX
));
878 /* ashr dreg_hi,cl */
879 EMIT2(0xD3, add_1reg(0xF8, dreg_hi
));
881 /* IA32_ECX = -IA32_ECX + 32 */
883 EMIT2(0xF7, add_1reg(0xD8, IA32_ECX
));
885 EMIT3(0x83, add_1reg(0xC0, IA32_ECX
), 32);
888 EMIT2(0xD3, add_1reg(0xE0, IA32_EBX
));
890 EMIT2(0x09, add_2reg(0xC0, dreg_lo
, IA32_EBX
));
893 if (is_imm8(jmp_label(jmp_label3
, 2)))
894 EMIT2(0xEB, jmp_label(jmp_label3
, 2));
896 EMIT1_off32(0xE9, jmp_label(jmp_label3
, 5));
899 if (jmp_label1
== -1)
903 EMIT3(0x83, add_1reg(0xF8, IA32_ECX
), 64);
904 /* Jumps when >= 64 */
905 if (is_imm8(jmp_label(jmp_label2
, 2)))
906 EMIT2(IA32_JAE
, jmp_label(jmp_label2
, 2));
908 EMIT2_off32(0x0F, IA32_JAE
+ 0x10, jmp_label(jmp_label2
, 6));
912 EMIT3(0x83, add_1reg(0xE8, IA32_ECX
), 32);
913 /* ashr dreg_hi,cl */
914 EMIT2(0xD3, add_1reg(0xF8, dreg_hi
));
915 /* mov dreg_lo,dreg_hi */
916 EMIT2(0x89, add_2reg(0xC0, dreg_lo
, dreg_hi
));
918 /* ashr dreg_hi,imm8 */
919 EMIT3(0xC1, add_1reg(0xF8, dreg_hi
), 31);
922 if (is_imm8(jmp_label(jmp_label3
, 2)))
923 EMIT2(0xEB, jmp_label(jmp_label3
, 2));
925 EMIT1_off32(0xE9, jmp_label(jmp_label3
, 5));
928 if (jmp_label2
== -1)
930 /* ashr dreg_hi,imm8 */
931 EMIT3(0xC1, add_1reg(0xF8, dreg_hi
), 31);
932 /* mov dreg_lo,dreg_hi */
933 EMIT2(0x89, add_2reg(0xC0, dreg_lo
, dreg_hi
));
935 if (jmp_label3
== -1)
939 /* mov dword ptr [ebp+off],dreg_lo */
940 EMIT3(0x89, add_2reg(0x40, IA32_EBP
, dreg_lo
),
942 /* mov dword ptr [ebp+off],dreg_hi */
943 EMIT3(0x89, add_2reg(0x40, IA32_EBP
, dreg_hi
),
950 /* dst = dst >> src */
951 static inline void emit_ia32_rsh_r64(const u8 dst
[], const u8 src
[], bool dstk
,
952 bool sstk
, u8
**pprog
)
956 static int jmp_label1
= -1;
957 static int jmp_label2
= -1;
958 static int jmp_label3
= -1;
959 u8 dreg_lo
= dstk
? IA32_EAX
: dst_lo
;
960 u8 dreg_hi
= dstk
? IA32_EDX
: dst_hi
;
963 EMIT3(0x8B, add_2reg(0x40, IA32_EBP
, IA32_EAX
),
965 EMIT3(0x8B, add_2reg(0x40, IA32_EBP
, IA32_EDX
),
970 /* mov ecx,dword ptr [ebp+off] */
971 EMIT3(0x8B, add_2reg(0x40, IA32_EBP
, IA32_ECX
),
975 EMIT2(0x8B, add_2reg(0xC0, src_lo
, IA32_ECX
));
978 EMIT3(0x83, add_1reg(0xF8, IA32_ECX
), 32);
979 /* Jumps when >= 32 */
980 if (is_imm8(jmp_label(jmp_label1
, 2)))
981 EMIT2(IA32_JAE
, jmp_label(jmp_label1
, 2));
983 EMIT2_off32(0x0F, IA32_JAE
+ 0x10, jmp_label(jmp_label1
, 6));
986 /* lshr dreg_lo,cl */
987 EMIT2(0xD3, add_1reg(0xE8, dreg_lo
));
988 /* mov ebx,dreg_hi */
989 EMIT2(0x8B, add_2reg(0xC0, dreg_hi
, IA32_EBX
));
991 EMIT2(0xD3, add_1reg(0xE8, dreg_hi
));
993 /* IA32_ECX = -IA32_ECX + 32 */
995 EMIT2(0xF7, add_1reg(0xD8, IA32_ECX
));
997 EMIT3(0x83, add_1reg(0xC0, IA32_ECX
), 32);
1000 EMIT2(0xD3, add_1reg(0xE0, IA32_EBX
));
1001 /* or dreg_lo,ebx */
1002 EMIT2(0x09, add_2reg(0xC0, dreg_lo
, IA32_EBX
));
1005 if (is_imm8(jmp_label(jmp_label3
, 2)))
1006 EMIT2(0xEB, jmp_label(jmp_label3
, 2));
1008 EMIT1_off32(0xE9, jmp_label(jmp_label3
, 5));
1011 if (jmp_label1
== -1)
1014 EMIT3(0x83, add_1reg(0xF8, IA32_ECX
), 64);
1015 /* Jumps when >= 64 */
1016 if (is_imm8(jmp_label(jmp_label2
, 2)))
1017 EMIT2(IA32_JAE
, jmp_label(jmp_label2
, 2));
1019 EMIT2_off32(0x0F, IA32_JAE
+ 0x10, jmp_label(jmp_label2
, 6));
1023 EMIT3(0x83, add_1reg(0xE8, IA32_ECX
), 32);
1024 /* shr dreg_hi,cl */
1025 EMIT2(0xD3, add_1reg(0xE8, dreg_hi
));
1026 /* mov dreg_lo,dreg_hi */
1027 EMIT2(0x89, add_2reg(0xC0, dreg_lo
, dreg_hi
));
1028 /* xor dreg_hi,dreg_hi */
1029 EMIT2(0x33, add_2reg(0xC0, dreg_hi
, dreg_hi
));
1032 if (is_imm8(jmp_label(jmp_label3
, 2)))
1033 EMIT2(0xEB, jmp_label(jmp_label3
, 2));
1035 EMIT1_off32(0xE9, jmp_label(jmp_label3
, 5));
1038 if (jmp_label2
== -1)
1040 /* xor dreg_lo,dreg_lo */
1041 EMIT2(0x33, add_2reg(0xC0, dreg_lo
, dreg_lo
));
1042 /* xor dreg_hi,dreg_hi */
1043 EMIT2(0x33, add_2reg(0xC0, dreg_hi
, dreg_hi
));
1045 if (jmp_label3
== -1)
1049 /* mov dword ptr [ebp+off],dreg_lo */
1050 EMIT3(0x89, add_2reg(0x40, IA32_EBP
, dreg_lo
),
1052 /* mov dword ptr [ebp+off],dreg_hi */
1053 EMIT3(0x89, add_2reg(0x40, IA32_EBP
, dreg_hi
),
1060 /* dst = dst << val */
1061 static inline void emit_ia32_lsh_i64(const u8 dst
[], const u32 val
,
1062 bool dstk
, u8
**pprog
)
1066 u8 dreg_lo
= dstk
? IA32_EAX
: dst_lo
;
1067 u8 dreg_hi
= dstk
? IA32_EDX
: dst_hi
;
1070 EMIT3(0x8B, add_2reg(0x40, IA32_EBP
, IA32_EAX
),
1072 EMIT3(0x8B, add_2reg(0x40, IA32_EBP
, IA32_EDX
),
1075 /* Do LSH operation */
1077 /* shl dreg_hi,imm8 */
1078 EMIT3(0xC1, add_1reg(0xE0, dreg_hi
), val
);
1079 /* mov ebx,dreg_lo */
1080 EMIT2(0x8B, add_2reg(0xC0, dreg_lo
, IA32_EBX
));
1081 /* shl dreg_lo,imm8 */
1082 EMIT3(0xC1, add_1reg(0xE0, dreg_lo
), val
);
1084 /* IA32_ECX = 32 - val */
1088 EMIT3(0x0F, 0xB6, add_2reg(0xC0, IA32_ECX
, IA32_ECX
));
1090 EMIT2(0xF7, add_1reg(0xD8, IA32_ECX
));
1092 EMIT3(0x83, add_1reg(0xC0, IA32_ECX
), 32);
1095 EMIT2(0xD3, add_1reg(0xE8, IA32_EBX
));
1096 /* or dreg_hi,ebx */
1097 EMIT2(0x09, add_2reg(0xC0, dreg_hi
, IA32_EBX
));
1098 } else if (val
>= 32 && val
< 64) {
1099 u32 value
= val
- 32;
1101 /* shl dreg_lo,imm8 */
1102 EMIT3(0xC1, add_1reg(0xE0, dreg_lo
), value
);
1103 /* mov dreg_hi,dreg_lo */
1104 EMIT2(0x89, add_2reg(0xC0, dreg_hi
, dreg_lo
));
1105 /* xor dreg_lo,dreg_lo */
1106 EMIT2(0x33, add_2reg(0xC0, dreg_lo
, dreg_lo
));
1108 /* xor dreg_lo,dreg_lo */
1109 EMIT2(0x33, add_2reg(0xC0, dreg_lo
, dreg_lo
));
1110 /* xor dreg_hi,dreg_hi */
1111 EMIT2(0x33, add_2reg(0xC0, dreg_hi
, dreg_hi
));
1115 /* mov dword ptr [ebp+off],dreg_lo */
1116 EMIT3(0x89, add_2reg(0x40, IA32_EBP
, dreg_lo
),
1118 /* mov dword ptr [ebp+off],dreg_hi */
1119 EMIT3(0x89, add_2reg(0x40, IA32_EBP
, dreg_hi
),
1125 /* dst = dst >> val */
1126 static inline void emit_ia32_rsh_i64(const u8 dst
[], const u32 val
,
1127 bool dstk
, u8
**pprog
)
1131 u8 dreg_lo
= dstk
? IA32_EAX
: dst_lo
;
1132 u8 dreg_hi
= dstk
? IA32_EDX
: dst_hi
;
1135 EMIT3(0x8B, add_2reg(0x40, IA32_EBP
, IA32_EAX
),
1137 EMIT3(0x8B, add_2reg(0x40, IA32_EBP
, IA32_EDX
),
1141 /* Do RSH operation */
1143 /* shr dreg_lo,imm8 */
1144 EMIT3(0xC1, add_1reg(0xE8, dreg_lo
), val
);
1145 /* mov ebx,dreg_hi */
1146 EMIT2(0x8B, add_2reg(0xC0, dreg_hi
, IA32_EBX
));
1147 /* shr dreg_hi,imm8 */
1148 EMIT3(0xC1, add_1reg(0xE8, dreg_hi
), val
);
1150 /* IA32_ECX = 32 - val */
1154 EMIT3(0x0F, 0xB6, add_2reg(0xC0, IA32_ECX
, IA32_ECX
));
1156 EMIT2(0xF7, add_1reg(0xD8, IA32_ECX
));
1158 EMIT3(0x83, add_1reg(0xC0, IA32_ECX
), 32);
1161 EMIT2(0xD3, add_1reg(0xE0, IA32_EBX
));
1162 /* or dreg_lo,ebx */
1163 EMIT2(0x09, add_2reg(0xC0, dreg_lo
, IA32_EBX
));
1164 } else if (val
>= 32 && val
< 64) {
1165 u32 value
= val
- 32;
1167 /* shr dreg_hi,imm8 */
1168 EMIT3(0xC1, add_1reg(0xE8, dreg_hi
), value
);
1169 /* mov dreg_lo,dreg_hi */
1170 EMIT2(0x89, add_2reg(0xC0, dreg_lo
, dreg_hi
));
1171 /* xor dreg_hi,dreg_hi */
1172 EMIT2(0x33, add_2reg(0xC0, dreg_hi
, dreg_hi
));
1174 /* xor dreg_lo,dreg_lo */
1175 EMIT2(0x33, add_2reg(0xC0, dreg_lo
, dreg_lo
));
1176 /* xor dreg_hi,dreg_hi */
1177 EMIT2(0x33, add_2reg(0xC0, dreg_hi
, dreg_hi
));
1181 /* mov dword ptr [ebp+off],dreg_lo */
1182 EMIT3(0x89, add_2reg(0x40, IA32_EBP
, dreg_lo
),
1184 /* mov dword ptr [ebp+off],dreg_hi */
1185 EMIT3(0x89, add_2reg(0x40, IA32_EBP
, dreg_hi
),
1191 /* dst = dst >> val (signed) */
1192 static inline void emit_ia32_arsh_i64(const u8 dst
[], const u32 val
,
1193 bool dstk
, u8
**pprog
)
1197 u8 dreg_lo
= dstk
? IA32_EAX
: dst_lo
;
1198 u8 dreg_hi
= dstk
? IA32_EDX
: dst_hi
;
1201 EMIT3(0x8B, add_2reg(0x40, IA32_EBP
, IA32_EAX
),
1203 EMIT3(0x8B, add_2reg(0x40, IA32_EBP
, IA32_EDX
),
1206 /* Do RSH operation */
1208 /* shr dreg_lo,imm8 */
1209 EMIT3(0xC1, add_1reg(0xE8, dreg_lo
), val
);
1210 /* mov ebx,dreg_hi */
1211 EMIT2(0x8B, add_2reg(0xC0, dreg_hi
, IA32_EBX
));
1212 /* ashr dreg_hi,imm8 */
1213 EMIT3(0xC1, add_1reg(0xF8, dreg_hi
), val
);
1215 /* IA32_ECX = 32 - val */
1219 EMIT3(0x0F, 0xB6, add_2reg(0xC0, IA32_ECX
, IA32_ECX
));
1221 EMIT2(0xF7, add_1reg(0xD8, IA32_ECX
));
1223 EMIT3(0x83, add_1reg(0xC0, IA32_ECX
), 32);
1226 EMIT2(0xD3, add_1reg(0xE0, IA32_EBX
));
1227 /* or dreg_lo,ebx */
1228 EMIT2(0x09, add_2reg(0xC0, dreg_lo
, IA32_EBX
));
1229 } else if (val
>= 32 && val
< 64) {
1230 u32 value
= val
- 32;
1232 /* ashr dreg_hi,imm8 */
1233 EMIT3(0xC1, add_1reg(0xF8, dreg_hi
), value
);
1234 /* mov dreg_lo,dreg_hi */
1235 EMIT2(0x89, add_2reg(0xC0, dreg_lo
, dreg_hi
));
1237 /* ashr dreg_hi,imm8 */
1238 EMIT3(0xC1, add_1reg(0xF8, dreg_hi
), 31);
1240 /* ashr dreg_hi,imm8 */
1241 EMIT3(0xC1, add_1reg(0xF8, dreg_hi
), 31);
1242 /* mov dreg_lo,dreg_hi */
1243 EMIT2(0x89, add_2reg(0xC0, dreg_lo
, dreg_hi
));
1247 /* mov dword ptr [ebp+off],dreg_lo */
1248 EMIT3(0x89, add_2reg(0x40, IA32_EBP
, dreg_lo
),
1250 /* mov dword ptr [ebp+off],dreg_hi */
1251 EMIT3(0x89, add_2reg(0x40, IA32_EBP
, dreg_hi
),
1257 static inline void emit_ia32_mul_r64(const u8 dst
[], const u8 src
[], bool dstk
,
1258 bool sstk
, u8
**pprog
)
1264 /* mov eax,dword ptr [ebp+off] */
1265 EMIT3(0x8B, add_2reg(0x40, IA32_EBP
, IA32_EAX
),
1268 /* mov eax,dst_hi */
1269 EMIT2(0x8B, add_2reg(0xC0, dst_hi
, IA32_EAX
));
1272 /* mul dword ptr [ebp+off] */
1273 EMIT3(0xF7, add_1reg(0x60, IA32_EBP
), STACK_VAR(src_lo
));
1276 EMIT2(0xF7, add_1reg(0xE0, src_lo
));
1279 EMIT2(0x89, add_2reg(0xC0, IA32_ECX
, IA32_EAX
));
1282 /* mov eax,dword ptr [ebp+off] */
1283 EMIT3(0x8B, add_2reg(0x40, IA32_EBP
, IA32_EAX
),
1286 /* mov eax,dst_lo */
1287 EMIT2(0x8B, add_2reg(0xC0, dst_lo
, IA32_EAX
));
1290 /* mul dword ptr [ebp+off] */
1291 EMIT3(0xF7, add_1reg(0x60, IA32_EBP
), STACK_VAR(src_hi
));
1294 EMIT2(0xF7, add_1reg(0xE0, src_hi
));
1297 EMIT2(0x01, add_2reg(0xC0, IA32_ECX
, IA32_EAX
));
1300 /* mov eax,dword ptr [ebp+off] */
1301 EMIT3(0x8B, add_2reg(0x40, IA32_EBP
, IA32_EAX
),
1304 /* mov eax,dst_lo */
1305 EMIT2(0x8B, add_2reg(0xC0, dst_lo
, IA32_EAX
));
1308 /* mul dword ptr [ebp+off] */
1309 EMIT3(0xF7, add_1reg(0x60, IA32_EBP
), STACK_VAR(src_lo
));
1312 EMIT2(0xF7, add_1reg(0xE0, src_lo
));
1315 EMIT2(0x01, add_2reg(0xC0, IA32_ECX
, IA32_EDX
));
1318 /* mov dword ptr [ebp+off],eax */
1319 EMIT3(0x89, add_2reg(0x40, IA32_EBP
, IA32_EAX
),
1321 /* mov dword ptr [ebp+off],ecx */
1322 EMIT3(0x89, add_2reg(0x40, IA32_EBP
, IA32_ECX
),
1325 /* mov dst_lo,eax */
1326 EMIT2(0x89, add_2reg(0xC0, dst_lo
, IA32_EAX
));
1327 /* mov dst_hi,ecx */
1328 EMIT2(0x89, add_2reg(0xC0, dst_hi
, IA32_ECX
));
1334 static inline void emit_ia32_mul_i64(const u8 dst
[], const u32 val
,
1335 bool dstk
, u8
**pprog
)
1341 hi
= val
& (1<<31) ? (u32
)~0 : 0;
1342 /* movl eax,imm32 */
1343 EMIT2_off32(0xC7, add_1reg(0xC0, IA32_EAX
), val
);
1345 /* mul dword ptr [ebp+off] */
1346 EMIT3(0xF7, add_1reg(0x60, IA32_EBP
), STACK_VAR(dst_hi
));
1349 EMIT2(0xF7, add_1reg(0xE0, dst_hi
));
1352 EMIT2(0x89, add_2reg(0xC0, IA32_ECX
, IA32_EAX
));
1354 /* movl eax,imm32 */
1355 EMIT2_off32(0xC7, add_1reg(0xC0, IA32_EAX
), hi
);
1357 /* mul dword ptr [ebp+off] */
1358 EMIT3(0xF7, add_1reg(0x60, IA32_EBP
), STACK_VAR(dst_lo
));
1361 EMIT2(0xF7, add_1reg(0xE0, dst_lo
));
1363 EMIT2(0x01, add_2reg(0xC0, IA32_ECX
, IA32_EAX
));
1365 /* movl eax,imm32 */
1366 EMIT2_off32(0xC7, add_1reg(0xC0, IA32_EAX
), val
);
1368 /* mul dword ptr [ebp+off] */
1369 EMIT3(0xF7, add_1reg(0x60, IA32_EBP
), STACK_VAR(dst_lo
));
1372 EMIT2(0xF7, add_1reg(0xE0, dst_lo
));
1375 EMIT2(0x01, add_2reg(0xC0, IA32_ECX
, IA32_EDX
));
1378 /* mov dword ptr [ebp+off],eax */
1379 EMIT3(0x89, add_2reg(0x40, IA32_EBP
, IA32_EAX
),
1381 /* mov dword ptr [ebp+off],ecx */
1382 EMIT3(0x89, add_2reg(0x40, IA32_EBP
, IA32_ECX
),
1385 /* mov dword ptr [ebp+off],eax */
1386 EMIT2(0x89, add_2reg(0xC0, dst_lo
, IA32_EAX
));
1387 /* mov dword ptr [ebp+off],ecx */
1388 EMIT2(0x89, add_2reg(0xC0, dst_hi
, IA32_ECX
));
1394 static int bpf_size_to_x86_bytes(int bpf_size
)
1396 if (bpf_size
== BPF_W
)
1398 else if (bpf_size
== BPF_H
)
1400 else if (bpf_size
== BPF_B
)
1402 else if (bpf_size
== BPF_DW
)
1403 return 4; /* imm32 */
1408 struct jit_context
{
1409 int cleanup_addr
; /* Epilogue code offset */
1412 /* Maximum number of bytes emitted while JITing one eBPF insn */
1413 #define BPF_MAX_INSN_SIZE 128
1414 #define BPF_INSN_SAFETY 64
1416 #define PROLOGUE_SIZE 35
1419 * Emit prologue code for BPF program and check it's size.
1420 * bpf_tail_call helper will skip it while jumping into another program.
1422 static void emit_prologue(u8
**pprog
, u32 stack_depth
)
1426 const u8
*r1
= bpf2ia32
[BPF_REG_1
];
1427 const u8 fplo
= bpf2ia32
[BPF_REG_FP
][0];
1428 const u8 fphi
= bpf2ia32
[BPF_REG_FP
][1];
1429 const u8
*tcc
= bpf2ia32
[TCALL_CNT
];
1442 /* sub esp,STACK_SIZE */
1443 EMIT2_off32(0x81, 0xEC, STACK_SIZE
);
1444 /* sub ebp,SCRATCH_SIZE+12*/
1445 EMIT3(0x83, add_1reg(0xE8, IA32_EBP
), SCRATCH_SIZE
+ 12);
1447 EMIT2(0x31, add_2reg(0xC0, IA32_EBX
, IA32_EBX
));
1449 /* Set up BPF prog stack base register */
1450 EMIT3(0x89, add_2reg(0x40, IA32_EBP
, IA32_EBP
), STACK_VAR(fplo
));
1451 EMIT3(0x89, add_2reg(0x40, IA32_EBP
, IA32_EBX
), STACK_VAR(fphi
));
1453 /* Move BPF_CTX (EAX) to BPF_REG_R1 */
1454 /* mov dword ptr [ebp+off],eax */
1455 EMIT3(0x89, add_2reg(0x40, IA32_EBP
, IA32_EAX
), STACK_VAR(r1
[0]));
1456 EMIT3(0x89, add_2reg(0x40, IA32_EBP
, IA32_EBX
), STACK_VAR(r1
[1]));
1458 /* Initialize Tail Count */
1459 EMIT3(0x89, add_2reg(0x40, IA32_EBP
, IA32_EBX
), STACK_VAR(tcc
[0]));
1460 EMIT3(0x89, add_2reg(0x40, IA32_EBP
, IA32_EBX
), STACK_VAR(tcc
[1]));
1462 BUILD_BUG_ON(cnt
!= PROLOGUE_SIZE
);
1466 /* Emit epilogue code for BPF program */
1467 static void emit_epilogue(u8
**pprog
, u32 stack_depth
)
1470 const u8
*r0
= bpf2ia32
[BPF_REG_0
];
1473 /* mov eax,dword ptr [ebp+off]*/
1474 EMIT3(0x8B, add_2reg(0x40, IA32_EBP
, IA32_EAX
), STACK_VAR(r0
[0]));
1475 /* mov edx,dword ptr [ebp+off]*/
1476 EMIT3(0x8B, add_2reg(0x40, IA32_EBP
, IA32_EDX
), STACK_VAR(r0
[1]));
1478 /* add ebp,SCRATCH_SIZE+12*/
1479 EMIT3(0x83, add_1reg(0xC0, IA32_EBP
), SCRATCH_SIZE
+ 12);
1481 /* mov ebx,dword ptr [ebp-12]*/
1482 EMIT3(0x8B, add_2reg(0x40, IA32_EBP
, IA32_EBX
), -12);
1483 /* mov esi,dword ptr [ebp-8]*/
1484 EMIT3(0x8B, add_2reg(0x40, IA32_EBP
, IA32_ESI
), -8);
1485 /* mov edi,dword ptr [ebp-4]*/
1486 EMIT3(0x8B, add_2reg(0x40, IA32_EBP
, IA32_EDI
), -4);
1488 EMIT1(0xC9); /* leave */
1489 EMIT1(0xC3); /* ret */
1494 * Generate the following code:
1495 * ... bpf_tail_call(void *ctx, struct bpf_array *array, u64 index) ...
1496 * if (index >= array->map.max_entries)
1498 * if (++tail_call_cnt > MAX_TAIL_CALL_CNT)
1500 * prog = array->ptrs[index];
1503 * goto *(prog->bpf_func + prologue_size);
1506 static void emit_bpf_tail_call(u8
**pprog
)
1510 const u8
*r1
= bpf2ia32
[BPF_REG_1
];
1511 const u8
*r2
= bpf2ia32
[BPF_REG_2
];
1512 const u8
*r3
= bpf2ia32
[BPF_REG_3
];
1513 const u8
*tcc
= bpf2ia32
[TCALL_CNT
];
1515 static int jmp_label1
= -1;
1518 * if (index >= array->map.max_entries)
1521 /* mov eax,dword ptr [ebp+off] */
1522 EMIT3(0x8B, add_2reg(0x40, IA32_EBP
, IA32_EAX
), STACK_VAR(r2
[0]));
1523 /* mov edx,dword ptr [ebp+off] */
1524 EMIT3(0x8B, add_2reg(0x40, IA32_EBP
, IA32_EDX
), STACK_VAR(r3
[0]));
1526 /* cmp dword ptr [eax+off],edx */
1527 EMIT3(0x39, add_2reg(0x40, IA32_EAX
, IA32_EDX
),
1528 offsetof(struct bpf_array
, map
.max_entries
));
1530 EMIT2(IA32_JBE
, jmp_label(jmp_label1
, 2));
1533 * if (tail_call_cnt > MAX_TAIL_CALL_CNT)
1536 lo
= (u32
)MAX_TAIL_CALL_CNT
;
1537 hi
= (u32
)((u64
)MAX_TAIL_CALL_CNT
>> 32);
1538 EMIT3(0x8B, add_2reg(0x40, IA32_EBP
, IA32_ECX
), STACK_VAR(tcc
[0]));
1539 EMIT3(0x8B, add_2reg(0x40, IA32_EBP
, IA32_EBX
), STACK_VAR(tcc
[1]));
1542 EMIT3(0x83, add_1reg(0xF8, IA32_EBX
), hi
);
1545 EMIT3(0x83, add_1reg(0xF8, IA32_ECX
), lo
);
1548 EMIT2(IA32_JAE
, jmp_label(jmp_label1
, 2));
1551 EMIT3(0x83, add_1reg(0xC0, IA32_ECX
), 0x01);
1553 EMIT3(0x83, add_1reg(0xD0, IA32_EBX
), 0x00);
1555 /* mov dword ptr [ebp+off],eax */
1556 EMIT3(0x89, add_2reg(0x40, IA32_EBP
, IA32_ECX
), STACK_VAR(tcc
[0]));
1557 /* mov dword ptr [ebp+off],edx */
1558 EMIT3(0x89, add_2reg(0x40, IA32_EBP
, IA32_EBX
), STACK_VAR(tcc
[1]));
1560 /* prog = array->ptrs[index]; */
1561 /* mov edx, [eax + edx * 4 + offsetof(...)] */
1562 EMIT3_off32(0x8B, 0x94, 0x90, offsetof(struct bpf_array
, ptrs
));
1569 EMIT2(0x85, add_2reg(0xC0, IA32_EDX
, IA32_EDX
));
1571 EMIT2(IA32_JE
, jmp_label(jmp_label1
, 2));
1573 /* goto *(prog->bpf_func + prologue_size); */
1574 /* mov edx, dword ptr [edx + 32] */
1575 EMIT3(0x8B, add_2reg(0x40, IA32_EDX
, IA32_EDX
),
1576 offsetof(struct bpf_prog
, bpf_func
));
1577 /* add edx,prologue_size */
1578 EMIT3(0x83, add_1reg(0xC0, IA32_EDX
), PROLOGUE_SIZE
);
1580 /* mov eax,dword ptr [ebp+off] */
1581 EMIT3(0x8B, add_2reg(0x40, IA32_EBP
, IA32_EAX
), STACK_VAR(r1
[0]));
1584 * Now we're ready to jump into next BPF program:
1585 * eax == ctx (1st arg)
1586 * edx == prog->bpf_func + prologue_size
1588 RETPOLINE_EDX_BPF_JIT();
1590 if (jmp_label1
== -1)
1597 /* Push the scratch stack register on top of the stack. */
1598 static inline void emit_push_r64(const u8 src
[], u8
**pprog
)
1603 /* mov ecx,dword ptr [ebp+off] */
1604 EMIT3(0x8B, add_2reg(0x40, IA32_EBP
, IA32_ECX
), STACK_VAR(src_hi
));
1608 /* mov ecx,dword ptr [ebp+off] */
1609 EMIT3(0x8B, add_2reg(0x40, IA32_EBP
, IA32_ECX
), STACK_VAR(src_lo
));
1616 static int do_jit(struct bpf_prog
*bpf_prog
, int *addrs
, u8
*image
,
1617 int oldproglen
, struct jit_context
*ctx
)
1619 struct bpf_insn
*insn
= bpf_prog
->insnsi
;
1620 int insn_cnt
= bpf_prog
->len
;
1621 bool seen_exit
= false;
1622 u8 temp
[BPF_MAX_INSN_SIZE
+ BPF_INSN_SAFETY
];
1627 emit_prologue(&prog
, bpf_prog
->aux
->stack_depth
);
1629 for (i
= 0; i
< insn_cnt
; i
++, insn
++) {
1630 const s32 imm32
= insn
->imm
;
1631 const bool is64
= BPF_CLASS(insn
->code
) == BPF_ALU64
;
1632 const bool dstk
= insn
->dst_reg
== BPF_REG_AX
? false : true;
1633 const bool sstk
= insn
->src_reg
== BPF_REG_AX
? false : true;
1634 const u8 code
= insn
->code
;
1635 const u8
*dst
= bpf2ia32
[insn
->dst_reg
];
1636 const u8
*src
= bpf2ia32
[insn
->src_reg
];
1637 const u8
*r0
= bpf2ia32
[BPF_REG_0
];
1644 /* ALU operations */
1646 case BPF_ALU
| BPF_MOV
| BPF_K
:
1647 case BPF_ALU
| BPF_MOV
| BPF_X
:
1648 case BPF_ALU64
| BPF_MOV
| BPF_K
:
1649 case BPF_ALU64
| BPF_MOV
| BPF_X
:
1650 switch (BPF_SRC(code
)) {
1652 emit_ia32_mov_r64(is64
, dst
, src
, dstk
,
1656 /* Sign-extend immediate value to dst reg */
1657 emit_ia32_mov_i64(is64
, dst
, imm32
,
1662 /* dst = dst + src/imm */
1663 /* dst = dst - src/imm */
1664 /* dst = dst | src/imm */
1665 /* dst = dst & src/imm */
1666 /* dst = dst ^ src/imm */
1667 /* dst = dst * src/imm */
1668 /* dst = dst << src */
1669 /* dst = dst >> src */
1670 case BPF_ALU
| BPF_ADD
| BPF_K
:
1671 case BPF_ALU
| BPF_ADD
| BPF_X
:
1672 case BPF_ALU
| BPF_SUB
| BPF_K
:
1673 case BPF_ALU
| BPF_SUB
| BPF_X
:
1674 case BPF_ALU
| BPF_OR
| BPF_K
:
1675 case BPF_ALU
| BPF_OR
| BPF_X
:
1676 case BPF_ALU
| BPF_AND
| BPF_K
:
1677 case BPF_ALU
| BPF_AND
| BPF_X
:
1678 case BPF_ALU
| BPF_XOR
| BPF_K
:
1679 case BPF_ALU
| BPF_XOR
| BPF_X
:
1680 case BPF_ALU64
| BPF_ADD
| BPF_K
:
1681 case BPF_ALU64
| BPF_ADD
| BPF_X
:
1682 case BPF_ALU64
| BPF_SUB
| BPF_K
:
1683 case BPF_ALU64
| BPF_SUB
| BPF_X
:
1684 case BPF_ALU64
| BPF_OR
| BPF_K
:
1685 case BPF_ALU64
| BPF_OR
| BPF_X
:
1686 case BPF_ALU64
| BPF_AND
| BPF_K
:
1687 case BPF_ALU64
| BPF_AND
| BPF_X
:
1688 case BPF_ALU64
| BPF_XOR
| BPF_K
:
1689 case BPF_ALU64
| BPF_XOR
| BPF_X
:
1690 switch (BPF_SRC(code
)) {
1692 emit_ia32_alu_r64(is64
, BPF_OP(code
), dst
,
1693 src
, dstk
, sstk
, &prog
);
1696 emit_ia32_alu_i64(is64
, BPF_OP(code
), dst
,
1697 imm32
, dstk
, &prog
);
1701 case BPF_ALU
| BPF_MUL
| BPF_K
:
1702 case BPF_ALU
| BPF_MUL
| BPF_X
:
1703 switch (BPF_SRC(code
)) {
1705 emit_ia32_mul_r(dst_lo
, src_lo
, dstk
,
1710 EMIT2_off32(0xC7, add_1reg(0xC0, IA32_ECX
),
1712 emit_ia32_mul_r(dst_lo
, IA32_ECX
, dstk
,
1716 emit_ia32_mov_i(dst_hi
, 0, dstk
, &prog
);
1718 case BPF_ALU
| BPF_LSH
| BPF_X
:
1719 case BPF_ALU
| BPF_RSH
| BPF_X
:
1720 case BPF_ALU
| BPF_ARSH
| BPF_K
:
1721 case BPF_ALU
| BPF_ARSH
| BPF_X
:
1722 switch (BPF_SRC(code
)) {
1724 emit_ia32_shift_r(BPF_OP(code
), dst_lo
, src_lo
,
1729 EMIT2_off32(0xC7, add_1reg(0xC0, IA32_ECX
),
1731 emit_ia32_shift_r(BPF_OP(code
), dst_lo
,
1732 IA32_ECX
, dstk
, false,
1736 emit_ia32_mov_i(dst_hi
, 0, dstk
, &prog
);
1738 /* dst = dst / src(imm) */
1739 /* dst = dst % src(imm) */
1740 case BPF_ALU
| BPF_DIV
| BPF_K
:
1741 case BPF_ALU
| BPF_DIV
| BPF_X
:
1742 case BPF_ALU
| BPF_MOD
| BPF_K
:
1743 case BPF_ALU
| BPF_MOD
| BPF_X
:
1744 switch (BPF_SRC(code
)) {
1746 emit_ia32_div_mod_r(BPF_OP(code
), dst_lo
,
1747 src_lo
, dstk
, sstk
, &prog
);
1751 EMIT2_off32(0xC7, add_1reg(0xC0, IA32_ECX
),
1753 emit_ia32_div_mod_r(BPF_OP(code
), dst_lo
,
1754 IA32_ECX
, dstk
, false,
1758 emit_ia32_mov_i(dst_hi
, 0, dstk
, &prog
);
1760 case BPF_ALU64
| BPF_DIV
| BPF_K
:
1761 case BPF_ALU64
| BPF_DIV
| BPF_X
:
1762 case BPF_ALU64
| BPF_MOD
| BPF_K
:
1763 case BPF_ALU64
| BPF_MOD
| BPF_X
:
1765 /* dst = dst >> imm */
1766 /* dst = dst << imm */
1767 case BPF_ALU
| BPF_RSH
| BPF_K
:
1768 case BPF_ALU
| BPF_LSH
| BPF_K
:
1769 if (unlikely(imm32
> 31))
1772 EMIT2_off32(0xC7, add_1reg(0xC0, IA32_ECX
), imm32
);
1773 emit_ia32_shift_r(BPF_OP(code
), dst_lo
, IA32_ECX
, dstk
,
1775 emit_ia32_mov_i(dst_hi
, 0, dstk
, &prog
);
1777 /* dst = dst << imm */
1778 case BPF_ALU64
| BPF_LSH
| BPF_K
:
1779 if (unlikely(imm32
> 63))
1781 emit_ia32_lsh_i64(dst
, imm32
, dstk
, &prog
);
1783 /* dst = dst >> imm */
1784 case BPF_ALU64
| BPF_RSH
| BPF_K
:
1785 if (unlikely(imm32
> 63))
1787 emit_ia32_rsh_i64(dst
, imm32
, dstk
, &prog
);
1789 /* dst = dst << src */
1790 case BPF_ALU64
| BPF_LSH
| BPF_X
:
1791 emit_ia32_lsh_r64(dst
, src
, dstk
, sstk
, &prog
);
1793 /* dst = dst >> src */
1794 case BPF_ALU64
| BPF_RSH
| BPF_X
:
1795 emit_ia32_rsh_r64(dst
, src
, dstk
, sstk
, &prog
);
1797 /* dst = dst >> src (signed) */
1798 case BPF_ALU64
| BPF_ARSH
| BPF_X
:
1799 emit_ia32_arsh_r64(dst
, src
, dstk
, sstk
, &prog
);
1801 /* dst = dst >> imm (signed) */
1802 case BPF_ALU64
| BPF_ARSH
| BPF_K
:
1803 if (unlikely(imm32
> 63))
1805 emit_ia32_arsh_i64(dst
, imm32
, dstk
, &prog
);
1808 case BPF_ALU
| BPF_NEG
:
1809 emit_ia32_alu_i(is64
, false, BPF_OP(code
),
1810 dst_lo
, 0, dstk
, &prog
);
1811 emit_ia32_mov_i(dst_hi
, 0, dstk
, &prog
);
1813 /* dst = ~dst (64 bit) */
1814 case BPF_ALU64
| BPF_NEG
:
1815 emit_ia32_neg64(dst
, dstk
, &prog
);
1817 /* dst = dst * src/imm */
1818 case BPF_ALU64
| BPF_MUL
| BPF_X
:
1819 case BPF_ALU64
| BPF_MUL
| BPF_K
:
1820 switch (BPF_SRC(code
)) {
1822 emit_ia32_mul_r64(dst
, src
, dstk
, sstk
, &prog
);
1825 emit_ia32_mul_i64(dst
, imm32
, dstk
, &prog
);
1829 /* dst = htole(dst) */
1830 case BPF_ALU
| BPF_END
| BPF_FROM_LE
:
1831 emit_ia32_to_le_r64(dst
, imm32
, dstk
, &prog
);
1833 /* dst = htobe(dst) */
1834 case BPF_ALU
| BPF_END
| BPF_FROM_BE
:
1835 emit_ia32_to_be_r64(dst
, imm32
, dstk
, &prog
);
1838 case BPF_LD
| BPF_IMM
| BPF_DW
: {
1842 emit_ia32_mov_i(dst_lo
, lo
, dstk
, &prog
);
1843 emit_ia32_mov_i(dst_hi
, hi
, dstk
, &prog
);
1848 /* ST: *(u8*)(dst_reg + off) = imm */
1849 case BPF_ST
| BPF_MEM
| BPF_H
:
1850 case BPF_ST
| BPF_MEM
| BPF_B
:
1851 case BPF_ST
| BPF_MEM
| BPF_W
:
1852 case BPF_ST
| BPF_MEM
| BPF_DW
:
1854 /* mov eax,dword ptr [ebp+off] */
1855 EMIT3(0x8B, add_2reg(0x40, IA32_EBP
, IA32_EAX
),
1858 /* mov eax,dst_lo */
1859 EMIT2(0x8B, add_2reg(0xC0, dst_lo
, IA32_EAX
));
1861 switch (BPF_SIZE(code
)) {
1863 EMIT(0xC6, 1); break;
1865 EMIT2(0x66, 0xC7); break;
1868 EMIT(0xC7, 1); break;
1871 if (is_imm8(insn
->off
))
1872 EMIT2(add_1reg(0x40, IA32_EAX
), insn
->off
);
1874 EMIT1_off32(add_1reg(0x80, IA32_EAX
),
1876 EMIT(imm32
, bpf_size_to_x86_bytes(BPF_SIZE(code
)));
1878 if (BPF_SIZE(code
) == BPF_DW
) {
1881 hi
= imm32
& (1<<31) ? (u32
)~0 : 0;
1882 EMIT2_off32(0xC7, add_1reg(0x80, IA32_EAX
),
1888 /* STX: *(u8*)(dst_reg + off) = src_reg */
1889 case BPF_STX
| BPF_MEM
| BPF_B
:
1890 case BPF_STX
| BPF_MEM
| BPF_H
:
1891 case BPF_STX
| BPF_MEM
| BPF_W
:
1892 case BPF_STX
| BPF_MEM
| BPF_DW
:
1894 /* mov eax,dword ptr [ebp+off] */
1895 EMIT3(0x8B, add_2reg(0x40, IA32_EBP
, IA32_EAX
),
1898 /* mov eax,dst_lo */
1899 EMIT2(0x8B, add_2reg(0xC0, dst_lo
, IA32_EAX
));
1902 /* mov edx,dword ptr [ebp+off] */
1903 EMIT3(0x8B, add_2reg(0x40, IA32_EBP
, IA32_EDX
),
1906 /* mov edx,src_lo */
1907 EMIT2(0x8B, add_2reg(0xC0, src_lo
, IA32_EDX
));
1909 switch (BPF_SIZE(code
)) {
1911 EMIT(0x88, 1); break;
1913 EMIT2(0x66, 0x89); break;
1916 EMIT(0x89, 1); break;
1919 if (is_imm8(insn
->off
))
1920 EMIT2(add_2reg(0x40, IA32_EAX
, IA32_EDX
),
1923 EMIT1_off32(add_2reg(0x80, IA32_EAX
, IA32_EDX
),
1926 if (BPF_SIZE(code
) == BPF_DW
) {
1928 /* mov edi,dword ptr [ebp+off] */
1929 EMIT3(0x8B, add_2reg(0x40, IA32_EBP
,
1933 /* mov edi,src_hi */
1934 EMIT2(0x8B, add_2reg(0xC0, src_hi
,
1937 if (is_imm8(insn
->off
+ 4)) {
1938 EMIT2(add_2reg(0x40, IA32_EAX
,
1942 EMIT1(add_2reg(0x80, IA32_EAX
,
1944 EMIT(insn
->off
+ 4, 4);
1949 /* LDX: dst_reg = *(u8*)(src_reg + off) */
1950 case BPF_LDX
| BPF_MEM
| BPF_B
:
1951 case BPF_LDX
| BPF_MEM
| BPF_H
:
1952 case BPF_LDX
| BPF_MEM
| BPF_W
:
1953 case BPF_LDX
| BPF_MEM
| BPF_DW
:
1955 /* mov eax,dword ptr [ebp+off] */
1956 EMIT3(0x8B, add_2reg(0x40, IA32_EBP
, IA32_EAX
),
1959 /* mov eax,dword ptr [ebp+off] */
1960 EMIT2(0x8B, add_2reg(0xC0, src_lo
, IA32_EAX
));
1962 switch (BPF_SIZE(code
)) {
1964 EMIT2(0x0F, 0xB6); break;
1966 EMIT2(0x0F, 0xB7); break;
1969 EMIT(0x8B, 1); break;
1972 if (is_imm8(insn
->off
))
1973 EMIT2(add_2reg(0x40, IA32_EAX
, IA32_EDX
),
1976 EMIT1_off32(add_2reg(0x80, IA32_EAX
, IA32_EDX
),
1980 /* mov dword ptr [ebp+off],edx */
1981 EMIT3(0x89, add_2reg(0x40, IA32_EBP
, IA32_EDX
),
1984 /* mov dst_lo,edx */
1985 EMIT2(0x89, add_2reg(0xC0, dst_lo
, IA32_EDX
));
1986 switch (BPF_SIZE(code
)) {
1991 EMIT3(0xC7, add_1reg(0x40, IA32_EBP
),
1995 EMIT3(0xC7, add_1reg(0xC0, dst_hi
), 0);
2000 add_2reg(0x80, IA32_EAX
, IA32_EDX
),
2004 add_2reg(0x40, IA32_EBP
,
2009 add_2reg(0xC0, dst_hi
, IA32_EDX
));
2016 case BPF_JMP
| BPF_CALL
:
2018 const u8
*r1
= bpf2ia32
[BPF_REG_1
];
2019 const u8
*r2
= bpf2ia32
[BPF_REG_2
];
2020 const u8
*r3
= bpf2ia32
[BPF_REG_3
];
2021 const u8
*r4
= bpf2ia32
[BPF_REG_4
];
2022 const u8
*r5
= bpf2ia32
[BPF_REG_5
];
2024 if (insn
->src_reg
== BPF_PSEUDO_CALL
)
2027 func
= (u8
*) __bpf_call_base
+ imm32
;
2028 jmp_offset
= func
- (image
+ addrs
[i
]);
2030 if (!imm32
|| !is_simm32(jmp_offset
)) {
2031 pr_err("unsupported BPF func %d addr %p image %p\n",
2032 imm32
, func
, image
);
2036 /* mov eax,dword ptr [ebp+off] */
2037 EMIT3(0x8B, add_2reg(0x40, IA32_EBP
, IA32_EAX
),
2039 /* mov edx,dword ptr [ebp+off] */
2040 EMIT3(0x8B, add_2reg(0x40, IA32_EBP
, IA32_EDX
),
2043 emit_push_r64(r5
, &prog
);
2044 emit_push_r64(r4
, &prog
);
2045 emit_push_r64(r3
, &prog
);
2046 emit_push_r64(r2
, &prog
);
2048 EMIT1_off32(0xE8, jmp_offset
+ 9);
2050 /* mov dword ptr [ebp+off],eax */
2051 EMIT3(0x89, add_2reg(0x40, IA32_EBP
, IA32_EAX
),
2053 /* mov dword ptr [ebp+off],edx */
2054 EMIT3(0x89, add_2reg(0x40, IA32_EBP
, IA32_EDX
),
2058 EMIT3(0x83, add_1reg(0xC0, IA32_ESP
), 32);
2061 case BPF_JMP
| BPF_TAIL_CALL
:
2062 emit_bpf_tail_call(&prog
);
2066 case BPF_JMP
| BPF_JEQ
| BPF_X
:
2067 case BPF_JMP
| BPF_JNE
| BPF_X
:
2068 case BPF_JMP
| BPF_JGT
| BPF_X
:
2069 case BPF_JMP
| BPF_JLT
| BPF_X
:
2070 case BPF_JMP
| BPF_JGE
| BPF_X
:
2071 case BPF_JMP
| BPF_JLE
| BPF_X
:
2072 case BPF_JMP
| BPF_JSGT
| BPF_X
:
2073 case BPF_JMP
| BPF_JSLE
| BPF_X
:
2074 case BPF_JMP
| BPF_JSLT
| BPF_X
:
2075 case BPF_JMP
| BPF_JSGE
| BPF_X
: {
2076 u8 dreg_lo
= dstk
? IA32_EAX
: dst_lo
;
2077 u8 dreg_hi
= dstk
? IA32_EDX
: dst_hi
;
2078 u8 sreg_lo
= sstk
? IA32_ECX
: src_lo
;
2079 u8 sreg_hi
= sstk
? IA32_EBX
: src_hi
;
2082 EMIT3(0x8B, add_2reg(0x40, IA32_EBP
, IA32_EAX
),
2084 EMIT3(0x8B, add_2reg(0x40, IA32_EBP
, IA32_EDX
),
2089 EMIT3(0x8B, add_2reg(0x40, IA32_EBP
, IA32_ECX
),
2091 EMIT3(0x8B, add_2reg(0x40, IA32_EBP
, IA32_EBX
),
2095 /* cmp dreg_hi,sreg_hi */
2096 EMIT2(0x39, add_2reg(0xC0, dreg_hi
, sreg_hi
));
2098 /* cmp dreg_lo,sreg_lo */
2099 EMIT2(0x39, add_2reg(0xC0, dreg_lo
, sreg_lo
));
2102 case BPF_JMP
| BPF_JSET
| BPF_X
: {
2103 u8 dreg_lo
= dstk
? IA32_EAX
: dst_lo
;
2104 u8 dreg_hi
= dstk
? IA32_EDX
: dst_hi
;
2105 u8 sreg_lo
= sstk
? IA32_ECX
: src_lo
;
2106 u8 sreg_hi
= sstk
? IA32_EBX
: src_hi
;
2109 EMIT3(0x8B, add_2reg(0x40, IA32_EBP
, IA32_EAX
),
2111 EMIT3(0x8B, add_2reg(0x40, IA32_EBP
, IA32_EDX
),
2116 EMIT3(0x8B, add_2reg(0x40, IA32_EBP
, IA32_ECX
),
2118 EMIT3(0x8B, add_2reg(0x40, IA32_EBP
, IA32_EBX
),
2121 /* and dreg_lo,sreg_lo */
2122 EMIT2(0x23, add_2reg(0xC0, sreg_lo
, dreg_lo
));
2123 /* and dreg_hi,sreg_hi */
2124 EMIT2(0x23, add_2reg(0xC0, sreg_hi
, dreg_hi
));
2125 /* or dreg_lo,dreg_hi */
2126 EMIT2(0x09, add_2reg(0xC0, dreg_lo
, dreg_hi
));
2129 case BPF_JMP
| BPF_JSET
| BPF_K
: {
2131 u8 dreg_lo
= dstk
? IA32_EAX
: dst_lo
;
2132 u8 dreg_hi
= dstk
? IA32_EDX
: dst_hi
;
2133 u8 sreg_lo
= IA32_ECX
;
2134 u8 sreg_hi
= IA32_EBX
;
2137 EMIT3(0x8B, add_2reg(0x40, IA32_EBP
, IA32_EAX
),
2139 EMIT3(0x8B, add_2reg(0x40, IA32_EBP
, IA32_EDX
),
2142 hi
= imm32
& (1<<31) ? (u32
)~0 : 0;
2145 EMIT2_off32(0xC7, add_1reg(0xC0, IA32_ECX
), imm32
);
2147 EMIT2_off32(0xC7, add_1reg(0xC0, IA32_EBX
), hi
);
2149 /* and dreg_lo,sreg_lo */
2150 EMIT2(0x23, add_2reg(0xC0, sreg_lo
, dreg_lo
));
2151 /* and dreg_hi,sreg_hi */
2152 EMIT2(0x23, add_2reg(0xC0, sreg_hi
, dreg_hi
));
2153 /* or dreg_lo,dreg_hi */
2154 EMIT2(0x09, add_2reg(0xC0, dreg_lo
, dreg_hi
));
2157 case BPF_JMP
| BPF_JEQ
| BPF_K
:
2158 case BPF_JMP
| BPF_JNE
| BPF_K
:
2159 case BPF_JMP
| BPF_JGT
| BPF_K
:
2160 case BPF_JMP
| BPF_JLT
| BPF_K
:
2161 case BPF_JMP
| BPF_JGE
| BPF_K
:
2162 case BPF_JMP
| BPF_JLE
| BPF_K
:
2163 case BPF_JMP
| BPF_JSGT
| BPF_K
:
2164 case BPF_JMP
| BPF_JSLE
| BPF_K
:
2165 case BPF_JMP
| BPF_JSLT
| BPF_K
:
2166 case BPF_JMP
| BPF_JSGE
| BPF_K
: {
2168 u8 dreg_lo
= dstk
? IA32_EAX
: dst_lo
;
2169 u8 dreg_hi
= dstk
? IA32_EDX
: dst_hi
;
2170 u8 sreg_lo
= IA32_ECX
;
2171 u8 sreg_hi
= IA32_EBX
;
2174 EMIT3(0x8B, add_2reg(0x40, IA32_EBP
, IA32_EAX
),
2176 EMIT3(0x8B, add_2reg(0x40, IA32_EBP
, IA32_EDX
),
2180 hi
= imm32
& (1<<31) ? (u32
)~0 : 0;
2182 EMIT2_off32(0xC7, add_1reg(0xC0, IA32_ECX
), imm32
);
2184 EMIT2_off32(0xC7, add_1reg(0xC0, IA32_EBX
), hi
);
2186 /* cmp dreg_hi,sreg_hi */
2187 EMIT2(0x39, add_2reg(0xC0, dreg_hi
, sreg_hi
));
2189 /* cmp dreg_lo,sreg_lo */
2190 EMIT2(0x39, add_2reg(0xC0, dreg_lo
, sreg_lo
));
2192 emit_cond_jmp
: /* Convert BPF opcode to x86 */
2193 switch (BPF_OP(code
)) {
2199 jmp_cond
= IA32_JNE
;
2202 /* GT is unsigned '>', JA in x86 */
2206 /* LT is unsigned '<', JB in x86 */
2210 /* GE is unsigned '>=', JAE in x86 */
2211 jmp_cond
= IA32_JAE
;
2214 /* LE is unsigned '<=', JBE in x86 */
2215 jmp_cond
= IA32_JBE
;
2218 /* Signed '>', GT in x86 */
2222 /* Signed '<', LT in x86 */
2226 /* Signed '>=', GE in x86 */
2227 jmp_cond
= IA32_JGE
;
2230 /* Signed '<=', LE in x86 */
2231 jmp_cond
= IA32_JLE
;
2233 default: /* to silence GCC warning */
2236 jmp_offset
= addrs
[i
+ insn
->off
] - addrs
[i
];
2237 if (is_imm8(jmp_offset
)) {
2238 EMIT2(jmp_cond
, jmp_offset
);
2239 } else if (is_simm32(jmp_offset
)) {
2240 EMIT2_off32(0x0F, jmp_cond
+ 0x10, jmp_offset
);
2242 pr_err("cond_jmp gen bug %llx\n", jmp_offset
);
2248 case BPF_JMP
| BPF_JA
:
2249 if (insn
->off
== -1)
2250 /* -1 jmp instructions will always jump
2251 * backwards two bytes. Explicitly handling
2252 * this case avoids wasting too many passes
2253 * when there are long sequences of replaced
2258 jmp_offset
= addrs
[i
+ insn
->off
] - addrs
[i
];
2261 /* Optimize out nop jumps */
2264 if (is_imm8(jmp_offset
)) {
2265 EMIT2(0xEB, jmp_offset
);
2266 } else if (is_simm32(jmp_offset
)) {
2267 EMIT1_off32(0xE9, jmp_offset
);
2269 pr_err("jmp gen bug %llx\n", jmp_offset
);
2273 /* STX XADD: lock *(u32 *)(dst + off) += src */
2274 case BPF_STX
| BPF_XADD
| BPF_W
:
2275 /* STX XADD: lock *(u64 *)(dst + off) += src */
2276 case BPF_STX
| BPF_XADD
| BPF_DW
:
2278 case BPF_JMP
| BPF_EXIT
:
2280 jmp_offset
= ctx
->cleanup_addr
- addrs
[i
];
2284 /* Update cleanup_addr */
2285 ctx
->cleanup_addr
= proglen
;
2286 emit_epilogue(&prog
, bpf_prog
->aux
->stack_depth
);
2289 pr_info_once("*** NOT YET: opcode %02x ***\n", code
);
2293 * This error will be seen if new instruction was added
2294 * to interpreter, but not to JIT or if there is junk in
2297 pr_err("bpf_jit: unknown opcode %02x\n", code
);
2302 if (ilen
> BPF_MAX_INSN_SIZE
) {
2303 pr_err("bpf_jit: fatal insn size error\n");
2308 if (unlikely(proglen
+ ilen
> oldproglen
)) {
2309 pr_err("bpf_jit: fatal error\n");
2312 memcpy(image
+ proglen
, temp
, ilen
);
2321 struct bpf_prog
*bpf_int_jit_compile(struct bpf_prog
*prog
)
2323 struct bpf_binary_header
*header
= NULL
;
2324 struct bpf_prog
*tmp
, *orig_prog
= prog
;
2325 int proglen
, oldproglen
= 0;
2326 struct jit_context ctx
= {};
2327 bool tmp_blinded
= false;
2333 if (!prog
->jit_requested
)
2336 tmp
= bpf_jit_blind_constants(prog
);
2338 * If blinding was requested and we failed during blinding,
2339 * we must fall back to the interpreter.
2348 addrs
= kmalloc_array(prog
->len
, sizeof(*addrs
), GFP_KERNEL
);
2355 * Before first pass, make a rough estimation of addrs[]
2356 * each BPF instruction is translated to less than 64 bytes
2358 for (proglen
= 0, i
= 0; i
< prog
->len
; i
++) {
2362 ctx
.cleanup_addr
= proglen
;
2365 * JITed image shrinks with every pass and the loop iterates
2366 * until the image stops shrinking. Very large BPF programs
2367 * may converge on the last pass. In such case do one more
2368 * pass to emit the final image.
2370 for (pass
= 0; pass
< 20 || image
; pass
++) {
2371 proglen
= do_jit(prog
, addrs
, image
, oldproglen
, &ctx
);
2376 bpf_jit_binary_free(header
);
2381 if (proglen
!= oldproglen
) {
2382 pr_err("bpf_jit: proglen=%d != oldproglen=%d\n",
2383 proglen
, oldproglen
);
2388 if (proglen
== oldproglen
) {
2389 header
= bpf_jit_binary_alloc(proglen
, &image
,
2396 oldproglen
= proglen
;
2400 if (bpf_jit_enable
> 1)
2401 bpf_jit_dump(prog
->len
, proglen
, pass
+ 1, image
);
2404 bpf_jit_binary_lock_ro(header
);
2405 prog
->bpf_func
= (void *)image
;
2407 prog
->jited_len
= proglen
;
2416 bpf_jit_prog_release_other(prog
, prog
== orig_prog
?