11 #include <linux/bitops.h>
12 #include <linux/compiler.h>
13 #include <linux/errno.h>
14 #include <linux/filter.h>
16 #include <linux/netdevice.h>
17 #include <linux/string.h>
18 #include <linux/slab.h>
19 #include <asm/cacheflush.h>
20 #include <asm/hwcap.h>
35 #define r_scratch ARM_R0
41 #define r_skb_data ARM_R7
42 #define r_skb_hl ARM_R8
44 #define SCRATCH_SP_OFFSET 0
45 #define SCRATCH_OFF(k) (SCRATCH_SP_OFFSET + (k))
47 #define SEEN_MEM ((1 << BPF_MEMWORDS) - 1)
48 #define SEEN_MEM_WORD(k) (1 << (k))
49 #define SEEN_X (1 << BPF_MEMWORDS)
50 #define SEEN_CALL (1 << (BPF_MEMWORDS + 1))
51 #define SEEN_SKB (1 << (BPF_MEMWORDS + 2))
52 #define SEEN_DATA (1 << (BPF_MEMWORDS + 3))
54 #define FLAG_NEED_X_RESET (1 << 0)
65 #if __LINUX_ARM_ARCH__ < 7
81 return (
u64)err << 32 |
ret;
94 static u64 jit_get_skb_w(
struct sk_buff *skb,
unsigned offset)
116 ctx->
target[ctx->
idx] = inst | (cond << 28);
133 if ((ctx->
skf->len > 1) ||
137 #ifdef CONFIG_FRAME_POINTER
153 static inline int mem_words_used(
struct jit_ctx *ctx)
159 static inline bool is_load_to_a(
u16 inst)
178 static void build_prologue(
struct jit_ctx *ctx)
181 u16 first_inst = ctx->
skf->insns[0].code;
184 #ifdef CONFIG_FRAME_POINTER
211 if ((first_inst !=
BPF_S_RET_K) && !(is_load_to_a(first_inst)))
219 static void build_epilogue(
struct jit_ctx *ctx)
226 reg_set &= ~(1 <<
ARM_LR);
228 #ifdef CONFIG_FRAME_POINTER
230 reg_set &= ~(1 <<
ARM_IP);
249 for (rot = 0; rot < 16; rot++)
250 if ((x & ~ror32(0xff, 2 * rot)) == 0)
251 return rol32(x, 2 * rot) | (rot << 8);
256 #if __LINUX_ARM_ARCH__ < 7
269 while ((i < ctx->imm_count) && ctx->
imms[i]) {
270 if (ctx->
imms[i] == k)
275 if (ctx->
imms[i] == 0)
287 imm = offset - (8 + ctx->
idx * 4);
297 static inline void emit_mov_i_no8m(
int rd,
u32 val,
struct jit_ctx *ctx)
299 #if __LINUX_ARM_ARCH__ < 7
308 static inline void emit_mov_i(
int rd,
u32 val,
struct jit_ctx *ctx)
310 int imm12 = imm8m(val);
315 emit_mov_i_no8m(rd, val, ctx);
318 #if __LINUX_ARM_ARCH__ < 6
332 static void emit_load_be16(
u8 cond,
u8 r_res,
u8 r_addr,
struct jit_ctx *ctx)
339 static inline void emit_swap16(
u8 r_dst,
u8 r_src,
struct jit_ctx *ctx)
349 static void emit_load_be32(
u8 cond,
u8 r_res,
u8 r_addr,
struct jit_ctx *ctx)
351 _emit(cond,
ARM_LDR_I(r_res, r_addr, 0), ctx);
352 #ifdef __LITTLE_ENDIAN
353 _emit(cond,
ARM_REV(r_res, r_res), ctx);
357 static void emit_load_be16(
u8 cond,
u8 r_res,
u8 r_addr,
struct jit_ctx *ctx)
359 _emit(cond,
ARM_LDRH_I(r_res, r_addr, 0), ctx);
360 #ifdef __LITTLE_ENDIAN
361 _emit(cond,
ARM_REV16(r_res, r_res), ctx);
366 u8 r_src __maybe_unused,
367 struct jit_ctx *ctx __maybe_unused)
369 #ifdef __LITTLE_ENDIAN
378 static inline u32 b_imm(
unsigned tgt,
struct jit_ctx *ctx)
393 #define OP_IMM3(op, r1, r2, imm_val, ctx) \
395 imm12 = imm8m(imm_val); \
397 emit_mov_i_no8m(r_scratch, imm_val, ctx); \
398 emit(op ## _R((r1), (r2), r_scratch), ctx); \
400 emit(op ## _I((r1), (r2), imm12), ctx); \
404 static inline void emit_err_ret(
u8 cond,
struct jit_ctx *ctx)
412 _emit(cond,
ARM_B(b_imm(ctx->
skf->len, ctx)), ctx);
416 static inline void emit_blx_r(
u8 tgt_reg,
struct jit_ctx *ctx)
418 #if __LINUX_ARM_ARCH__ < 5
430 static inline void emit_udiv(
u8 rd,
u8 rm,
u8 rn,
struct jit_ctx *ctx)
432 #if __LINUX_ARM_ARCH__ == 7
451 static inline void update_on_xread(
struct jit_ctx *ctx)
459 static int build_body(
struct jit_ctx *ctx)
461 void *load_func[] = {jit_get_skb_b, jit_get_skb_h, jit_get_skb_w};
464 unsigned i, load_order, off, condt;
468 for (i = 0; i < prog->
len; i++) {
477 switch (inst->
code) {
479 emit_mov_i(
r_A, k, ctx);
504 emit_mov_i(
r_off, k, ctx);
508 if (load_order > 0) {
510 1 << load_order), ctx);
524 else if (load_order == 1)
526 else if (load_order == 2)
529 _emit(condt,
ARM_B(b_imm(i + 1, ctx)), ctx);
532 emit_mov_i(
ARM_R3, (
u32)load_func[load_order], ctx);
554 emit_mov_i(
r_X, k, ctx);
572 emit_mov_i(
r_off, k, ctx);
586 emit_mov_i(
ARM_R3, (
u32)jit_get_skb_b, ctx);
600 update_on_xread(ctx);
609 update_on_xread(ctx);
617 update_on_xread(ctx);
626 update_on_xread(ctx);
636 update_on_xread(ctx);
646 update_on_xread(ctx);
654 update_on_xread(ctx);
663 update_on_xread(ctx);
672 update_on_xread(ctx);
704 _emit(condt,
ARM_B(b_imm(i + inst->
jt + 1,
707 _emit(condt ^ 1,
ARM_B(b_imm(i + inst->
jf + 1,
722 update_on_xread(ctx);
740 update_on_xread(ctx);
750 emit_mov_i(
ARM_R0, k, ctx);
752 if (i != ctx->
skf->len - 1)
762 update_on_xread(ctx);
767 update_on_xread(ctx);
816 queue_mapping) != 2);
818 queue_mapping) > 0xff);
844 memset(&ctx, 0,
sizeof(ctx));
857 build_prologue(&ctx);
860 #if __LINUX_ARM_ARCH__ < 7
862 build_epilogue(&ctx);
873 build_epilogue(&ctx);
876 alloc_size = 4 * ctx.
idx;
883 build_prologue(&ctx);
885 build_epilogue(&ctx);
889 #if __LINUX_ARM_ARCH__ < 7
895 print_hex_dump(
KERN_INFO,
"BPF JIT code: ",