1717#define LOONGARCH_BPF_FENTRY_NBYTES (LOONGARCH_LONG_JUMP_NINSNS * 4)
1818
1919#define REG_TCC LOONGARCH_GPR_A6
20+ #define REG_ARENA LOONGARCH_GPR_S6 /* For storing arena_vm_start */
2021#define BPF_TAIL_CALL_CNT_PTR_STACK_OFF (stack ) (round_up(stack, 16) - 80)
2122
2223static const int regmap [] = {
@@ -136,6 +137,9 @@ static void build_prologue(struct jit_ctx *ctx)
136137 /* To store tcc and tcc_ptr */
137138 stack_adjust += sizeof (long ) * 2 ;
138139
140+ if (ctx -> arena_vm_start )
141+ stack_adjust += 8 ;
142+
139143 stack_adjust = round_up (stack_adjust , 16 );
140144 stack_adjust += bpf_stack_adjust ;
141145
@@ -178,6 +182,11 @@ static void build_prologue(struct jit_ctx *ctx)
178182 store_offset -= sizeof (long );
179183 emit_insn (ctx , std , LOONGARCH_GPR_S5 , LOONGARCH_GPR_SP , store_offset );
180184
185+ if (ctx -> arena_vm_start ) {
186+ store_offset -= sizeof (long );
187+ emit_insn (ctx , std , REG_ARENA , LOONGARCH_GPR_SP , store_offset );
188+ }
189+
181190 prepare_bpf_tail_call_cnt (ctx , & store_offset );
182191
183192 emit_insn (ctx , addid , LOONGARCH_GPR_FP , LOONGARCH_GPR_SP , stack_adjust );
@@ -186,6 +195,9 @@ static void build_prologue(struct jit_ctx *ctx)
186195 emit_insn (ctx , addid , regmap [BPF_REG_FP ], LOONGARCH_GPR_SP , bpf_stack_adjust );
187196
188197 ctx -> stack_size = stack_adjust ;
198+
199+ if (ctx -> arena_vm_start )
200+ move_imm (ctx , REG_ARENA , ctx -> arena_vm_start , false);
189201}
190202
191203static void __build_epilogue (struct jit_ctx * ctx , bool is_tail_call )
@@ -217,6 +229,11 @@ static void __build_epilogue(struct jit_ctx *ctx, bool is_tail_call)
217229 load_offset -= sizeof (long );
218230 emit_insn (ctx , ldd , LOONGARCH_GPR_S5 , LOONGARCH_GPR_SP , load_offset );
219231
232+ if (ctx -> arena_vm_start ) {
233+ load_offset -= sizeof (long );
234+ emit_insn (ctx , ldd , REG_ARENA , LOONGARCH_GPR_SP , load_offset );
235+ }
236+
220237 /*
221238 * When push into the stack, follow the order of tcc then tcc_ptr.
222239 * When pop from the stack, first pop tcc_ptr then followed by tcc.
@@ -442,14 +459,16 @@ static bool is_signed_bpf_cond(u8 cond)
442459
443460#define BPF_FIXUP_REG_MASK GENMASK(31, 27)
444461#define BPF_FIXUP_OFFSET_MASK GENMASK(26, 0)
462+ #define REG_DONT_CLEAR_MARKER 0
445463
446464bool ex_handler_bpf (const struct exception_table_entry * ex ,
447465 struct pt_regs * regs )
448466{
449467 int dst_reg = FIELD_GET (BPF_FIXUP_REG_MASK , ex -> fixup );
450468 off_t offset = FIELD_GET (BPF_FIXUP_OFFSET_MASK , ex -> fixup );
451469
452- regs -> regs [dst_reg ] = 0 ;
470+ if (dst_reg != REG_DONT_CLEAR_MARKER )
471+ regs -> regs [dst_reg ] = 0 ;
453472 regs -> csr_era = (unsigned long )& ex -> fixup - offset ;
454473
455474 return true;
@@ -468,7 +487,8 @@ static int add_exception_handler(const struct bpf_insn *insn,
468487 return 0 ;
469488
470489 if (BPF_MODE (insn -> code ) != BPF_PROBE_MEM &&
471- BPF_MODE (insn -> code ) != BPF_PROBE_MEMSX )
490+ BPF_MODE (insn -> code ) != BPF_PROBE_MEMSX &&
491+ BPF_MODE (insn -> code ) != BPF_PROBE_MEM32 )
472492 return 0 ;
473493
474494 if (WARN_ON_ONCE (ctx -> num_exentries >= ctx -> prog -> aux -> num_exentries ))
@@ -528,8 +548,9 @@ static int build_insn(const struct bpf_insn *insn, struct jit_ctx *ctx, bool ext
528548 const u8 cond = BPF_OP (code );
529549 const u8 t1 = LOONGARCH_GPR_T1 ;
530550 const u8 t2 = LOONGARCH_GPR_T2 ;
531- const u8 src = regmap [insn -> src_reg ];
532- const u8 dst = regmap [insn -> dst_reg ];
551+ const u8 t3 = LOONGARCH_GPR_T3 ;
552+ u8 src = regmap [insn -> src_reg ];
553+ u8 dst = regmap [insn -> dst_reg ];
533554 const s16 off = insn -> off ;
534555 const s32 imm = insn -> imm ;
535556 const bool is32 = BPF_CLASS (insn -> code ) == BPF_ALU || BPF_CLASS (insn -> code ) == BPF_JMP32 ;
@@ -1035,8 +1056,19 @@ static int build_insn(const struct bpf_insn *insn, struct jit_ctx *ctx, bool ext
10351056 case BPF_LDX | BPF_PROBE_MEMSX | BPF_B :
10361057 case BPF_LDX | BPF_PROBE_MEMSX | BPF_H :
10371058 case BPF_LDX | BPF_PROBE_MEMSX | BPF_W :
1038- sign_extend = BPF_MODE (insn -> code ) == BPF_MEMSX ||
1039- BPF_MODE (insn -> code ) == BPF_PROBE_MEMSX ;
1059+ /* LDX | PROBE_MEM32: dst = *(unsigned size *)(src + REG_ARENA + off) */
1060+ case BPF_LDX | BPF_PROBE_MEM32 | BPF_B :
1061+ case BPF_LDX | BPF_PROBE_MEM32 | BPF_H :
1062+ case BPF_LDX | BPF_PROBE_MEM32 | BPF_W :
1063+ case BPF_LDX | BPF_PROBE_MEM32 | BPF_DW :
1064+ sign_extend = BPF_MODE (code ) == BPF_MEMSX ||
1065+ BPF_MODE (code ) == BPF_PROBE_MEMSX ;
1066+
1067+ if (BPF_MODE (code ) == BPF_PROBE_MEM32 ) {
1068+ emit_insn (ctx , addd , t2 , src , REG_ARENA );
1069+ src = t2 ;
1070+ }
1071+
10401072 switch (BPF_SIZE (code )) {
10411073 case BPF_B :
10421074 if (is_signed_imm12 (off )) {
@@ -1096,6 +1128,16 @@ static int build_insn(const struct bpf_insn *insn, struct jit_ctx *ctx, bool ext
10961128 case BPF_ST | BPF_MEM | BPF_H :
10971129 case BPF_ST | BPF_MEM | BPF_W :
10981130 case BPF_ST | BPF_MEM | BPF_DW :
1131+ /* ST | PROBE_MEM32: *(size *)(dst + REG_ARENA + off) = imm */
1132+ case BPF_ST | BPF_PROBE_MEM32 | BPF_B :
1133+ case BPF_ST | BPF_PROBE_MEM32 | BPF_H :
1134+ case BPF_ST | BPF_PROBE_MEM32 | BPF_W :
1135+ case BPF_ST | BPF_PROBE_MEM32 | BPF_DW :
1136+ if (BPF_MODE (code ) == BPF_PROBE_MEM32 ) {
1137+ emit_insn (ctx , addd , t3 , dst , REG_ARENA );
1138+ dst = t3 ;
1139+ }
1140+
10991141 switch (BPF_SIZE (code )) {
11001142 case BPF_B :
11011143 move_imm (ctx , t1 , imm , is32 );
@@ -1138,13 +1180,27 @@ static int build_insn(const struct bpf_insn *insn, struct jit_ctx *ctx, bool ext
11381180 }
11391181 break ;
11401182 }
1183+
1184+ ret = add_exception_handler (insn , ctx , REG_DONT_CLEAR_MARKER );
1185+ if (ret )
1186+ return ret ;
11411187 break ;
11421188
11431189 /* *(size *)(dst + off) = src */
11441190 case BPF_STX | BPF_MEM | BPF_B :
11451191 case BPF_STX | BPF_MEM | BPF_H :
11461192 case BPF_STX | BPF_MEM | BPF_W :
11471193 case BPF_STX | BPF_MEM | BPF_DW :
1194+ /* STX | PROBE_MEM32: *(size *)(dst + REG_ARENA + off) = src */
1195+ case BPF_STX | BPF_PROBE_MEM32 | BPF_B :
1196+ case BPF_STX | BPF_PROBE_MEM32 | BPF_H :
1197+ case BPF_STX | BPF_PROBE_MEM32 | BPF_W :
1198+ case BPF_STX | BPF_PROBE_MEM32 | BPF_DW :
1199+ if (BPF_MODE (code ) == BPF_PROBE_MEM32 ) {
1200+ emit_insn (ctx , addd , t2 , dst , REG_ARENA );
1201+ dst = t2 ;
1202+ }
1203+
11481204 switch (BPF_SIZE (code )) {
11491205 case BPF_B :
11501206 if (is_signed_imm12 (off )) {
@@ -1183,6 +1239,10 @@ static int build_insn(const struct bpf_insn *insn, struct jit_ctx *ctx, bool ext
11831239 }
11841240 break ;
11851241 }
1242+
1243+ ret = add_exception_handler (insn , ctx , REG_DONT_CLEAR_MARKER );
1244+ if (ret )
1245+ return ret ;
11861246 break ;
11871247
11881248 case BPF_STX | BPF_ATOMIC | BPF_W :
@@ -1894,6 +1954,7 @@ struct bpf_prog *bpf_int_jit_compile(struct bpf_prog *prog)
18941954
18951955 memset (& ctx , 0 , sizeof (ctx ));
18961956 ctx .prog = prog ;
1957+ ctx .arena_vm_start = bpf_arena_get_kern_vm_start (prog -> aux -> arena );
18971958
18981959 ctx .offset = kvcalloc (prog -> len + 1 , sizeof (u32 ), GFP_KERNEL );
18991960 if (ctx .offset == NULL ) {
0 commit comments