@@ -17019,27 +17019,6 @@ static int visit_func_call_insn(int t, struct bpf_insn *insns,
1701917019/* Bitmask with 1s for all caller saved registers */
1702017020#define ALL_CALLER_SAVED_REGS ((1u << CALLER_SAVED_REGS) - 1)
1702117021
17022- /* Return a bitmask specifying which caller saved registers are
17023- * clobbered by a call to a helper *as if* this helper follows
17024- * bpf_fastcall contract:
17025- * - includes R0 if function is non-void;
17026- * - includes R1-R5 if corresponding parameter has is described
17027- * in the function prototype.
17028- */
17029- static u32 helper_fastcall_clobber_mask(const struct bpf_func_proto *fn)
17030- {
17031- u32 mask;
17032- int i;
17033-
17034- mask = 0;
17035- if (fn->ret_type != RET_VOID)
17036- mask |= BIT(BPF_REG_0);
17037- for (i = 0; i < ARRAY_SIZE(fn->arg_type); ++i)
17038- if (fn->arg_type[i] != ARG_DONTCARE)
17039- mask |= BIT(BPF_REG_1 + i);
17040- return mask;
17041- }
17042-
1704317022/* True if do_misc_fixups() replaces calls to helper number 'imm',
1704417023 * replacement patch is presumed to follow bpf_fastcall contract
1704517024 * (see mark_fastcall_pattern_for_call() below).
@@ -17056,24 +17035,54 @@ static bool verifier_inlines_helper_call(struct bpf_verifier_env *env, s32 imm)
1705617035 }
1705717036}
1705817037
17059- /* Same as helper_fastcall_clobber_mask() but for kfuncs, see comment above */
17060- static u32 kfunc_fastcall_clobber_mask(struct bpf_kfunc_call_arg_meta *meta)
17038+ struct call_summary {
17039+ u8 num_params;
17040+ bool is_void;
17041+ bool fastcall;
17042+ };
17043+
17044+ /* If @call is a kfunc or helper call, fills @cs and returns true,
17045+ * otherwise returns false.
17046+ */
17047+ static bool get_call_summary(struct bpf_verifier_env *env, struct bpf_insn *call,
17048+ struct call_summary *cs)
1706117049{
17062- u32 vlen, i, mask;
17050+ struct bpf_kfunc_call_arg_meta meta;
17051+ const struct bpf_func_proto *fn;
17052+ int i;
1706317053
17064- vlen = btf_type_vlen(meta->func_proto);
17065- mask = 0;
17066- if (!btf_type_is_void(btf_type_by_id(meta->btf, meta->func_proto->type)))
17067- mask |= BIT(BPF_REG_0);
17068- for (i = 0; i < vlen; ++i)
17069- mask |= BIT(BPF_REG_1 + i);
17070- return mask;
17071- }
17054+ if (bpf_helper_call(call)) {
1707217055
17073- /* Same as verifier_inlines_helper_call() but for kfuncs, see comment above */
17074- static bool is_fastcall_kfunc_call(struct bpf_kfunc_call_arg_meta *meta)
17075- {
17076- return meta->kfunc_flags & KF_FASTCALL;
17056+ if (get_helper_proto(env, call->imm, &fn) < 0)
17057+ /* error would be reported later */
17058+ return false;
17059+ cs->fastcall = fn->allow_fastcall &&
17060+ (verifier_inlines_helper_call(env, call->imm) ||
17061+ bpf_jit_inlines_helper_call(call->imm));
17062+ cs->is_void = fn->ret_type == RET_VOID;
17063+ cs->num_params = 0;
17064+ for (i = 0; i < ARRAY_SIZE(fn->arg_type); ++i) {
17065+ if (fn->arg_type[i] == ARG_DONTCARE)
17066+ break;
17067+ cs->num_params++;
17068+ }
17069+ return true;
17070+ }
17071+
17072+ if (bpf_pseudo_kfunc_call(call)) {
17073+ int err;
17074+
17075+ err = fetch_kfunc_meta(env, call, &meta, NULL);
17076+ if (err < 0)
17077+ /* error would be reported later */
17078+ return false;
17079+ cs->num_params = btf_type_vlen(meta.func_proto);
17080+ cs->fastcall = meta.kfunc_flags & KF_FASTCALL;
17081+ cs->is_void = btf_type_is_void(btf_type_by_id(meta.btf, meta.func_proto->type));
17082+ return true;
17083+ }
17084+
17085+ return false;
1707717086}
1707817087
1707917088/* LLVM define a bpf_fastcall function attribute.
@@ -17156,39 +17165,23 @@ static void mark_fastcall_pattern_for_call(struct bpf_verifier_env *env,
1715617165{
1715717166 struct bpf_insn *insns = env->prog->insnsi, *stx, *ldx;
1715817167 struct bpf_insn *call = &env->prog->insnsi[insn_idx];
17159- const struct bpf_func_proto *fn ;
17160- u32 clobbered_regs_mask = ALL_CALLER_SAVED_REGS ;
17168+ u32 clobbered_regs_mask ;
17169+ struct call_summary cs ;
1716117170 u32 expected_regs_mask;
17162- bool can_be_inlined = false;
1716317171 s16 off;
1716417172 int i;
1716517173
17166- if (bpf_helper_call(call)) {
17167- if (get_helper_proto(env, call->imm, &fn) < 0)
17168- /* error would be reported later */
17169- return;
17170- clobbered_regs_mask = helper_fastcall_clobber_mask(fn);
17171- can_be_inlined = fn->allow_fastcall &&
17172- (verifier_inlines_helper_call(env, call->imm) ||
17173- bpf_jit_inlines_helper_call(call->imm));
17174- }
17175-
17176- if (bpf_pseudo_kfunc_call(call)) {
17177- struct bpf_kfunc_call_arg_meta meta;
17178- int err;
17179-
17180- err = fetch_kfunc_meta(env, call, &meta, NULL);
17181- if (err < 0)
17182- /* error would be reported later */
17183- return;
17184-
17185- clobbered_regs_mask = kfunc_fastcall_clobber_mask(&meta);
17186- can_be_inlined = is_fastcall_kfunc_call(&meta);
17187- }
17188-
17189- if (clobbered_regs_mask == ALL_CALLER_SAVED_REGS)
17174+ if (!get_call_summary(env, call, &cs))
1719017175 return;
1719117176
17177+ /* A bitmask specifying which caller saved registers are clobbered
17178+ * by a call to a helper/kfunc *as if* this helper/kfunc follows
17179+ * bpf_fastcall contract:
17180+ * - includes R0 if function is non-void;
17181+ * - includes R1-R5 if corresponding parameter has is described
17182+ * in the function prototype.
17183+ */
17184+ clobbered_regs_mask = GENMASK(cs.num_params, cs.is_void ? 1 : 0);
1719217185 /* e.g. if helper call clobbers r{0,1}, expect r{2,3,4,5} in the pattern */
1719317186 expected_regs_mask = ~clobbered_regs_mask & ALL_CALLER_SAVED_REGS;
1719417187
@@ -17246,7 +17239,7 @@ static void mark_fastcall_pattern_for_call(struct bpf_verifier_env *env,
1724617239 * don't set 'fastcall_spills_num' for call B so that remove_fastcall_spills_fills()
1724717240 * does not remove spill/fill pair {4,6}.
1724817241 */
17249- if (can_be_inlined )
17242+ if (cs.fastcall )
1725017243 env->insn_aux_data[insn_idx].fastcall_spills_num = i - 1;
1725117244 else
1725217245 subprog->keep_fastcall_stack = 1;
0 commit comments