Lines Matching refs:subprog

425 static bool subprog_is_global(const struct bpf_verifier_env *env, int subprog)  in subprog_is_global()  argument
429 return aux && aux[subprog].linkage == BTF_FUNC_GLOBAL; in subprog_is_global()
432 static const char *subprog_name(const struct bpf_verifier_env *env, int subprog) in subprog_name() argument
439 info = &env->prog->aux->func_info[subprog]; in subprog_name()
443 static void mark_subprog_exc_cb(struct bpf_verifier_env *env, int subprog) in mark_subprog_exc_cb() argument
445 struct bpf_subprog_info *info = subprog_info(env, subprog); in mark_subprog_exc_cb()
452 static bool subprog_is_exc_cb(struct bpf_verifier_env *env, int subprog) in subprog_is_exc_cb() argument
454 return subprog_info(env, subprog)->is_exception_cb; in subprog_is_exc_cb()
2687 int subprog, bool is_sleepable) in push_async_cb() argument
2726 subprog /* subprog number within this prog */); in push_async_cb()
3211 struct bpf_subprog_info *subprog = env->subprog_info; in add_subprog_and_kfunc() local
3263 subprog[env->subprog_cnt].start = insn_cnt; in add_subprog_and_kfunc()
3267 verbose(env, "func#%d @%d\n", i, subprog[i].start); in add_subprog_and_kfunc()
3275 struct bpf_subprog_info *subprog = env->subprog_info; in check_subprogs() local
3280 subprog_start = subprog[cur_subprog].start; in check_subprogs()
3281 subprog_end = subprog[cur_subprog + 1].start; in check_subprogs()
3288 subprog[cur_subprog].has_tail_call = true; in check_subprogs()
3289 subprog[cur_subprog].tail_call_reachable = true; in check_subprogs()
3293 subprog[cur_subprog].has_ld_abs = true; in check_subprogs()
3321 subprog_end = subprog[cur_subprog + 1].start; in check_subprogs()
4139 int subprog_insn_idx, subprog; in backtrack_insn() local
4142 subprog = find_subprog(env, subprog_insn_idx); in backtrack_insn()
4143 if (subprog < 0) in backtrack_insn()
4146 if (subprog_is_global(env, subprog)) { in backtrack_insn()
4844 struct bpf_subprog_info *subprog = &env->subprog_info[state->subprogno]; in check_fastcall_stack_contract() local
4848 if (subprog->fastcall_stack_off <= off || aux[insn_idx].fastcall_pattern) in check_fastcall_stack_contract()
4855 subprog->fastcall_stack_off = S16_MIN; in check_fastcall_stack_contract()
4859 for (i = subprog->start; i < (subprog + 1)->start; ++i) { in check_fastcall_stack_contract()
6326 struct bpf_subprog_info *subprog = env->subprog_info; in check_max_stack_depth_subprog() local
6334 i = subprog[idx].start; in check_max_stack_depth_subprog()
6336 subprog[idx].priv_stack_mode = NO_PRIV_STACK; in check_max_stack_depth_subprog()
6357 if (idx && subprog[idx].has_tail_call && depth >= 256) { in check_max_stack_depth_subprog()
6364 subprog_depth = round_up_stack_depth(env, subprog[idx].stack_depth); in check_max_stack_depth_subprog()
6370 if (subprog[idx].priv_stack_mode == PRIV_STACK_UNKNOWN && in check_max_stack_depth_subprog()
6372 subprog[idx].priv_stack_mode = PRIV_STACK_ADAPTIVE; in check_max_stack_depth_subprog()
6375 if (subprog[idx].priv_stack_mode == PRIV_STACK_ADAPTIVE) { in check_max_stack_depth_subprog()
6390 subprog_end = subprog[idx + 1].start; in check_max_stack_depth_subprog()
6399 if (subprog[idx].is_cb) in check_max_stack_depth_subprog()
6402 if (subprog[ret_prog[c]].is_cb) { in check_max_stack_depth_subprog()
6429 if (subprog[sidx].is_async_cb) { in check_max_stack_depth_subprog()
6430 if (subprog[sidx].has_tail_call) { in check_max_stack_depth_subprog()
6437 if (subprog[sidx].is_exception_cb) { in check_max_stack_depth_subprog()
6445 subprog[idx].priv_stack_mode = NO_PRIV_STACK; in check_max_stack_depth_subprog()
6447 if (subprog[idx].has_tail_call) in check_max_stack_depth_subprog()
6465 if (subprog[ret_prog[j]].is_exception_cb) { in check_max_stack_depth_subprog()
6469 subprog[ret_prog[j]].tail_call_reachable = true; in check_max_stack_depth_subprog()
6471 if (subprog[0].tail_call_reachable) in check_max_stack_depth_subprog()
6479 if (subprog[idx].priv_stack_mode != PRIV_STACK_ADAPTIVE) in check_max_stack_depth_subprog()
6480 depth -= round_up_stack_depth(env, subprog[idx].stack_depth); in check_max_stack_depth_subprog()
6535 int start = idx + insn->imm + 1, subprog; in get_callee_stack_depth() local
6537 subprog = find_subprog(env, start); in get_callee_stack_depth()
6538 if (subprog < 0) { in get_callee_stack_depth()
6543 return env->subprog_info[subprog].stack_depth; in get_callee_stack_depth()
9977 static int setup_func_entry(struct bpf_verifier_env *env, int subprog, int callsite, in setup_func_entry() argument
10010 subprog /* subprog number within this prog */); in setup_func_entry()
10026 static int btf_check_func_arg_match(struct bpf_verifier_env *env, int subprog, in btf_check_func_arg_match() argument
10030 struct bpf_subprog_info *sub = subprog_info(env, subprog); in btf_check_func_arg_match()
10035 ret = btf_prepare_func_args(env, subprog); in btf_check_func_arg_match()
10122 static int btf_check_subprog_call(struct bpf_verifier_env *env, int subprog, in btf_check_subprog_call() argument
10133 btf_id = prog->aux->func_info[subprog].type_id; in btf_check_subprog_call()
10137 if (prog->aux->func_info_aux[subprog].unreliable) in btf_check_subprog_call()
10140 err = btf_check_func_arg_match(env, subprog, btf, regs); in btf_check_subprog_call()
10146 prog->aux->func_info_aux[subprog].unreliable = true; in btf_check_subprog_call()
10151 int insn_idx, int subprog, in push_callback_call() argument
10159 err = btf_check_subprog_call(env, subprog, caller->regs); in push_callback_call()
10167 env->subprog_info[subprog].is_cb = true; in push_callback_call()
10184 env->subprog_info[subprog].is_async_cb = true; in push_callback_call()
10185 async_cb = push_async_cb(env, env->subprog_info[subprog].start, in push_callback_call()
10186 insn_idx, subprog, in push_callback_call()
10204 callback_state = push_stack(env, env->subprog_info[subprog].start, insn_idx, false); in push_callback_call()
10208 err = setup_func_entry(env, subprog, insn_idx, set_callee_state_cb, in push_callback_call()
10224 int err, subprog, target_insn; in check_func_call() local
10227 subprog = find_subprog(env, target_insn); in check_func_call()
10228 if (subprog < 0) { in check_func_call()
10234 err = btf_check_subprog_call(env, subprog, caller->regs); in check_func_call()
10237 if (subprog_is_global(env, subprog)) { in check_func_call()
10238 const char *sub_name = subprog_name(env, subprog); in check_func_call()
10262 subprog, sub_name); in check_func_call()
10267 subprog, sub_name); in check_func_call()
10268 if (env->subprog_info[subprog].changes_pkt_data) in check_func_call()
10271 subprog_aux(env, subprog)->called = true; in check_func_call()
10285 err = setup_func_entry(env, subprog, *insn_idx, set_callee_state, state); in check_func_call()
10292 *insn_idx = env->subprog_info[subprog].start - 1; in check_func_call()
16579 struct bpf_subprog_info *subprog; in mark_subprog_changes_pkt_data() local
16581 subprog = find_containing_subprog(env, off); in mark_subprog_changes_pkt_data()
16582 subprog->changes_pkt_data = true; in mark_subprog_changes_pkt_data()
16888 struct bpf_subprog_info *subprog, in mark_fastcall_pattern_for_call() argument
16986 subprog->keep_fastcall_stack = 1; in mark_fastcall_pattern_for_call()
16987 subprog->fastcall_stack_off = min(subprog->fastcall_stack_off, off); in mark_fastcall_pattern_for_call()
16992 struct bpf_subprog_info *subprog = env->subprog_info; in mark_fastcall_patterns() local
16997 for (s = 0; s < env->subprog_cnt; ++s, ++subprog) { in mark_fastcall_patterns()
17000 for (i = subprog->start; i < (subprog + 1)->start; ++i) { in mark_fastcall_patterns()
17008 for (i = subprog->start; i < (subprog + 1)->start; ++i) { in mark_fastcall_patterns()
17012 mark_fastcall_pattern_for_call(env, subprog, i, lowest_off); in mark_fastcall_patterns()
20597 int i, j, subprog_start, subprog_end = 0, len, subprog; in jit_subprogs() local
20614 subprog = find_subprog(env, i + insn->imm + 1); in jit_subprogs()
20615 if (subprog < 0) { in jit_subprogs()
20623 insn->off = subprog; in jit_subprogs()
20744 subprog = insn->off; in jit_subprogs()
20745 insn[0].imm = (u32)(long)func[subprog]->bpf_func; in jit_subprogs()
20746 insn[1].imm = ((u64)(long)func[subprog]->bpf_func) >> 32; in jit_subprogs()
20751 subprog = insn->off; in jit_subprogs()
20752 insn->imm = BPF_CALL_IMM(func[subprog]->bpf_func); in jit_subprogs()
20808 subprog = find_subprog(env, i + insn->off + 1); in jit_subprogs()
20809 insn->imm = subprog; in jit_subprogs()
22110 struct bpf_subprog_info *subprog = env->subprog_info; in remove_fastcall_spills_fills() local
22128 if ((subprog + 1)->start == i + 1) { in remove_fastcall_spills_fills()
22129 if (modified && !subprog->keep_fastcall_stack) in remove_fastcall_spills_fills()
22130 subprog->stack_depth = -subprog->fastcall_stack_off; in remove_fastcall_spills_fills()
22131 subprog++; in remove_fastcall_spills_fills()
22169 static int do_check_common(struct bpf_verifier_env *env, int subprog) in do_check_common() argument
22172 struct bpf_subprog_info *sub = subprog_info(env, subprog); in do_check_common()
22195 subprog); in do_check_common()
22196 state->first_insn_idx = env->subprog_info[subprog].start; in do_check_common()
22200 if (subprog || env->prog->type == BPF_PROG_TYPE_EXT) { in do_check_common()
22201 const char *sub_name = subprog_name(env, subprog); in do_check_common()
22205 verbose(env, "Validating %s() func#%d...\n", sub_name, subprog); in do_check_common()
22206 ret = btf_prepare_func_args(env, subprog); in do_check_common()
22210 if (subprog_is_exc_cb(env, subprog)) { in do_check_common()
22537 int ret = 0, subprog = -1, i; in bpf_check_attach_target() local
22577 subprog = i; in bpf_check_attach_target()
22580 if (subprog == -1) { in bpf_check_attach_target()
22584 if (aux->func && aux->func[subprog]->aux->exception_cb) { in bpf_check_attach_target()
22590 conservative = aux->func_info_aux[subprog].unreliable; in bpf_check_attach_target()
22603 ? aux->func[subprog]->aux->changes_pkt_data in bpf_check_attach_target()
22761 if (subprog == 0) in bpf_check_attach_target()
22764 addr = (long) tgt_prog->aux->func[subprog]->bpf_func; in bpf_check_attach_target()