Lines Matching +full:1 +full:ma

50 	1,	/* 72 */
51 1, /* 80 */
52 1, /* 88 */
53 1, /* 96 */
71 return -1; in bpf_mem_cache_idx()
74 return size_index[(size - 1) / 8] - 1; in bpf_mem_cache_idx()
76 return fls(size - 1) - 2; in bpf_mem_cache_idx()
151 obj[1] = pptr; in __alloc()
184 WARN_ON_ONCE(local_inc_return(&c->active) != 1); in inc_active()
258 free_percpu(((void __percpu **)obj)[1]); in free_one()
308 if (atomic_xchg(&c->call_rcu_ttrace_in_progress, 1)) { in do_call_rcu_ttrace()
399 if (atomic_xchg(&c->call_rcu_in_progress, 1)) { in check_free_by_rcu()
465 * memory consumption, set low_mark = 1 and high_mark = 3, resulting in c->batch = 1.
471 c->low_watermark = 1; in init_refill_work()
480 * 8k allocs and above low == 1, high == 3, batch == 1. in init_refill_work()
482 c->low_watermark = max(32 * 256 / c->unit_size, 1); in init_refill_work()
485 c->batch = max((c->high_watermark - c->low_watermark) / 4 * 3, 1); in init_refill_work()
490 int cnt = 1; in prefill_mem_cache()
493 * 1st run of bpf prog won't be doing more than 4 map_update_elem from in prefill_mem_cache()
509 int bpf_mem_alloc_init(struct bpf_mem_alloc *ma, int size, bool percpu) in bpf_mem_alloc_init() argument
522 ma->percpu = percpu; in bpf_mem_alloc_init()
537 ma->objcg = objcg; in bpf_mem_alloc_init()
548 ma->cache = pc; in bpf_mem_alloc_init()
558 ma->objcg = objcg; in bpf_mem_alloc_init()
573 ma->caches = pcc; in bpf_mem_alloc_init()
577 int bpf_mem_alloc_percpu_init(struct bpf_mem_alloc *ma, struct obj_cgroup *objcg) in bpf_mem_alloc_percpu_init() argument
585 ma->caches = pcc; in bpf_mem_alloc_percpu_init()
586 ma->objcg = objcg; in bpf_mem_alloc_percpu_init()
587 ma->percpu = true; in bpf_mem_alloc_percpu_init()
591 int bpf_mem_alloc_percpu_unit_init(struct bpf_mem_alloc *ma, int size) in bpf_mem_alloc_percpu_unit_init() argument
606 objcg = ma->objcg; in bpf_mem_alloc_percpu_unit_init()
607 pcc = ma->caches; in bpf_mem_alloc_percpu_unit_init()
658 static void check_leaked_objs(struct bpf_mem_alloc *ma) in check_leaked_objs() argument
664 if (ma->cache) { in check_leaked_objs()
666 c = per_cpu_ptr(ma->cache, cpu); in check_leaked_objs()
670 if (ma->caches) { in check_leaked_objs()
672 cc = per_cpu_ptr(ma->caches, cpu); in check_leaked_objs()
681 static void free_mem_alloc_no_barrier(struct bpf_mem_alloc *ma) in free_mem_alloc_no_barrier() argument
683 check_leaked_objs(ma); in free_mem_alloc_no_barrier()
684 free_percpu(ma->cache); in free_mem_alloc_no_barrier()
685 free_percpu(ma->caches); in free_mem_alloc_no_barrier()
686 ma->cache = NULL; in free_mem_alloc_no_barrier()
687 ma->caches = NULL; in free_mem_alloc_no_barrier()
690 static void free_mem_alloc(struct bpf_mem_alloc *ma) in free_mem_alloc() argument
706 free_mem_alloc_no_barrier(ma); in free_mem_alloc()
711 struct bpf_mem_alloc *ma = container_of(work, struct bpf_mem_alloc, work); in free_mem_alloc_deferred() local
713 free_mem_alloc(ma); in free_mem_alloc_deferred()
714 kfree(ma); in free_mem_alloc_deferred()
717 static void destroy_mem_alloc(struct bpf_mem_alloc *ma, int rcu_in_progress) in destroy_mem_alloc() argument
725 free_mem_alloc_no_barrier(ma); in destroy_mem_alloc()
729 copy = kmemdup(ma, sizeof(*ma), GFP_KERNEL); in destroy_mem_alloc()
732 free_mem_alloc(ma); in destroy_mem_alloc()
737 memset(ma, 0, sizeof(*ma)); in destroy_mem_alloc()
742 void bpf_mem_alloc_destroy(struct bpf_mem_alloc *ma) in bpf_mem_alloc_destroy() argument
748 if (ma->cache) { in bpf_mem_alloc_destroy()
751 c = per_cpu_ptr(ma->cache, cpu); in bpf_mem_alloc_destroy()
758 obj_cgroup_put(ma->objcg); in bpf_mem_alloc_destroy()
759 destroy_mem_alloc(ma, rcu_in_progress); in bpf_mem_alloc_destroy()
761 if (ma->caches) { in bpf_mem_alloc_destroy()
764 cc = per_cpu_ptr(ma->caches, cpu); in bpf_mem_alloc_destroy()
774 obj_cgroup_put(ma->objcg); in bpf_mem_alloc_destroy()
775 destroy_mem_alloc(ma, rcu_in_progress); in bpf_mem_alloc_destroy()
799 if (local_inc_return(&c->active) == 1) { in unit_alloc()
840 if (local_inc_return(&c->active) == 1) { in unit_free()
873 if (local_inc_return(&c->active) == 1) { in unit_free_rcu()
889 void notrace *bpf_mem_alloc(struct bpf_mem_alloc *ma, size_t size) in bpf_mem_alloc() argument
897 if (!ma->percpu) in bpf_mem_alloc()
903 ret = unit_alloc(this_cpu_ptr(ma->caches)->cache + idx); in bpf_mem_alloc()
907 void notrace bpf_mem_free(struct bpf_mem_alloc *ma, void *ptr) in bpf_mem_free() argument
920 unit_free(this_cpu_ptr(ma->caches)->cache + idx, ptr); in bpf_mem_free()
923 void notrace bpf_mem_free_rcu(struct bpf_mem_alloc *ma, void *ptr) in bpf_mem_free_rcu() argument
936 unit_free_rcu(this_cpu_ptr(ma->caches)->cache + idx, ptr); in bpf_mem_free_rcu()
939 void notrace *bpf_mem_cache_alloc(struct bpf_mem_alloc *ma) in bpf_mem_cache_alloc() argument
943 ret = unit_alloc(this_cpu_ptr(ma->cache)); in bpf_mem_cache_alloc()
947 void notrace bpf_mem_cache_free(struct bpf_mem_alloc *ma, void *ptr) in bpf_mem_cache_free() argument
952 unit_free(this_cpu_ptr(ma->cache), ptr); in bpf_mem_cache_free()
955 void notrace bpf_mem_cache_free_rcu(struct bpf_mem_alloc *ma, void *ptr) in bpf_mem_cache_free_rcu() argument
960 unit_free_rcu(this_cpu_ptr(ma->cache), ptr); in bpf_mem_cache_free_rcu()
985 void notrace *bpf_mem_cache_alloc_flags(struct bpf_mem_alloc *ma, gfp_t flags) in bpf_mem_cache_alloc_flags() argument
990 c = this_cpu_ptr(ma->cache); in bpf_mem_cache_alloc_flags()