Lines Matching full:ctl

46 static int link_free_space(struct btrfs_free_space_ctl *ctl,
48 static void unlink_free_space(struct btrfs_free_space_ctl *ctl,
50 static int search_bitmap(struct btrfs_free_space_ctl *ctl,
53 static void free_bitmap(struct btrfs_free_space_ctl *ctl,
55 static void bitmap_clear_bits(struct btrfs_free_space_ctl *ctl,
64 static void __btrfs_remove_free_space_cache(struct btrfs_free_space_ctl *ctl) in __btrfs_remove_free_space_cache() argument
69 while ((node = rb_last(&ctl->free_space_offset)) != NULL) { in __btrfs_remove_free_space_cache()
72 unlink_free_space(ctl, info, true); in __btrfs_remove_free_space_cache()
75 free_bitmap(ctl, info); in __btrfs_remove_free_space_cache()
78 cond_resched_lock(&ctl->tree_lock); in __btrfs_remove_free_space_cache()
690 static void recalculate_thresholds(struct btrfs_free_space_ctl *ctl) in recalculate_thresholds() argument
692 struct btrfs_block_group *block_group = ctl->block_group; in recalculate_thresholds()
697 u64 bytes_per_bg = BITS_PER_BITMAP * ctl->unit; in recalculate_thresholds()
702 if (ctl->total_bitmaps > max_bitmaps) in recalculate_thresholds()
706 ctl->total_bitmaps, ctl->unit, max_bitmaps, in recalculate_thresholds()
708 ASSERT(ctl->total_bitmaps <= max_bitmaps); in recalculate_thresholds()
721 bitmap_bytes = ctl->total_bitmaps * ctl->unit; in recalculate_thresholds()
730 ctl->extents_thresh = in recalculate_thresholds()
735 struct btrfs_free_space_ctl *ctl, in __load_free_space_cache() argument
833 spin_lock(&ctl->tree_lock); in __load_free_space_cache()
834 ret = link_free_space(ctl, e); in __load_free_space_cache()
835 spin_unlock(&ctl->tree_lock); in __load_free_space_cache()
853 spin_lock(&ctl->tree_lock); in __load_free_space_cache()
854 ret = link_free_space(ctl, e); in __load_free_space_cache()
856 spin_unlock(&ctl->tree_lock); in __load_free_space_cache()
863 ctl->total_bitmaps++; in __load_free_space_cache()
864 recalculate_thresholds(ctl); in __load_free_space_cache()
865 spin_unlock(&ctl->tree_lock); in __load_free_space_cache()
893 spin_lock(&ctl->tree_lock); in __load_free_space_cache()
894 __btrfs_remove_free_space_cache(ctl); in __load_free_space_cache()
895 spin_unlock(&ctl->tree_lock); in __load_free_space_cache()
900 struct btrfs_free_space_ctl *ctl) in copy_free_space_cache() argument
906 while (!ret && (n = rb_first(&ctl->free_space_offset)) != NULL) { in copy_free_space_cache()
912 unlink_free_space(ctl, info, true); in copy_free_space_cache()
913 spin_unlock(&ctl->tree_lock); in copy_free_space_cache()
916 spin_lock(&ctl->tree_lock); in copy_free_space_cache()
919 u64 bytes = ctl->unit; in copy_free_space_cache()
921 ret = search_bitmap(ctl, info, &offset, &bytes, false); in copy_free_space_cache()
923 bitmap_clear_bits(ctl, info, offset, bytes, true); in copy_free_space_cache()
924 spin_unlock(&ctl->tree_lock); in copy_free_space_cache()
927 spin_lock(&ctl->tree_lock); in copy_free_space_cache()
929 free_bitmap(ctl, info); in copy_free_space_cache()
933 cond_resched_lock(&ctl->tree_lock); in copy_free_space_cache()
943 struct btrfs_free_space_ctl *ctl = block_group->free_space_ctl; in load_free_space_cache() local
954 * valid copy it all into the actual free space ctl. in load_free_space_cache()
1062 spin_lock(&ctl->tree_lock); in load_free_space_cache()
1064 spin_unlock(&ctl->tree_lock); in load_free_space_cache()
1071 struct btrfs_free_space_ctl *ctl, in write_cache_extent_entries() argument
1079 struct rb_node *node = rb_first(&ctl->free_space_offset); in write_cache_extent_entries()
1131 list_for_each_entry(trim_entry, &ctl->trimming_ranges, list) { in write_cache_extent_entries()
1367 * @ctl: free space cache we are going to write out
1377 struct btrfs_free_space_ctl *ctl, in __btrfs_write_out_cache() argument
1423 mutex_lock(&ctl->cache_writeout_mutex); in __btrfs_write_out_cache()
1425 spin_lock(&ctl->tree_lock); in __btrfs_write_out_cache()
1426 ret = write_cache_extent_entries(io_ctl, ctl, in __btrfs_write_out_cache()
1450 spin_unlock(&ctl->tree_lock); in __btrfs_write_out_cache()
1451 mutex_unlock(&ctl->cache_writeout_mutex); in __btrfs_write_out_cache()
1498 spin_unlock(&ctl->tree_lock); in __btrfs_write_out_cache()
1499 mutex_unlock(&ctl->cache_writeout_mutex); in __btrfs_write_out_cache()
1526 struct btrfs_free_space_ctl *ctl = block_group->free_space_ctl; in btrfs_write_out_cache() local
1541 ret = __btrfs_write_out_cache(inode, ctl, block_group, in btrfs_write_out_cache()
1576 static inline u64 offset_to_bitmap(struct btrfs_free_space_ctl *ctl, in offset_to_bitmap() argument
1582 bytes_per_bitmap = BITS_PER_BITMAP * ctl->unit; in offset_to_bitmap()
1583 bitmap_start = offset - ctl->start; in offset_to_bitmap()
1586 bitmap_start += ctl->start; in offset_to_bitmap()
1591 static int tree_insert_offset(struct btrfs_free_space_ctl *ctl, in tree_insert_offset() argument
1599 lockdep_assert_held(&ctl->tree_lock); in tree_insert_offset()
1605 root = &ctl->free_space_offset; in tree_insert_offset()
1708 tree_search_offset(struct btrfs_free_space_ctl *ctl, in tree_search_offset() argument
1711 struct rb_node *n = ctl->free_space_offset.rb_node; in tree_search_offset()
1714 lockdep_assert_held(&ctl->tree_lock); in tree_search_offset()
1796 if (entry->offset + BITS_PER_BITMAP * ctl->unit > offset) in tree_search_offset()
1811 ctl->unit > offset) in tree_search_offset()
1821 static inline void unlink_free_space(struct btrfs_free_space_ctl *ctl, in unlink_free_space() argument
1825 lockdep_assert_held(&ctl->tree_lock); in unlink_free_space()
1827 rb_erase(&info->offset_index, &ctl->free_space_offset); in unlink_free_space()
1828 rb_erase_cached(&info->bytes_index, &ctl->free_space_bytes); in unlink_free_space()
1829 ctl->free_extents--; in unlink_free_space()
1832 ctl->discardable_extents[BTRFS_STAT_CURR]--; in unlink_free_space()
1833 ctl->discardable_bytes[BTRFS_STAT_CURR] -= info->bytes; in unlink_free_space()
1837 ctl->free_space -= info->bytes; in unlink_free_space()
1840 static int link_free_space(struct btrfs_free_space_ctl *ctl, in link_free_space() argument
1845 lockdep_assert_held(&ctl->tree_lock); in link_free_space()
1848 ret = tree_insert_offset(ctl, NULL, info); in link_free_space()
1852 rb_add_cached(&info->bytes_index, &ctl->free_space_bytes, entry_less); in link_free_space()
1855 ctl->discardable_extents[BTRFS_STAT_CURR]++; in link_free_space()
1856 ctl->discardable_bytes[BTRFS_STAT_CURR] += info->bytes; in link_free_space()
1859 ctl->free_space += info->bytes; in link_free_space()
1860 ctl->free_extents++; in link_free_space()
1864 static void relink_bitmap_entry(struct btrfs_free_space_ctl *ctl, in relink_bitmap_entry() argument
1871 * want to re-link it into our ctl bytes index. in relink_bitmap_entry()
1876 lockdep_assert_held(&ctl->tree_lock); in relink_bitmap_entry()
1878 rb_erase_cached(&info->bytes_index, &ctl->free_space_bytes); in relink_bitmap_entry()
1879 rb_add_cached(&info->bytes_index, &ctl->free_space_bytes, entry_less); in relink_bitmap_entry()
1882 static inline void bitmap_clear_bits(struct btrfs_free_space_ctl *ctl, in bitmap_clear_bits() argument
1889 start = offset_to_bit(info->offset, ctl->unit, offset); in bitmap_clear_bits()
1890 count = bytes_to_bits(bytes, ctl->unit); in bitmap_clear_bits()
1897 if (info->max_extent_size > ctl->unit) in bitmap_clear_bits()
1900 relink_bitmap_entry(ctl, info); in bitmap_clear_bits()
1910 ctl->discardable_extents[BTRFS_STAT_CURR] += extent_delta; in bitmap_clear_bits()
1911 ctl->discardable_bytes[BTRFS_STAT_CURR] -= bytes; in bitmap_clear_bits()
1915 ctl->free_space -= bytes; in bitmap_clear_bits()
1918 static void btrfs_bitmap_set_bits(struct btrfs_free_space_ctl *ctl, in btrfs_bitmap_set_bits() argument
1925 start = offset_to_bit(info->offset, ctl->unit, offset); in btrfs_bitmap_set_bits()
1926 count = bytes_to_bits(bytes, ctl->unit); in btrfs_bitmap_set_bits()
1938 ctl->free_space += bytes; in btrfs_bitmap_set_bits()
1940 relink_bitmap_entry(ctl, info); in btrfs_bitmap_set_bits()
1950 ctl->discardable_extents[BTRFS_STAT_CURR] += extent_delta; in btrfs_bitmap_set_bits()
1951 ctl->discardable_bytes[BTRFS_STAT_CURR] += bytes; in btrfs_bitmap_set_bits()
1959 static int search_bitmap(struct btrfs_free_space_ctl *ctl, in search_bitmap() argument
1980 i = offset_to_bit(bitmap_info->offset, ctl->unit, in search_bitmap()
1982 bits = bytes_to_bits(*bytes, ctl->unit); in search_bitmap()
2002 *offset = (u64)(i * ctl->unit) + bitmap_info->offset; in search_bitmap()
2003 *bytes = (u64)(found_bits) * ctl->unit; in search_bitmap()
2007 *bytes = (u64)(max_bits) * ctl->unit; in search_bitmap()
2009 relink_bitmap_entry(ctl, bitmap_info); in search_bitmap()
2015 find_free_space(struct btrfs_free_space_ctl *ctl, u64 *offset, u64 *bytes, in find_free_space() argument
2024 if (!ctl->free_space_offset.rb_node) in find_free_space()
2028 node = rb_first_cached(&ctl->free_space_bytes); in find_free_space()
2030 entry = tree_search_offset(ctl, offset_to_bitmap(ctl, *offset), in find_free_space()
2065 tmp = entry->offset - ctl->start + align - 1; in find_free_space()
2067 tmp = tmp * align + ctl->start; in find_free_space()
2091 ret = search_bitmap(ctl, entry, &tmp, &size, true); in find_free_space()
2121 static void add_new_bitmap(struct btrfs_free_space_ctl *ctl, in add_new_bitmap() argument
2124 info->offset = offset_to_bitmap(ctl, offset); in add_new_bitmap()
2128 link_free_space(ctl, info); in add_new_bitmap()
2129 ctl->total_bitmaps++; in add_new_bitmap()
2130 recalculate_thresholds(ctl); in add_new_bitmap()
2133 static void free_bitmap(struct btrfs_free_space_ctl *ctl, in free_bitmap() argument
2143 ctl->discardable_extents[BTRFS_STAT_CURR] -= in free_bitmap()
2145 ctl->discardable_bytes[BTRFS_STAT_CURR] -= bitmap_info->bytes; in free_bitmap()
2148 unlink_free_space(ctl, bitmap_info, true); in free_bitmap()
2151 ctl->total_bitmaps--; in free_bitmap()
2152 recalculate_thresholds(ctl); in free_bitmap()
2155 static noinline int remove_from_bitmap(struct btrfs_free_space_ctl *ctl, in remove_from_bitmap() argument
2164 end = bitmap_info->offset + (u64)(BITS_PER_BITMAP * ctl->unit) - 1; in remove_from_bitmap()
2173 search_bytes = ctl->unit; in remove_from_bitmap()
2175 ret = search_bitmap(ctl, bitmap_info, &search_start, &search_bytes, in remove_from_bitmap()
2186 bitmap_clear_bits(ctl, bitmap_info, search_start, search_bytes, true); in remove_from_bitmap()
2193 free_bitmap(ctl, bitmap_info); in remove_from_bitmap()
2219 search_bytes = ctl->unit; in remove_from_bitmap()
2220 ret = search_bitmap(ctl, bitmap_info, &search_start, in remove_from_bitmap()
2227 free_bitmap(ctl, bitmap_info); in remove_from_bitmap()
2232 static u64 add_bytes_to_bitmap(struct btrfs_free_space_ctl *ctl, in add_bytes_to_bitmap() argument
2245 ctl->discardable_extents[BTRFS_STAT_CURR] += in add_bytes_to_bitmap()
2247 ctl->discardable_bytes[BTRFS_STAT_CURR] += info->bytes; in add_bytes_to_bitmap()
2252 end = info->offset + (u64)(BITS_PER_BITMAP * ctl->unit); in add_bytes_to_bitmap()
2256 btrfs_bitmap_set_bits(ctl, info, offset, bytes_to_set); in add_bytes_to_bitmap()
2262 static bool use_bitmap(struct btrfs_free_space_ctl *ctl, in use_bitmap() argument
2265 struct btrfs_block_group *block_group = ctl->block_group; in use_bitmap()
2282 if (!forced && ctl->free_extents < ctl->extents_thresh) { in use_bitmap()
2291 if (ctl->free_extents * 3 <= ctl->extents_thresh) in use_bitmap()
2306 if (((BITS_PER_BITMAP * ctl->unit) >> 1) > block_group->length) in use_bitmap()
2316 static int insert_into_bitmap(struct btrfs_free_space_ctl *ctl, in insert_into_bitmap() argument
2330 if (!ctl->op->use_bitmap(ctl, info)) in insert_into_bitmap()
2333 if (ctl->op == &free_space_op) in insert_into_bitmap()
2334 block_group = ctl->block_group; in insert_into_bitmap()
2362 if (entry->offset == offset_to_bitmap(ctl, offset)) { in insert_into_bitmap()
2363 bytes_added = add_bytes_to_bitmap(ctl, entry, offset, in insert_into_bitmap()
2376 bitmap_info = tree_search_offset(ctl, offset_to_bitmap(ctl, offset), in insert_into_bitmap()
2383 bytes_added = add_bytes_to_bitmap(ctl, bitmap_info, offset, bytes, in insert_into_bitmap()
2397 add_new_bitmap(ctl, info, offset); in insert_into_bitmap()
2402 spin_unlock(&ctl->tree_lock); in insert_into_bitmap()
2409 spin_lock(&ctl->tree_lock); in insert_into_bitmap()
2419 spin_lock(&ctl->tree_lock); in insert_into_bitmap()
2454 static bool try_merge_free_space(struct btrfs_free_space_ctl *ctl, in try_merge_free_space() argument
2470 right_info = tree_search_offset(ctl, offset + bytes, 0, 0); in try_merge_free_space()
2477 left_info = tree_search_offset(ctl, offset - 1, 0, 0); in try_merge_free_space()
2482 unlink_free_space(ctl, right_info, update_stat); in try_merge_free_space()
2492 unlink_free_space(ctl, left_info, update_stat); in try_merge_free_space()
2502 static bool steal_from_bitmap_to_end(struct btrfs_free_space_ctl *ctl, in steal_from_bitmap_to_end() argument
2510 const u64 bitmap_offset = offset_to_bitmap(ctl, end); in steal_from_bitmap_to_end()
2513 bitmap = tree_search_offset(ctl, bitmap_offset, 1, 0); in steal_from_bitmap_to_end()
2517 i = offset_to_bit(bitmap->offset, ctl->unit, end); in steal_from_bitmap_to_end()
2521 bytes = (j - i) * ctl->unit; in steal_from_bitmap_to_end()
2528 bitmap_clear_bits(ctl, bitmap, end, bytes, update_stat); in steal_from_bitmap_to_end()
2531 free_bitmap(ctl, bitmap); in steal_from_bitmap_to_end()
2536 static bool steal_from_bitmap_to_front(struct btrfs_free_space_ctl *ctl, in steal_from_bitmap_to_front() argument
2547 bitmap_offset = offset_to_bitmap(ctl, info->offset); in steal_from_bitmap_to_front()
2552 bitmap_offset = offset_to_bitmap(ctl, info->offset - 1); in steal_from_bitmap_to_front()
2555 bitmap = tree_search_offset(ctl, bitmap_offset, 1, 0); in steal_from_bitmap_to_front()
2559 i = offset_to_bit(bitmap->offset, ctl->unit, info->offset) - 1; in steal_from_bitmap_to_front()
2571 bytes = (i + 1) * ctl->unit; in steal_from_bitmap_to_front()
2573 bytes = (i - prev_j) * ctl->unit; in steal_from_bitmap_to_front()
2582 bitmap_clear_bits(ctl, bitmap, info->offset, bytes, update_stat); in steal_from_bitmap_to_front()
2585 free_bitmap(ctl, bitmap); in steal_from_bitmap_to_front()
2601 static void steal_from_bitmap(struct btrfs_free_space_ctl *ctl, in steal_from_bitmap() argument
2612 if (ctl->total_bitmaps > 0) { in steal_from_bitmap()
2616 stole_end = steal_from_bitmap_to_end(ctl, info, update_stat); in steal_from_bitmap()
2617 if (ctl->total_bitmaps > 0) in steal_from_bitmap()
2618 stole_front = steal_from_bitmap_to_front(ctl, info, in steal_from_bitmap()
2622 try_merge_free_space(ctl, info, update_stat); in steal_from_bitmap()
2631 struct btrfs_free_space_ctl *ctl = block_group->free_space_ctl; in __btrfs_add_free_space() local
2648 spin_lock(&ctl->tree_lock); in __btrfs_add_free_space()
2650 if (try_merge_free_space(ctl, info, true)) in __btrfs_add_free_space()
2658 ret = insert_into_bitmap(ctl, info); in __btrfs_add_free_space()
2672 steal_from_bitmap(ctl, info, true); in __btrfs_add_free_space()
2676 ret = link_free_space(ctl, info); in __btrfs_add_free_space()
2681 spin_unlock(&ctl->tree_lock); in __btrfs_add_free_space()
2700 struct btrfs_free_space_ctl *ctl = block_group->free_space_ctl; in __btrfs_add_free_space_zoned() local
2726 spin_lock(&ctl->tree_lock); in __btrfs_add_free_space_zoned()
2727 ctl->free_space += to_free; in __btrfs_add_free_space_zoned()
2728 spin_unlock(&ctl->tree_lock); in __btrfs_add_free_space_zoned()
2806 struct btrfs_free_space_ctl *ctl = block_group->free_space_ctl; in btrfs_remove_free_space() local
2831 spin_lock(&ctl->tree_lock); in btrfs_remove_free_space()
2838 info = tree_search_offset(ctl, offset, 0, 0); in btrfs_remove_free_space()
2844 info = tree_search_offset(ctl, offset_to_bitmap(ctl, offset), in btrfs_remove_free_space()
2859 unlink_free_space(ctl, info, true); in btrfs_remove_free_space()
2866 ret = link_free_space(ctl, info); in btrfs_remove_free_space()
2879 ret = link_free_space(ctl, info); in btrfs_remove_free_space()
2893 spin_unlock(&ctl->tree_lock); in btrfs_remove_free_space()
2904 ret = remove_from_bitmap(ctl, info, &offset, &bytes); in btrfs_remove_free_space()
2911 spin_unlock(&ctl->tree_lock); in btrfs_remove_free_space()
2920 struct btrfs_free_space_ctl *ctl = block_group->free_space_ctl; in btrfs_dump_free_space() local
2937 spin_lock(&ctl->tree_lock); in btrfs_dump_free_space()
2938 for (n = rb_first(&ctl->free_space_offset); n; n = rb_next(n)) { in btrfs_dump_free_space()
2945 spin_unlock(&ctl->tree_lock); in btrfs_dump_free_space()
2954 struct btrfs_free_space_ctl *ctl) in btrfs_init_free_space_ctl() argument
2958 spin_lock_init(&ctl->tree_lock); in btrfs_init_free_space_ctl()
2959 ctl->unit = fs_info->sectorsize; in btrfs_init_free_space_ctl()
2960 ctl->start = block_group->start; in btrfs_init_free_space_ctl()
2961 ctl->block_group = block_group; in btrfs_init_free_space_ctl()
2962 ctl->op = &free_space_op; in btrfs_init_free_space_ctl()
2963 ctl->free_space_bytes = RB_ROOT_CACHED; in btrfs_init_free_space_ctl()
2964 INIT_LIST_HEAD(&ctl->trimming_ranges); in btrfs_init_free_space_ctl()
2965 mutex_init(&ctl->cache_writeout_mutex); in btrfs_init_free_space_ctl()
2972 ctl->extents_thresh = (SZ_32K / 2) / sizeof(struct btrfs_free_space); in btrfs_init_free_space_ctl()
2985 struct btrfs_free_space_ctl *ctl = block_group->free_space_ctl; in __btrfs_return_cluster_to_free_space() local
2988 lockdep_assert_held(&ctl->tree_lock); in __btrfs_return_cluster_to_free_space()
3012 ctl->discardable_extents[BTRFS_STAT_CURR]--; in __btrfs_return_cluster_to_free_space()
3013 ctl->discardable_bytes[BTRFS_STAT_CURR] -= in __btrfs_return_cluster_to_free_space()
3017 try_merge_free_space(ctl, entry, false); in __btrfs_return_cluster_to_free_space()
3018 steal_from_bitmap(ctl, entry, false); in __btrfs_return_cluster_to_free_space()
3022 ctl->discardable_extents[BTRFS_STAT_CURR]++; in __btrfs_return_cluster_to_free_space()
3023 ctl->discardable_bytes[BTRFS_STAT_CURR] += in __btrfs_return_cluster_to_free_space()
3027 tree_insert_offset(ctl, NULL, entry); in __btrfs_return_cluster_to_free_space()
3028 rb_add_cached(&entry->bytes_index, &ctl->free_space_bytes, in __btrfs_return_cluster_to_free_space()
3038 struct btrfs_free_space_ctl *ctl = block_group->free_space_ctl; in btrfs_remove_free_space_cache() local
3042 spin_lock(&ctl->tree_lock); in btrfs_remove_free_space_cache()
3051 cond_resched_lock(&ctl->tree_lock); in btrfs_remove_free_space_cache()
3053 __btrfs_remove_free_space_cache(ctl); in btrfs_remove_free_space_cache()
3055 spin_unlock(&ctl->tree_lock); in btrfs_remove_free_space_cache()
3064 struct btrfs_free_space_ctl *ctl = block_group->free_space_ctl; in btrfs_is_free_space_trimmed() local
3069 spin_lock(&ctl->tree_lock); in btrfs_is_free_space_trimmed()
3070 node = rb_first(&ctl->free_space_offset); in btrfs_is_free_space_trimmed()
3083 spin_unlock(&ctl->tree_lock); in btrfs_is_free_space_trimmed()
3091 struct btrfs_free_space_ctl *ctl = block_group->free_space_ctl; in btrfs_find_space_for_alloc() local
3104 spin_lock(&ctl->tree_lock); in btrfs_find_space_for_alloc()
3105 entry = find_free_space(ctl, &offset, &bytes_search, in btrfs_find_space_for_alloc()
3113 bitmap_clear_bits(ctl, entry, offset, bytes, true); in btrfs_find_space_for_alloc()
3119 free_bitmap(ctl, entry); in btrfs_find_space_for_alloc()
3121 unlink_free_space(ctl, entry, true); in btrfs_find_space_for_alloc()
3136 link_free_space(ctl, entry); in btrfs_find_space_for_alloc()
3140 spin_unlock(&ctl->tree_lock); in btrfs_find_space_for_alloc()
3160 struct btrfs_free_space_ctl *ctl; in btrfs_return_cluster_to_free_space() local
3178 ctl = block_group->free_space_ctl; in btrfs_return_cluster_to_free_space()
3181 spin_lock(&ctl->tree_lock); in btrfs_return_cluster_to_free_space()
3183 spin_unlock(&ctl->tree_lock); in btrfs_return_cluster_to_free_space()
3197 struct btrfs_free_space_ctl *ctl = block_group->free_space_ctl; in btrfs_alloc_from_bitmap() local
3206 err = search_bitmap(ctl, entry, &search_start, &search_bytes, true); in btrfs_alloc_from_bitmap()
3214 bitmap_clear_bits(ctl, entry, ret, bytes, false); in btrfs_alloc_from_bitmap()
3228 struct btrfs_free_space_ctl *ctl = block_group->free_space_ctl; in btrfs_alloc_from_cluster() local
3293 spin_lock(&ctl->tree_lock); in btrfs_alloc_from_cluster()
3298 ctl->free_space -= bytes; in btrfs_alloc_from_cluster()
3300 ctl->discardable_bytes[BTRFS_STAT_CURR] -= bytes; in btrfs_alloc_from_cluster()
3305 ctl->free_extents--; in btrfs_alloc_from_cluster()
3309 ctl->total_bitmaps--; in btrfs_alloc_from_cluster()
3310 recalculate_thresholds(ctl); in btrfs_alloc_from_cluster()
3312 ctl->discardable_extents[BTRFS_STAT_CURR]--; in btrfs_alloc_from_cluster()
3318 spin_unlock(&ctl->tree_lock); in btrfs_alloc_from_cluster()
3329 struct btrfs_free_space_ctl *ctl = block_group->free_space_ctl; in btrfs_bitmap_cluster() local
3340 lockdep_assert_held(&ctl->tree_lock); in btrfs_bitmap_cluster()
3342 i = offset_to_bit(entry->offset, ctl->unit, in btrfs_bitmap_cluster()
3344 want_bits = bytes_to_bits(bytes, ctl->unit); in btrfs_bitmap_cluster()
3345 min_bits = bytes_to_bits(min_bytes, ctl->unit); in btrfs_bitmap_cluster()
3371 entry->max_extent_size = (u64)max_bits * ctl->unit; in btrfs_bitmap_cluster()
3382 if (cluster->max_size < found_bits * ctl->unit) in btrfs_bitmap_cluster()
3383 cluster->max_size = found_bits * ctl->unit; in btrfs_bitmap_cluster()
3390 cluster->window_start = start * ctl->unit + entry->offset; in btrfs_bitmap_cluster()
3391 rb_erase(&entry->offset_index, &ctl->free_space_offset); in btrfs_bitmap_cluster()
3392 rb_erase_cached(&entry->bytes_index, &ctl->free_space_bytes); in btrfs_bitmap_cluster()
3403 ret = tree_insert_offset(ctl, cluster, entry); in btrfs_bitmap_cluster()
3407 total_found * ctl->unit, 1); in btrfs_bitmap_cluster()
3422 struct btrfs_free_space_ctl *ctl = block_group->free_space_ctl; in setup_cluster_no_bitmap() local
3431 lockdep_assert_held(&ctl->tree_lock); in setup_cluster_no_bitmap()
3433 entry = tree_search_offset(ctl, offset, 0, 1); in setup_cluster_no_bitmap()
3493 rb_erase(&entry->offset_index, &ctl->free_space_offset); in setup_cluster_no_bitmap()
3494 rb_erase_cached(&entry->bytes_index, &ctl->free_space_bytes); in setup_cluster_no_bitmap()
3495 ret = tree_insert_offset(ctl, cluster, entry); in setup_cluster_no_bitmap()
3515 struct btrfs_free_space_ctl *ctl = block_group->free_space_ctl; in setup_cluster_bitmap() local
3518 u64 bitmap_offset = offset_to_bitmap(ctl, offset); in setup_cluster_bitmap()
3520 if (ctl->total_bitmaps == 0) in setup_cluster_bitmap()
3531 entry = tree_search_offset(ctl, bitmap_offset, 1, 0); in setup_cluster_bitmap()
3565 struct btrfs_free_space_ctl *ctl = block_group->free_space_ctl; in btrfs_find_space_cluster() local
3589 spin_lock(&ctl->tree_lock); in btrfs_find_space_cluster()
3595 if (ctl->free_space < bytes) { in btrfs_find_space_cluster()
3596 spin_unlock(&ctl->tree_lock); in btrfs_find_space_cluster()
3633 spin_unlock(&ctl->tree_lock); in btrfs_find_space_cluster()
3660 struct btrfs_free_space_ctl *ctl = block_group->free_space_ctl; in do_trimming() local
3684 mutex_lock(&ctl->cache_writeout_mutex); in do_trimming()
3694 mutex_unlock(&ctl->cache_writeout_mutex); in do_trimming()
3719 struct btrfs_free_space_ctl *ctl = block_group->free_space_ctl; in trim_no_bitmap() local
3732 mutex_lock(&ctl->cache_writeout_mutex); in trim_no_bitmap()
3733 spin_lock(&ctl->tree_lock); in trim_no_bitmap()
3735 if (ctl->free_space < minlen) in trim_no_bitmap()
3738 entry = tree_search_offset(ctl, start, 0, 1); in trim_no_bitmap()
3762 spin_unlock(&ctl->tree_lock); in trim_no_bitmap()
3763 mutex_unlock(&ctl->cache_writeout_mutex); in trim_no_bitmap()
3766 unlink_free_space(ctl, entry, true); in trim_no_bitmap()
3779 link_free_space(ctl, entry); in trim_no_bitmap()
3787 spin_unlock(&ctl->tree_lock); in trim_no_bitmap()
3788 mutex_unlock(&ctl->cache_writeout_mutex); in trim_no_bitmap()
3792 unlink_free_space(ctl, entry, true); in trim_no_bitmap()
3796 spin_unlock(&ctl->tree_lock); in trim_no_bitmap()
3799 list_add_tail(&trim_entry.list, &ctl->trimming_ranges); in trim_no_bitmap()
3800 mutex_unlock(&ctl->cache_writeout_mutex); in trim_no_bitmap()
3827 spin_unlock(&ctl->tree_lock); in trim_no_bitmap()
3828 mutex_unlock(&ctl->cache_writeout_mutex); in trim_no_bitmap()
3847 static void reset_trimming_bitmap(struct btrfs_free_space_ctl *ctl, u64 offset) in reset_trimming_bitmap() argument
3851 spin_lock(&ctl->tree_lock); in reset_trimming_bitmap()
3852 entry = tree_search_offset(ctl, offset, 1, 0); in reset_trimming_bitmap()
3855 ctl->discardable_extents[BTRFS_STAT_CURR] += in reset_trimming_bitmap()
3857 ctl->discardable_bytes[BTRFS_STAT_CURR] += entry->bytes; in reset_trimming_bitmap()
3862 spin_unlock(&ctl->tree_lock); in reset_trimming_bitmap()
3865 static void end_trimming_bitmap(struct btrfs_free_space_ctl *ctl, in end_trimming_bitmap() argument
3870 ctl->discardable_extents[BTRFS_STAT_CURR] -= in end_trimming_bitmap()
3872 ctl->discardable_bytes[BTRFS_STAT_CURR] -= entry->bytes; in end_trimming_bitmap()
3885 struct btrfs_free_space_ctl *ctl = block_group->free_space_ctl; in trim_bitmaps() local
3890 u64 offset = offset_to_bitmap(ctl, start); in trim_bitmaps()
3897 mutex_lock(&ctl->cache_writeout_mutex); in trim_bitmaps()
3898 spin_lock(&ctl->tree_lock); in trim_bitmaps()
3900 if (ctl->free_space < minlen) { in trim_bitmaps()
3903 spin_unlock(&ctl->tree_lock); in trim_bitmaps()
3904 mutex_unlock(&ctl->cache_writeout_mutex); in trim_bitmaps()
3908 entry = tree_search_offset(ctl, offset, 1, 0); in trim_bitmaps()
3919 spin_unlock(&ctl->tree_lock); in trim_bitmaps()
3920 mutex_unlock(&ctl->cache_writeout_mutex); in trim_bitmaps()
3935 ret2 = search_bitmap(ctl, entry, &start, &bytes, false); in trim_bitmaps()
3942 end_trimming_bitmap(ctl, entry); in trim_bitmaps()
3945 spin_unlock(&ctl->tree_lock); in trim_bitmaps()
3946 mutex_unlock(&ctl->cache_writeout_mutex); in trim_bitmaps()
3956 spin_unlock(&ctl->tree_lock); in trim_bitmaps()
3957 mutex_unlock(&ctl->cache_writeout_mutex); in trim_bitmaps()
3963 spin_unlock(&ctl->tree_lock); in trim_bitmaps()
3964 mutex_unlock(&ctl->cache_writeout_mutex); in trim_bitmaps()
3979 bitmap_clear_bits(ctl, entry, start, bytes, true); in trim_bitmaps()
3981 free_bitmap(ctl, entry); in trim_bitmaps()
3983 spin_unlock(&ctl->tree_lock); in trim_bitmaps()
3986 list_add_tail(&trim_entry.list, &ctl->trimming_ranges); in trim_bitmaps()
3987 mutex_unlock(&ctl->cache_writeout_mutex); in trim_bitmaps()
3992 reset_trimming_bitmap(ctl, offset); in trim_bitmaps()
3999 offset += BITS_PER_BITMAP * ctl->unit; in trim_bitmaps()
4008 reset_trimming_bitmap(ctl, offset); in trim_bitmaps()
4026 struct btrfs_free_space_ctl *ctl = block_group->free_space_ctl; in btrfs_trim_block_group() local
4047 div64_u64_rem(end, BITS_PER_BITMAP * ctl->unit, &rem); in btrfs_trim_block_group()
4050 reset_trimming_bitmap(ctl, offset_to_bitmap(ctl, end)); in btrfs_trim_block_group()
4195 struct btrfs_free_space_ctl *ctl = cache->free_space_ctl; in test_add_free_space_entry() local
4210 spin_lock(&ctl->tree_lock); in test_add_free_space_entry()
4214 ret = link_free_space(ctl, info); in test_add_free_space_entry()
4215 spin_unlock(&ctl->tree_lock); in test_add_free_space_entry()
4229 spin_lock(&ctl->tree_lock); in test_add_free_space_entry()
4230 bitmap_info = tree_search_offset(ctl, offset_to_bitmap(ctl, offset), in test_add_free_space_entry()
4235 add_new_bitmap(ctl, info, offset); in test_add_free_space_entry()
4240 bytes_added = add_bytes_to_bitmap(ctl, bitmap_info, offset, bytes, in test_add_free_space_entry()
4245 spin_unlock(&ctl->tree_lock); in test_add_free_space_entry()
4265 struct btrfs_free_space_ctl *ctl = cache->free_space_ctl; in test_check_exists() local
4269 spin_lock(&ctl->tree_lock); in test_check_exists()
4270 info = tree_search_offset(ctl, offset, 0, 0); in test_check_exists()
4272 info = tree_search_offset(ctl, offset_to_bitmap(ctl, offset), in test_check_exists()
4285 bit_bytes = ctl->unit; in test_check_exists()
4286 ret = search_bitmap(ctl, info, &bit_off, &bit_bytes, false); in test_check_exists()
4338 spin_unlock(&ctl->tree_lock); in test_check_exists()