Lines Matching full:trans
45 int bch2_btree_node_check_topology(struct btree_trans *trans, struct btree *b) in bch2_btree_node_check_topology() argument
47 struct bch_fs *c = trans->c; in bch2_btree_node_check_topology()
63 bch2_btree_and_journal_iter_init_node_iter(trans, &iter, b); in bch2_btree_node_check_topology()
69 log_fsck_err(trans, btree_root_bad_min_key, in bch2_btree_node_check_topology()
77 log_fsck_err(trans, btree_root_bad_max_key, in bch2_btree_node_check_topology()
109 log_fsck_err(trans, btree_node_topology_bad_min_key, "%s", buf.buf); in bch2_btree_node_check_topology()
126 log_fsck_err(trans, btree_node_topology_empty_interior_node, "%s", buf.buf); in bch2_btree_node_check_topology()
139 log_fsck_err(trans, btree_node_topology_bad_max_key, "%s", buf.buf); in bch2_btree_node_check_topology()
219 static void __btree_node_free(struct btree_trans *trans, struct btree *b) in __btree_node_free() argument
221 struct bch_fs *c = trans->c; in __btree_node_free()
223 trace_and_count(c, btree_node_free, trans, b); in __btree_node_free()
236 static void bch2_btree_node_free_inmem(struct btree_trans *trans, in bch2_btree_node_free_inmem() argument
240 struct bch_fs *c = trans->c; in bch2_btree_node_free_inmem()
242 bch2_btree_node_lock_write_nofail(trans, path, &b->c); in bch2_btree_node_free_inmem()
244 __btree_node_free(trans, b); in bch2_btree_node_free_inmem()
253 bch2_trans_node_drop(trans, b); in bch2_btree_node_free_inmem()
257 struct btree_trans *trans, in bch2_btree_node_free_never_used() argument
283 bch2_trans_node_drop(trans, b); in bch2_btree_node_free_never_used()
286 static struct btree *__bch2_btree_node_alloc(struct btree_trans *trans, in __bch2_btree_node_alloc() argument
292 struct bch_fs *c = trans->c; in __bch2_btree_node_alloc()
304 b = bch2_btree_node_mem_alloc(trans, interior_node); in __bch2_btree_node_alloc()
322 ret = bch2_alloc_sectors_start_trans(trans, in __bch2_btree_node_alloc()
365 struct btree_trans *trans, in bch2_btree_node_alloc() argument
378 btree_node_lock_nopath_nofail(trans, &b->c, SIX_LOCK_intent); in bch2_btree_node_alloc()
379 btree_node_lock_nopath_nofail(trans, &b->c, SIX_LOCK_write); in bch2_btree_node_alloc()
412 trace_and_count(c, btree_node_alloc, trans, b); in bch2_btree_node_alloc()
431 struct btree_trans *trans, in bch2_btree_node_alloc_replacement() argument
434 struct btree *n = bch2_btree_node_alloc(as, trans, b->c.level); in bch2_btree_node_alloc_replacement()
459 struct btree_trans *trans, unsigned level) in __btree_root_alloc() argument
461 struct btree *b = bch2_btree_node_alloc(as, trans, level); in __btree_root_alloc()
473 static void bch2_btree_reserve_put(struct btree_update *as, struct btree_trans *trans) in bch2_btree_reserve_put() argument
500 btree_node_lock_nopath_nofail(trans, &b->c, SIX_LOCK_intent); in bch2_btree_reserve_put()
501 btree_node_lock_nopath_nofail(trans, &b->c, SIX_LOCK_write); in bch2_btree_reserve_put()
502 __btree_node_free(trans, b); in bch2_btree_reserve_put()
508 static int bch2_btree_reserve_get(struct btree_trans *trans, in bch2_btree_reserve_get() argument
524 ret = bch2_btree_cache_cannibalize_lock(trans, cl); in bch2_btree_reserve_get()
532 b = __bch2_btree_node_alloc(trans, &as->disk_res, cl, in bch2_btree_reserve_get()
543 bch2_btree_cache_cannibalize_unlock(trans); in bch2_btree_reserve_get()
549 static void bch2_btree_update_free(struct btree_update *as, struct btree_trans *trans) in bch2_btree_update_free() argument
560 bch2_btree_reserve_put(as, trans); in bch2_btree_update_free()
619 static int btree_update_nodes_written_trans(struct btree_trans *trans, in btree_update_nodes_written_trans() argument
622 struct jset_entry *e = bch2_trans_jset_entry_alloc(trans, as->journal_u64s); in btree_update_nodes_written_trans()
629 trans->journal_pin = &as->journal; in btree_update_nodes_written_trans()
634 ret = bch2_key_trigger_old(trans, as->btree_id, level, bkey_i_to_s_c(k), in btree_update_nodes_written_trans()
643 ret = bch2_key_trigger_new(trans, as->btree_id, level, bkey_i_to_s(k), in btree_update_nodes_written_trans()
656 struct btree_trans *trans = bch2_trans_get(c); in btree_update_nodes_written() local
684 bch2_trans_begin(trans); in btree_update_nodes_written()
685 btree_node_lock_nopath_nofail(trans, &b->c, SIX_LOCK_read); in btree_update_nodes_written()
688 bch2_trans_unlock_long(trans); in btree_update_nodes_written()
708 ret = commit_do(trans, &as->disk_res, &journal_seq, in btree_update_nodes_written()
713 btree_update_nodes_written_trans(trans, as)); in btree_update_nodes_written()
714 bch2_trans_unlock(trans); in btree_update_nodes_written()
728 bch2_trans_unlock(trans); in btree_update_nodes_written()
729 bch2_trans_begin(trans); in btree_update_nodes_written()
750 btree_path_idx_t path_idx = bch2_path_get_unlocked_mut(trans, in btree_update_nodes_written()
752 struct btree_path *path = trans->paths + path_idx; in btree_update_nodes_written()
753 btree_node_lock_nopath_nofail(trans, &b->c, SIX_LOCK_intent); in btree_update_nodes_written()
754 mark_btree_node_locked(trans, path, b->c.level, BTREE_NODE_INTENT_LOCKED); in btree_update_nodes_written()
758 bch2_btree_node_lock_write_nofail(trans, path, &b->c); in btree_update_nodes_written()
797 btree_node_write_if_need(trans, b, SIX_LOCK_intent); in btree_update_nodes_written()
798 btree_node_unlock(trans, path, b->c.level); in btree_update_nodes_written()
799 bch2_path_put(trans, path_idx, true); in btree_update_nodes_written()
817 btree_node_lock_nopath_nofail(trans, &b->c, SIX_LOCK_read); in btree_update_nodes_written()
818 btree_node_write_if_need(trans, b, SIX_LOCK_read); in btree_update_nodes_written()
825 bch2_btree_update_free(as, trans); in btree_update_nodes_written()
826 bch2_trans_put(trans); in btree_update_nodes_written()
1100 static void bch2_btree_update_done(struct btree_update *as, struct btree_trans *trans) in bch2_btree_update_done() argument
1111 bch2_btree_reserve_put(as, trans); in bch2_btree_update_done()
1121 bch2_btree_update_start(struct btree_trans *trans, struct btree_path *path, in bch2_btree_update_start() argument
1124 struct bch_fs *c = trans->c; in bch2_btree_update_start()
1133 u32 restart_count = trans->restart_count; in bch2_btree_update_start()
1150 ret = drop_locks_do(trans, in bch2_btree_update_start()
1160 ret = bch2_btree_path_upgrade(trans, path, level_end + 1); in bch2_btree_update_start()
1183 ret = drop_locks_do(trans, (down_read(&c->gc_lock), 0)); in bch2_btree_update_start()
1232 ret = bch2_btree_reserve_get(trans, as, nr_nodes, flags, NULL); in bch2_btree_update_start()
1251 ret = bch2_btree_reserve_get(trans, as, nr_nodes, flags, &cl); in bch2_btree_update_start()
1253 bch2_trans_unlock(trans); in bch2_btree_update_start()
1259 trace_and_count(c, btree_reserve_get_fail, trans->fn, in bch2_btree_update_start()
1264 ret = bch2_trans_relock(trans); in bch2_btree_update_start()
1268 bch2_trans_verify_not_restarted(trans, restart_count); in bch2_btree_update_start()
1271 bch2_btree_update_free(as, trans); in bch2_btree_update_start()
1296 struct btree_trans *trans, in bch2_btree_set_root() argument
1303 trace_and_count(c, btree_node_set_root, trans, b); in bch2_btree_set_root()
1312 bch2_btree_node_lock_write_nofail(trans, path, &old->c); in bch2_btree_set_root()
1314 int ret = bch2_btree_node_lock_write(trans, path, &old->c); in bch2_btree_set_root()
1330 bch2_btree_node_unlock_write(trans, path, old); in bch2_btree_set_root()
1337 struct btree_trans *trans, in bch2_insert_fixup_btree_ptr() argument
1379 bch2_btree_bset_insert_key(trans, path, b, node_iter, insert); in bch2_insert_fixup_btree_ptr()
1396 struct btree_trans *trans, in bch2_btree_insert_keys_interior() argument
1414 bch2_insert_fixup_btree_ptr(as, trans, path, b, &node_iter, insert); in bch2_btree_insert_keys_interior()
1416 if (bch2_btree_node_check_topology(trans, b)) { in bch2_btree_insert_keys_interior()
1422 bch2_bkey_val_to_text(&buf, trans->c, bkey_i_to_s_c(k)); in bch2_btree_insert_keys_interior()
1447 struct btree_trans *trans, in __btree_split_node() argument
1485 (bch2_key_deleted_in_journal(trans, b->c.btree_id, b->c.level, uk.p) || in __btree_split_node()
1549 BUG_ON(bch2_btree_node_check_topology(trans, n[i])); in __btree_split_node()
1565 struct btree_trans *trans, in btree_split_insert_keys() argument
1570 struct btree_path *path = trans->paths + path_idx; in btree_split_insert_keys()
1578 bch2_btree_insert_keys_interior(as, trans, path, b, node_iter, keys); in btree_split_insert_keys()
1582 static int btree_split(struct btree_update *as, struct btree_trans *trans, in btree_split() argument
1587 struct btree *parent = btree_node_parent(trans->paths + path, b); in btree_split()
1595 BUG_ON(parent && !btree_node_intent_locked(trans->paths + path, b->c.level + 1)); in btree_split()
1597 ret = bch2_btree_node_check_topology(trans, b); in btree_split()
1604 trace_and_count(c, btree_node_split, trans, b); in btree_split()
1606 n[0] = n1 = bch2_btree_node_alloc(as, trans, b->c.level); in btree_split()
1607 n[1] = n2 = bch2_btree_node_alloc(as, trans, b->c.level); in btree_split()
1609 __btree_split_node(as, trans, b, n, keys); in btree_split()
1612 btree_split_insert_keys(as, trans, path, n1, keys); in btree_split()
1613 btree_split_insert_keys(as, trans, path, n2, keys); in btree_split()
1625 path1 = bch2_path_get_unlocked_mut(trans, as->btree_id, n1->c.level, n1->key.k.p); in btree_split()
1627 mark_btree_node_locked(trans, trans->paths + path1, n1->c.level, BTREE_NODE_INTENT_LOCKED); in btree_split()
1628 bch2_btree_path_level_init(trans, trans->paths + path1, n1); in btree_split()
1630 path2 = bch2_path_get_unlocked_mut(trans, as->btree_id, n2->c.level, n2->key.k.p); in btree_split()
1632 mark_btree_node_locked(trans, trans->paths + path2, n2->c.level, BTREE_NODE_INTENT_LOCKED); in btree_split()
1633 bch2_btree_path_level_init(trans, trans->paths + path2, n2); in btree_split()
1645 n3 = __btree_root_alloc(as, trans, b->c.level + 1); in btree_split()
1650 trans->paths[path2].locks_want++; in btree_split()
1651 BUG_ON(btree_node_locked(trans->paths + path2, n3->c.level)); in btree_split()
1653 mark_btree_node_locked(trans, trans->paths + path2, n3->c.level, BTREE_NODE_INTENT_LOCKED); in btree_split()
1654 bch2_btree_path_level_init(trans, trans->paths + path2, n3); in btree_split()
1659 btree_split_insert_keys(as, trans, path, n3, &as->parent_keys); in btree_split()
1662 trace_and_count(c, btree_node_compact, trans, b); in btree_split()
1664 n1 = bch2_btree_node_alloc_replacement(as, trans, b); in btree_split()
1667 btree_split_insert_keys(as, trans, path, n1, keys); in btree_split()
1675 path1 = bch2_path_get_unlocked_mut(trans, as->btree_id, n1->c.level, n1->key.k.p); in btree_split()
1677 mark_btree_node_locked(trans, trans->paths + path1, n1->c.level, BTREE_NODE_INTENT_LOCKED); in btree_split()
1678 bch2_btree_path_level_init(trans, trans->paths + path1, n1); in btree_split()
1688 ret = bch2_btree_insert_node(as, trans, path, parent, &as->parent_keys); in btree_split()
1690 ret = bch2_btree_set_root(as, trans, trans->paths + path, n3, false); in btree_split()
1693 ret = bch2_btree_set_root(as, trans, trans->paths + path, n1, false); in btree_split()
1703 bch2_btree_node_write_trans(trans, n3, SIX_LOCK_intent, 0); in btree_split()
1707 bch2_btree_node_write_trans(trans, n2, SIX_LOCK_intent, 0); in btree_split()
1710 bch2_btree_node_write_trans(trans, n1, SIX_LOCK_intent, 0); in btree_split()
1718 bch2_btree_node_free_inmem(trans, trans->paths + path, b); in btree_split()
1721 bch2_trans_node_add(trans, trans->paths + path, n3); in btree_split()
1723 bch2_trans_node_add(trans, trans->paths + path2, n2); in btree_split()
1724 bch2_trans_node_add(trans, trans->paths + path1, n1); in btree_split()
1733 __bch2_btree_path_unlock(trans, trans->paths + path2); in btree_split()
1734 bch2_path_put(trans, path2, true); in btree_split()
1737 __bch2_btree_path_unlock(trans, trans->paths + path1); in btree_split()
1738 bch2_path_put(trans, path1, true); in btree_split()
1741 bch2_trans_verify_locks(trans); in btree_split()
1750 bch2_btree_node_free_never_used(as, trans, n3); in btree_split()
1752 bch2_btree_node_free_never_used(as, trans, n2); in btree_split()
1753 bch2_btree_node_free_never_used(as, trans, n1); in btree_split()
1761 * @trans: btree_trans object
1772 static int bch2_btree_insert_node(struct btree_update *as, struct btree_trans *trans, in bch2_btree_insert_node() argument
1777 struct btree_path *path = trans->paths + path_idx, *linked; in bch2_btree_insert_node()
1790 ret = bch2_btree_node_lock_write(trans, path, &b->c); in bch2_btree_insert_node()
1794 bch2_btree_node_prep_for_write(trans, path, b); in bch2_btree_insert_node()
1797 bch2_btree_node_unlock_write(trans, path, b); in bch2_btree_insert_node()
1801 ret = bch2_btree_node_check_topology(trans, b); in bch2_btree_insert_node()
1803 bch2_btree_node_unlock_write(trans, path, b); in bch2_btree_insert_node()
1807 bch2_btree_insert_keys_interior(as, trans, path, b, in bch2_btree_insert_node()
1810 trans_for_each_path_with_node(trans, b, linked, i) in bch2_btree_insert_node()
1813 bch2_trans_verify_paths(trans); in bch2_btree_insert_node()
1825 bch2_trans_node_reinit_iter(trans, b); in bch2_btree_insert_node()
1828 bch2_btree_node_unlock_write(trans, path, b); in bch2_btree_insert_node()
1836 trace_and_count(c, trans_restart_split_race, trans, _THIS_IP_, b); in bch2_btree_insert_node()
1837 return btree_trans_restart(trans, BCH_ERR_transaction_restart_split_race); in bch2_btree_insert_node()
1840 return btree_split(as, trans, path_idx, b, keys); in bch2_btree_insert_node()
1843 int bch2_btree_split_leaf(struct btree_trans *trans, in bch2_btree_split_leaf() argument
1848 struct btree *b = path_l(trans->paths + path)->b; in bch2_btree_split_leaf()
1853 as = bch2_btree_update_start(trans, trans->paths + path, in bch2_btree_split_leaf()
1854 trans->paths[path].level, in bch2_btree_split_leaf()
1859 ret = btree_split(as, trans, path, b, NULL); in bch2_btree_split_leaf()
1861 bch2_btree_update_free(as, trans); in bch2_btree_split_leaf()
1865 bch2_btree_update_done(as, trans); in bch2_btree_split_leaf()
1867 for (l = trans->paths[path].level + 1; in bch2_btree_split_leaf()
1868 btree_node_intent_locked(&trans->paths[path], l) && !ret; in bch2_btree_split_leaf()
1870 ret = bch2_foreground_maybe_merge(trans, path, l, flags); in bch2_btree_split_leaf()
1875 static void __btree_increase_depth(struct btree_update *as, struct btree_trans *trans, in __btree_increase_depth() argument
1879 struct btree_path *path = trans->paths + path_idx; in __btree_increase_depth()
1884 n = __btree_root_alloc(as, trans, b->c.level + 1); in __btree_increase_depth()
1892 mark_btree_node_locked(trans, path, n->c.level, BTREE_NODE_INTENT_LOCKED); in __btree_increase_depth()
1893 bch2_btree_path_level_init(trans, path, n); in __btree_increase_depth()
1899 btree_split_insert_keys(as, trans, path_idx, n, &as->parent_keys); in __btree_increase_depth()
1901 int ret = bch2_btree_set_root(as, trans, path, n, true); in __btree_increase_depth()
1905 bch2_btree_node_write_trans(trans, n, SIX_LOCK_intent, 0); in __btree_increase_depth()
1906 bch2_trans_node_add(trans, path, n); in __btree_increase_depth()
1913 bch2_trans_verify_locks(trans); in __btree_increase_depth()
1916 int bch2_btree_increase_depth(struct btree_trans *trans, btree_path_idx_t path, unsigned flags) in bch2_btree_increase_depth() argument
1918 struct bch_fs *c = trans->c; in bch2_btree_increase_depth()
1919 struct btree *b = bch2_btree_id_root(c, trans->paths[path].btree_id)->b; in bch2_btree_increase_depth()
1922 return bch2_btree_split_leaf(trans, path, flags); in bch2_btree_increase_depth()
1925 bch2_btree_update_start(trans, trans->paths + path, b->c.level, true, flags); in bch2_btree_increase_depth()
1929 __btree_increase_depth(as, trans, path); in bch2_btree_increase_depth()
1930 bch2_btree_update_done(as, trans); in bch2_btree_increase_depth()
1934 int __bch2_foreground_maybe_merge(struct btree_trans *trans, in __bch2_foreground_maybe_merge() argument
1940 struct bch_fs *c = trans->c; in __bch2_foreground_maybe_merge()
1948 enum btree_id btree = trans->paths[path].btree_id; in __bch2_foreground_maybe_merge()
1953 bch2_trans_verify_not_unlocked_or_in_restart(trans); in __bch2_foreground_maybe_merge()
1954 BUG_ON(!trans->paths[path].should_be_locked); in __bch2_foreground_maybe_merge()
1955 BUG_ON(!btree_node_locked(&trans->paths[path], level)); in __bch2_foreground_maybe_merge()
1973 b = trans->paths[path].l[level].b; in __bch2_foreground_maybe_merge()
1985 sib_path = bch2_path_get(trans, btree, sib_pos, in __bch2_foreground_maybe_merge()
1987 ret = bch2_btree_path_traverse(trans, sib_path, false); in __bch2_foreground_maybe_merge()
1991 btree_path_set_should_be_locked(trans, trans->paths + sib_path); in __bch2_foreground_maybe_merge()
1993 m = trans->paths[sib_path].l[level].b; in __bch2_foreground_maybe_merge()
1995 if (btree_node_parent(trans->paths + path, b) != in __bch2_foreground_maybe_merge()
1996 btree_node_parent(trans->paths + sib_path, m)) { in __bch2_foreground_maybe_merge()
2048 parent = btree_node_parent(trans->paths + path, b); in __bch2_foreground_maybe_merge()
2049 as = bch2_btree_update_start(trans, trans->paths + path, level, false, in __bch2_foreground_maybe_merge()
2055 trace_and_count(c, btree_node_merge, trans, b); in __bch2_foreground_maybe_merge()
2057 n = bch2_btree_node_alloc(as, trans, b->c.level); in __bch2_foreground_maybe_merge()
2076 new_path = bch2_path_get_unlocked_mut(trans, btree, n->c.level, n->key.k.p); in __bch2_foreground_maybe_merge()
2078 mark_btree_node_locked(trans, trans->paths + new_path, n->c.level, BTREE_NODE_INTENT_LOCKED); in __bch2_foreground_maybe_merge()
2079 bch2_btree_path_level_init(trans, trans->paths + new_path, n); in __bch2_foreground_maybe_merge()
2086 bch2_trans_verify_paths(trans); in __bch2_foreground_maybe_merge()
2088 ret = bch2_btree_insert_node(as, trans, path, parent, &as->parent_keys); in __bch2_foreground_maybe_merge()
2095 bch2_trans_verify_paths(trans); in __bch2_foreground_maybe_merge()
2098 bch2_btree_node_write_trans(trans, n, SIX_LOCK_intent, 0); in __bch2_foreground_maybe_merge()
2100 bch2_btree_node_free_inmem(trans, trans->paths + path, b); in __bch2_foreground_maybe_merge()
2101 bch2_btree_node_free_inmem(trans, trans->paths + sib_path, m); in __bch2_foreground_maybe_merge()
2103 bch2_trans_node_add(trans, trans->paths + path, n); in __bch2_foreground_maybe_merge()
2105 bch2_trans_verify_paths(trans); in __bch2_foreground_maybe_merge()
2109 bch2_btree_update_done(as, trans); in __bch2_foreground_maybe_merge()
2115 bch2_path_put(trans, new_path, true); in __bch2_foreground_maybe_merge()
2116 bch2_path_put(trans, sib_path, true); in __bch2_foreground_maybe_merge()
2117 bch2_trans_verify_locks(trans); in __bch2_foreground_maybe_merge()
2121 ret = bch2_trans_relock(trans); in __bch2_foreground_maybe_merge()
2124 bch2_btree_node_free_never_used(as, trans, n); in __bch2_foreground_maybe_merge()
2125 bch2_btree_update_free(as, trans); in __bch2_foreground_maybe_merge()
2129 int bch2_btree_node_rewrite(struct btree_trans *trans, in bch2_btree_node_rewrite() argument
2134 struct bch_fs *c = trans->c; in bch2_btree_node_rewrite()
2142 struct btree_path *path = btree_iter_path(trans, iter); in bch2_btree_node_rewrite()
2144 as = bch2_btree_update_start(trans, path, b->c.level, false, flags); in bch2_btree_node_rewrite()
2149 n = bch2_btree_node_alloc_replacement(as, trans, b); in bch2_btree_node_rewrite()
2155 new_path = bch2_path_get_unlocked_mut(trans, iter->btree_id, n->c.level, n->key.k.p); in bch2_btree_node_rewrite()
2157 mark_btree_node_locked(trans, trans->paths + new_path, n->c.level, BTREE_NODE_INTENT_LOCKED); in bch2_btree_node_rewrite()
2158 bch2_btree_path_level_init(trans, trans->paths + new_path, n); in bch2_btree_node_rewrite()
2160 trace_and_count(c, btree_node_rewrite, trans, b); in bch2_btree_node_rewrite()
2164 ret = bch2_btree_insert_node(as, trans, iter->path, parent, &as->parent_keys); in bch2_btree_node_rewrite()
2166 ret = bch2_btree_set_root(as, trans, btree_iter_path(trans, iter), n, false); in bch2_btree_node_rewrite()
2175 bch2_btree_node_write_trans(trans, n, SIX_LOCK_intent, 0); in bch2_btree_node_rewrite()
2177 bch2_btree_node_free_inmem(trans, btree_iter_path(trans, iter), b); in bch2_btree_node_rewrite()
2179 bch2_trans_node_add(trans, trans->paths + iter->path, n); in bch2_btree_node_rewrite()
2182 bch2_btree_update_done(as, trans); in bch2_btree_node_rewrite()
2185 bch2_path_put(trans, new_path, true); in bch2_btree_node_rewrite()
2186 bch2_trans_downgrade(trans); in bch2_btree_node_rewrite()
2189 bch2_btree_node_free_never_used(as, trans, n); in bch2_btree_node_rewrite()
2190 bch2_btree_update_free(as, trans); in bch2_btree_node_rewrite()
2203 static int async_btree_node_rewrite_trans(struct btree_trans *trans, in async_btree_node_rewrite_trans() argument
2207 bch2_trans_node_iter_init(trans, &iter, in async_btree_node_rewrite_trans()
2217 ? bch2_btree_node_rewrite(trans, &iter, b, 0) in async_btree_node_rewrite_trans()
2223 struct bch_fs *c = trans->c; in async_btree_node_rewrite_trans()
2243 bch2_trans_iter_exit(trans, &iter); in async_btree_node_rewrite_trans()
2253 int ret = bch2_trans_do(c, async_btree_node_rewrite_trans(trans, a)); in async_btree_node_rewrite_work()
2347 static int __bch2_btree_node_update_key(struct btree_trans *trans, in __bch2_btree_node_update_key() argument
2354 struct bch_fs *c = trans->c; in __bch2_btree_node_update_key()
2360 ret = bch2_key_trigger_old(trans, b->c.btree_id, b->c.level + 1, in __bch2_btree_node_update_key()
2363 bch2_key_trigger_new(trans, b->c.btree_id, b->c.level + 1, in __bch2_btree_node_update_key()
2377 parent = btree_node_parent(btree_iter_path(trans, iter), b); in __bch2_btree_node_update_key()
2381 iter2.path = bch2_btree_path_make_mut(trans, iter2.path, in __bch2_btree_node_update_key()
2385 struct btree_path *path2 = btree_iter_path(trans, &iter2); in __bch2_btree_node_update_key()
2389 btree_path_set_level_up(trans, path2); in __bch2_btree_node_update_key()
2391 trans->paths_sorted = false; in __bch2_btree_node_update_key()
2394 bch2_trans_update(trans, &iter2, new_key, BTREE_TRIGGER_norun); in __bch2_btree_node_update_key()
2400 struct jset_entry *e = bch2_trans_jset_entry_alloc(trans, in __bch2_btree_node_update_key()
2412 ret = bch2_trans_commit(trans, NULL, NULL, commit_flags); in __bch2_btree_node_update_key()
2416 bch2_btree_node_lock_write_nofail(trans, btree_iter_path(trans, iter), &b->c); in __bch2_btree_node_update_key()
2432 bch2_btree_node_unlock_write(trans, btree_iter_path(trans, iter), b); in __bch2_btree_node_update_key()
2434 bch2_trans_iter_exit(trans, &iter2); in __bch2_btree_node_update_key()
2445 int bch2_btree_node_update_key(struct btree_trans *trans, struct btree_iter *iter, in bch2_btree_node_update_key() argument
2449 struct bch_fs *c = trans->c; in bch2_btree_node_update_key()
2451 struct btree_path *path = btree_iter_path(trans, iter); in bch2_btree_node_update_key()
2455 ret = bch2_btree_path_upgrade(trans, path, b->c.level + 1); in bch2_btree_node_update_key()
2466 ret = bch2_btree_cache_cannibalize_lock(trans, &cl); in bch2_btree_node_update_key()
2468 ret = drop_locks_do(trans, (closure_sync(&cl), 0)); in bch2_btree_node_update_key()
2473 new_hash = bch2_btree_node_mem_alloc(trans, false); in bch2_btree_node_update_key()
2480 ret = __bch2_btree_node_update_key(trans, iter, b, new_hash, new_key, in bch2_btree_node_update_key()
2488 bch2_btree_cache_cannibalize_unlock(trans); in bch2_btree_node_update_key()
2492 int bch2_btree_node_update_key_get_iter(struct btree_trans *trans, in bch2_btree_node_update_key_get_iter() argument
2499 bch2_trans_node_iter_init(trans, &iter, b->c.btree_id, b->key.k.p, in bch2_btree_node_update_key_get_iter()
2507 if (btree_iter_path(trans, &iter)->l[b->c.level].b != b) { in bch2_btree_node_update_key_get_iter()
2518 ret = bch2_btree_node_update_key(trans, &iter, b, new_key, in bch2_btree_node_update_key_get_iter()
2521 bch2_trans_iter_exit(trans, &iter); in bch2_btree_node_update_key_get_iter()
2538 int bch2_btree_root_alloc_fake_trans(struct btree_trans *trans, enum btree_id id, unsigned level) in bch2_btree_root_alloc_fake_trans() argument
2540 struct bch_fs *c = trans->c; in bch2_btree_root_alloc_fake_trans()
2548 ret = bch2_btree_cache_cannibalize_lock(trans, &cl); in bch2_btree_root_alloc_fake_trans()
2552 b = bch2_btree_node_mem_alloc(trans, false); in bch2_btree_root_alloc_fake_trans()
2553 bch2_btree_cache_cannibalize_unlock(trans); in bch2_btree_root_alloc_fake_trans()
2590 bch2_trans_run(c, lockrestart_do(trans, bch2_btree_root_alloc_fake_trans(trans, id, level))); in bch2_btree_root_alloc_fake()