Lines Matching +full:9 +full:k

41 static void trace_move_extent2(struct bch_fs *c, struct bkey_s_c k,  in trace_move_extent2()  argument
48 bch2_bkey_val_to_text(&buf, c, k); in trace_move_extent2()
56 static void trace_move_extent_read2(struct bch_fs *c, struct bkey_s_c k) in trace_move_extent_read2() argument
61 bch2_bkey_val_to_text(&buf, c, k); in trace_move_extent_read2()
126 bch2_bkey_val_to_text(&buf, c, bkey_i_to_s_c(io->write.k.k)); in move_write()
255 struct bkey_s_c k, in bch2_move_extent() argument
261 struct bkey_ptrs_c ptrs = bch2_bkey_ptrs_c(k); in bch2_move_extent()
265 unsigned sectors = k.k->size, pages; in bch2_move_extent()
268 trace_move_extent2(c, k, &io_opts, &data_opts); in bch2_move_extent()
273 bch2_data_update_opts_normalize(k, &data_opts); in bch2_move_extent()
278 return bch2_extent_drop_ptrs(trans, iter, k, &io_opts, &data_opts); in bch2_move_extent()
289 bkey_for_each_ptr_decode(k.k, ptrs, p, entry) in bch2_move_extent()
300 io->read_sectors = k.k->size; in bch2_move_extent()
301 io->write_sectors = k.k->size; in bch2_move_extent()
307 if (bch2_bio_alloc_pages(&io->write.op.wbio.bio, sectors << 9, in bch2_move_extent()
316 io->rbio.bio.bi_iter.bi_size = sectors << 9; in bch2_move_extent()
319 io->rbio.bio.bi_iter.bi_sector = bkey_start_offset(k.k); in bch2_move_extent()
323 io_opts, data_opts, iter->btree_id, k); in bch2_move_extent()
330 bch2_ratelimit_increment(ctxt->rate, k.k->size); in bch2_move_extent()
334 atomic64_add(k.k->size, &ctxt->stats->sectors_moved); in bch2_move_extent()
342 this_cpu_add(c->counters[BCH_COUNTER_io_move], k.k->size); in bch2_move_extent()
343 this_cpu_add(c->counters[BCH_COUNTER_move_extent_read], k.k->size); in bch2_move_extent()
344 trace_move_extent_read2(c, k); in bch2_move_extent()
360 bkey_start_pos(k.k), in bch2_move_extent()
361 iter->btree_id, k, 0, in bch2_move_extent()
382 bch2_bkey_val_to_text(&buf, c, k); in bch2_move_extent()
402 if (extent_k.k->type == KEY_TYPE_reflink_v) in bch2_move_get_io_opts()
409 BTREE_ITER_all_snapshots, k, ({ in bch2_move_get_io_opts()
410 if (k.k->p.offset != extent_pos.inode) in bch2_move_get_io_opts()
413 if (!bkey_is_inode(k.k)) in bch2_move_get_io_opts()
417 _ret3 = bch2_inode_unpack(k, &inode); in bch2_move_get_io_opts()
421 struct snapshot_io_opts_entry e = { .snapshot = k.k->p.snapshot }; in bch2_move_get_io_opts()
433 if (extent_k.k->p.snapshot) in bch2_move_get_io_opts()
435 if (bch2_snapshot_is_ancestor(c, extent_k.k->p.snapshot, i->snapshot)) { in bch2_move_get_io_opts()
456 if (!extent_k.k->p.inode) in bch2_move_get_io_opts_one()
461 SPOS(0, extent_k.k->p.inode, extent_k.k->p.snapshot), in bch2_move_get_io_opts_one()
467 if (!ret && bkey_is_inode(inode_k.k)) { in bch2_move_get_io_opts_one()
513 atomic_read(&ctxt->write_sectors) < c->opts.move_bytes_in_flight >> 9 && in bch2_move_ratelimit()
514 atomic_read(&ctxt->read_sectors) < c->opts.move_bytes_in_flight >> 9 && in bch2_move_ratelimit()
533 struct bkey_s_c k; in bch2_move_data_btree() local
564 k = bch2_btree_iter_peek(&iter); in bch2_move_data_btree()
565 if (!k.k) in bch2_move_data_btree()
568 ret = bkey_err(k); in bch2_move_data_btree()
574 if (bkey_ge(bkey_start_pos(k.k), end)) in bch2_move_data_btree()
581 k.k->type == KEY_TYPE_reflink_p && in bch2_move_data_btree()
582 REFLINK_P_MAY_UPDATE_OPTIONS(bkey_s_c_to_reflink_p(k).v)) { in bch2_move_data_btree()
583 struct bkey_s_c_reflink_p p = bkey_s_c_to_reflink_p(k); in bch2_move_data_btree()
584 s64 offset_into_extent = iter.pos.offset - bkey_start_offset(k.k); in bch2_move_data_btree()
587 k = bch2_lookup_indirect_extent(trans, &reflink_iter, &offset_into_extent, p, true, 0); in bch2_move_data_btree()
588 ret = bkey_err(k); in bch2_move_data_btree()
594 if (bkey_deleted(k.k)) in bch2_move_data_btree()
600 * pointer - need to fixup iter->k in bch2_move_data_btree()
605 if (!bkey_extent_is_direct_data(k.k)) in bch2_move_data_btree()
609 iter.pos, extent_iter, k); in bch2_move_data_btree()
615 if (!pred(c, arg, k, io_opts, &data_opts)) in bch2_move_data_btree()
620 * save a copy of @k elsewhere: in bch2_move_data_btree()
622 bch2_bkey_buf_reassemble(&sk, c, k); in bch2_move_data_btree()
623 k = bkey_i_to_s_c(sk.k); in bch2_move_data_btree()
625 ret2 = bch2_move_extent(ctxt, NULL, extent_iter, k, *io_opts, data_opts); in bch2_move_data_btree()
641 atomic64_add(k.k->size, &ctxt->stats->sectors_seen); in bch2_move_data_btree()
714 struct bkey_s_c k; in bch2_evacuate_bucket() local
727 bkey_init(&last_flushed.k->k); in bch2_evacuate_bucket()
753 k = bch2_btree_iter_peek(&bp_iter); in bch2_evacuate_bucket()
754 ret = bkey_err(k); in bch2_evacuate_bucket()
760 if (!k.k || bkey_gt(k.k->p, bucket_pos_to_bp_end(ca, bucket))) in bch2_evacuate_bucket()
763 if (k.k->type != KEY_TYPE_backpointer) in bch2_evacuate_bucket()
766 struct bkey_s_c_backpointer bp = bkey_s_c_to_backpointer(k); in bch2_evacuate_bucket()
769 k = bch2_backpointer_get_key(trans, bp, &iter, 0, &last_flushed); in bch2_evacuate_bucket()
770 ret = bkey_err(k); in bch2_evacuate_bucket()
775 if (!k.k) in bch2_evacuate_bucket()
778 bch2_bkey_buf_reassemble(&sk, c, k); in bch2_evacuate_bucket()
779 k = bkey_i_to_s_c(sk.k); in bch2_evacuate_bucket()
781 ret = bch2_move_get_io_opts_one(trans, &io_opts, &iter, k); in bch2_evacuate_bucket()
795 bkey_for_each_ptr_decode(k.k, bch2_bkey_ptrs_c(k), p, entry) { in bch2_evacuate_bucket()
808 &iter, k, io_opts, data_opts); in bch2_evacuate_bucket()
915 bpos_cmp(b->key.k.p, end.pos)) > 0) in bch2_move_btree()
948 struct bkey_s_c k, in rereplicate_pred() argument
952 unsigned nr_good = bch2_bkey_durability(c, k); in rereplicate_pred()
953 unsigned replicas = bkey_is_btree_ptr(k.k) in rereplicate_pred()
958 struct bkey_ptrs_c ptrs = bch2_bkey_ptrs_c(k); in rereplicate_pred()
980 struct bkey_s_c k, in migrate_pred() argument
984 struct bkey_ptrs_c ptrs = bch2_bkey_ptrs_c(k); in migrate_pred()
1071 struct bkey_s_c k, in drop_extra_replicas_pred() argument
1075 unsigned durability = bch2_bkey_durability(c, k); in drop_extra_replicas_pred()
1076 unsigned replicas = bkey_is_btree_ptr(k.k) in drop_extra_replicas_pred()
1084 bkey_for_each_ptr_decode(k.k, bch2_bkey_ptrs_c(k), p, entry) { in drop_extra_replicas_pred()
1182 prt_human_readable_u64(out, atomic64_read(&stats->sectors_seen) << 9); in bch2_move_stats_to_text()
1186 prt_human_readable_u64(out, atomic64_read(&stats->sectors_moved) << 9); in bch2_move_stats_to_text()
1190 prt_human_readable_u64(out, atomic64_read(&stats->sectors_raced) << 9); in bch2_move_stats_to_text()
1207 c->opts.move_bytes_in_flight >> 9); in bch2_moving_ctxt_to_text()
1213 c->opts.move_bytes_in_flight >> 9); in bch2_moving_ctxt_to_text()