Home
last modified time | relevance | path

Searched refs:AsyncCollectiveTensor (Results 1 – 11 of 11) sorted by relevance

/aosp_15_r20/external/pytorch/torch/distributed/
H A D_functional_collectives.py212 if isinstance(res, AsyncCollectiveTensor):
245 if isinstance(res, AsyncCollectiveTensor):
560 class AsyncCollectiveTensor(torch.Tensor): class
601 return AsyncCollectiveTensor(elem)
627 wrapper_res = AsyncCollectiveTensor(res)
632 def unwrap(e: AsyncCollectiveTensor): argument
640 assert not isinstance(e, AsyncCollectiveTensor)
641 res = AsyncCollectiveTensor(e)
644 unwrapped_args = tree_map_only(AsyncCollectiveTensor, unwrap, args)
645 unwrapped_kwargs = tree_map_only(AsyncCollectiveTensor, unwrap, kwargs)
[all …]
H A D_state_dict_utils.py24 from torch.distributed._functional_collectives import AsyncCollectiveTensor
277 if isinstance(value, AsyncCollectiveTensor):
/aosp_15_r20/external/pytorch/torch/distributed/tensor/parallel/
H A D_data_parallel_utils.py5 from torch.distributed._functional_collectives import AsyncCollectiveTensor
12 if isinstance(grad, AsyncCollectiveTensor):
/aosp_15_r20/external/pytorch/torch/distributed/tensor/experimental/
H A D_func_map.py6 from torch.distributed._functional_collectives import AsyncCollectiveTensor
174 if isinstance(local_arg, AsyncCollectiveTensor):
H A D_attention.py74 if isinstance(tensor, ft_c.AsyncCollectiveTensor):
/aosp_15_r20/external/pytorch/test/distributed/
H A Dtest_c10d_functional_native.py17 AsyncCollectiveTensor,
111 assert isinstance(output, AsyncCollectiveTensor)
216 assert isinstance(output, AsyncCollectiveTensor)
315 assert isinstance(output, AsyncCollectiveTensor)
403 assert isinstance(output, AsyncCollectiveTensor)
/aosp_15_r20/external/pytorch/test/distributed/_tensor/
H A Dtest_dtensor.py8 from torch.distributed._functional_collectives import AsyncCollectiveTensor
372 self.assertFalse(isinstance(full_out, AsyncCollectiveTensor))
433 self.assertEqual(type(out), AsyncCollectiveTensor)
438 self.assertEqual(type(out_view), AsyncCollectiveTensor)
450 self.assertFalse(isinstance(sync_out, AsyncCollectiveTensor))
/aosp_15_r20/external/pytorch/torch/distributed/_composable/fsdp/
H A D_fsdp_param.py11 from torch.distributed._functional_collectives import AsyncCollectiveTensor
673 if isinstance(grad, AsyncCollectiveTensor):
/aosp_15_r20/external/pytorch/torch/distributed/tensor/
H A D_collective_utils.py59 if isinstance(out, funcol.AsyncCollectiveTensor):
H A D_redistribute.py268 if not async_op and isinstance(new_local_tensor, funcol.AsyncCollectiveTensor):
H A Dplacement_types.py509 if isinstance(result, funcol.AsyncCollectiveTensor):