Searched refs:FSDPModule (Results 1 – 8 of 8) sorted by relevance
/aosp_15_r20/external/pytorch/torch/distributed/_composable/fsdp/ |
H A D | fully_shard.py | 153 new_cls = type(f"FSDP{cls.__name__}", (FSDPModule, cls), dct) 165 class FSDPModule: class 243 if isinstance(module, FSDPModule): 260 if isinstance(module, FSDPModule): 282 if isinstance(module, FSDPModule): 421 if not isinstance(module, FSDPModule): 445 if not isinstance(module, FSDPModule):
|
H A D | __init__.py | 2 from .fully_shard import FSDPModule, fully_shard, register_fsdp_forward_method
|
/aosp_15_r20/external/pytorch/test/distributed/_composable/fsdp/ |
H A D | test_fully_shard_state.py | 7 from torch.distributed._composable.fsdp import FSDPModule, fully_shard 50 self.assertTrue(isinstance(model, FSDPModule)) 55 self.assertFalse(isinstance(module, FSDPModule)) 61 self.assertTrue(isinstance(model, FSDPModule)) 65 self.assertFalse(isinstance(sliced_model, FSDPModule))
|
H A D | test_fully_shard_comm.py | 15 FSDPModule, 359 isinstance(module, FSDPModule) for module in model.modules() 371 if isinstance(module, FSDPModule): 1065 if isinstance(self.mlps.mlp1, FSDPModule): 1068 if isinstance(self.mlps.mlp2, FSDPModule): 1071 if isinstance(self.mlps.mlp3, FSDPModule):
|
H A D | test_fully_shard_training.py | 16 FSDPModule, 149 if isinstance(module, FSDPModule): 850 if isinstance(module, FSDPModule):
|
/aosp_15_r20/external/pytorch/torch/distributed/_tools/ |
H A D | fsdp2_mem_tracker.py | 10 from torch.distributed._composable.fsdp import FSDPModule 161 assert isinstance(mod, FSDPModule), "FSDPMemTracker only supports FSDP modules" 187 fsdp_mod: FSDPModule, argument 253 fsdp_mod: FSDPModule, argument 285 fsdp_mod: FSDPModule, argument 311 fsdp_mod: FSDPModule, argument 359 if isinstance(module, FSDPModule):
|
/aosp_15_r20/external/pytorch/torch/distributed/pipelining/ |
H A D | stage.py | 13 from torch.distributed._composable.fsdp.fully_shard import FSDPModule, fully_shard 523 elif isinstance(self.submod, FSDPModule): 530 def run_post_backward(fsdp_module: FSDPModule) -> None: argument
|
H A D | schedules.py | 26 from torch.distributed._composable.fsdp.fully_shard import FSDPModule, UnshardHandle 1500 stage_uses_fsdp = isinstance(stage.submod, FSDPModule)
|