Home
last modified time | relevance | path

Searched refs:FSDPModule (Results 1 – 8 of 8) sorted by relevance

/aosp_15_r20/external/pytorch/torch/distributed/_composable/fsdp/
H A Dfully_shard.py153 new_cls = type(f"FSDP{cls.__name__}", (FSDPModule, cls), dct)
165 class FSDPModule: class
243 if isinstance(module, FSDPModule):
260 if isinstance(module, FSDPModule):
282 if isinstance(module, FSDPModule):
421 if not isinstance(module, FSDPModule):
445 if not isinstance(module, FSDPModule):
H A D__init__.py2 from .fully_shard import FSDPModule, fully_shard, register_fsdp_forward_method
/aosp_15_r20/external/pytorch/test/distributed/_composable/fsdp/
H A Dtest_fully_shard_state.py7 from torch.distributed._composable.fsdp import FSDPModule, fully_shard
50 self.assertTrue(isinstance(model, FSDPModule))
55 self.assertFalse(isinstance(module, FSDPModule))
61 self.assertTrue(isinstance(model, FSDPModule))
65 self.assertFalse(isinstance(sliced_model, FSDPModule))
H A Dtest_fully_shard_comm.py15 FSDPModule,
359 isinstance(module, FSDPModule) for module in model.modules()
371 if isinstance(module, FSDPModule):
1065 if isinstance(self.mlps.mlp1, FSDPModule):
1068 if isinstance(self.mlps.mlp2, FSDPModule):
1071 if isinstance(self.mlps.mlp3, FSDPModule):
H A Dtest_fully_shard_training.py16 FSDPModule,
149 if isinstance(module, FSDPModule):
850 if isinstance(module, FSDPModule):
/aosp_15_r20/external/pytorch/torch/distributed/_tools/
H A Dfsdp2_mem_tracker.py10 from torch.distributed._composable.fsdp import FSDPModule
161 assert isinstance(mod, FSDPModule), "FSDPMemTracker only supports FSDP modules"
187 fsdp_mod: FSDPModule, argument
253 fsdp_mod: FSDPModule, argument
285 fsdp_mod: FSDPModule, argument
311 fsdp_mod: FSDPModule, argument
359 if isinstance(module, FSDPModule):
/aosp_15_r20/external/pytorch/torch/distributed/pipelining/
H A Dstage.py13 from torch.distributed._composable.fsdp.fully_shard import FSDPModule, fully_shard
523 elif isinstance(self.submod, FSDPModule):
530 def run_post_backward(fsdp_module: FSDPModule) -> None: argument
H A Dschedules.py26 from torch.distributed._composable.fsdp.fully_shard import FSDPModule, UnshardHandle
1500 stage_uses_fsdp = isinstance(stage.submod, FSDPModule)