Searched refs:max_memory_allocated (Results 1 – 17 of 17) sorted by relevance
60 max_memory_allocated
109 max_memory_allocated
136 def max_memory_allocated(device: _device_t = None) -> int: function
441 max_memory_allocated,
195 peak_mem = torch.cuda.max_memory_allocated() / 10**9
244 max_allocated = torch.cuda.max_memory_allocated()
91 :meth:`~torch.cuda.max_memory_allocated` to monitor memory occupied by
404 :meth:`~torch.cuda.max_memory_allocated` to monitor memory occupied by
192 return torch.cuda.max_memory_allocated()
1164 max_memory_allocated = torch.cuda.max_memory_allocated(device)
416 def max_memory_allocated(device: Union[Device, int] = None) -> int: function
136 max_m_arr = [torch.cuda.max_memory_allocated(device)]154 new_max_m = torch.cuda.max_memory_allocated(device)206 self.assertEqual(torch.cuda.max_memory_allocated(device), last_m_arr[0])
953 max_mems.append(torch.cuda.max_memory_allocated())
6834 mem_no_checkpoint = torch.cuda.max_memory_allocated()6839 mem_reentrant_checkpoint = torch.cuda.max_memory_allocated()6844 mem_no_reentrant_checkpoint = torch.cuda.max_memory_allocated()
3117 val = torch.cuda.max_memory_allocated()
253 mem_usage[i] = torch.cuda.max_memory_allocated()
1963 return torch.cuda.max_memory_allocated() / 10**9