File size: 179 Bytes
84cfd61
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
import torch


def torch_gc():
    if torch.cuda.is_available():
        with torch.cuda.device("cuda"):
            torch.cuda.empty_cache()
            torch.cuda.ipc_collect()