File size: 85 Bytes
d8b5430
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
import gc
import torch




def flush():
    gc.collect()
    torch.cuda.empty_cache()