HazeT_Hieu / sampler.py
datnguyentien204's picture
Upload 1403 files
83034b6 verified
import numpy as np
import torch
from torch.utils import data
def InfiniteSampler(n):
# i = 0
i = n - 1
order = np.random.permutation(n)
while True:
yield order[i]
i += 1
if i >= n:
np.random.seed()
order = np.random.permutation(n)
i = 0
class InfiniteSamplerWrapper(data.sampler.Sampler):
def __init__(self, data_source):
self.num_samples = len(data_source)
def __iter__(self):
return iter(InfiniteSampler(self.num_samples))
def __len__(self):
return 2 ** 31
def save_checkpoint(encoder1,encoder2, transModule, decoder, optimizer, scheduler, epoch,
log_c, log_s, log_id1, log_id2, log_all, loss_count_interval, save_path):
checkpoint = {
'encoder1': encoder1.state_dict() if not encoder1 is None else None,
'encoder2': encoder2.state_dict() if not encoder2 is None else None,
'transModule': transModule.state_dict() if not transModule is None else None,
'decoder': decoder.state_dict() if not decoder is None else None,
'optimizer': optimizer.state_dict() if not optimizer is None else None,
'scheduler': scheduler.state_dict() if not scheduler is None else None,
'epoch': epoch if not epoch is None else None,
'log_c': log_c if not log_c is None else None,
'log_s': log_s if not log_s is None else None,
'log_id1': log_id1 if not log_id1 is None else None,
'log_id2': log_id2 if not log_id2 is None else None,
'log_all': log_all if not log_all is None else None,
'loss_count_interval': loss_count_interval if not loss_count_interval is None else None
}
torch.save(checkpoint, save_path)