File size: 1,724 Bytes
83034b6
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
import numpy as np
import torch
from torch.utils import data


def InfiniteSampler(n):
    # i = 0
    i = n - 1
    order = np.random.permutation(n)
    while True:
        yield order[i]
        i += 1
        if i >= n:
            np.random.seed()
            order = np.random.permutation(n)
            i = 0


class InfiniteSamplerWrapper(data.sampler.Sampler):
    def __init__(self, data_source):
        self.num_samples = len(data_source)

    def __iter__(self):
        return iter(InfiniteSampler(self.num_samples))

    def __len__(self):
        return 2 ** 31


def save_checkpoint(encoder1,encoder2, transModule, decoder, optimizer, scheduler, epoch,

           log_c, log_s, log_id1, log_id2, log_all, loss_count_interval, save_path):
  checkpoint = {
    'encoder1': encoder1.state_dict() if not encoder1 is None else None,
    'encoder2': encoder2.state_dict() if not encoder2 is None else None,
    'transModule': transModule.state_dict() if not transModule is None else None,
    'decoder': decoder.state_dict() if not decoder is None else None,
    'optimizer': optimizer.state_dict() if not optimizer is None else None,
    'scheduler': scheduler.state_dict() if not scheduler is None else None,
    'epoch': epoch if not epoch is None else None,
    'log_c': log_c if not log_c is None else None,
    'log_s': log_s if not log_s is None else None,
    'log_id1': log_id1 if not log_id1 is None else None,
    'log_id2': log_id2 if not log_id2 is None else None,
    'log_all': log_all if not log_all is None else None,
    'loss_count_interval': loss_count_interval if not loss_count_interval is None else None
  }

  torch.save(checkpoint, save_path)