import oneflow as torch
import os
import sys
import glob

rootdir = sys.argv[1]
lastn = int(sys.argv[2])


def average_chkpt(datadir, n):

    chkpts = glob.glob(os.path.join(datadir, 'model.*.pt'))
    sorted_chkpts = sorted(chkpts, key=lambda x: int(x.split('.')[-2]), reverse=True)[:n]
    print('Average these %d number models:' % n)
    print(sorted_chkpts)

    params_dict = {}
    params_keys = {}
    new_state = None
    num_models = len(sorted_chkpts)

    for chkpt in sorted_chkpts:
        state = torch.load(os.path.join(datadir, chkpt))
        # Copies over the settings from the first checkpoint

        if new_state is None:
            new_state = state

        for key, value in state.items():

            if key in ['params', 'epochs', 'amp', 'global_step']: continue

            model_params = value
            model_params_keys = list(model_params.keys())

            if key not in params_keys:
                params_keys[key] = model_params_keys

            if key not in params_dict:
                params_dict[key] = {}

            for k in params_keys[key]:
                p = model_params[k]

                if k not in params_dict[key]:
                    params_dict[key][k] = p.clone()
                    # NOTE: clone() is needed in case of p is a shared parameter
                else:
                    params_dict[key][k] += p

    averaged_params = {}
    for key, states in params_dict.items():
        averaged_params[key] = {}
        for k, v in states.items():
            averaged_params[key][k] = v
            try:
                averaged_params[key][k].div_(num_models)
            except:
                if 'batch_norm.num_batches_tracked' in k:
                    averaged_params[key][k] = torch.div(averaged_params[key][k], num_models).long()
                else:
                    print('Key: %s  Tensor: %s' % (key, k))
                    raise ValueError
                
        new_state[key] = averaged_params[key]

    torch.save(new_state, os.path.join(datadir, 'average.model.last.%d.pt' % num_models))
    print('Save the average checkpoint as %s' % os.path.join(datadir, 'average.model.last.%d.pt' % num_models))
    print('Done!')


average_chkpt(rootdir, lastn)
