import pickle
import time
import torch

from train import train
import os
import logging

from util import get_user_paras


def get_logger(logname='info_log',log_path='log/', multi_logs=True):
    logger = logging.getLogger()
    logger.setLevel(logging.INFO)
    if multi_logs:
        logger.handlers.clear()
        logger.handler = []

    fh = logging.FileHandler(log_path+'{}.txt'.format(logname))
    formatter = logging.Formatter('%(asctime)s - %(levelname)s - %(message)s',
                                  datefmt="%Y-%m-%d %H:%M:%S")
    fh.setFormatter(formatter)
    ch = logging.StreamHandler()
    ch.setFormatter(formatter)

    logger.addHandler(fh)
    logger.addHandler(ch)
    return logger


def train_varied_batch(logger, batch):
    user_config = get_user_paras()
    user_config['epoch'] = 100
    user_config['batch'] = batch
    device = torch.device("cuda:{}".format(2) if torch.cuda.is_available() else "cpu")
    user_config['device'] = device
    return train(user_config, logger)


if __name__ == '__main__':
    test_batch = [300, 256, 200, 180, 150, 128]
    for b in test_batch:
        logger = get_logger('batch_test_{}'.format(b))
        data = train_varied_batch(logger, b)
        pickle.dump(data, open('model/{}_b{}.pkl'.format(time.strftime("%Y_%m_%d_%H_%M"),b), 'wb'))

