import torch
import torch.utils.data as Data
from TransFM.dataset import BookUserInfo, TrainDataset
from TransFM.evaluate import Evaluator
from TransFM.model import NewFMModel
from utils.log_output import simple_metric_log, simple_text_log

hyper_params = {
    'prerec_result': 'prerec_result/prerec_result.pkl',
    'data_path': 'datasets/data_full/',
    'previous_state_dict': 'TransFM/model_dat/state_dict.pkl',
    'new_state_dict': 'TransFM/model_dat/state_dict.pkl',
    'max_title_len': 10,
    'feature_dim': 32,
    'fm_dim': 32,
    'batch_size': 512,
    'eval_batch_size': 2048,
    'device': 'cuda',
    'epochs': 1000
}

def train(hyper_params, book_user_info: BookUserInfo):
    # Load Data
    print('train: preparing...')
    simple_text_log('train', 'Preparing training dataset for TransFM...')
    dataset = TrainDataset(hyper_params, book_user_info)
    loader = Data.DataLoader(dataset, batch_size=hyper_params['batch_size'], shuffle=True)

    # Define Model
    model = NewFMModel(hyper_params).to(hyper_params['device'])

    # Load previous model for incremental recommendation
    model.load_previous_state(hyper_params['previous_state_dict'])

    # Define Optimizer
    optimizer = torch.optim.AdamW(model.parameters())

    # Define Evaluator
    evaluator = Evaluator(hyper_params, book_user_info)

    # Start training
    print('train: start training...')

    for epoch in range(hyper_params['epochs']):
        if epoch % 10 == 0:
            simple_text_log('train', f'Training epoch ({epoch+1}/{hyper_params["epochs"]})...')
        total_loss = 0.0
        total_step = 0

        #t = tqdm(loader)
        for data in loader:
            user_id, school, grade, user_len, date, sem, pos_bid, pos_title, pos_type, neg_bid, neg_title, neg_type = [i.to(hyper_params['device']) for i in data]

            pos_score = model(user_id, school, grade, user_len, date, sem, pos_bid, pos_title, pos_type)
            neg_score = model(user_id, school, grade, user_len, date, sem, neg_bid, neg_title, neg_type)

            loss = -torch.log(torch.sigmoid(pos_score-neg_score)).mean()
            total_loss += loss.item()
            total_step += 1

            optimizer.zero_grad()
            loss.backward()
            optimizer.step()

        print(f'Epoch: {epoch}, Loss: {total_loss / total_step}')
        simple_metric_log('train_metric', 'transfm_loss', total_loss / total_step)
        
    simple_text_log('train', 'Recommending...')
    result = evaluator.evaluate(model)
    model.save_state_dict(hyper_params['new_state_dict'])

    return result


if __name__ == '__main__':
    book_user_info = BookUserInfo(hyper_params)
    hyper_params = book_user_info.update_hyper_params()

    train(hyper_params, book_user_info)
