from dataset import CustomDataset
import torch
import torch.nn as nn
import torch.optim as optim
import torch.utils.data as data
from HG_small import PPHGNet_small
from train import slice_tensor,lengths,random_split
import pandas as pd
part = ['AGE','GENDER','GLASSES','RACE','EMOTION','MASK','HAT','WHISKERS','average']
def train(epoch, model, train_loader, optimizer, loss):
    model.train()
    total_nums = 0
    correct_part = [0 for _ in range(8)]
    loss_part = [0 for _ in range(8)]
    for idx,(imgs,labels) in enumerate(train_loader):
        total_nums += imgs.size()[0]
        imgs,labels = imgs.to('cuda'),labels.to('cuda')
        preds = model(imgs)
        parts = slice_tensor(preds,lengths)
        loss_value = 0
        for i,(name,pred) in enumerate(parts.items()):
            ll = loss(pred, labels[:,i])
            loss_value += ll
            loss_part[i] += ll.data.item()
            # 每个属性的correct
            pred = torch.argmax(pred, dim=1)
            correct_part[i] += (pred == labels[:,i]).sum().item()
        optimizer.zero_grad()        
        loss_value.backward()
        optimizer.step()
        if (idx+1) % 100==0:
            avg_loss = sum(loss_part) / len(loss_part) / total_nums
            avg_loss = round(avg_loss,5)
            avg_acc = sum(correct_part) / len(correct_part) / total_nums
            avg_acc = round(avg_acc,5)
            print('epoch:{}|{}/{},avg_loss:{},avg_acc:{}'.format(epoch,idx+1,len(train_loader),avg_loss,avg_acc))
        
    print(f"***********************************************train {epoch} ***********************************************")
    loss_list = [loss/total_nums for loss in loss_part]
    acc_list = [acc/total_nums for acc in correct_part]
    for i in range(len(loss_list)):
        print(f'{part[i]} loss:{loss_list[i]},acc:{acc_list[i]}')
    print('average, loss:{}, acc:{}'.format(sum(loss_list)/8,sum(acc_list)/8))
    # d = [['loss'],['acc']]
    # d[0].extend(loss_list)
    # d[1].extend(acc_list)
    # average_loss = sum(loss_part)/ len(loss_part) / total_nums
    # average_acc = sum(correct_part)/ len(correct_part) / total_nums
    # d[0].append(average_loss)
    # d[1].append(average_acc)
    # df = pd.DataFrame(d, columns=['Train','AGE','GENDER','GLASSES','RACE','EMOTION','MASK','HAT','WHISKERS','average'])
    # print(df)
        
def val(model,val_loader):
    model.eval()
    torch.no_grad()
    total_nums = 0
    correct_part = [0 for _ in range(8)]
    for idx,(imgs,labels) in enumerate(val_loader):
        total_nums += imgs.size()[0]
        imgs,labels = imgs.to('cuda'),labels.to('cuda')
        preds = model(imgs)
        parts = slice_tensor(preds,lengths)
        for i,(name,pred) in enumerate(parts.items()):
            # 每个属性的correct
            pred = torch.argmax(pred, dim=1)
            correct_part[i] += (pred == labels[:,i]).sum().item()
    acc_list = [acc/total_nums for acc in correct_part]
    print("***********************************************eval***********************************************")
    for i in range(len(acc_list)):
        print(f'{part[i]} acc:{acc_list[i]}')
    average_acc = sum(acc_list)/8
    print('average,  acc:{}'.format(average_acc))
    
    # d = [['acc']]
    # d[0].extend(acc_list)
    # average_acc = sum(correct_part)/ len(correct_part) / total_nums
    # d[0].append(average_acc)
    # df = pd.DataFrame(d, columns=['Eval','AGE','GENDER','GLASSES','RACE','EMOTION','MASK','HAT','WHISKERS','average'])
    # print(df)
    return average_acc
if __name__ == "__main__":
    trainset = CustomDataset('/home/data/2792')
    # trainset, valset = random_split(trainset, [0.8, 0.2],)  
    # print(f'trainset: {len(trainset)}, valset: {len(valset)}')
    print(f'trainset: {len(trainset)}')
    
    train_loader = data.DataLoader(trainset, batch_size=32, shuffle=True, num_workers=4)
    val_loader =data.DataLoader(trainset, batch_size=32, shuffle=False, num_workers=4)
    
    loss = nn.CrossEntropyLoss()
    
    model = PPHGNet_small()
    model.load_state_dict(torch.load('HG_small_127.pth'))
    model.to('cuda')
    optimizer = optim.AdamW(model.parameters(), lr=0.0001)
    best_acc = 0
    for epoch in range(1,21):
        train(epoch, model, train_loader, optimizer, loss)
        acc = val(model, val_loader)
        if acc > best_acc:
            path = '/project/train/models/cls/1201/{}_{}.pth'.format(epoch,acc)
            print(f'save model in {path}')
            torch.save(model.state_dict(),path)
            best_acc = acc
        elif epoch % 5==0:
            torch.save(model.state_dict(),'/project/train/models/cls/1201/{}.pth'.format(epoch))
