import os
import time
import pandas as pd
from sklearn.model_selection import train_test_split
from FSdata.FSdataset import FSdata, collate_fn,attr2length_map, idx2attr_map
import torch
import torch.utils.data as torchdata
from models.resnet import *
from models.senet import *
import torch.optim as optim
from torch.optim import lr_scheduler
from utils.losses import SoftmaxCrossEntropy
from utils.train_catsoftmax import train, trainlog
from FSdata.FSaug import *
import logging
from utils.preprocessing import init_weight_from_model

class FSAug(object):
    def __init__(self):
        self.augment = Compose([
            Resize(size=(336,336), select=[0,1,2,5,6,7]),
            RandomUpperCrop(size=(336,336), select=[3,4]),
            RandomHflip(),
            Normalize(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225])
        ])

    def __call__(self, image,attr_idx):
        return self.augment(image,attr_idx)

class FSAugVal(object):
    def __init__(self):
        self.augment = Compose([
            Resize(size=(336, 336), select=[0,1,2,5,6,7]),
            UpperCrop(size=(336, 336), select=[3,4]),
            Normalize(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225])
        ])

    def __call__(self, image,attr_idx):
        return self.augment(image,attr_idx)


os.environ["CUDA_VISIBLE_DEVICES"] = "1,2"

rawdata_root = '/media/gserver/data/FashionAI'
all_pd = pd.read_csv(os.path.join(rawdata_root, 'base/Annotations/label.csv'),
                       header=None, names=['ImageName', 'AttrKey', 'AttrValues'])
all_pd['ImageName'] = all_pd['ImageName'].apply(lambda x: os.path.join('base',x))


train_pd, val_pd = train_test_split(all_pd, test_size=0.1, random_state=37,
                                    stratify=all_pd['AttrKey'])


# select part
select_AttrIdx = [4]
select_AttrKey = [idx2attr_map[x] for x in select_AttrIdx]
train_pd = train_pd[train_pd['AttrKey'].apply(lambda x: True if x in select_AttrKey else False)]
val_pd = val_pd[val_pd['AttrKey'].apply(lambda x: True if x in select_AttrKey else False)]


# saving dir
save_dir = '/media/gserver/models/FashionAI/res50_cat[4]-init[3,4]-crop-%d'%time.time()
if not os.path.exists(save_dir):
    os.makedirs(save_dir)
logfile = '%s/trainlog.log'%save_dir
trainlog(logfile)


data_set = {}
data_set['train'] = FSdata(root_path=rawdata_root,
                           anno_pd=train_pd,
                           transforms=FSAug(),
                           select=select_AttrIdx,
                           )
data_set['val'] = FSdata(root_path=rawdata_root,
                           anno_pd=val_pd,
                           transforms=FSAugVal(),
                         select=select_AttrIdx
                           )

data_loader = {}
data_loader['train'] = torchdata.DataLoader(data_set['train'], 8, num_workers=4,
                                            shuffle=True, pin_memory=True,collate_fn=collate_fn)
data_loader['val'] = torchdata.DataLoader(data_set['val'], batch_size=8, num_workers=4,
                                          shuffle=False, pin_memory=True,collate_fn=collate_fn)
#
# logging info of dataset
logging.info(train_pd.shape)
logging.info(val_pd.shape)
logging.info('train augment:')
for item in data_set['train'].transforms.augment.transforms:
    logging.info('  %s %s' % (item.__class__.__name__, item.__dict__))

logging.info('val augment:')
for item in data_set['val'].transforms.augment.transforms:
    logging.info('  %s %s' % (item.__class__.__name__, item.__dict__))


# model prepare
resume = None
model = resnet50_cat(pretrained=True, num_classes=[attr2length_map[x] for x in select_AttrIdx])


model = torch.nn.DataParallel(model)
if resume:
    logging.info('resuming finetune from %s'%resume)
    model.load_state_dict(torch.load(resume))
model = model.cuda()

# init from multi-task weights
init_weight_path = '/media/gserver/models/FashionAI/resnet50_cat[3,4]_crop[3,4]/bestweights-[0.8810]-[0.9793].pth'
src_weights = torch.load(init_weight_path)
src_name = ['module.fc.fc1.weight', 'module.fc.fc1.bias']
tg_name = ['module.fc.fc0.weight', 'module.fc.fc0.bias']
model = init_weight_from_model(model, tg_name=tg_name, src_name=src_name, src_weights=src_weights)
logging.info('init weights from %s'%init_weight_path)

optimizer = optim.SGD(model.parameters(), lr=0.0005, momentum=0.9, weight_decay=1e-5)
criterion = SoftmaxCrossEntropy()
exp_lr_scheduler = lr_scheduler.StepLR(optimizer, step_size=10, gamma=0.5)

# training
best_acc,best_model_wts = train(  model,
                                  epoch_num=20,
                                  start_epoch=0,
                                  optimizer=optimizer,
                                  criterion=criterion,
                                  exp_lr_scheduler=exp_lr_scheduler,
                                  data_set=data_set,
                                  data_loader=data_loader,
                                  save_dir=save_dir,
                                  augloss=False,
                                  print_inter=100,
                                  val_inter=2000,
                                  )
