import os
import time
import pandas as pd
from sklearn.model_selection import train_test_split
from FSdata.FSdataset import FSdata, collate_fn,attr2length_map, idx2attr_map
import torch
import torch.utils.data as torchdata
from models.resnet import *
from models.senet import *
from models.drn import *
import torch.optim as optim
from torch.optim import lr_scheduler
from utils.losses import SoftmaxCrossEntropy
from utils.train_MltLdr import train, trainlog
from FSdata.FSaug import *
import logging
from utils.preprocessing import *
from utils.confusion_mat import cfs_mats
from models.GCN import GCN_cat
from models.resnext import resnext101_32_cat_dilated
from models.drn import drn_d_54_cat_merge, drn_d_54_cat
from models.resnet_test import Resnet50_test,Resnet50_merge34
from FSdata.FSdataset_mscl import multiDataLoader

class FSAug(object):
    def __init__(self, size=(336,336)):
        self.augment = Compose([
            # RandomHShift(scale=(0., 0.2), select=[0,5,6]),
            # RandomRotate(angles=(-20.,20.), bound=True, select=[0,2,5,6,7]),
            Resize(size=size, select=[0,2,5,6,7]),
            RandomUpperCrop(size=size, select=[1,3,4]),
            RandomHflip(),
            Normalize(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225])
        ])

    def __call__(self, image,attr_idx):
        return self.augment(image,attr_idx)

class FSAugVal(object):
    def __init__(self, size=(336,336)):
        self.augment = Compose([
            Resize(size=size, select=[0,2,5,6,7]),
            UpperCrop(size=size, select=[1,3,4]),
            Normalize(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225])
        ])

    def __call__(self, image,attr_idx):
        return self.augment(image,attr_idx)


os.environ["CUDA_VISIBLE_DEVICES"] = "0"
rawdata_root = '/media/gserver/data/FashionAI'

round1_df = pd.read_csv(os.path.join(rawdata_root,'round1/base/Annotations/label.csv'),
                        header=None, names=['ImageName', 'AttrKey', 'AttrValues'])
round1_df = join_path_to_df(round1_df, rawdata_root, 'round1/base')


round2_df = pd.read_csv(os.path.join(rawdata_root,'round2/train/Annotations/label.csv'),
                        header=None, names=['ImageName', 'AttrKey', 'AttrValues'])
round2_df = join_path_to_df(round2_df, rawdata_root, 'round2/train')

extra_df = pd.read_csv(os.path.join(rawdata_root,'round2/round2_data_add_skirt_legth.txt'),
                        header=None, names=['ImageName', 'AttrKey', 'AttrValues'])
extra_df = join_path_to_df(extra_df, rawdata_root, 'round1/web')


round2_train_pd, val_pd = train_test_split(round2_df, test_size=0.1, random_state=37,
                                    stratify=round2_df['AttrKey'])

train_pd = pd.concat([round2_train_pd, round1_df, extra_df], axis=0, ignore_index=True)
train_pd.index = range(train_pd.shape[0])


select_AttrIdx = [1,2,3,4]
train_pd = select_attr_from_df(train_pd, select_AttrIdx)
val_pd = select_attr_from_df(val_pd, select_AttrIdx)

# saving dir
save_dir = '/media/gserver/models/FashionAI/res_dilate_mltscl'
if not os.path.exists(save_dir):
    os.makedirs(save_dir)
logfile = '%s/trainlog.log'%save_dir
trainlog(logfile)


data_set = {}
data_set['train'] = []
data_set['train'].append(FSdata(anno_pd=train_pd,
                                transforms=FSAug(size=(336,336)),
                                select=select_AttrIdx))

data_set['train'].append(FSdata(anno_pd=train_pd,
                                transforms=FSAug(size=(368,368)),
                                select=select_AttrIdx))

data_set['train'].append(FSdata(anno_pd=train_pd,
                                transforms=FSAug(size=(424,242)),
                                select=select_AttrIdx))

data_set['val'] = FSdata(anno_pd=val_pd,
                         transforms=FSAugVal(),
                         select=select_AttrIdx)

multi_loader = multiDataLoader(*data_set['train'], bs=8, num_workers=3, shuffle=True,
                               pin_memory=False, collate_fn=collate_fn)
data_loader = {}
data_loader['val'] = torchdata.DataLoader(data_set['val'], batch_size=4, num_workers=2,
                                          shuffle=False, pin_memory=False,collate_fn=collate_fn)
#
# logging info of dataset
logging.info(train_pd.shape)
logging.info(val_pd.shape)
logging.info('train augment:')
for i in xrange(len(data_set['train'])):
    for item in data_set['train'][i].transforms.augment.transforms:
        logging.info('  %s %s' % (item.__class__.__name__, item.__dict__))

logging.info('val augment:')
for item in data_set['val'].transforms.augment.transforms:
    logging.info('  %s %s' % (item.__class__.__name__, item.__dict__))


# model prepare
resume = None
model = resnet50_cat_dilate(pretrained=True, num_classes=[attr2length_map[x] for x in select_AttrIdx])


# model = torch.nn.DataParallel(model)
if resume:
    logging.info('resuming finetune from %s'%resume)
    model.load_state_dict(torch.load(resume))
model = model.cuda()
logging.info(model)

optimizer = optim.SGD(model.parameters(), lr=0.001, momentum=0.9, weight_decay=1e-5)
criterion = SoftmaxCrossEntropy()
exp_lr_scheduler = lr_scheduler.StepLR(optimizer, step_size=6, gamma=0.1)

# training
best_acc,best_model_wts = train(  model,
                                  epoch_num=50,
                                  start_epoch=0,
                                  optimizer=optimizer,
                                  criterion=criterion,
                                  exp_lr_scheduler=exp_lr_scheduler,
                                  multi_loader=multi_loader,
                                  val_data_set=data_set['val'],
                                  val_data_loader=data_loader['val'],
                                  save_dir=save_dir,
                                  augloss=False,
                                  print_inter=100,
                                  val_inter=4000,
                                  )
