import os
import time
import pandas as pd
from sklearn.model_selection import train_test_split
from FSdata_p.FSdataset import FSdata, collate_fn,attr2length_map, idx2attr_map
import torch
import torch.utils.data as torchdata
from models.resnet import *
from models.resnet import  resnet50_cat3,resnet50_cat_conv
from models.drn import  drn_d_54_cat,drn_d_54_cat3,drn_d_54_cat_sp,drn_d_54_cat_selflayer,drn_c_42_cat,drn_d_54_cat_private

from models.senet import *
from models.xception import xception_cat,xception_cat_multiscale,xception_cat2
from models.inception_resnet_v2 import InceptionResNetV2_cat
from models.inception_v4 import inceptionv4_cat
import torch.optim as optim
from torch.optim import lr_scheduler
from utils.losses import SoftmaxCrossEntropy
from utils.train_catsoftmax_test import train, trainlog
from FSdata_p.FSaug import *
from models.dilation_resnet import *
from models.senet2 import se_resnet50_cat,se_resnext50_cat,se_resnet152_cat
import logging
class FSAug(object):
    def __init__(self):
        self.augment = Compose([

            RandomUpperCrop(size=(336,336), select=[1,3,4]),
            RandomResizedCrop(size=(336,336), select=[2,7]),
            RandomDownCrop(size=(336,336), select=[5,6]),
            Resize(size=(336, 336), select=[0,1,2,3,4,5,6,7]),
            # RandomContrast(),
            # RandomBrightness(),
            # RandomSaturation(),
            RandomHflip(),
            Normalize(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225])
            # Normalize(mean=[0.5, 0.5, 0.5], std=[0.5, 0.5, 0.5])

        ])

    def __call__(self, image,attr_idx):
        return self.augment(image,attr_idx)

class FSAugVal(object):
    def __init__(self):
        self.augment = Compose([
            # Resize(size=(336, 336), select=[]),
            UpperCrop(size=(336, 336), select=[1,3,4]),
            ResizedCrop(size=(336,336), select=[2,7]),
            DownCrop(size=(336, 336), select=[5, 6]),
            Resize(size=(336, 336), select=[0, 1, 2, 3, 4, 5, 6, 7]),
            Normalize(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225])

            # Normalize(mean=[0.5, 0.5, 0.5], std=[0.5, 0.5, 0.5])

        ])

    def __call__(self, image,attr_idx):
        return self.augment(image,attr_idx)


os.environ["CUDA_VISIBLE_DEVICES"] = "0"

rawdata_root = '/home/hszc/fashion'
all_pd_round1 = pd.read_csv(os.path.join(rawdata_root, 'round1/base/Annotations/label.csv'),
                       header=None, names=['ImageName', 'AttrKey', 'AttrValues'])
all_pd_round1['ImageName'] = all_pd_round1['ImageName'].apply(lambda x: os.path.join('round1/base',x))
all_pd_round2 = pd.read_csv(os.path.join(rawdata_root, 'round2/train/Annotations/label.csv'),
                       header=None, names=['ImageName', 'AttrKey', 'AttrValues'])
all_pd_round2['ImageName'] = all_pd_round2['ImageName'].apply(lambda x: os.path.join('round2/train',x))
train_pd_round2, val_pd = train_test_split(all_pd_round2, test_size=0.1, random_state=37,
                                    stratify=all_pd_round2['AttrKey'])
train_pd =all_pd_round1.append(train_pd_round2)
skrit_add = pd.read_csv("data/round2_data_add_skirt_legth.csv",header=None,names=['ImageName','AttrKey', 'AttrValues'])
skrit_add['ImageName'] = skrit_add['ImageName'].apply(lambda x: os.path.join('round1/web',x))
train_pd =train_pd.append(skrit_add)

# fuck
# select part
select_AttrIdx = [1,2,3,4]
# select_AttrIdx = range(8)
select_AttrKey = [idx2attr_map[x] for x in select_AttrIdx]
train_pd = train_pd[train_pd['AttrKey'].apply(lambda x: True if x in select_AttrKey else False)]
val_pd = val_pd[val_pd['AttrKey'].apply(lambda x: True if x in select_AttrKey else False)]

# saving dir
save_dir = '/home/hszc/fashion/model_round2/drn_d_54_cat[allcrop]-1234-%d'%time.time()
if not os.path.exists(save_dir):
    os.makedirs(save_dir)
logfile = '%s/trainlog.log'%save_dir
trainlog(logfile)

data_set = {}
data_set['train'] = FSdata(root_path=rawdata_root,
                           anno_pd=train_pd,
                           transforms=FSAug(),
                           select=select_AttrIdx,
                           )
data_set['val'] = FSdata(root_path=rawdata_root,
                           anno_pd=val_pd,
                           transforms=FSAugVal(),
                         select=select_AttrIdx
                           )

data_loader = {}
data_loader['train'] = torchdata.DataLoader(data_set['train'], 8, num_workers=8,
                                            shuffle=True, pin_memory=True,collate_fn=collate_fn)
data_loader['val'] = torchdata.DataLoader(data_set['val'], batch_size=4, num_workers=4,
                                          shuffle=False, pin_memory=True,collate_fn=collate_fn)
#
# logging info of dataset
logging.info(train_pd.shape)
logging.info(val_pd.shape)
logging.info('train augment:')
for item in data_set['train'].transforms.augment.transforms:
    logging.info('  %s %s' % (item.__class__.__name__, item.__dict__))

logging.info('val augment:')
for item in data_set['val'].transforms.augment.transforms:
    logging.info('  %s %s' % (item.__class__.__name__, item.__dict__))

# model prepare
# resume ="/media/hszc/data/detao/model/drn_d_54_cat_private[0,1,2,3,4,5,6,7]-[0.001]-crop[]-1523345706/weights-6-848-[0.8802]-[0.9757].pth"
resume =None
model = drn_d_54_cat(pretrained=True, num_classes=[attr2length_map[x] for x in select_AttrIdx])
base_lr =0.001
# optimizer = optim.SGD([{'params': model.base_model.parameters(), 'lr': base_lr},
#                        {'params': model.fc.parameters(), 'lr': 5 * base_lr}], lr=0.001, momentum=0.9,
#                       weight_decay=1e-5)
# model = torch.nn.DataParallel(model)
if resume:
    logging.info('resuming finetune from %s'%resume)
    model.load_state_dict(torch.load(resume))
model = model.cuda()

optimizer = optim.SGD(model.parameters(), lr=0.001, momentum=0.9, weight_decay=1e-5)
criterion = SoftmaxCrossEntropy()
exp_lr_scheduler = lr_scheduler.StepLR(optimizer, step_size=4, gamma=0.1)

# training
best_acc,best_model_wts = train(  model,
                                  epoch_num=30,
                                  start_epoch=0,
                                  optimizer=optimizer,
                                  criterion=criterion,
                                  exp_lr_scheduler=exp_lr_scheduler,
                                  data_set=data_set,
                                  data_loader=data_loader,
                                  save_dir=save_dir,
                                  augloss=False,
                                  print_inter=100,
                                  val_inter=4000,
                                  )
