import os
import time
import pandas as pd
from sklearn.model_selection import train_test_split
from FSdata.FSdataset import FSdata, collate_fn,attr2length_map, idx2attr_map
import torch
import torch.utils.data as torchdata
from models.resnet import *
from models.senet import *
from models.drn import *
import torch.optim as optim
from torch.optim import lr_scheduler
from utils.losses import SoftmaxCrossEntropy, BCELogitsLossWithMask
from utils.train_FSLM import train, trainlog
from FSdata.FSaug import *
import logging
from utils.confusion_mat import cfs_mats
from models.GCN import GCN_cat
from models.resnext import resnext101_32_cat_dilated
from models.dpn import dpn68_cat_dilate
from models.resnet_test import Resnet50_crosscat
from FSdata.LMFSdata_prepare import prepare_FSLM_dataProvider
from models.FSLM_ResGCN import FSLM_ResGCN

os.environ["CUDA_VISIBLE_DEVICES"] = "2"
FSselect_AttrIdx = range(8)
epoch_num = 50
start_epoch = 0

# prepare data provider
FSroot_path = '/media/gserver/data/FashionAI'
LMroot_path = '/media/gserver/data/landmark/rawdata/'

data_provider = prepare_FSLM_dataProvider(
    FSroot_path=FSroot_path,
    LMroot_path=LMroot_path,
    FSselect_AttrIdx=FSselect_AttrIdx,
)


# saving dir
save_dir = '/media/gserver/models/FashionAI/FSLM_ResGCN'
if not os.path.exists(save_dir):
    os.makedirs(save_dir)
logfile = '%s/trainlog.log'%save_dir
trainlog(logfile)


# model prepare
resume = None
model = FSLM_ResGCN(FSnum_classes=[attr2length_map[x] for x in FSselect_AttrIdx],
                    LMnum_classes=24, input_size=(336,336))
if resume:
    logging.info('resuming finetune from %s'%resume)
    model.load_state_dict(torch.load(resume))
model = model.cuda()
logging.info(model)


FSoptimizer = optim.SGD(model.parameters(), lr=0.001, momentum=0.9, weight_decay=1e-5)
FScriterion = SoftmaxCrossEntropy()
FSexp_lr_scheduler = lr_scheduler.StepLR(FSoptimizer, step_size=6, gamma=0.1)

LMoptimizer = optim.Adam(model.parameters(), lr=1e-4, weight_decay=1e-5)
LMcriterion = BCELogitsLossWithMask(size_average=True)
lr_lambda = lambda x: 1 if x < 12 else (0.1 if x<22 else 0.05)
LMexp_lr_scheduler = lr_scheduler.LambdaLR(LMoptimizer, lr_lambda=lr_lambda)

train(
    model,
    epoch_num,
    start_epoch,
    FSoptimizer,
    LMoptimizer,
    FScriterion,
    LMcriterion,
    FSexp_lr_scheduler,
    LMexp_lr_scheduler,
    data_provider,
    save_dir,
    augloss=False,
    print_inter=200,
    val_inter=3500
)

