from cassava_leaf.logging import create_hyper, get_logger
from cassava_leaf.utils import random_seed, get_model, load_checkpoint
from cassava_leaf.augment import AlbumentationsTransform, get_train_aug, get_valid_aug, RandomCutMixUp
from cassava_leaf.multihead import MultiHeadModel, MultiOutputLoss, MultiOutputCallback, AccMul, AccBin
from cassava_leaf.training import StepCallback, EarlyStoppingException, FineTuneEarlyStoppingCallback, \
    ModelInfoCallback, LogMetricsCallback, GroupMetricsCallback
from cassava_leaf.model import change_act, multi_head_model_splitter, simple_head_splitter
from cassava_leaf.plot import PlotCallback
import pandas as pd
from sklearn.model_selection import train_test_split, StratifiedKFold
import os.path as osp
from fastai.vision.all import ImageDataLoaders, Learner, cnn_learner, accuracy,\
    LabelSmoothingCrossEntropy, L, CSVLogger, trainable_params, aug_transforms, Normalize, imagenet_stats, Callback,\
    MultiCategoryBlock, RandomSplitter, ColSplitter, ColReader, DataBlock, ImageBlock, Path, Resize
from fastai.distributed import *
from functools import partial
import torch
from torch import nn
import os
from typing import List


DATA_ROOT = os.environ['DATA_ROOT']
MODEL_ROOT = os.environ['MODEL_ROOT']
DISABLE_MONGO = 'DISABLE_MONGO' in os.environ
DEBUG = 'DEBUG' in os.environ


def create_dataset(hyperparam, logger, debug=False):
    df = pd.read_csv(osp.join(DATA_ROOT, 'train.csv'))
    if debug:
        df = df.iloc[:1000]
    logger.info(f'Total training samples: {df.shape[0]}')

    df['image_id'] = df['image_id'].map(lambda x: osp.join(DATA_ROOT, 'train_images', x))

    test_df = None
    if hyperparam.get('HOLDOUT', False):
        df, test_df = train_test_split(df, test_size=0.2, stratify=df['label'],
                                       random_state=hyperparam['KFOLD_RANDOM_STATE'], shuffle=True)
        df['from'] = 0
        df = df.reset_index(drop=True)
        logger.info(f'(HOLDOUT) Total training samples: {df.shape[0]}, holdout: {test_df.shape[0]}')

    if hyperparam.get('USE_2019_DATA', False):
        merged_df = pd.read_csv(osp.join(DATA_ROOT, 'merged.csv'))
        merged_df['image_id'] = merged_df['image_id'].map(lambda x: osp.join(DATA_ROOT, 'train', x))
        merged_df['from'] = 1
        df = df.append(merged_df[merged_df['source'] == 2019])
        df = df.reset_index(drop=True)
        logger.info(f'Add 2019 data, now training samples: {df.shape[0]}')

    if hyperparam.get('USE_EXTRA_IMAGE', False):
        extra_df = pd.read_csv(osp.join(DATA_ROOT, 'extra_data.csv'))
        extra_df['image_id'] = extra_df['image_id'].map(lambda x: osp.join(DATA_ROOT, 'extra', x))
        extra_df['from'] = 2
        df = df.append(extra_df)
        df = df.reset_index(drop=True)
        logger.info(f'Add extra data, now training samples: {df.shape[0]}')

    fold = StratifiedKFold(n_splits=hyperparam['N_FOLD'], random_state=hyperparam['KFOLD_RANDOM_STATE'], shuffle=True)
    return df, test_df, enumerate(fold.split(df, df['label']))


def create_dataloader(df, hyperparam, logger):
    aug_kwargs = {}
    if hyperparam.get('FASTAI_DEFAULT_AUG', False):
        aug_kwargs['batch_tfms'] = [*aug_transforms(size=hyperparam['IMAGE_SIZE'])]
        if hyperparam.get('NORMALIZE', False):
            aug_kwargs['batch_tfms'].insert(0, Normalize.from_stats(*imagenet_stats))
    if hyperparam.get('ALBUMENTATIONS_AUG', False):
        aug_kwargs['item_tfms'] = [AlbumentationsTransform(get_train_aug(), get_valid_aug())]

    logger.info(str(aug_kwargs))
    valid_col = 'is_valid' if 'is_valid' in df.columns else None

    dls = ImageDataLoaders.from_df(df, path='/',
                                   fn_col='image_id', label_col='label',
                                   bs=hyperparam['BATCH_SIZE'], num_workers=32, valid_col=valid_col,
                                   item_tfms=[Resize(hyperparam['IMAGE_SIZE'])],  # TODO: Fix here...
                                   **aug_kwargs)
    print('Transform:')
    print('After item:', dls.train.after_item)
    print('Before batch:', dls.train.before_batch)
    print('After batch:', dls.train.after_batch)
    return dls


def create_learner(hyperparam, dls, model_id, f_idx, valid_df, logger, eval=False):
    callbacks: List[Callback] = []

    if hyperparam.get('USE_MULTI_HEAD', False):
        learner_func = Learner
        model_func = MultiHeadModel(hyperparam['MODEL_NAME'])
        loss_func = MultiOutputLoss(hyperparam['USE_LABEL_SMOOTHING'], hyperparam['LABEL_SMOOTHING_EPS'])
        callbacks.append(MultiOutputCallback())
        metrics = [AccMul(), AccBin()]
        splitter = multi_head_model_splitter
    else:
        learner_func = cnn_learner
        model_func = partial(get_model, hyperparam['MODEL_NAME'])
        metrics = accuracy
        splitter = None

    if hyperparam.get('USE_LEARNER', False):
        learner_func = Learner
        if not splitter:
            splitter = trainable_params
        model_func = get_model(hyperparam['MODEL_NAME'])

    if hyperparam.get('SIMPLE_HEAD', False):
        learner_func = Learner
        model_func = get_model(hyperparam['MODEL_NAME'])
        if hasattr(model_func, 'fc'):
            fc = model_func.fc
            model_func.fc = nn.Linear(model_func.fc.in_features, 5)
            torch.nn.init.normal_(model_func.fc.weight)
        elif hasattr(model_func, 'classifier'):
            fc = model_func.classfier
            model_func.classifier = nn.Linear(fc.in_features, 5)
            torch.nn.init.normal_(model_func.classifier.weight)

        splitter = simple_head_splitter

    if hyperparam.get('FINETUNE_THRESHOLD', False):
        callbacks.append(FineTuneEarlyStoppingCallback(hyperparam['FINETUNE_THRESHOLD']))

    if hyperparam.get('USE_CUTMIXUP', False):
        callbacks.append(RandomCutMixUp())

    if hyperparam.get('USE_LABEL_SMOOTHING', False):
        loss_func = LabelSmoothingCrossEntropy(hyperparam['LABEL_SMOOTHING_EPS'])
    else:
        loss_func = nn.CrossEntropyLoss()

    if not eval:
        callbacks.append(StepCallback(model_id, f_idx, valid_df, hyperparam.get('DEBUG_STEP', False)))

    learn = learner_func(dls, model_func, metrics=metrics,
                         model_dir=MODEL_ROOT,
                         cbs=callbacks + [
                             CSVLogger(osp.join(MODEL_ROOT, f'model_{model_id}_{f_idx}_history.csv')),
                             ModelInfoCallback(),
                             LogMetricsCallback(),
                             PlotCallback(MODEL_ROOT, model_id, f_idx)
                         ],
                         loss_func=loss_func,
                         splitter=splitter,
                         pretrained=hyperparam['PRETRAINED']
                         )
    print(learn.cbs)
    learn._logger = logger
    learn._best_acc = None
    return learn


def run_training(hyperparam):
    if not DISABLE_MONGO:
        _id = create_hyper(hyperparam)
    _id = '123123'
    print(_id)
    logger = get_logger(_id, hyperparam['LOG_DB'])

    logger.info(hyperparam)
    random_seed(hyperparam['GLOBAL_RANDOM_SEED'], True)
    model_id = hyperparam['MODEL_ID']

    df, test_df, folds = create_dataset(hyperparam, logger, debug=DEBUG)

    for f_idx, (train_idx, valid_idx) in folds:
        logger.info('training fold {}...'.format(f_idx))

        if 'ONE_FOLD' in hyperparam:
            if f_idx != hyperparam['ONE_FOLD']:
                f_idx += 1
                logger.info('Skipping...')
                continue

        df_ = df.copy()
        df_['is_valid'] = False
        df_.loc[valid_idx, 'is_valid'] = True

        # DEPRECATED!! BINARY MODEL PERFORMS BAD
        if hyperparam.get('BINARY', False):
            # df_.loc[df['label'] != 4, 'label'] = 0
            # df_.loc[df['label'] == 4, 'label'] = 1
            # logger.info('Training label dist:')
            # logger.info(df_.loc[train_idx]['label'].value_counts())
            # logger.info('Valid label dist:')
            # logger.info(df_.loc[valid_idx]['label'].value_counts())
            raise DeprecationWarning

        dls = create_dataloader(df_, hyperparam, logger)

        learn = create_learner(hyperparam, dls, model_id, f_idx, df_.loc[valid_idx], logger)
        
        learn.add_cb(GroupMetricsCallback(df_.loc[valid_idx]))

        if hyperparam.get('CHANGE_ACT', False):
            change_act(learn, nn.ReLU, nn.SiLU)

        # learn.create_opt()
        if hyperparam.get('VALID_ONLY', False):
            ckpt = load_checkpoint(osp.join(MODEL_ROOT, f'model_{model_id}_{f_idx}_ac.pth'))
            learn.model.load_state_dict(ckpt)
        if hyperparam.get('SYNC_BN', False):
            # learn.model = convert_model(learn.model)
            raise DeprecationWarning

        learn = learn.to_native_fp16()
        learn.to_parallel()
        # learn.validate()
        try:
            if hyperparam.get('VALID_ONLY', False):
                valid_pred = learn.get_preds()
                torch.save(valid_pred, osp.join(MODEL_ROOT, f'model_{model_id}_{f_idx}_pred.pth'))
                if hyperparam.get('HOLDOUT', False):
                    assert 'test_df' in locals()
                    test_dl = dls.test_dl(test_df['image_id'].map(lambda x: '/home/ray/cas/train_images/' + x))
                    test_pred = learn.get_preds(dl=test_dl)
                    torch.save(valid_pred, osp.join(MODEL_ROOT, f'model_{model_id}_{f_idx}_test_pred.pth'))
            else:
                if hyperparam.get('NATIVE_FINETUNE', False):
                    learn.fine_tune(hyperparam['EPOCHS'], hyperparam['LR'], freeze_epochs=hyperparam['FREEZE_EPOCHS'])
                else:
                    lr_mult = 100
                    pct_start = 0.3
                    div = 5.0
                    base_lr = hyperparam['LR']
                    if hyperparam.get('NO_DIS_LR', False):
                        learn.lr = base_lr
                    else:
                        learn.lr = slice(base_lr)
                    if hyperparam.get('FINETUNE', False):
                        learn.freeze()
                        learn.fit_one_cycle(hyperparam['FREEZE_EPOCHS'], learn.lr, pct_start=0.99)
                        learn.unfreeze()
                        if hyperparam.get('NO_DIS_LR', False):
                            learn.lr = base_lr
                        else:
                            learn.lr = slice(base_lr / 2 / lr_mult, base_lr)
                    learn.fit_one_cycle(hyperparam['EPOCHS'], learn.lr, pct_start=pct_start, div=div)
        except KeyboardInterrupt:
            print('KeyboardInterrupt')
            exit(0)
        except (EarlyStoppingException, nn.modules.module.ModuleAttributeError) as e:
            print('Fine-tuning early stop')
            return learn

    return learn
