import numpy as np
import matplotlib.pyplot as plt
import pandas as pd
from loguru import logger
from utils.analyze_nll import analyze_anomaly_nll, get_best_fscore
import click
import json
import os


def detect_donut(train_df: pd.DataFrame, test_df: pd.DataFrame, kpi_id: str=None):
    from donut import complete_timestamp, standardize_kpi, Donut, DonutTrainer, DonutPredictor
    from tensorflow import keras as K
    import tensorflow as tf
    from tfsnippet.modules import Sequential
    # Load train data
    train_timestamp, train_values, train_labels = \
        train_df['timestamp'].values.copy(), train_df['value'].values.copy(), train_df['label'].values.copy()

    print(f"{train_values.shape=}")

    train_timestamp, train_missing, (train_values, train_labels) = \
        complete_timestamp(train_timestamp, (train_values, train_labels))
    print(f"{train_missing.shape=}")

    train_values, mean, std = standardize_kpi(train_values, excludes=np.logical_or(train_labels, train_missing))
    print(f"{train_df['label'].shape=},{train_values.shape=},{np.sum(train_missing)=}")

    # Load test data
    test_timestamp, test_values = test_df['timestamp'].values.copy(), test_df['value'].values.copy()
    print(f"{test_values.shape=}")

    test_labels = np.zeros_like(test_values, dtype=np.int32)
    test_timestamp, test_missing, (test_values, test_labels) = \
        complete_timestamp(test_timestamp, (test_values, test_labels))
    test_values, _, _ = standardize_kpi(test_values, mean=mean, std=std)

    print(f"{test_timestamp.shape=}, {test_missing.shape=}, {test_df['timestamp'].values.shape=}")

    # Train
    with tf.variable_scope('model') as model_vs:
        model = Donut(
            h_for_p_x=Sequential([
                K.layers.Dense(100, kernel_regularizer=K.regularizers.l2(0.001),
                            activation=tf.nn.relu),
                K.layers.Dense(100, kernel_regularizer=K.regularizers.l2(0.001),
                            activation=tf.nn.relu),
            ]),
            h_for_q_z=Sequential([
                K.layers.Dense(100, kernel_regularizer=K.regularizers.l2(0.001),
                            activation=tf.nn.relu),
                K.layers.Dense(100, kernel_regularizer=K.regularizers.l2(0.001),
                            activation=tf.nn.relu),
            ]),
            x_dims=120,
            z_dims=5,
        )

    trainer = DonutTrainer(model=model, model_vs=model_vs, max_epoch=30)
    predictor = DonutPredictor(model, n_z=128)
        
    config = tf.ConfigProto()
    config.gpu_options.allow_growth=True
    with tf.Session(config=config).as_default():
        logger.info('Training model...')
        trainer.fit(train_values, train_labels, train_missing, mean, std)
        print(f'{train_values.shape=}')

        logger.info('Get threshold by self...')
        valid_score = np.zeros_like(train_values)
        valid_score_last = predictor.get_score(train_values, train_missing)
        valid_score[-len(valid_score_last):] = valid_score_last
        valid_score = valid_score[train_missing == 0]

        # Get threshold
        print(f"{valid_score.shape=},{train_df['label'].values.shape}")
        eval_result = analyze_anomaly_nll(-valid_score, train_df['label'].values)
        # eval_result = get_best_fscore(train_df['timestamp'].values, -valid_score, train_df['label'].values)
        eval_result.update({
            'test_count': len(test_values),
            'kpi_id': kpi_id
        })
        print(eval_result)

        os.makedirs('train_results', exist_ok=True)
        json.dump(eval_result, open(os.path.join('train_results', f'{kpi_id}.json'), 'wt'))

        logger.info('Detecting...')
        test_score = np.zeros_like(test_values)
        test_score_last = predictor.get_score(test_values, test_missing)
        test_score[-len(test_score_last):] = test_score_last
        test_score = test_score[test_missing == 0]

    print(f'{test_score.shape=}')

    # if kpi_id is not None and kpi_id == '02e99bd4f6cfb33f':
    #     test_label = (test_score < -eval_result['best_threshold'] * 2).astype(np.int64)
    #     test_label[test_df['value'].values < 1.0] = 0
    # else:
    #     test_label = (test_score < -eval_result['best_threshold']).astype(np.int64)

    test_label = (test_score < -eval_result['best_threshold']).astype(np.int64)

    valid_label = (valid_score < -eval_result['best_threshold']).astype(np.int64)

    return test_score, test_label, valid_score, valid_label


@click.command()
@click.option('--kpi-id', required=True, type=str)
def process(kpi_id):
    train_df = pd.read_csv('data/train.csv', engine='c')
    test_df = pd.read_csv('data/test.csv', engine='c')

    cur_train_df = train_df[train_df['KPI ID'] == kpi_id]
    cur_test_df = test_df[test_df['KPI ID'] == kpi_id].copy(deep=True)

    logger.info(f'Detecting KPI ID: {kpi_id}')
    test_score, test_label, valid_score, valid_label = detect_donut(cur_train_df, cur_test_df, kpi_id)

    cur_test_df['predict'] = test_label
    os.makedirs('results', exist_ok=True)
    cur_test_df.to_csv(os.path.join('results', f'{kpi_id}.csv'), index=False)

    cur_train_df['predict'] = valid_label
    os.makedirs('train_results', exist_ok=True)
    cur_train_df.to_csv(os.path.join('train_results', f'{kpi_id}.csv'), index=False)


if __name__ == '__main__':
    process()
    
