import torch
import numpy as np
import pandas as pd
import seaborn as sns
import torch.nn as nn
from model.bert import BERT, masked_position
from dataproc_npy_sanwei import TraceSet, load_yaml
from matplotlib import pyplot as plt
from mpl_toolkits.axes_grid1 import make_axes_locatable
from torch.utils.data import DataLoader
from torch.nn.utils.rnn import pack_padded_sequence


def create_model(cfgs) -> BERT:
    """ 构建模型 """
    model = BERT(
        loc_size=cfgs['BERT']['loc_size'],
        dim_in=cfgs['BERT']['dim_in'],
        dim_out=cfgs['BERT']['dim_out'],
        n_heads=cfgs['BERT']['n_heads'],
        n_layers=cfgs['BERT']['n_layers'],
        dropout=cfgs['BERT']['dropout'],
        max_len=cfgs['Data']['traj_len'],
        scale=cfgs['BERT']['scale'],
        max_targets=cfgs['Data']['max_pair_targets'],
        position_embedding_size=cfgs['BERT']['position_embedding_size'])

    return model


def load_model():
    config = load_yaml('configs.yaml')
    model = create_model(config)
    state = torch.load(config['Train']['predict_model'])
    model.load_state_dict(state['module'])
    model.eval()
    return model, config


def torch_cov(input_vec):
    """ 计算一个batch的协方差
    Args:
        input_vec: shape = (bn, seq, 2)
    """
    m = input_vec - torch.mean(input_vec, dim=1, keepdim=True)  # 数据减去其均值
    mT = torch.transpose(m, 1, 2) # 交换两个维度的顺序
    cov_matrix = torch.matmul(mT, m) / (m.shape[1] - 1)  # 相乘并除以n-1，计算协方差的公式
    return cov_matrix.reshape(input_vec.shape[0], -1)  # 返回协方差矩阵


def compute_loss(lossfn, losscov, mtm, ppd, target, length_indicator, pairs_target, mask_labels):
    # loss = decode loss(gaussian) + mtm loss(L1)
    posi = masked_position(mask_labels)

    # mtm loss
    # 计算对应mask位置与原始轨迹的损失，部分数据
    mtm_loss = torch.tensor(0)
    if mtm is not None:
        mtm_loss = lossfn(mtm, target * posi)

    # SBO loss
    mask = length_indicator != 0
    pairs_target = pairs_target.view(-1, pairs_target.size(2), pairs_target.size(3))
    pairs_target = pairs_target[mask]
    length_indicator = length_indicator[mask].cpu()
    pairs_target = pack_padded_sequence(input=pairs_target, lengths=length_indicator,
                                               batch_first=True, enforce_sorted=False)
    pairs_target = pairs_target.data
    pairs_loss = torch.tensor(0)
    if ppd is not None:
        pairs_loss = lossfn(ppd, pairs_target)

     # cov loss
    """生成上三角矩阵"""
    ones = torch.ones((3, 3))
    upper_triangular = torch.triu(ones)
    upper_triangular = torch.reshape(upper_triangular, (-1,))
    upper_triangular = upper_triangular
    # 计算出原始数据的协方差，这里是计算的是原始数据seq的协方差
    cov_seq = torch_cov(target)
    cov_seq = torch.where(upper_triangular.bool(), cov_seq,  torch.tensor([0.0], dtype=torch.float))
    # 计算出重构数据的协方差，这里是计算的是重构数据decode的协方差
    predection = mtm * posi + target * ~posi
    cov_dec = torch_cov(predection)
    cov_dec = torch.where(upper_triangular.bool(), cov_dec, torch.tensor([0.0], dtype=torch.float))

    cov_loss_2 = lossfn(cov_dec, cov_seq)

    return mtm_loss, pairs_loss, cov_loss_2


def convert_to_dataframe(mtm):
    # 将tensor转换为DataFrame
    mtm_np = mtm.detach().numpy()
    mtm_np_reshaped = mtm_np.reshape(mtm_np.shape[0]*mtm_np.shape[1], mtm_np.shape[2])
    mtm_df = pd.DataFrame(mtm_np_reshaped)
    return mtm_df


def transform_trajectory_data(data, mtm_df, masked_data_label, len_data):
    """Transform trajectory data for plotting."""
    columns_2 = ['lat', 'lon', 'geoaltitude']

    mtm_df.columns = columns_2
    data_df = pd.DataFrame(data, columns=columns_2)

    masked_data_label_series = pd.Series(masked_data_label.numpy() == 1)
    shou_traj_df = data_df[masked_data_label_series]

    masked_data_label_series = pd.Series(masked_data_label.numpy() == 0)
    mtm_df_data = mtm_df[masked_data_label_series]
    mtm_df_target =data_df[masked_data_label_series]
    data_df = data_df.iloc[:len_data]
    # Create a new DataFrame with the same index as data_df
    combined_df = pd.DataFrame(data_df.copy(), columns=columns_2)
    # Fill the new DataFrame with data from shou_traj_df and mtm_df_data
    combined_df.loc[shou_traj_df.index] = shou_traj_df
    combined_df.loc[mtm_df_data.index] = mtm_df_data
    combined_df = combined_df.iloc[:len_data]

    return data_df, shou_traj_df, mtm_df_data, combined_df, mtm_df_target


def plot_trajectory(group, show=True, save_path=None):
    """
    Plot a 3D trajectory of a group.

    Parameters:
    group (pandas.DataFrame): The DataFrame containing the trajectory data.
    show (bool): Whether to display the plot. Default is True.
    save_path (str): The path to save the plot. If None, the plot will not be saved. Default is None.

    Returns:
    matplotlib.figure.Figure: The created figure.
    """
    # Extract values from the group
    latitudes = group.lat.values
    longitudes = group.lon.values
    geoAltitudes = group.geoaltitude.values


    # Create a figure with 3d projection
    fig = plt.figure(figsize=(10, 8))
    ax = fig.gca(projection='3d')

    # Plot the 3D scatter plot with alpha representing velocity
    plot = ax.scatter3D(longitudes, latitudes, geoAltitudes, s=2)

    # Add labels for axes
    ax.set_xlabel('Longitude')
    ax.set_ylabel('Latitude')
    ax.set_zlabel('GeoAltitude')

    # fig.colorbar(plot, label='Velocity (knot)', shrink=0.4)

    # Show the plot if required
    if show:
        plt.show()

    # Save the plot if save_path is provided
    if save_path is not None:
        fig.savefig(save_path)

    return fig


def plot_Interpolation_trajectory(group, new_masked_seq, masked_data, show=True, save_path=None):
    """
    Plot a 3D trajectory of a group and masked_data.

    Parameters:
    group (pandas.DataFrame): The DataFrame containing the original trajectory data.
    new_masked_seq (pandas.DataFrame): The DataFrame containing the new masked sequence.
    masked_data (pandas.DataFrame): The DataFrame containing the masked data.
    show (bool): Whether to display the plot. Default is True.
    save_path (str): The path to save the plot. If None, the plot will not be saved. Default is None.

    Returns:
    matplotlib.figure.Figure: The created figure.
    """
    # Extract values from the group
    latitudes = new_masked_seq.lat.values
    longitudes = new_masked_seq.lon.values
    geoAltitudes = new_masked_seq.geoaltitude.values

    # Create a figure with 3d projection
    fig = plt.figure(figsize=(10, 8))
    ax = fig.add_subplot(projection='3d')

    # Plot the 3D scatter plot with alpha representing velocity
    plot1 = ax.scatter3D(longitudes, latitudes, geoAltitudes, s=2)

    # Extract values from the masked_data
    latitudes_masked = masked_data.lat.values
    longitudes_masked = masked_data.lon.values
    geoAltitudes_masked = masked_data.geoaltitude.values

    # Plot the 3D scatter plot for masked_data
    plot2 = ax.scatter3D(longitudes_masked, latitudes_masked, geoAltitudes_masked, alpha=0.5, s=2, color='red')

    # Add labels for axes
    ax.set_xlabel('Longitude')
    ax.set_ylabel('Latitude')
    ax.set_zlabel('GeoAltitude')

    # Show the plot if required
    if show:
        plt.show()

    # Save the plot if save_path is provided
    if save_path is not None:
        fig.savefig(save_path)

    return fig


def plotDataAndCov(data, data2):
    ACov = np.cov(data, rowvar=False, bias=True)
    BCov = np.cov(data2, rowvar=False, bias=True)
    diff = ACov - BCov

    vmax = max(np.max(ACov), np.max(BCov))
    vmin = min(np.min(ACov), np.min(BCov))
    center = (vmax + vmin) / 2

    fig, ax = plt.subplots(nrows=1, ncols=3, figsize=(10, 5))

    ax0 = plt.subplot(1, 3, 1)
    cmap = sns.color_palette("coolwarm", 10)
    im0 = sns.heatmap(ACov, cmap=cmap, center=center, vmax=vmax, vmin=vmin, ax=ax0, square=True, cbar=False)
    ax0.set_title('Covariance matrix A')
    divider0 = make_axes_locatable(ax0)
    cax0 = divider0.append_axes("right", size="5%", pad=0.05)
    plt.colorbar(im0.get_children()[0], cax=cax0)

    ax1 = plt.subplot(1, 3, 2)
    im1 = sns.heatmap(BCov, cmap=cmap, center=center, vmax=vmax, vmin=vmin, ax=ax1, square=True, cbar=False)
    ax1.set_title('Covariance matrix B')
    divider1 = make_axes_locatable(ax1)
    cax1 = divider1.append_axes("right", size="5%", pad=0.05)
    plt.colorbar(im1.get_children()[0], cax=cax1)

    ax2 = plt.subplot(1, 3, 3)
    cmap_diff = sns.color_palette("YlOrRd", 10)
    im2 = sns.heatmap(diff, cmap=cmap_diff, center=0, vmax=np.max(diff), vmin=np.min(diff), ax=ax2, square=True, cbar=False)
    ax2.set_title('Difference')
    divider2 = make_axes_locatable(ax2)
    cax2 = divider2.append_axes("right", size="5%", pad=0.05)
    plt.colorbar(im2.get_children()[0], cax=cax2)

    plt.tight_layout()
    plt.show()

'''
def plotDataAndCov(data, data2):
    ACov = np.cov(data, rowvar=False, bias=True)
    BCov = np.cov(data2, rowvar=False, bias=True)
    vmax = np.max([np.max(ACov), np.max(BCov)])
    vmin = np.min([np.min(ACov), np.min(BCov)])
    center = np.mean([np.mean(ACov), np.mean(BCov)])
    # print('Covariance matrix A:\n', ACov)
    # print('Covariance matrix B:\n', BCov)
    fig, ax = plt.subplots(nrows=1, ncols=2)
    fig.set_size_inches(10, 10)
    ax0 = plt.subplot(2, 2, 1)
    # Choosing the colors
    cmap = sns.color_palette("GnBu", 10)
    sns.heatmap(ACov, cmap=cmap, center=center, vmax=vmax, vmin=vmin)
    ax1 = plt.subplot(2, 2, 2)
    sns.heatmap(BCov, cmap=cmap, center=center, vmax=vmax, vmin=vmin)
    plt.show()
'''

def plotDataAndCov2(traj_mask_target_cov, traj_mtm_cov, traj_diff_cov):
    # 直接在函数内部完成从PyTorch张量到NumPy数组的转换
    traj_mask_target_cov = [d.detach().numpy() for d in traj_mask_target_cov]
    traj_mtm_cov = [d.detach().numpy() for d in traj_mtm_cov]
    traj_diff_cov = [d.detach().numpy() for d in traj_diff_cov]

    num_matrices = len(traj_mask_target_cov)  # 确定子图的列数
    fig, axs = plt.subplots(nrows=3, ncols=num_matrices, figsize=(10, 10))

    # 创建一个二维的颜色映射列表
    cmaps = [[sns.color_palette("coolwarm", 10) for _ in range(num_matrices)],
             [sns.color_palette("coolwarm", 10) for _ in range(num_matrices)],
             [sns.color_palette("YlOrRd", 10) for _ in range(num_matrices)]]

    # 计算traj_mask_target_cov和traj_mtm_cov的最大值和最小值
    vmax = max(np.max(d) for d in traj_mask_target_cov + traj_mtm_cov)
    vmin = min(np.min(d) for d in traj_mask_target_cov + traj_mtm_cov)

    # 绘制traj_mask_target_cov的热力图
    for i, cov in enumerate(traj_mask_target_cov):
        center = np.mean(cov)
        ax = axs[0, i]
        ax.set_title(f'span_{i+1}')
        sns.heatmap(cov, ax=ax, cmap=cmaps[0][i], center=center, vmax=vmax, vmin=vmin)

    # 绘制traj_mtm_cov的热力图
    for i, cov in enumerate(traj_mtm_cov):
        center = np.mean(cov)
        ax = axs[1, i]
        ax.set_title(f'span_prediction_{i+1}')
        sns.heatmap(cov, ax=ax, cmap=cmaps[1][i], center=center, vmax=vmax, vmin=vmin)

    # 绘制traj_diff_cov的热力图
    for i, cov in enumerate(traj_diff_cov):
        vmax_diff = np.max(cov)
        vmin_diff = np.min(cov)
        center_diff = np.mean(cov)
        ax = axs[2, i]
        ax.set_title(f'differences_{i+1}')
        sns.heatmap(cov, ax=ax, cmap=cmaps[2][i], center=center_diff, vmax=vmax_diff, vmin=vmin_diff)

    plt.tight_layout()
    plt.show()


def group_covariances(cov_mask_target, cov_mtm, cov_diff, pairs_mask_sum):
    """
    根据pairs_mask_sum分组协方差矩阵。

    参数:
    - cov_mask_target: mask_target的协方差矩阵，二维张量。
    - cov_mtm: mtm的协方差矩阵，二维张量。
    - cov_diff: 协方差矩阵的差值，二维张量。
    - pairs_mask_sum: 一维张量，指示如何分组协方差矩阵。

    返回:
    - grouped_cov_mask_target_list: 分组后的mask_target协方差矩阵列表。
    - grouped_cov_mtm_list: 分组后的mtm协方差矩阵列表。
    - grouped_cov_diff_list: 分组后的协方差矩阵差值列表。
    """
    grouped_cov_mask_target_list = []
    grouped_cov_mtm_list = []
    grouped_cov_diff_list = []
    idx = 0
    for sum_value in pairs_mask_sum:
        end_idx = idx + sum_value
        grouped_cov_mask_target_list.append(cov_mask_target[idx:end_idx])
        grouped_cov_mtm_list.append(cov_mtm[idx:end_idx])
        grouped_cov_diff_list.append(cov_diff[idx:end_idx])
        idx = end_idx

    return grouped_cov_mask_target_list, grouped_cov_mtm_list, grouped_cov_diff_list


if __name__ == "__main__":
    batch_index = 5
    traj_index = 21  # 8-19有问题 8-40较为复杂 9-16较为复杂 11-22较为复杂 13-22较为复杂
    model, cfgs = load_model()
    cfgs = load_yaml('configs.yaml')
    cfgs['Data']['data_root'] = 'D:/pythonProject/粒子滤波/ADS-B/Tset-npy'
    cfgs['Data']['norm_file_path'] = 'D:/pythonProject/粒子滤波/Bert_NonDec/result_test.csv'
    cfgs['Data']['batch_size'] = 512
    model.eval()
    dataset = TraceSet(cfgs, phase='train')
    train_loader = DataLoader(dataset, shuffle=True, batch_size=cfgs['Train']['batch_size'],
                              num_workers=cfgs['Train']['n_worker'], pin_memory=True, collate_fn=dataset.collate_fn)

    for i, batch in enumerate(train_loader):
        if i == batch_index:
            masked_sentence, target, pairs, length_indicator, binary_mask, pairs_mask, masked_label, pairs_target = batch
            break  # 找到第n个batch后，我们使用break停止迭代

    # print(target[9])

    mtm, pairs_predictions = model(masked_sentence, pairs, length_indicator, binary_mask, masked_label, pairs_mask)

    MTM_loss, SBO_loss, COV_loss = compute_loss(nn.SmoothL1Loss(), nn.L1Loss(), mtm, pairs_predictions, target,
                                                    length_indicator, pairs_target, masked_label)
    print("MTM_loss:", MTM_loss.item())
    print("SBO_loss:", SBO_loss.item())
    print("COV_loss:", COV_loss.item()*100)
    # 13batch的cov损失较大
    pairs_mask_sum = pairs_mask.sum(dim=-1)

    traj_data = target[traj_index].numpy()
    mtm = mtm[traj_index]
    mtm_df = pd.DataFrame(mtm.detach().numpy())
    masked_label = masked_label[traj_index]
    pd.set_option('display.max_columns', None)
    pd.set_option('display.max_rows', None)
    columns = ['lat', 'lon', 'geoaltitude']
    traj_df = pd.DataFrame(traj_data, columns=columns)
    geoaltitude = traj_df['geoaltitude']
    masked_label = masked_label.squeeze()
    len_traj = cfgs['Data']['traj_len'] - torch.sum(torch.eq(masked_label, 2)).item()
    data_df, shou_traj_df, mtm_df_data, combined_df, mtm_df_target = transform_trajectory_data(traj_data, mtm_df,
                                                                                masked_label, len_traj)

    plot_trajectory(data_df)
    plot_Interpolation_trajectory(data_df, shou_traj_df, mtm_df_data)
    # plotDataAndCov(data_df, combined_df)
    # plotDataAndCov2(traj_mask_target_cov, traj_mtm_cov, traj_diff_cov)