import math
import os
import datetime

import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
from osgeo import gdal
from sklearn.metrics import mean_squared_error, r2_score
from scipy.stats.stats import pearsonr

# 支持中文
plt.rcParams['font.sans-serif'] = ['SimHei']  # 用来正常显示中文标签
plt.rcParams['axes.unicode_minus'] = False  # 用来正常显示负号


def coord2pixel(geo_transform, x_geo, y_geo):
    x = int((x_geo - geo_transform[0]) / geo_transform[1])
    y = int((y_geo - geo_transform[3]) / geo_transform[5])
    return x, y


# 获取指定时间区间内的日期返回一个列表
def get_date_list(start_date, end_date, format):
    """
        根据开始日期、结束日期返回这段时间里所有天的集合
        :param start_date: 开始日期(日期格式或者字符串格式)
        :param end_date: 结束日期(日期格式或者字符串格式)
        :param format: 格式化字符串, 如: '%Y%m%d'
        :return:
    """
    date_list = []
    if isinstance(start_date, str) and isinstance(end_date, str):
        start_date = datetime.datetime.strptime(start_date, '%Y%m%d')
        end_date = datetime.datetime.strptime(end_date, '%Y%m%d')
    date_list.append(start_date.strftime(format))
    while start_date < end_date:
        start_date += datetime.timedelta(days=1)
        date_list.append(start_date.strftime(format))
    return date_list


# 计算R
def computed_R(y_true, y_pred):
    y_true = np.array(y_true)
    y_pred = np.array(y_pred)
    y_true_mean = np.mean(y_true)
    y_pred_mean = np.mean(y_pred)
    SS_tot = np.sum(((y_true - y_true_mean) ** 2) * ((y_pred - y_pred_mean) ** 2))
    SS_res = np.sum((y_true - y_true_mean) ** 2) * np.sum((y_pred - y_pred_mean) ** 2)
    return SS_tot / (SS_res ** 0.5)


# 读取实测数据和对应位置的降尺度值
def read_data(sm_measure_path, sm_donwscal_dir):
    date_list = get_date_list("20130726", "20130824", '%Y%m%d')
    sm_measure = pd.read_csv(sm_measure_path, sep=",")
    # print(sm_measure)
    sm_downscale_list = [file for file in os.listdir(sm_donwscal_dir) if file.endswith('.tif')]
    # print(sm_downscale_list)
    sm_measure_downscale = pd.DataFrame(columns=('station', 'date', 'lon', 'lat', 'SM_4cm', 'downscale'))
    # 遍历每一个降尺度结果
    for i, sm_downscale_file in enumerate(sm_downscale_list):
        # gdal读取降尺度结果
        downscale_data = gdal.Open(os.path.join(sm_donwscal_dir, sm_downscale_file))
        downscale_array = downscale_data.ReadAsArray()
        downscale_geot = downscale_data.GetGeoTransform()

        date = date_list[i]
        # 获取指定日期0-0.05的数据
        now_data = sm_measure[(sm_measure['date'] == int(date)) & (sm_measure['end_deep'] == 0.05)]
        # print(now_data)
        # 遍历每一个站点
        for index, row in now_data.iterrows():
            # 获取站点位置和实测值
            station = row['station']
            lon = row['lon']
            lat = row['lat']
            mean_value = row['mean_value']
            # 根据经纬度获取站点对应降尺度值
            x, y = coord2pixel(downscale_geot, lon, lat)
            # print(x, y)
            downscale_value = downscale_array[y, x]

            print(station, date, lon, lat, mean_value, downscale_value)
            sm_measure_downscale.loc[len(sm_measure_downscale)] = [station, date, lon, lat, mean_value, downscale_value]
    sm_measure_downscale.to_csv(r"E:\data\downscaling SM\sm_measure_downscale_mask_filter-1nd.csv", index=False)


# 总体分析
def fitting_analysis(result_path):
    # 获取所有站点数据
    sm_measure_downscale = pd.read_csv(result_path)
    # print(sm_measure_downscale)
    # 实测值
    sm_measure_value = sm_measure_downscale['measure']
    # 降尺度值
    sm_downscale_value = sm_measure_downscale['downscale']
    # print(sm_measure_value)
    # print(sm_downscale_value)
    # 计算R方
    r, p_value = pearsonr(sm_measure_value, sm_downscale_value)
    # 计算均方根误差RMSE
    rmse = mean_squared_error(sm_measure_value, sm_downscale_value)
    # 绘制散点图
    plt.scatter(sm_measure_value, sm_downscale_value)
    plt.xlabel('实测值')
    plt.ylabel('降尺度值')
    plt.title('实测值与降尺度值散点图_filter')
    plt.text(x=0.5, y=0.5, s=f'R：{r:.3f}\nRMSE：{rmse:.3f}', fontdict={'size': 15, 'color': 'red'})
    # 使坐标轴刻度和范围一致
    plt.xlim(0, 1)
    plt.ylim(0, 1)
    # 绘制1:1虚线
    plt.plot(np.arange(0, 1, 0.01), np.arange(0, 1, 0.01), color='black', linestyle='--')
    plt.show()


# 每幅分析
def fitting_analysis_each(result_path):
    # 研究日期列表
    date_list = get_date_list("20130726", "20130824", '%Y%m%d')
    # 获取所有站点数据
    sm_measure_downscale = pd.read_csv(result_path)
    plt.figure(figsize=(20, 25))
    plt.subplots_adjust(hspace=0.25, wspace=0.35)
    for i, date in enumerate(date_list):
        # 获取该日期的数据
        sm_measure_downscale_date = sm_measure_downscale[sm_measure_downscale['date'] == int(date)]
        # 实测值
        sm_measure_value = sm_measure_downscale_date['measure']
        # 降尺度值
        sm_downscale_value = sm_measure_downscale_date['downscale']
        # print(sm_measure_value)
        # print(sm_downscale_value)
        # 计算R方
        r, p_value = pearsonr(sm_measure_value, sm_downscale_value)
        # 计算均方根误差RMSE
        rmse = mean_squared_error(sm_measure_value, sm_downscale_value)
        # 绘制散点图
        plt.subplot(6, 5, i + 1)
        plt.scatter(sm_measure_value, sm_downscale_value)
        plt.xlabel('实测值')
        plt.ylabel('降尺度值')
        plt.title('{}'.format(date))
        plt.text(x=0.5, y=0.5, s=f'R：{r:.3f}\nRMSE：{rmse:.3f}', fontdict={'size': 15, 'color': 'red'})
        # 使坐标轴刻度和范围一致
        # plt.xlim(0, 1)
        # plt.ylim(0, 1)
        # 绘制1:1虚线
        plt.plot(np.arange(0, 1, 0.01), np.arange(0, 1, 0.01), color='black', linestyle='--')
    plt.show()


# 获取一个像元里面所有站点数据并计算平均值
def station_data_in_pixel(measure_data_path, downscal_result_path, date):
    downscale_data = gdal.Open(downscal_result_path)
    downscale_array = downscale_data.ReadAsArray()
    downscale_geot = downscale_data.GetGeoTransform()
    # 获取实测值所在像元行列号
    measure_data = pd.read_excel(measure_data_path)
    measure_dict = {}
    for i, measure in measure_data.iterrows():
        # print(type(measure['date'])) # int
        if measure['date'] == int(date):
            lon, lat, measure_value = measure['Longitude'], measure['Latitude'], measure['SM_4cm']
            x, y = coord2pixel(downscale_geot, lon, lat)
            # print((x, y) in measure_dict)
            if (y, x) not in measure_dict:
                measure_dict[y, x] = measure_value
            # 重复的取平均
            else:
                measure_dict[y, x] = (measure_dict[y, x] + measure_value) / 2
    measure_pixel_value = list(measure_dict.values())
    downscale_pixel_value = []
    # 获取不重复行列号对应像元的降尺度值
    for key in measure_dict.keys():
        downscale_pixel_value.append(downscale_array[key])
    # 返回均值后的实测值和对应像元降尺度值
    return np.array(measure_pixel_value), np.array(downscale_pixel_value)


# 2012195-2012229
# 20120713-20120816
# 相关性分析
def corr_analysis_new(measure_data_path, downscal_result_dir):
    # 研究日期列表
    date_list = get_date_list("20120713", "20120816", '%Y%m%d')
    day_list = [i for i in range(2012195, 2012230)]
    have_date = ['20120726', '20120802']
    total_measure_data = []
    total_downscale_data = []
    each_day_measure_data = []
    each_day_downscale_data = []
    for i, day in enumerate(day_list):
        if date_list[i] in have_date:
            downscal_result_path = downscal_result_dir + "\\" + "{}_mask_mask_out.tif".format(day)
            measure_data, downscale_data = station_data_in_pixel(measure_data_path, downscal_result_path, date_list[i])
            each_day_measure_data.append(measure_data)
            each_day_downscale_data.append(downscale_data)
            total_measure_data.extend(measure_data)
            total_downscale_data.extend(downscale_data)
    total_measure_data = np.array(total_measure_data)
    total_downscale_data = np.array(total_downscale_data)
    # print(total_measure_data)
    # print(total_downscale_data)
    # print(each_day_measure_data)
    # print(each_day_downscale_data)
    # 计算总体相关性
    plt.figure(1)
    # 计算R方
    r, p_value = pearsonr(total_measure_data, total_downscale_data)
    # 计算均方根误差RMSE
    rmse = mean_squared_error(total_measure_data, total_downscale_data)
    mse = np.sum((total_measure_data - total_downscale_data) ** 2) / len(total_measure_data)
    rmse2 = math.sqrt(mse)
    plt.scatter(total_measure_data, total_downscale_data)
    plt.xlabel('实测值')
    plt.ylabel('降尺度值')
    plt.title('{}'.format('总体相关性分析'))
    plt.text(x=0.5, y=0.5, s=f'R：{r:.3f}\nRMSE：{rmse:.3f}\nRMSE2：{rmse2:.3f}', fontdict={'size': 15, 'color': 'red'})
    # 绘制1:1虚线
    plt.plot(np.arange(0, 1, 0.01), np.arange(0, 1, 0.01), color='black', linestyle='--')
    plt.show()

    # 计算每天的相关性
    plt.figure(2)
    for i, date in enumerate(have_date):
        r, p_value = pearsonr(each_day_measure_data[i], each_day_downscale_data[i])
        # 计算均方根误差RMSE
        rmse = mean_squared_error(each_day_measure_data[i], each_day_downscale_data[i])
        mse = np.sum((each_day_measure_data[i] - each_day_downscale_data[i]) ** 2) / len(each_day_measure_data[i])
        rmse2 = math.sqrt(mse)
        plt.subplot(math.ceil(len(each_day_measure_data) / 5), len(each_day_measure_data) % 6,  i + 1)
        plt.scatter(each_day_measure_data[i], each_day_downscale_data[i])
        plt.xlabel('实测值')
        plt.ylabel('降尺度值')
        plt.title('{}'.format(date))
        plt.text(x=0.5, y=0.5, s=f'R：{r:.3f}\nRMSE：{rmse:.3f}\nRMSE2：{rmse2:.3f}', fontdict={'size': 15, 'color': 'red'})
        # 绘制1:1虚线
        plt.plot(np.arange(0, 1, 0.01), np.arange(0, 1, 0.01), color='black', linestyle='--')
    plt.show()




if __name__ == '__main__':
    #    sm_measure_path = r"‪E:\WSN_Data\Station\In_situ_sm.xlsx"
    # # # sm_donwscal_dir = r"G:\遥感图像处理\土壤水分降尺度\降尺度结果"
    #    sm_donwscal_dir = r"E:\data\downscaling SM\mask_norec_1nd"
    # # # sm_donwscal_dir = r"G:\遥感图像处理\土壤水分降尺度\downscaling SM\logist_mask"
    #    read_data(sm_measure_path, sm_donwscal_dir)
    # date_list = get_date_list("20130726", "20130824", '%Y%m%d')
    # print(date_list)
    # print(len(date_list))

    # result_path = r"G:\遥感图像处理\土壤水分降尺度\sm_measure_downscale_filter.csv"
    # result_path = r"E:\data\downscaling SM\sm_measure_downscale_logist_2nd.csv"
    # # fitting_analysis(result_path)
    # #fitting_analysis_each(result_path)
    # fitting_analysis(result_path)

    measure_data_path = r"G:\test\In_situ_sm.xlsx"
    downscal_result_dir = r"G:\test\test02"
    corr_analysis_new(measure_data_path, downscal_result_dir)

