import sys
import joblib
from matplotlib import pyplot as plt
from sklearn.preprocessing import MinMaxScaler
from torch.utils.data import Dataset
from orbitP.script.timefeatures import time_features, extract_time_features
from orbitP.script import config
from orbitP.script.util import *
from scipy.signal import savgol_filter
from datetime import datetime, timedelta
import numpy as np
import os
from tqdm import tqdm
import pandas as pd
import torch

# beginDate = datetime.strptime("2025-03-14","%Y-%m-%d")
# endDate = datetime.strptime("2025-03-15","%Y-%m-%d")
beginDate = datetime.strptime("2023-06-01","%Y-%m-%d")
endDate = datetime.strptime("2025-08-01","%Y-%m-%d")

def get_dis(dataMOEORBDir,dataPOEORBDir):
    disData = []
    filesOrekitPath = get_file_paths(dataMOEORBDir)
    t_bar = tqdm(filesOrekitPath, total=len(filesOrekitPath))
    disLast = None
    for fileOrekitPath in t_bar:
        fileName = os.path.basename(fileOrekitPath).replace(".csv", "")
        fileDate = datetime.strptime(fileName, "%Y-%m-%d")
        t_bar.set_description(fileName)
        if fileDate < beginDate or fileDate > endDate:
            continue
        filePOEORBPath = dataPOEORBDir + os.path.basename(fileOrekitPath)

        df_Orekit = pd.read_csv(fileOrekitPath)
        df_POEORB = pd.read_csv(filePOEORBPath)
        data_stamp = time_features(pd.to_datetime(df_Orekit['date'].values), freq='min')  # 归一化到[-0.5 0.5] 采样到min
        data_stamp = data_stamp.transpose(1, 0)

        date_Orbit = df_Orekit[df_Orekit.columns.drop('date')].to_numpy()
        date_POEORB = df_POEORB[df_POEORB.columns.drop('date')].to_numpy()

        # date_Orbit[:, :config.outputSize] -= date_POEORB[:, :config.outputSize]
        error = date_Orbit[:, :config.outputSize] - date_POEORB[:, :config.outputSize]
        date_Orbit = np.hstack((error, date_Orbit))

        disAvg = np.mean(np.abs(np.diff(date_Orbit[:, 0])))
        disNxt = 0
        if disLast is not None:
            disNxt = disLast - date_Orbit[0][0]
        disLast = date_Orbit[-1][0]
        disData.append([fileDate, disAvg, disNxt])

    disData = np.vstack(disData)
    return disData

def get_orbitData_Orekit(dataOrekitDir,dataPOEORBDir):
    orbitData = []
    stampData = []
    filesOrekitPath = get_file_paths(dataOrekitDir)
    t_bar = tqdm(filesOrekitPath, total=len(filesOrekitPath))
    for fileOrekitPath in t_bar:
        fileName = os.path.basename(fileOrekitPath).replace(".csv", "")
        fileDate = datetime.strptime(fileName, "%Y-%m-%d")
        t_bar.set_description(fileName)
        if fileDate < beginDate or fileDate > endDate:
            continue
        filePOEORBPath = dataPOEORBDir + os.path.basename(fileOrekitPath)

        df_Orekit = pd.read_csv(fileOrekitPath)
        df_POEORB = pd.read_csv(filePOEORBPath)
        data_stamp = time_features(pd.to_datetime(df_Orekit['date'].values), freq='min') # 归一化到[-0.5 0.5] 采样到min
        data_stamp = data_stamp.transpose(1, 0)

        date_Orbit = df_Orekit[df_Orekit.columns.drop('date')].to_numpy()
        date_POEORB = df_POEORB[df_POEORB.columns.drop('date')].to_numpy()

        # date_Orbit[:, :config.outputSize] -= date_POEORB[:, :config.outputSize]
        error = date_Orbit[:,:config.outputSize] - date_POEORB[:,:config.outputSize]
        date_Orbit = np.hstack((error, date_Orbit))
        orbitData.append(date_Orbit)  # 将二维数组添加到列表中
        stampData.append(data_stamp)

    # 转换列表为NumPy数组，并按行合并
    orbitData = np.vstack(orbitData)  # 使用vstack按行合并
    stampData = np.vstack(stampData)
    return orbitData,stampData

def loadOrbitData(filesOrbitPath):
    dataOribit = {} #存储每个时刻对应的最终轨道
    for fileOrbitPath in filesOrbitPath:
        fileName = os.path.basename(fileOrbitPath).replace(".csv", "")
        fileDate = fileName.split("_")[0] #日期
        sat_id = fileName.split("_")[1] #卫星编号
        fileDate = datetime.strptime(fileDate, "%Y-%m-%d")
        if fileDate < (beginDate - timedelta(days=1)) or fileDate > (endDate + timedelta(days=1)):
            continue
        df_Oribit = pd.read_csv(fileOrbitPath)
        dataOribit_Time = df_Oribit.iloc[:, 0].to_numpy() #时间
        dataOribit_chara = df_Oribit.iloc[:, 1:].to_numpy() #特征
        for time, data in zip(dataOribit_Time,dataOribit_chara):
            timeDate = time +"_"+ sat_id
            dataOribit[timeDate] = data
    return dataOribit

def loadSunData(dataFinal):
    dateSet = set()
    dataSun = {}
    for fileName in dataFinal.keys():
        fileNameList = fileName.split("_")
        dateSet.add(datetime.strptime(fileNameList[0],"%Y-%m-%d %H:%M:%S"))
    for date in dateSet:
        sunECI = get_sunPosition(date)
        dataSun[date.strftime("%Y-%m-%d %H:%M:%S")] = sunECI
    return dataSun

def loadOMNIData(freq="15min"):
    df_omni = pd.read_csv(
        config.omniPath,
        sep='\s+',
        header=None,
        names=["Year", "DOY", "Hour", "Kp", "F10.7"])
    # 年 + 年积日 + 小时 → datetime
    df_omni["datetime"] = df_omni.apply(
        lambda row: datetime(int(row["Year"]), 1, 1) +
                    timedelta(days=row["DOY"] - 1, hours=row["Hour"]),
        axis=1
    )
    df_omni = df_omni[["datetime", "Kp", "F10.7"]].sort_values("datetime").reset_index(drop=True)
    df_omni = df_omni.set_index("datetime")

    # --- F10.7：线性插值 ---
    f107_interp = (
        df_omni["F10.7"]
        .resample(freq)
        .interpolate(method="time")   # 线性时间插值
    )

    # --- Kp：前向填充 (ffill) ---
    kp_ffill = (
        df_omni["Kp"]
        .resample(freq)
        .ffill()  # 每3小时一个值，延续到下一个时刻
    )

    df_resampled = pd.DataFrame({
        "datetime": f107_interp.index,
        "Kp": kp_ffill.values,
        "F10.7": f107_interp.values
    })

    omni_dict = {
        row["datetime"].strftime("%Y-%m-%d %H:%M:%S"): [row["Kp"], row["F10.7"]]
        for _, row in df_resampled.iterrows()
    }
    return omni_dict

def dataImprove(orbitDataSet):
    newOrbitDateSet = {}
    for fileName in orbitDataSet:
        fileDate = fileName.split("_")[0]
        sat_id = fileName.split("_")[1]
        fileDate = datetime.strptime(fileDate, "%Y-%m-%d %H-%M-%S")
        nxtFileDate = fileDate + timedelta(hours=6);nxtFileName = nxtFileDate.strftime("%Y-%m-%d %H-%M-%S")+"_"+sat_id
        nnxtFileDate = fileDate + timedelta(hours=12);nnxtFileName = nnxtFileDate.strftime("%Y-%m-%d %H-%M-%S")+"_"+sat_id

        total_len = 96
        counts = np.zeros(total_len)
        merged=orbitDataSet[fileName][0]
        counts+=1
        if nxtFileName in orbitDataSet:
            merged[24:,:config.axis] += orbitDataSet[nxtFileName][0][:-24,:config.axis]
            counts[24:] += 1
        if nnxtFileName in orbitDataSet:
            merged[48:,:config.axis] += orbitDataSet[nxtFileName][0][:-48,:config.axis]
            counts[48:] += 1
        merged[:,:config.axis] /= counts[:, np.newaxis]
        newOrbitDateSet[fileName]=[merged,orbitDataSet[fileName][1],orbitDataSet[fileName][2],orbitDataSet[fileName][3]]

    return newOrbitDateSet

def sampleMerge(orbitDataSet,mergeDays=1):
    newOrbitDateSet = {} #存储新的数据
    timeData = [] #存储剩余的日期
    for fileName in orbitDataSet:
        fileDate = fileName.split("_")[0]
        sat_id = fileName.split("_")[1]
        fileDate = datetime.strptime(fileDate, "%Y-%m-%d %H-%M-%S")
        flag = True
        dataObs = orbitDataSet[fileName][0] # 0为obs误差 2为obs时间特征
        dataObsStamp = orbitDataSet[fileName][2]
        dataObsNew = dataObs
        dataObsStampNew = dataObsStamp
        for i in range(1,1+mergeDays):
            preFileDate = fileDate - timedelta(days=i);preFileName = preFileDate.strftime("%Y-%m-%d %H-%M-%S")+"_"+sat_id #前一天的数据
            if preFileName not in orbitDataSet:
                flag = False
                break
            dataObsPre = orbitDataSet[preFileName][0]
            dataObsStampPre = orbitDataSet[preFileName][2]
            dataObsNew = np.concatenate([dataObsPre,dataObsNew],axis=0) #合并观测段
            dataObsStampNew = np.concatenate([dataObsStampPre,dataObsStampNew],axis=0) #合并观测段的时间特征
        if flag:
            newOrbitDateSet[fileName] = [dataObsNew,orbitDataSet[fileName][1],dataObsStampNew,orbitDataSet[fileName][3]]
            timeData.append(fileName)
    return newOrbitDateSet,timeData

def dataSplit(orbitDataSet):
    res = []
    for i in range(4):
        data = [orbitDataSet[key][i] for key in orbitDataSet] # 将对应的数据拿出来
        merged = np.concatenate(data, axis=0)
        res.append(merged)
    return res[0],res[1],res[2],res[3]

def loadObsANDPrdData(dataObsDir,dataPrdDir,dataRapid,dataFinal,dataSun,dataOMNI,turnRSW=True,useMerge=True,mergeDays=1,useError="Final"):
    euclidDelt = 0
    euclidLimit = 10
    obsData = []; prdData = [] #观测段 预测段
    stampObs = []; stampPrd = [] #观测段时间 预测段时间
    timeData = [] #剩余日期
    filesObsPath = get_file_paths(dataObsDir)
    t_bar = tqdm(filesObsPath, total=len(filesObsPath))
    orbitDataSet = {} #每个日期下的全部数据
    for fileObsPath in t_bar:
        fileName = os.path.basename(fileObsPath).replace(".csv", "")
        fileDate = fileName.split("_")[0]
        sat_id = fileName.split("_")[1]
        fileDate = datetime.strptime(fileDate, "%Y-%m-%d %H-%M-%S")
        t_bar.set_description(fileDate.strftime("%Y-%m-%d %H:%M:%S"))
        if fileDate < beginDate or fileDate > endDate:
            continue
        obsNow = []; prdNow = [] #当前时刻数据
        flag = True
        filePrdPath = dataPrdDir + os.path.basename(fileObsPath) #预测段路径
        df_Obs = pd.read_csv(fileObsPath)
        dateObs = df_Obs['date'].to_numpy() #观测段日期
        data_Obs = df_Obs[df_Obs.columns.drop('date')].to_numpy() #观测段特征

        df_Prd = pd.read_csv(filePrdPath)
        datePrd = df_Prd['date'].to_numpy() #预测段日期
        data_Prd = df_Prd[df_Prd.columns.drop('date')].to_numpy() #预测段特征
        if np.any(data_Obs[:, -1] == 999999.999999) or np.any(data_Prd[:, -1] == 999999.999999): #去除缺失值
            euclidDelt += 1
            continue
        for time, obsDataRow in zip(dateObs, data_Obs):
            if not flag: break
            timeDate = time + "_" + sat_id
            if timeDate not in dataFinal or timeDate not in dataRapid:
                flag = False
                break
            if useError == "Final":
                obsErr = obsDataRow[:config.axis] - dataFinal[timeDate][:config.axis]
            else:
                obsErr = obsDataRow[:config.axis] - dataRapid[timeDate][:config.axis]
            if euclidDistance(obsErr) > euclidLimit:
                flag = False
                break
            # [margin,np.sin(beta_rad),np.cos(beta_rad)]
            betaANDshadow = get_betaANDshadow(obsDataRow[:config.axis],obsDataRow[config.axis:config.axis*2],dataSun[time])
            omni = dataOMNI[time]
            dataObs = np.hstack((obsErr,obsDataRow[config.axis:],omni,betaANDshadow))
            obsNow.append(dataObs)

        for time, prdDataRow in zip(datePrd, data_Prd):
            if not flag: break
            timeDate = time + "_" + sat_id
            if timeDate not in dataFinal:
                flag = False
                break
            prdErr = prdDataRow[:config.axis] - dataFinal[timeDate][:config.axis] #计算预测段误差
            if euclidDistance(prdErr) > euclidLimit:
                flag = False
                break
            # betaANDshadow = get_betaANDshadow(prdDataRow[:config.axis],prdDataRow[config.axis:config.axis*2],dataSun[time])
            # omni = dataOMNI[time]
            dataPrd = np.hstack((prdErr,prdDataRow[config.axis:]))
            prdNow.append(dataPrd)

        if flag == False:
            euclidDelt += 1
            continue
        obsNow = np.array(obsNow)
        prdNow = np.array(prdNow)
        if turnRSW == True:
            obsRSW = convert_eciErr_to_rsw(data_Obs[:,:config.axis],data_Obs[:,config.axis:config.axis*2],obsNow[:,:config.axis])
            prdRSW = convert_eciErr_to_rsw(data_Prd[:,:config.axis],data_Prd[:,config.axis:config.axis*2],prdNow[:,:config.axis])
            obsNow = np.concatenate([obsRSW,obsNow[:,config.axis*2:]],axis=-1)
            prdNow = np.concatenate([prdRSW,prdNow[:,config.axis*2:]],axis=-1)

        # GPS卫星标签
        idx = int(sat_id[1:])
        idxMatrix = np.full((obsNow.shape[0], 1), idx)
        obsNow = np.concatenate([obsNow, idxMatrix], axis=-1)
        prdNow = np.concatenate([prdNow, idxMatrix], axis=-1)

        obsData.extend(obsNow)
        prdData.extend(prdNow)
        timeData.append(fileName)
        data_stampObs = extract_time_features(pd.to_datetime(df_Obs['date'].values))
        data_stampPrd = extract_time_features(pd.to_datetime(df_Prd['date'].values))
        stampObs.append(data_stampObs)
        stampPrd.append(data_stampPrd)
        orbitDataSet[fileName] = [obsNow,prdNow,data_stampObs,data_stampPrd]

    if useMerge:
        orbitDataSet, timeData = sampleMerge(orbitDataSet,mergeDays=mergeDays)
        obsData,prdData,stampObs,stampPrd = dataSplit(orbitDataSet)

    obsData = np.array(obsData); prdData = np.array(prdData)
    stampObs = np.array(stampObs); stampPrd = np.array(stampPrd)
    return obsData, prdData, stampObs, stampPrd, timeData, euclidDelt


def get_orbitData_ReSULT_mahalanobis(dataObsDir,dataPrdDir,dataRapidDir,dataFinalDir,turnRSW=True,useMerge=False,mergeDays=1,useError="Final"):
    filesFinalPath = get_file_paths(dataFinalDir)
    filesRapidPath = get_file_paths(dataRapidDir)
    dataRapid = loadOrbitData(filesRapidPath) #获取快速轨道数据
    dataFinal = loadOrbitData(filesFinalPath) #获取最终轨道数据
    dataSun = loadSunData(dataFinal)
    dataOMNI = loadOMNIData()
    obsData, prdData, stampObs, stampPrd, timeData, euclidDelt = loadObsANDPrdData(dataObsDir,dataPrdDir,dataRapid,dataFinal,dataSun,dataOMNI,turnRSW=turnRSW,useMerge=useMerge,mergeDays=mergeDays,useError=useError)
    print(f"euclidDelt: {euclidDelt}")
    obsData = np.vstack(obsData)
    prdData = np.vstack(prdData)
    stampObs = np.vstack(stampObs)
    stampPrd = np.vstack(stampPrd)
    timeData = np.array(timeData)
    obsData, prdData, stampObs, stampPrd, timeData = outlierDetection_mahalanobis(obsData, prdData, stampObs, stampPrd, timeData)

    yearScaler = MinMaxScaler()
    obsYears = stampObs[:, 0:1]
    obsYears = yearScaler.fit_transform(obsYears)
    stampObs[:, 0:1] = obsYears
    prdYears = stampPrd[:, 0:1]
    prdYears = yearScaler.transform(prdYears)
    stampPrd[:, 0:1] = prdYears

    df_timeData = pd.DataFrame(timeData)
    df_timeData.to_csv(config.saveDir+'df_timeData.csv', index=False)
    return obsData, prdData, stampObs, stampPrd


if __name__ == "__main__":
    obsData,prdData,stampObs,stampPrd = get_orbitData_ReSULT_mahalanobis(config.dataObsDir,config.dataPrdDir,config.dataRapidDir,config.dataFinalDir,turnRSW=True,useMerge=False,mergeDays=1,useError="Final")
    df_ObsData = pd.DataFrame(obsData)
    df_ObsData.to_csv(config.dataSetDir+'df_obsData.csv', index=False, chunksize=50000)
    df_PrdData = pd.DataFrame(prdData)
    df_PrdData.to_csv(config.dataSetDir+'df_prdData.csv', index=False, chunksize=50000)
    df_stampObs = pd.DataFrame(stampObs)
    df_stampObs.to_csv(config.dataSetDir+'df_stampObs.csv', index=False, chunksize=50000)
    df_stampPrd = pd.DataFrame(stampPrd)
    df_stampPrd.to_csv(config.dataSetDir+'df_stampPrd.csv', index=False, chunksize=50000)
    print(f"obsData: {obsData.shape}")
    print(f"prdData: {prdData.shape}")
    print(f"stampObs: {stampObs.shape}")
    print(f"stampPrd: {stampPrd.shape}")
    print(obsData.shape[0]/config.training_length)
    print(prdData.shape[0]/config.predicting_length)
