import os
import sys
project_root = os.path.abspath(os.path.join(os.path.dirname(__file__), '../..'))
sys.path.append(project_root)
import json
import matplotlib
matplotlib.use('Agg')
import threading
import numpy as np
import matplotlib.colors as mcolors

from scipy.interpolate import interp1d
from concurrent.futures import ThreadPoolExecutor
from PIL import Image
from dateutil import parser

from utils import *

lat0 = None
lon0 = None
h0 = None

all_data = {'x': [], 'y': [], 'z': [], 's': [], 'dn': []}
utc_to_llhrph = []
all_data_lock = threading.Lock()

def process_mat_file(mat_data, x_range, y_range):
    global lat0, lon0, h0
    global all_data
    global utc_to_llhrph

    start_frame_llh = mat_data.get('StartFrameLLH', None)
    start_frame_prh = mat_data.get('StartFrameRPY', None)
    azi_map = mat_data.get('aziLinearlityMap', None)
    ele_map = mat_data.get('eleLinearlityMap', None)
    errorMap = mat_data.get('errorMap', None)
    target_x = mat_data.get('targetX', None)
    target_y = mat_data.get('targetY', None)
    target_z = mat_data.get('targetZ', None)
    dn = mat_data.get('DN', None)
    utc = mat_data.get('utcTimeChar', None)[0]

    if not all([azi_map is not None, ele_map is not None, errorMap is not None, target_x is not None, 
                target_y is not None, target_z is not None, dn is not None, start_frame_llh is not None, start_frame_prh is not None]):
        raise ".mat文件中缺少数据"
    
    lat = start_frame_llh[0][0]  # 纬度
    lon = start_frame_llh[0][1]  # 精度
    h = start_frame_llh[0][2] / 1000  # 海拔高度

    pitch = start_frame_prh[0][0]  # 俯仰角（重力基准）
    roll = start_frame_prh[0][1]  # 滚转角（重力基准）
    heading = start_frame_prh[0][2]  # 与北方向夹角

    if lat0 == None:
        lat0 = lat
        lon0 = lon
        h0 = h

    X = target_x.flatten()
    Y = target_y.flatten()
    Z = target_z.flatten()
    DN = dn.flatten()
    azi = azi_map.flatten()
    ele = ele_map.flatten()
    err = errorMap.flatten()

    s = azi * ele * err

    raw_data = {
        'x': X,
        'y': Y,
        'z': Z,
        'dn': DN,
        's': s,
        'aziLinearlityMap': azi,
        'eleLinearlityMap': ele,
        'errorMap': err,
    }
    
    filtered_data = filter_z(raw_data)
    filtered_data = filter_data(filtered_data, x_range, y_range, filter_noise=True)

    X = filtered_data['x']
    Y = filtered_data['y']
    Z = filtered_data['z']
    DN = filtered_data['dn']
    s = filtered_data['s']

    X, Y, Z = local_to_global_coords(X, Y, Z, lat, lon, h, pitch, roll, heading, lat0, lon0, h0)
    
    with all_data_lock:
        all_data['x'].extend(X)
        all_data['y'].extend(Y)
        all_data['z'].extend(Z)
        all_data['s'].extend(s)
        all_data['dn'].extend(DN)

        utc_to_llhrph.append({
            'utc': utc,
            'lat': lat,
            'lon': lon,
            'h': h,
            'pitch': pitch,
            'roll': roll,
            'heading': heading
        })

def read_bin_file_and_split_packages(filepath):
    try:
        with open(filepath, 'rb') as f:
            packet_psn = np.frombuffer(f.read(4), dtype='<i4')[0]
            pack_min, pack_max = np.frombuffer(f.read(8), dtype='<f4')
            points_count = np.frombuffer(f.read(4), dtype='<i4')[0]
            
            points = np.frombuffer(f.read(points_count * 3 * 4), dtype='<f4')
            points = points.reshape(-1, 3)
            
            num_packages = points.shape[0] // 125
            packages = []
            for i in range(num_packages):
                start_idx = i * 125
                end_idx = start_idx + 125
                package_points = points[start_idx:end_idx]
                packages.append({
                    "packet_psn": packet_psn,
                    "pack_min": pack_min,
                    "pack_max": pack_max,
                    "points_count": 125,
                    "points": package_points
                })
            
            remaining_points = points.shape[0] % 125
            if remaining_points > 0:
                raise ValueError(f"LiDAR points are not *125.")
            
            return packages
    except Exception as e:
        print(f"read {filepath} failed: {e}")
        return None
    
def read_txt_file_timestamps(filepath):
    timestamps = []
    try:
        with open(filepath, 'r') as f:
            for line in f:
                timestamp_str = line.strip()
                timestamps.append(timestamp_str)
    except Exception as e:
        print(f"read txt file {filepath} failed: {e}")
        return None

    return timestamps

def interpolate_lidar_data():
    global utc_to_llhrph

    sar_utc_times = np.array([parser.isoparse(data['utc']).timestamp() for data in utc_to_llhrph])
    sar_llh = np.array([[data['lat'], data['lon'], data['h']] for data in utc_to_llhrph])
    sar_rpy = np.array([[data['roll'], data['pitch'], data['heading']] for data in utc_to_llhrph])

    sorted_indices = np.argsort(sar_utc_times)
    sar_utc_times = sar_utc_times[sorted_indices]
    sar_llh = sar_llh[sorted_indices]
    sar_rpy = sar_rpy[sorted_indices]

    llh_interp_func = interp1d(sar_utc_times, sar_llh, axis=0, bounds_error=False, fill_value="extrapolate")
    rpy_interp_func = interp1d(sar_utc_times, sar_rpy, axis=0, bounds_error=False, fill_value="extrapolate")

    return llh_interp_func, rpy_interp_func, sar_utc_times[0], sar_utc_times[-1]

all_LiDAR = {'x': [], 'y': [], 'z': []}

def process_LiDAR_data(folder_path):
    global lat0, lon0, h0

    files = os.listdir(folder_path)

    bin_files = [f for f in files if f.endswith('.bin')]
    txt_files = [f for f in files if f.endswith('.txt')]

    bin_files_dict = {}
    for f in sorted(bin_files, key=lambda x: int(x.split('_')[0])):
        prefix = f.split('_')[0]
        bin_files_dict[prefix] = f

    txt_files_dict = {}
    for f in sorted(txt_files, key=lambda x: int(x.split('_')[0])):
        prefix = f.split('_')[0]
        txt_files_dict[prefix] = f

    llh_interp_func, rpy_interp_func, min_time, max_time = interpolate_lidar_data()

    for prefix, bin_filename in bin_files_dict.items():
        if prefix in txt_files_dict:
            print(f"正在处理文件: {bin_filename}")
            txt_filename = txt_files_dict[prefix]
            txt_filepath = os.path.join(folder_path, txt_filename)
            timestamps = read_txt_file_timestamps(txt_filepath)

            bin_filepath = os.path.join(folder_path, bin_filename)
            packages = read_bin_file_and_split_packages(bin_filepath)
            
            if packages is None or timestamps is None:
                continue

            num_packages = len(packages)
            num_timestamps = len(timestamps)
            min_length = min(num_packages, num_timestamps)

            for i in range(min_length):
                timestamp_str = timestamps[i]
                try:
                    lidar_utc = parser.isoparse(timestamp_str).timestamp()

                    if lidar_utc < min_time or lidar_utc > max_time:
                        continue
                    
                    lidar_llh = llh_interp_func(lidar_utc)
                    lidar_rpy = rpy_interp_func(lidar_utc)
                    points = packages[i]['points']

                    X, Y, Z = local_to_global_coords(-points[:, 1]+0.236, points[:, 0]+2.488, points[:, 2],
                                           lidar_llh[0],lidar_llh[1], lidar_llh[2],
                                           lidar_rpy[1], lidar_rpy[0], lidar_rpy[2],
                                           lat0, lon0, h0)
                    
                    mask = (Z < 3) & (Z > -1)
                    all_LiDAR['x'].extend(X[mask])
                    all_LiDAR['y'].extend(Y[mask])
                    all_LiDAR['z'].extend(Z[mask])
                except Exception as e:
                    print(f"解析时间戳 {timestamp_str} 失败: {e}")
                    continue
        else:
            print(f"未找到 bin 文件 {bin_filename} 对应的 txt 文件")

def plot_combined_map(output_folder, x_range, y_range):
    global lat0, lon0, h0, all_data, all_LiDAR

    X = np.array(all_data['x'])
    Y = np.array(all_data['y'])

    resolution = 10.0

    x_min, x_max = X.min(), X.max()
    y_min, y_max = Y.min(), Y.max()

    x_min_aligned = np.floor((x_min - (x_range[1] - x_range[0])) * resolution) / resolution
    x_max_aligned = np.ceil((x_max + (x_range[1] - x_range[0])) * resolution) / resolution
    y_min_aligned = np.floor((y_min - (y_range[1] - y_range[0])) * resolution) / resolution
    y_max_aligned = np.ceil((y_max + (y_range[1] - y_range[0])) * resolution) / resolution

    width_px = int((x_max_aligned - x_min_aligned) * resolution)
    height_px = int((y_max_aligned - y_min_aligned) * resolution)

    if width_px < 1:
        width_px = 1

    if height_px < 1:
        height_px = 1

    # SAR
    px = np.floor((X - x_min_aligned) * resolution).astype(np.int32)
    py = np.floor((Y - y_min_aligned) * resolution).astype(np.int32)
    px = np.clip(px, 0, width_px - 1)
    py = np.clip(py, 0, height_px - 1)

    rgb = compute_rgb(np.array(all_data['z']), np.array(all_data['s']), np.array(all_data['dn']))

    rgb_sum = np.zeros((height_px, width_px, 3), dtype=np.float64)
    count = np.zeros((height_px, width_px), dtype=np.int64)

    for i in range(len(X)):
        rgb_sum[py[i], px[i], :] += rgb[i, :] * 255
        count[py[i], px[i]] += 1

    non_zero_mask = count > 0
    rgb_avg = np.zeros((height_px, width_px, 3), dtype=np.float32)
    rgb_avg[non_zero_mask] = (rgb_sum[non_zero_mask] / count[non_zero_mask][:, None])
    
    rgb_image = np.flipud(rgb_avg)
    rgb_image = np.clip(rgb_image, 0, 255).astype(np.uint8)
    SAR_image = os.path.join(output_folder, "map_SAR.png")
    Image.fromarray(rgb_image, 'RGB').save(SAR_image)

    # LiDAR
    X = all_LiDAR['x']
    Y = all_LiDAR['y']
    Z = all_LiDAR['z']

    px = np.floor((X - x_min_aligned) * resolution).astype(np.int32)
    py = np.floor((Y - y_min_aligned) * resolution).astype(np.int32)
    px = np.clip(px, 0, width_px - 1)
    py = np.clip(py, 0, height_px - 1)

    hue = 2/3 * (Z - np.min(Z)) / (np.max(Z) - np.min(Z))
    hsv = np.stack((hue, np.ones_like(hue), np.ones_like(hue)), axis=1)
    rgb = mcolors.hsv_to_rgb(hsv)

    rgb_sum = np.zeros((height_px, width_px, 3), dtype=np.float64)
    count = np.zeros((height_px, width_px), dtype=np.int64)

    for i in range(0, len(X), 10):
        rgb_sum[py[i], px[i], :] += rgb[i, :] * 255
        count[py[i], px[i]] += 1

    non_zero_mask = count > 0
    rgb_avg = np.zeros((height_px, width_px, 3), dtype=np.float32)
    rgb_avg[non_zero_mask] = (rgb_sum[non_zero_mask] / count[non_zero_mask][:, None])
    
    rgb_image = np.flipud(rgb_avg)
    rgb_image = np.clip(rgb_image, 0, 255).astype(np.uint8)
    LiDAR_image = os.path.join(output_folder, "map_LiDAR.png")
    Image.fromarray(rgb_image, 'RGB').save(LiDAR_image)

    metadata = {
        "left_bottom": [float(x_min_aligned), float(y_min_aligned)],
        "right_top": [float(x_max_aligned), float(y_max_aligned)],
        "resolution": resolution,
        "width_px": width_px,
        "height_px": height_px,
        "lat0": lat0,
        "lon0": lon0,
        "h0": h0
    }

    with open(os.path.join(output_folder, "meta.json"), 'w') as f:
        json.dump(metadata, f, indent=4)
 
def main():
    SAR_folder = "./workspace/data/SAR"
    output_folder = "./workspace/image/SAR"

    x_range = (-45, 45)
    y_range = (0, 90)
    
    os.makedirs(output_folder, exist_ok=True)

    with ThreadPoolExecutor() as executor:
        for root, _, files in os.walk(SAR_folder):
            files.sort(key=natural_key)
            for i in range(0, len(files), 1):
                file = files[i]
                if file.endswith(".mat"):
                    file_path = os.path.join(root, file)
                    print(f"正在处理文件: {file_path}")

                    mat_data = read_mat_file(file_path)
                    if mat_data:
                        executor.submit(
                            process_mat_file,
                            mat_data, 
                            x_range=x_range,
                            y_range=y_range
                        )

    LiDAR_folder = r"./workspace/data/LiDAR"
    process_LiDAR_data(LiDAR_folder)
    plot_combined_map(output_folder, x_range, y_range)

if __name__ == "__main__":
    main()