import os
import pickle
import sys
project_root = os.path.abspath(os.path.join(os.path.dirname(__file__), '..'))
sys.path.append(project_root)
import matplotlib
matplotlib.use('Agg')

import numpy as np

from dateutil import parser

from utils import *

def read_bin_file_and_split_packages(filepath):
    try:
        with open(filepath, 'rb') as f:
            packet_psn = np.frombuffer(f.read(4), dtype='<i4')[0]
            pack_min, pack_max = np.frombuffer(f.read(8), dtype='<f4')
            points_count = np.frombuffer(f.read(4), dtype='<i4')[0]
            
            points = np.frombuffer(f.read(points_count * 3 * 4), dtype='<f4')
            points = points.reshape(-1, 3)
            
            num_packages = points.shape[0] // 125
            packages = []
            for i in range(num_packages):
                start_idx = i * 125
                end_idx = start_idx + 125
                package_points = points[start_idx:end_idx]
                packages.append({
                    "packet_psn": packet_psn,
                    "pack_min": pack_min,
                    "pack_max": pack_max,
                    "points_count": 125,
                    "points": package_points
                })
            
            remaining_points = points.shape[0] % 125
            if remaining_points > 0:
                raise ValueError(f"LiDAR points are not *125.")
            
            return packages
    except Exception as e:
        print(f"read {filepath} failed: {e}")
        return None
    
def read_txt_file_timestamps(filepath):
    timestamps = []
    try:
        with open(filepath, 'r') as f:
            for line in f:
                timestamp_str = line.strip()
                timestamps.append(timestamp_str)
    except Exception as e:
        print(f"read txt file {filepath} failed: {e}")
        return None

    return timestamps

def process_LiDAR_data(folder_path):
    all_data_list = []
    files = os.listdir(folder_path)

    bin_files = [f for f in files if f.endswith('.bin')]
    txt_files = [f for f in files if f.endswith('.txt')]

    bin_files_dict = {}
    for f in sorted(bin_files, key=lambda x: int(x.split('_')[0])):
        prefix = f.split('_')[0]
        bin_files_dict[prefix] = f

    txt_files_dict = {}
    for f in sorted(txt_files, key=lambda x: int(x.split('_')[0])):
        prefix = f.split('_')[0]
        txt_files_dict[prefix] = f

    num_cache = 1
    for prefix, bin_filename in bin_files_dict.items():
        if prefix in txt_files_dict:
            txt_filename = txt_files_dict[prefix]
            txt_filepath = os.path.join(folder_path, txt_filename)
            timestamps = read_txt_file_timestamps(txt_filepath)

            bin_filepath = os.path.join(folder_path, bin_filename)
            packages = read_bin_file_and_split_packages(bin_filepath)
            
            if packages is None or timestamps is None:
                continue
            num_packages = len(packages)
            num_timestamps = len(timestamps)
            min_length = min(num_packages, num_timestamps)
            data_list = []
            for i in range(min_length):
                timestamp_str = timestamps[i]
                try:
                    timestamp = parser.isoparse(timestamp_str)
                    data_list.append( (timestamp, packages[i]) )
                except Exception as e:
                    print(f"解析时间戳 {timestamp_str} 失败: {e}")
                    continue
            all_data_list.extend(data_list)
            # print(f'{timestamps[0]}')

            if len(all_data_list) >= 25000:
                all_data_list.sort(key=lambda x: x[0])
                os.makedirs(os.path.join(folder_path, 'cache'), exist_ok=True)
                file_name = os.path.join(folder_path, 'cache', f'{num_cache}.pkl')
                num_cache += 1
                with open(file_name, 'wb') as f:
                    pickle.dump(all_data_list[:12500], f)
                print(f'Saved cache {file_name}')
                del all_data_list[:12500]
            elif list(bin_files_dict.keys())[-1] == prefix and len(all_data_list) > 0:
                os.makedirs(os.path.join(folder_path, 'cache'), exist_ok=True)
                file_name = os.path.join(folder_path, 'cache', f'{num_cache}.pkl')
                with open(file_name, 'wb') as f:
                    pickle.dump(all_data_list, f)
                print(f"Saved cache {file_name}")
        else:
            print(f"未找到 bin 文件 {bin_filename} 对应的 txt 文件")

if __name__ == "__main__":
    folder_LiDAR_path = r"./workspace/data/LiDAR"
    process_LiDAR_data(folder_LiDAR_path)  # [(timestamp, package)]
    