import os
import sys
project_root = os.path.abspath(os.path.join(os.path.dirname(__file__), '../..'))
sys.path.append(project_root)
import pickle
import torch
import argparse
import matplotlib
matplotlib.use('Agg')

import numpy as np

from matplotlib import pyplot as plt

from utils import *

NUM_FRAMES = None
X_RANGE = None
Y_RANGE = None
GRID_SIZE = None
NUM_HEIGHT_LAYERS = None
NUM_PARAM = None
Z_MIN = None
Z_MAX = None

def voxelize(frame, x_range, y_range, z_min, z_max, num_height_layers, grid_size):
    x_min, x_max = x_range
    y_min, y_max = y_range
    z_bin = (z_max - z_min) / num_height_layers
    x_bin = (x_max - x_min) / grid_size[0]
    y_bin = (y_max - y_min) / grid_size[1]
    
    x = np.array(frame['x'])
    y = np.array(frame['y'])
    z = np.array(frame['z'])
    dn = np.array(frame['dn'])
    azi = np.array(frame['aziLinearlityMap'])
    ele = np.array(frame['eleLinearlityMap'])
    err = np.array(frame['errorMap'])
    
    x_idx = ((x - x_min) / x_bin).astype(int)
    y_idx = ((y - y_min) / y_bin).astype(int)
    z_idx = ((z - z_min) / z_bin).astype(int)
    
    x_idx = np.clip(x_idx, 0, grid_size[0]-1)
    y_idx = np.clip(y_idx, 0, grid_size[1]-1)
    z_idx = np.clip(z_idx, 0, num_height_layers-1)
    
    sum_dn = np.zeros((num_height_layers, grid_size[1], grid_size[0]))
    sum_ele = np.zeros((num_height_layers, grid_size[1], grid_size[0]))
    sum_azi = np.zeros((num_height_layers, grid_size[1], grid_size[0]))
    sum_err = np.zeros((num_height_layers, grid_size[1], grid_size[0]))
    count = np.zeros((num_height_layers, grid_size[1], grid_size[0]))
    
    for i in range(len(x)):
        sum_dn[z_idx[i], y_idx[i], x_idx[i]] += dn[i]
        sum_ele[z_idx[i], y_idx[i], x_idx[i]] += ele[i]
        sum_azi[z_idx[i], y_idx[i], x_idx[i]] += azi[i]
        sum_err[z_idx[i], y_idx[i], x_idx[i]] += err[i]
        count[z_idx[i], y_idx[i], x_idx[i]] += 1
    
    avg_dn = np.divide(sum_dn, count, out=np.zeros_like(sum_dn), where=count!=0)
    avg_ele = np.divide(sum_ele, count, out=np.zeros_like(sum_ele), where=count!=0)
    avg_azi = np.divide(sum_azi, count, out=np.zeros_like(sum_azi), where=count!=0)
    avg_err = np.divide(sum_err, count, out=np.zeros_like(sum_err), where=count!=0)

    voxel_data = np.stack((avg_dn, avg_ele, avg_azi, avg_err), axis=-1)
    
    return voxel_data

def plot_tensor_image(input_tensor, output_path):
    if isinstance(input_tensor, torch.Tensor):
        input_tensor = input_tensor.numpy()

    input_image = np.max(input_tensor, axis=-1)
    input_image = (input_image - input_image.min()) / (input_image.max() - input_image.min())

    fig, ax = plt.subplots(figsize=(10, 10))
    im = ax.imshow(input_image, cmap='viridis', origin='lower')
    fig.colorbar(im, ax=ax, label="Normalized Intensity")
    ax.set_title("Tensor Input Image Visualization")
    ax.axis("off")
    fig.savefig(output_path, bbox_inches='tight')

    plt.close(fig)

def process_pkl_file(file_path, output_folder):
    with open(file_path, 'rb') as f:
        data = pickle.load(f).get('data', [])

    voxelized_frames = []
    for frame in data:
        if frame:
            voxel = voxelize(
                frame,
                x_range=X_RANGE,
                y_range=Y_RANGE,
                z_min=Z_MIN,
                z_max=Z_MAX,
                num_height_layers=NUM_HEIGHT_LAYERS,
                grid_size=GRID_SIZE
            )
        else:
            voxel = np.zeros((NUM_HEIGHT_LAYERS, GRID_SIZE[1], GRID_SIZE[0], NUM_PARAM))

        voxel = voxel.transpose(1, 2, 0, 3).reshape(GRID_SIZE[1], GRID_SIZE[0], NUM_HEIGHT_LAYERS * NUM_PARAM)
        voxelized_frames.append(voxel)

    valid_frames = [frame for frame in voxelized_frames if not np.all(frame == 0)]
    total_valid = len(valid_frames)

    if total_valid >= NUM_FRAMES:
        stride = total_valid / NUM_FRAMES
        selected_indices = [int(i * stride) for i in range(NUM_FRAMES)]
        selected_frames = [valid_frames[i] for i in selected_indices]
    else:
        selected_frames = valid_frames.copy()
        while len(selected_frames) < NUM_FRAMES:
            selected_frames.insert(0, np.zeros((GRID_SIZE[1], GRID_SIZE[0], NUM_HEIGHT_LAYERS * NUM_PARAM)))

    tensor = np.concatenate(selected_frames, axis=-1)
    tensor = torch.from_numpy(tensor).float()

    tensor = tensor.permute(2, 0, 1).contiguous()

    base_name = os.path.splitext(os.path.basename(file_path))[0]
    output_file = os.path.join(output_folder, f'{base_name}.pt')
    torch.save(tensor, output_file)

    os.makedirs(os.path.join(output_folder, 'img'), exist_ok=True)
    plot_output_path = os.path.join(output_folder, 'img', f'{base_name}.png')
    plot_tensor_image(tensor, plot_output_path)

def main():
    global NUM_FRAMES, X_RANGE, Y_RANGE, GRID_SIZE, NUM_HEIGHT_LAYERS, NUM_PARAM, Z_MIN, Z_MAX

    parser = argparse.ArgumentParser(add_help=False)
    parser.add_argument("--X_RANGE", type=str, required=True)
    parser.add_argument("--Y_RANGE", type=str, required=True)
    parser.add_argument("--NUM_FRAMES", type=str, required=True)
    parser.add_argument("--GRID_SIZE", type=str, required=True)
    parser.add_argument("--NUM_HEIGHT_LAYERS", type=str, required=True)
    parser.add_argument("--NUM_PARAM", type=str, required=True)
    parser.add_argument("--Z_MIN", type=str, required=True)
    parser.add_argument("--Z_MAX", type=str, required=True)
    args = parser.parse_args()

    X_RANGE = eval(args.X_RANGE)
    Y_RANGE = eval(args.Y_RANGE)
    NUM_FRAMES = eval(args.NUM_FRAMES)
    GRID_SIZE = eval(args.GRID_SIZE)
    NUM_HEIGHT_LAYERS = eval(args.NUM_HEIGHT_LAYERS)
    NUM_PARAM = eval(args.NUM_PARAM)
    Z_MIN = eval(args.Z_MIN)
    Z_MAX = eval(args.Z_MAX)

    input_folder = "./workspace/image/SAR/input"
    output_folder = "./workspace/image/SAR/tensors"
    os.makedirs(output_folder, exist_ok=True)
    
    all_files = []
    with os.scandir(input_folder) as entries:
        files = [entry for entry in entries if entry.is_file() and entry.name.endswith(".pkl")]
        files.sort(key=lambda entry: natural_key(entry.name))
        
        for file in files:
            all_files.append(file.path)
    
    total_files = len(all_files)
    for index, file_path in enumerate(all_files, start=1):
        print(f"正在处理文件 ({index}/{total_files})")

        process_pkl_file(file_path, output_folder)                    

if __name__ == "__main__":
    main()
