from concurrent.futures import ThreadPoolExecutor
import os
import shutil
import struct
from tqdm import tqdm
import numpy as np
from cuml import DBSCAN
from sklearn.preprocessing import StandardScaler
from typing import TypeGuard

class float3:
    x:float
    y:float
    z:float
class PointCloud:
    Strength:float
    Pos:float3
    Velocity:float
    CohenFactor:float

def isClassFormat(pointCloud:list) -> TypeGuard[list[PointCloud]]:
    if not isinstance(pointCloud, list):
        return False
    if len(pointCloud) <= 0:
        return True
    for point in pointCloud:
        if not isinstance(point, PointCloud):
            return False
    return True

def isFloatFormat(pointCloud:list) -> TypeGuard[list[list[float]]]:
    if not isinstance(pointCloud, list):
        return False
    if len(pointCloud) <= 0:
        return True
    for point in pointCloud:
        if not isinstance(point, list) or len(point) != 5:
            return False
        for value in point:
            if not isinstance(value, float):
                return False
    return True

def dataPreprocess(pointCloud:list[PointCloud]|list[list[float]])->np.ndarray:
    if isClassFormat(pointCloud):
        pointCloudList:list[list[float]] = []
        for point in pointCloud:
            pointCloudList.append([point.Pos.x, point.Pos.y, point.Pos.z, point.Strength, point.CohenFactor])
    elif isFloatFormat(pointCloud):
        pointCloudList:list[list[float]] = pointCloud
    else:
        raise TypeError("Unsupported point cloud format. Expected list of PointCloud or list of lists of floats.")
    npFormatPointCloud = np.array(pointCloudList, dtype=np.float32)
    for i in range(3):
        a = np.max(npFormatPointCloud[:, i])
        b = 1/0.5/a
        sign = np.sign(npFormatPointCloud[:, i])
        npFormatPointCloud[:, i] = sign * a * (1 - np.exp(-b * np.abs(npFormatPointCloud[:, i])))
    return StandardScaler().fit_transform(npFormatPointCloud)

def filter(pointCloud:list[PointCloud]|list[list[float]],eps:float,minPoints:int) -> list[bool]:
    if len(pointCloud) <= 0:
        return []
    processedPoingCloud = dataPreprocess(pointCloud)
    cluter = DBSCAN(eps=eps, min_samples=minPoints)
    res = cluter.fit_predict(processedPoingCloud)
    return [r != -1 for r in res]

def loadDataFromBinFile(filePath:str) -> list[list[float]]:
    res:list[list[float]] = []
    with open(filePath, "rb") as f:
        buffer = f.read()
    structSize = 24
    if len(buffer) % structSize != 0:
        raise ValueError("File size does not match expected struct size.")
    for i in range(len(buffer) // structSize):
        data = struct.unpack_from("<6f", buffer, i * structSize)
        if data[5] < 0.14 or data[2] > 8 or data[3] < 6.5 or data[2] < -0.2:
            continue
        res.append([data[0],data[1], data[2], data[3], data[4], data[5]])
    return res

def saveDataToBinFile(filePath:str, res:list[list[float]]) -> None:
    with open(filePath, "wb") as f:
        for data in res:
            if len(data) != 6:
                raise ValueError("Each point must have exactly 6 float values.")
            f.write(struct.pack("<6f", *data))

def getArg() -> tuple[str, float, int]:
    import argparse
    parser = argparse.ArgumentParser(description="Filter point cloud data.")
    parser.add_argument("--bin-path", required=True, type=str, help="Path to the input binary file containing point cloud data.")
    parser.add_argument("--eps", type=float, default=0.35, help="DBSCAN epsilon parameter.")
    parser.add_argument("--min-points", type=int, default=3, help="DBSCAN minimum points parameter.")
    args = parser.parse_args()
    return args.bin_path, args.eps, args.min_points

logList:list[str] = []
def process(binFile:os.DirEntry[str], eps:float, minPoints:int, outputBinPath:str) -> None:
    global logList
    pointCloudData = loadDataFromBinFile(binFile.path)
    tmpData:list[list[float]] = []
    for point in pointCloudData:
        tmpData.append([point[1], point[2], point[3], point[0], point[5]])
    res = filter(tmpData, eps, minPoints)
    tmpData = [point for point, r in zip(pointCloudData, res) if r]
    saveDataToBinFile(os.path.join(outputBinPath,binFile.name), tmpData)
    logList.append(f"{binFile.name},{len(pointCloudData)},{len(tmpData)}\n")

def main() -> None:
    binPath, eps, minPoints = getArg()
    inputBinPath = os.path.join(binPath, "SARRadar")
    outputBinPath = os.path.join(binPath, "SARRadarProcessed")
    logPath = os.path.join(binPath, "filter.csv")
    os.makedirs(outputBinPath, exist_ok=True)
    with ThreadPoolExecutor(max_workers=4) as executor:
        for entry in os.scandir(inputBinPath):
            if entry.name.endswith(".bin"):
                executor.submit(
                    process, entry, eps, minPoints, outputBinPath
                )
                # process(entry,eps,minPoints,outputBinPath)
            else:
                shutil.copy(entry.path, outputBinPath)
    global logList
    logFile = open(logPath, "w")
    logFile.writelines(logList)
if __name__ == "__main__":
    main()