from logging import debug

import numpy
import numpy as np
import math
import os
import queue
import threading
import open3d as o3d
from memory_profiler import profile

#引入线程池
from concurrent.futures import ThreadPoolExecutor

from networkx.classes import neighbors
from sklearn.neighbors import BallTree, NearestNeighbors
#from scipy.spatial import KDTree
from scipy.spatial import Delaunay
from scipy.spatial.distance import pdist, squareform

from sklearn.neighbors import LocalOutlierFactor as LOFDetect
from sklearn.cluster import DBSCAN

np.set_printoptions(precision=10, floatmode='fixed')

#初始化参数
ptsPoints = []
datasubsets = []
PI = 3.1415926535
lofThre = 1.2
batchsize = 10

#体素大小
voxelSize = 1.2

dataqueue = queue.Queue()
pointCloud = o3d.geometry.PointCloud()

def worker(q:queue,tree:BallTree,pt,r:float):
    item = np.array([pt])
    neighbors = tree.query_radius(X=item,r=r)
    q.put(neighbors)
    return "finished"

#构建KDTree
def constructKDTree(datas):
    trees = []
    for data in datas:
        tree = BallTree(data, leaf_size=1000)
        trees.append(tree)
    return trees

#基于KDTree的半径查找
def radiusSearch(trees:[BallTree],pt,r:float):
    #item = np.array([pt])
    #neighbors = tuple(tree.query_radius(X=item, r=r))
    neighbors = np.array([])
    threads = []
    with ThreadPoolExecutor(max_workers=len(trees) + 1) as executor:
        futures = [executor.submit(worker, dataqueue,tree,pt,r) for tree in trees]
        for future in futures:
            a = 1
    while not dataqueue.empty():
        data = dataqueue.get()
        neighbors = np.append(neighbors,data)
    neighbors = tuple(neighbors)

    return neighbors


def getGaussianParams(ptCollection):
    if ptCollection is None or len(ptCollection) == 0:
        return []
    n = len(ptCollection)
    total_sum = 0.0
    total_sq_sum = 0.0
    count = 0

    pt_array = np.asarray(ptCollection)
    # 先构建德洛内三角，遍历边长就行了
    tri = Delaunay(pt_array)
    for simplex in tri.simplices:
        count += 1
        currdis = np.sqrt(np.sum((pt_array[simplex,0] - pt_array[simplex,1]) ** 2))
        total_sum += currdis
        total_sq_sum += currdis ** 2

    miu = total_sum / count
    variance = (total_sq_sum / count) - (miu ** 2)
    sigma = np.sqrt(variance)
    return [miu, sigma]

def LOF(ptCollction:np.ndarray,neighborsNum:int) -> list:
    if len(ptCollction) < 4:
        return []
    #clf = LOFDetect(n_neighbors=int(len(ptCollction) / 4))
    clf = LOFDetect(n_neighbors=neighborsNum)
    predict = clf.fit_predict(ptCollction)
    return predict

def writeFile(filepath,data,topk):
    before = len(data[0][1])
    with open(filepath, 'w') as file:
        for i in data:
            if len(i[1]) > 0:
                if len(i[1]) < before * 0.6:
                    continue
                for j in i[1]:
                    file.write(str(j).replace('[', '').replace(']', '') + '\n')
            #before = len(i[1])
    file.close()


def DBScanWithLOF(dataset: np.ndarray, eps: float, min_samples: int, tree: [BallTree]):
    # 求出样本点数量
    n_samples = len(dataset)

    # 保存核心点(保存核心点在数据集中的索引)
    core_pts = list()

    # 所有样本点刚开始都没有被访问过
    flags = [False for i in range(n_samples)]

    #ptDict = {str(dataset[i]) : i for i in range(n_samples)}

    # 初始化样本点的簇标签，表示所有的点刚开始都是非核心点
    labels = np.full(n_samples, -1)

    #词典储存该类下有多少点
    dict = {i: list() for i in range(n_samples)}

    c = -1

    for i in range(n_samples):
        if flags[i] is True:
            continue
        flags[i] = True
        neighbors = radiusSearch(trees=tree, pt=dataset[i], r=eps)[0]
        c += 1
        labels[i] = c
        dict[c].append(dataset[i])
        core_pts.append(i)

        seed_set = set(np.delete(neighbors, np.where(neighbors == i)[0]))
    # 处理种子集合中所有的样本点
        while seed_set:
            j = seed_set.pop()
            labels[j] = c
            flags[j] = True
            dict[c].append(dataset[j])
            subptNeighbor = radiusSearch(trees=tree, pt=dataset[j], r=eps)[0]
            dict[c].append(np.unique(subptNeighbor))
            for s in subptNeighbor:
                seed_set.add(s)
            #seed_set.update(set(neighbors))
    #print(dict)
    sortdict = sorted(dict.items(), key=lambda x: len(x[1]),reverse=True)
    return labels, np.array(core_pts), sortdict

@profile
def DBScanWithLOF1(dataset: np.ndarray, eps: float, min_samples: int):
    dict = {}
    db = DBSCAN(eps=eps, min_samples=min_samples,algorithm='kd_tree')
    labels = db.fit_predict(dataset)
    uniquelabel = np.unique(labels)
    pts = np.array([dataset[i] for i in range(len(labels))])
    dict = {i:[] for i in uniquelabel if i != -1}
    for j in range(len(labels)):
        if labels[j] == -1: continue
        dict[labels[j]].append(dataset[j])
    sortdict = sorted(dict.items(), key=lambda x: len(x[1]), reverse=True)
    return uniquelabel,pts,sortdict

def loadPTS(ptsFile):
    return np.loadtxt(ptsFile)


if __name__ == '__main__':
    # basePath = 'E:\\项目\\噪点去除\\整理\\'
    # outputPath = 'E:\\项目\\噪点去除\\修复后\\'
    #test1
    #basePath = 'D:\\项目\\噪点去除\\实验\\LOF近邻数量结果比对\\原始数据'
    #outputPath = 'D:\\项目\\噪点去除\\实验\\LOF近邻数量结果比对\\修复后'
    basePath = 'D:\\项目\\噪点去除\\实验\\点云稀疏程度对结果的影响\\原始数据'
    outputPath = 'D:\\项目\\噪点去除\\实验\\点云稀疏程度对结果的影响\\修复后'
    for file in os.listdir(basePath):
        fileName = os.path.join(basePath,file)
        outputFileName = os.path.join(outputPath,file)
        print(fileName)
        if os.path.isfile(fileName) and file.endswith('.pts'):
            print("正在处理：" + file)
            srcFile = fileName
            dstFile = outputFileName + "修复后.pts"
            dataset = loadPTS(srcFile)
            print('file loaded')
            #体素化
            pointCloud.points = o3d.utility.Vector3dVector(dataset)
            voxelGrid = o3d.geometry.PointCloud.voxel_down_sample(pointCloud,voxelSize)
            #体素化后的点
            dataset = numpy.array(voxelGrid.points)
            #体素可视化
            #o3d.visualization.draw_geometries([pointCloud, voxelGrid])
            #构建德罗内三角计算高斯分布
            gaussianParam = getGaussianParams(dataset)

            print('gaussianParam compulated')
            eps, min_samples = abs(gaussianParam[0] - 1.0 * (gaussianParam[1])), 100
            result =  DBScanWithLOF1(dataset=dataset,eps=eps,min_samples=min_samples)
            neighNum = -1
            print('DBSCAN FINISHED')
            for item in result[2]:
                if len(item[1]) > min_samples:
                    neighNum = int(len(item[1]) / 4)
                    lofList = list(LOF(item[1],neighborsNum=neighNum))
                    removeList = []
                    for i in range(len(lofList)):
                        if lofList[i] == -1:
                            removeList.append(i)
                    for j in sorted(removeList, reverse=True):
                        del item[1][j]
            #dstFile = outputFileName + "voxelSize" + str(voxelSize) + "neighborsNum" + str(neighNum) +"修复后.pts"
            #dstFile = outputFileName + "neighborsNum" + str(neighNum) + "test1.pts"
            dstFile = outputFileName  + "test1.pts"
            writeFile(dstFile, result[2], 5)
            print('FILE FLUSHED')
