import os
# import openslide
from tqdm import tqdm
import time
from PIL import Image
import numpy as np
import cv2
import copy
import xml.etree.ElementTree as ET
import shutil
import argparse
import threading
import torch
import torchvision
from concurrent.futures import ProcessPoolExecutor
OPENSLIDE_PATH = r'C:\openslide-win64-20231011\bin'


from torch.utils.data import Dataset
from torchvision import transforms



class TestDataset(Dataset):
    def __init__(self, image_index_list,slide,window_size):
        # generate image path list

        self.image_index_list= image_index_list
        self.slide = slide
        self.window_size = window_size

        self.transforms = transforms.Compose([transforms.ToTensor(),])

    def __getitem__(self, index):
        index_x,index_y = self.image_index_list[index]
        region = self.slide.read_region((index_x, index_y), 0, (self.window_size, self.window_size))
        region = region.convert('RGB')


        return self.transforms(region), [index_x,index_y]

    def __len__(self):
        return len(self.image_index_list)
    
    
def predict_region(model,region,index_x, index_y,points_all,scores):
    # region = region.convert('RGB')
    result = model.predict(region,imgsz=640,conf=0.5)
    result = result[0]
    if len(result.boxes)>0:
        for score in result.boxes.conf:
            scores.append(copy.deepcopy(score))
        for xywh in result.boxes.xywh:
            x,y,w,h = int(xywh[0]-xywh[2]/2)+index_x,int(xywh[1]-xywh[3]/2)+index_y,int(xywh[2]),int(xywh[3])
            points_xyxy = [x,y,x+w,y+h]
            points_all .append(copy.deepcopy(points_xyxy))
    return points_all,scores
import os
if hasattr(os, 'add_dll_directory'):
    # Windows
    with os.add_dll_directory(OPENSLIDE_PATH):
        import openslide
else:
    import openslide

if __name__ == '__main__':
    # python3 automated_segmentaition_ndpi.py --ndpi_path '/media/alex/FA5EB5A15EB556DB1/uterus/2022.3.15子宫确诊'
    parser = argparse.ArgumentParser(description='Ndpi to xml')
    parser.add_argument('--ndpi_path', '-f', type=str, default=False, help='Load ndpi from a .ndpi file')
    args = parser.parse_args()

    ndpi_dir = args.ndpi_path

    window_size = 640
    batch_size = 12
    from ultralytics import YOLO

    # 加载一个模型
    # model = YOLO(r'runs/detect/train17/weights/best.pt')  # 从YAML建立并转移权重
    model = YOLO(r'/media/doge/柴犬/代码/yolov8/runs/detect/yolov8n/weights/best.pt')  # 从YAML建立并转移权重
    image_transforms = transforms.Compose([transforms.ToTensor()])
    for ndpi_path in os.listdir(ndpi_dir):
        if (ndpi_path[-4:]=='ndpi'):
            start_pred_time = time.time()
            ndpi_path = os.path.join(ndpi_dir,ndpi_path)

            time1 = time.time()
            slide = openslide.open_slide(ndpi_path)
            print ('opentime :',time.time()-time1)
            width, height = slide.dimensions
            print (width,height)
            index_x = 0
            index_y = 0
            index = 0
            # crop ndpi ,get images
            scores = []
            points_all = []

            num = int(width/window_size) * int((height/window_size))

            image_index_list = []
            for index_x in range (0,width,window_size):
                for index_y in range (0,height,window_size):
                    if index_x + window_size < width and index_y + window_size < height:
                        image_index_list.append([index_x,index_y])


            testdataset = TestDataset(image_index_list,slide=slide,window_size=window_size)
            testloader = torch.utils.data.DataLoader(testdataset, batch_size=batch_size, shuffle=False,num_workers=12)
   
    
            print (f'共计{len(testloader)}个batch')
            for index, data in tqdm(enumerate(testloader)):
                test_img,index_xy = data
                
                results = model.predict(test_img,imgsz=window_size,conf=0.5,stream=True)
                
                index_x,index_y = index_xy
                result_index = 0
                for result in results:
                    if len(result.boxes)>0:
                        for score in result.boxes.conf:
                            print (score)
                            scores.append(copy.deepcopy(score))
                        for xywh in result.boxes.xywh:
                            
                            xywh = xywh
                            x,y,w,h = int(xywh[0]-xywh[2]/2)+index_x[result_index],int(xywh[1]-xywh[3]/2)+index_y[result_index],int(xywh[2]),int(xywh[3])
                            
                            points_xyxy = [x,y,x+w,y+h]
                            points_all .append(copy.deepcopy(points_xyxy))
                    result_index+=1
            slide.close()




            # from ultralytics.engine.results

            # load xml 
            xml_path = './module.xml'

            tree = ET.parse(xml_path)
            root = tree.getroot()
            annotation =  list(root.iter('Annotation'))[0]

            # draw cells
            regions  = list(annotation.iter('Regions'))[0]
            region  = list(annotation.iter('Region'))[0]
            Vertices = list(region.iter('Vertices'))[0]
            vertex = list(Vertices.iter('Vertex'))[0]

            root.remove(annotation)
            annotation.remove(regions)
            regions.remove(region)
            region.remove(Vertices)
            Vertices.remove(vertex)

            ID = 1
                        
            print (f'bbox num before nms : {len(points_all)}')
            points_all = torch.tensor(points_all).float()
            scores = torch.tensor(scores).float()
            classes = torch.zeros((len(points_all))).float()
            points_keep_index = torchvision.ops.batched_nms(points_all,scores, classes,iou_threshold=0.2)
            print (f'bbox num after nms : {len(points_keep_index)}')


            for points_index in list(points_keep_index):
                point_index = points_index.item()
                points_xyxy = points_all[points_index]
                region_to_save = copy.deepcopy(region)
                Vertices_to_save = copy.deepcopy(Vertices )

                region_to_save.set('Id',str(ID))
                region_to_save.set('Text',str(scores[points_index].item())[:5])
                region_to_save.set('DisplayId',str(ID))
                x1,y1,x2,y2 = int(points_xyxy[0].item()),int(points_xyxy[1].item()),int(points_xyxy[2].item()),int(points_xyxy[3].item())
                points = [
                    [x1,y1],[x2,y1],[x2,y2],[x1,y2]
                ]
                for point in points:
                    vertex_to_save = copy.deepcopy(vertex)
                    vertex_to_save.set('X',str(point[0]))
                    vertex_to_save.set('Y',str(point[1]))
                    Vertices_to_save.append(vertex_to_save)
                region_to_save.append(Vertices_to_save)
                regions.append(region_to_save)
                ID += 1

            annotation.append(regions)
            root.append(annotation)

            tree.write(ndpi_path[:-5]+'.xml')

            print (f'{ndpi_path} total_time',time.time()-start_pred_time)
