
import faiss
import torch
import logging
import numpy as np
from tqdm import tqdm
from torch.utils.data import DataLoader
from torch.utils.data.dataset import Subset
from model.search.LocFeature2ImgIndex import LocFeature2ImgIndex
import h5py
from os.path import join
from pathlib import Path
import os
from tqdm import tqdm

class LocalFeatureSet():
    def __init__(self,config = None, context = None) -> None:
        
        self._feature_set = {}
        self._save_in_mem = True
        self._dataset = None
        self._img_metadata = {}
        self._feature_dir ="features"
        self._imageset_dir = ""
        
        if config:

            dataset_name = config["data"]["dataset_name"]
            features_dir = config["features"]["features_dir"]
            datasets_folder = config["data"]["datasets_folder"]
        
            fdir = "features"
            if len(features_dir) > 0:
                fdir = features_dir

            self._feature_dir = join(datasets_folder, dataset_name, "images","test",fdir)
            self._imageset_dir = join(datasets_folder, dataset_name, "images","test")
        if context:
            self._dataset = context.dataset
        # self.resize = config["resize"]
        
    def set_feature_dir(self,dirname):
        self._feature_dir = join(self._imageset_dir,dirname)

    #添加特征点
    def add_features(self,imgid,features):
        if self._save_in_mem == True:
            self._feature_set[imgid] = features
        self._write_features(imgid,features)
        pass
    
    def get_features(self):
        return self._feature_set
    
    def get_imgids(self):
        return self._feature_set.keys()

    def get_features_by_id(self,imgid):
        return self._feature_set[imgid]
    
    def get_id_by_fname(self,fname):
        for id,md in self._img_metadata.items():
            if md["filename"] == fname:
                return id
        return -1

    def get_feature_dir(self):
        return self._feature_dir
    
    # def search_image(self,features):
    #     distances, predictions = self._faiss.search(features, max(self._recall_nums))
    #     predictions_ids = self._imgid_index.searchArr2d(predictions)
    #     predictions_ids_top_k = self._imgid_index.most_common(predictions_ids,max(self._recall_nums))
    #     return predictions_ids_top_k
    
    def read(self,fdir=None):
        if fdir != None:
            self._feature_dir = fdir

        feature_path = join(self._feature_dir, "features.h5")
        feature_path_obj = Path(feature_path)
        if not feature_path_obj.exists() :
            return -1
        logging.debug("START Read Features")
        with h5py.File(str(feature_path), 'r') as fd:
            pbar = tqdm(total=len(fd.items()))
            for imgid_str,imdata in fd.items():
                pbar.update()
                if imgid_str == "imageid_index":
                    continue
                
                #features 读取
                imgid = int(imgid_str)
                features = {}
                features["descriptors"] = imdata["descriptors"].__array__()
                features["keypoints"] = imdata["keypoints"].__array__()
                self._feature_set[imgid] = features

                mg = imdata["metadata"]
                mdata = {}
                for k,v in mg.attrs.items():
                    mdata[k] =v
                self._img_metadata[imgid] = mdata

            pbar.close()

        return 0
        
    def write(self):
        # self._write_feature_index()
        # self._write_f2i_index()
        pass
    
    def _write_features(self,imgid,features):
        imgidstr = str(imgid)
        dataset = {}
        dataset["descriptors"] = features["descriptors"]
        dataset["keypoints"] = features["keypoints"]
        
        feature_path = join(self._feature_dir, "features.h5")
        
        if not os.path.exists(self._feature_dir):
            os.mkdir(self._feature_dir)

        with h5py.File(str(feature_path), 'a') as fd:
            try:
                if imgidstr in fd:
                    del fd[imgidstr]
                grp = fd.create_group(imgidstr)

                if not self._dataset == None:
                    mg = grp.create_group("metadata")
                    filename = self._dataset.get_filename_byid(imgid)
                    # filename = os.path.basename(impath)
                    posarr = self._dataset.get_namedata_by_id(imgid)
                    mg.attrs["filename"] = str(filename)
                    
                    mg.attrs["xyz"] = [float(posarr[1]),float(posarr[2]),float(posarr[3])]
                    # mg.attrs["hpr"] = [float(posarr[-3]),float(posarr[-2]),float(posarr[-1])]

                for k, v in dataset.items():
                    grp.create_dataset(k, data=v)
                


            except OSError as error:
                # if 'No space left on device' in error.args[0]:
                #     logger.error(
                #         'Out of disk space: storing features on disk can take '
                #         'significant space, did you enable the as_half flag?')
                #     del grp, fd[name]
                raise error
            
    def _write_f2i_index(self):
        dataset = {}
        
        v1 = self._imgid_index._featureCountArr
        v2 = self._imgid_index._indexArr
        
        dataset["feature_index"] = v1
        dataset["image_index"] = v2
        
        groupstr = "imageid_index"
        feature_path = join(self._feature_dir, "features.h5")
        with h5py.File(str(feature_path), 'a') as fd:
            try:
                if groupstr in fd:
                    del fd[groupstr]
                grp = fd.create_group(groupstr)
                for k, v in dataset.items():
                    grp.create_dataset(k, data=v)

            except OSError as error:
                # if 'No space left on device' in error.args[0]:
                #     logger.error(
                #         'Out of disk space: storing features on disk can take '
                #         'significant space, did you enable the as_half flag?')
                #     del grp, fd[name]
                raise error
    
    def _write_feature_index(self):
        feature_index_path = join(self._feature_dir, "features.index")
        faiss.write_index(self._faiss,feature_index_path)
        