from os import name
import argparse
import os,sys
sys.path.append(os.getcwd())

from src.utils import read_txt_embeddings, normalize_embeddings, export_embeddings

try:
    import faiss
    FAISS_AVAILABLE = True
    if not hasattr(faiss, 'StandardGpuResources'):
        sys.stderr.write("Impossible to import Faiss-GPU. "
                         "Switching to FAISS-CPU, "
                         "this will be slower.\n\n")

except ImportError:
    sys.stderr.write("Impossible to import Faiss library!! "
                     "Switching to standard nearest neighbors search implementation, "
                     "this will be significantly slower.\n\n")
    FAISS_AVAILABLE = False

def get_nn_dist_distrib(emb, knn = 6):
    if FAISS_AVAILABLE:
        emb = query = emb.cpu().numpy()
        if hasattr(faiss, 'StandardGpuResources'):
            # gpu mode
            res = faiss.StandardGpuResources()
            config = faiss.GpuIndexFlatConfig()
            config.device = 0
            index = faiss.GpuIndexFlatIP(res, emb.shape[1], config)
        else:
            # cpu mode
            index = faiss.IndexFlatIP(emb.shape[1])
        index.add(emb)
        distances, _ = index.search(query, knn)
        print('\tmin_nn', list(distances.min(axis=0)))
        print('\tmax_nn', list(distances.max(axis=0)))
        print('\tavg_nn', list(distances.mean(axis=0)))
        print('\tthres', list((distances > 1 - 1e-1).sum(axis=0)))

if __name__ == '__main__':
    params = argparse.ArgumentParser().parse_args()
    params.src_lang = 'vi'
    params.tgt_lang = 'zh'
    params.src_emb = 'data/wiki.vi.vec'
    params.tgt_emb = 'data/wiki.zh.vec'
    params.emb_dim = 300
    params.max_vocab = 200000
    params.cuda = True
    params.export = "pth"
    params.exp_path = "dumped/tmp_itrecent_wordvec"

    params.src_dico, src_emb = read_txt_embeddings(params, True, False)

    print('Read embedding')

#    "renorm,center,renorm,center,renorm,center,renorm,center,"
    src_emb2 = src_emb.clone()
    for i in range(4):
        normalize_embeddings(src_emb, 'renorm')

        src_mean = normalize_embeddings(src_emb, 'center')

        src_minlen = (src_emb**2).sum(dim=1).min().item()

        print(i, 'minlen', src_minlen, 'mean_norm', src_mean.norm().item() )

        # get_nn_dist_distrib(src_emb)
        # get_nn_dist_distrib(tgt_emb)
        # print()
    
    mean2 = normalize_embeddings(src_emb2, 'renorm,center,'*4)
    print('directly: ', ((src_emb2-src_emb)**2).sum().item(), (mean2**2).sum().item())