import argparse
from typing import Union, Optional, Dict
import logging
from pathlib import Path
import pprint
import collections.abc as collections
from tqdm import tqdm
import h5py
import torch
import json
from . import Model
from .Model.base_module import dynamic_load
from .utilize.parsers import names_to_pair, parse_image_lists
from .utilize.io import list_h5_names
import tqdm
confs = {
    'superglue_outdoor': {
        'output': 'matches-superglue_outdoor',
        'model': {
            'name': 'superglue',
            'weights': 'outdoor',
            'sinkhorn_iterations': 50,
        },
    },    'superglue_indoor': {
        'output': 'matches-superglue_indoor',
        'model': {
            'name': 'superglue',
            'weights': 'indoor',
            'sinkhorn_iterations': 50,
        },
    },
        'NN-superpoint': {
        'output': 'matches-NN-mutual-dist.7',
        'model': {
            'name': 'nn',
            'do_mutual_check': True,
            'distance_threshold': 0.7,
        },
    },
}


def main(conf: Dict, features: Union[Path, str],
         export_dir: Optional[Path] = None, matches: Optional[Path] = None):

    if isinstance(features, Path) or Path(features).exists():
        if matches is None:
            raise ValueError('Either provide both features and matches as Path'
                             ' or both as names.')
    else:
        if export_dir is None:
            raise ValueError('Provide an export_dir if features is not'
                             f' a file path: {features}.')
        features = Path(export_dir, features+'.h5')
        pairs = Path(f'{features}_{conf["output"]}.txt')
        if matches is None:
            matches = Path(f'{features}_{conf["output"]}.h5')

            # print(matches)
            # print(features)
    # print(matches)
    match_from_paths(
        conf, matches, features, pairs)

    return matches, pairs


@torch.no_grad()
def match_from_paths(conf, matches_path, features, pairs_path):
    device = 'cuda'
    # device='cpu'
    logging.info('Matching local features with configuration:'
                 f'\n{pprint.pformat(conf)}')
    print(matches_path)
    print(conf['model']['name'])
    model = dynamic_load(Model, conf['model']['name'])
    model_ = model(conf['model']).eval().to(device)
    # matches_path.mkdir(exist_ok=True)
    with h5py.File(str(features), 'r') as fd:
        imglist = list(fd.keys())
        data = {}
        pairs = []
        for i in range(len(imglist)):
            for j in range(i+1, len(imglist)):
                pair = [imglist[i], imglist[j]]
                pairs.append(pair)

        # with open(str(pairs_path), 'w') as f:
        #     f.write(json.dumps(pairs))
        match_list = open(pairs_path, 'w')
        if matches_path.exists():
            pass
        else:
            for pair_idx in tqdm.tqdm(range(len(pairs))):
                pair=pairs[pair_idx]
                # print('pair 0 ',pair[0])
                grp = fd[pair[0]]
                for k, v in grp.items():
                    data[k +'0'] = torch.from_numpy(v.__array__()).float().to(device)
                data['image0'] = torch.empty(
                    (1,)+tuple(grp['image_size'])[::-1])
                grp = fd[pair[1]]
                for k, v in grp.items():
                    data[k +
                         '1'] = torch.from_numpy(v.__array__()).float().to(device)
                data['image1'] = torch.empty(
                    (1,)+tuple(grp['image_size'])[::-1])
                data = {k: v[None] for k, v in data.items()}
                # print(data['image_size0'])
                model_.to(device)
                pred = model_(data)
                # print(pred)
                pairname = pair[0]+' '+pair[1]
                # print('group:',pair)
                match_list.write("%s %s\n" % (pair[0], pair[1]))
                with h5py.File(str(matches_path), 'a') as f_match:
                    grp_match = f_match.create_group(pairname)
                    matches = pred['matches0'][0].cpu().short().numpy()
                    grp_match.create_dataset('matches0', data=matches)

                    if 'matching_scores0' in pred:
                        scores = pred['matching_scores0'][0].cpu(
                        ).half().numpy()
                        grp_match.create_dataset(
                            'matching_scores0', data=scores)
                del pred, grp_match, grp, data
                data = {}
        logging.info('Finished exporting matches.')
