from copy import deepcopy
from concurrent.futures import ThreadPoolExecutor, wait, as_completed
import os
try:
    from cvio import cvio
    from checkMask import _labelme_format
    import imagesize
    from running_faster import Runner
except:
    from .cvio import cvio
    from .checkMask import _labelme_format
    from . import imagesize
    from .running_faster import Runner
from urllib import request, parse
import base64
import json

def encodeImg(imgName):
    with open(imgName, 'rb') as f:
        data = f.read()
        encodestr = base64.b64encode(data)
    return encodestr

def batch_loader(src, batch_size=2):
    imgs = cvio.load_image_list(src)
    
    itn = len(imgs)
    for i in range(0, itn, batch_size):
        j = i + batch_size
        batch = dict(annInfos=[], base64s=[])
        for e in range(i, j):
            img = imgs[e]
            annInfo = deepcopy(_labelme_format)
            annInfo['imagePath'] = os.path.basename(img)            
            annInfo['imagePath'] = os.path.basename(img)
            w, h = imagesize.get(img)
            annInfo['imageWidth'] = w
            annInfo['imageHeight'] = h
            
            batch['annInfos'].append(annInfo)
            batch['base64s'].append(img)
        yield batch

def save_predict_results(i, annInfos, results, dst, silent=True):
    status = results['status']
    if  status == 0:
        for ann in annInfos:
            img = ann['imagePath']
            print('Batch %d %s **detect error** !' % (i, img))
        return
    results = results['results']
    if not os.path.exists(dst):
        os.makedirs(dst)
    for j, (annInfo, result) in enumerate(zip(annInfos, results), 1):
        img = annInfo['imagePath']
        ann = os.path.join(dst, os.path.splitext(os.path.basename(img))[0] + '.json')

        annInfo['shapes'] = result
        cvio.write_ann(annInfo, ann)
        if not silent:
            print('Batch %d %s dets %d' % (i, img, len(result)))

def detect_local_images(url, model_name, score_thr, batch_size, src, dst='', mode='segm', silent=False,
                        worker='process', num_workers=4):
    loader = batch_loader(src, batch_size)
    headers = dict(model_name=model_name, mode=mode)
    if dst in (None, ''):
        dst = os.path.join(src, model_name)
    
    print('自动打标 %s %s \n%s' % (model_name, url, src))
    # print(mode)
    runner = Runner()
    # runner = ThreadPoolExecutor(num_workers)
    # pools = []    
    for i, batch in enumerate(loader, 1):
        # annInfos = batch['annInfos']
        # b64s = batch['base64s']
        # data = dict(images=b64s, score_thr=score_thr)
        # data = parse.urlencode(data).encode('utf-8')
        # res = request.Request(url, headers=headers, data=data)
        # res = request.urlopen(res).read()
        # res = bytes.decode(res)
        # res = json.loads(res)
        # save_predict_results(i, annInfos, res, dst, silent)
        # print('Preload task [%d]' % (i,), batch['base64s'])
        runner.append(one_detect_task, (i, batch, dst, silent, url, score_thr, headers))
        # one_detect_worker(i, batch, dst, silent, url, score_thr, headers)
        # p = runner.submit(one_detect_task, i, batch, dst, silent, url, score_thr, headers)
        # pools.append(p)        
    # print(len(runner.tasks))
    runner.run(mode=worker, num_workers=num_workers)
    # wait(pools)
    # for p in as_completed(pools):
    #     try:
    #         p.result()
    #     except Exception as e:
    #         print(e)
    #         runner.shutdown(wait=True)

def one_detect_task(i, batch, dst, silent, url, score_thr, headers):
    annInfos = batch['annInfos']
    b64s = [encodeImg(img) for img in batch['base64s']]
    data = dict(images=b64s, score_thr=score_thr)
    data = parse.urlencode(data).encode('utf-8')
    res = request.Request(url, headers=headers, data=data)
    res = request.urlopen(res).read()
    res = bytes.decode(res)
    res = json.loads(res)
    save_predict_results(i, annInfos, res, dst, silent)

def batch_detect_runner(url, mode='segm'):
    # cfg: [input_args1, input_args2, ...]
    model_cfg = [
        # dict(url=url, model_name='cascade_mask_x101_fpn_dcn_0.6_bottle', score_thr=0.5, batch_size=1),
        # dict(url=url, model_name='cascade_mask_rcnn_swin_tiny_bottle', score_thr=0.5, batch_size=1),
        # dict(url=url, model_name='maskrcnn-swin-tiny-bottle',  score_thr=0.5, batch_size=1),
        # dict(url=url, model_name='queryinst_swin_tiny_bottle', score_thr=0.5, batch_size=1),        
        # dict(url=url, model_name='cascade_mask_x101_fpn_dcn_fl', score_thr=0.4, batch_size=1), 
        dict(url=url, model_name='queryinst_swin_tiny_mask_bottle', score_thr=0.4, batch_size=1), 
    ]
    image_paths = [
        # r'G:\data\datasets\drink\daiding_101\base_test\bkl',
        # r'G:\data\datasets\drink\daiding_101\base_test\gsblp',
        # r'G:\data\datasets\drink\daiding_101\base_test\aiot',        
        # r'G:\data\datasets\drink\daiding_101\base_test\jdb',
        # r'G:\data\datasets\drink\daiding_101\base_test\jt',
        # r'G:\data\datasets\drink\daiding_101\base_test\nfsq',
        # r'G:\data\datasets\drink\daiding_101\base_test\rio',
        # r'G:\data\datasets\drink\daiding_101\base_test\whh',
        # r'G:\data\datasets\drink\daiding_101\base_test\yili_jml_xxw_dly',
        # r'G:\data\datasets\drink\daiding_101\base_test\yy',
        r'G:\data\datasets\drink\daiding_101\base_train\bkl',
        # r'G:\data\datasets\drink\daiding_101\base_train\gsblp',
    ]

    for src in image_paths:
        print(src)
        for m_cfg in model_cfg:
            m_cfg.update(dict(src=src, dst='', silent=False, mode=mode))
            detect_local_images(**m_cfg)
    
if __name__ == '__main__':
    url = 'http://192.168.11.25:9998/object_detect'
    batch_detect_runner(url, mode='segm')