# -*- coding: utf-8 -*-
from concurrent import futures

from tqdm import tqdm

from db.mysqldb import MysqlDB
from utils.confident import site_checker
from utils.log import get_logger

log = get_logger(name='update.log', path=r"D:\wuzheng\log\\")

mysql_db = MysqlDB()


def save_es(url, host, site_id, site_name):
    site_checker(url, host, depath=3, fxck_oo=True, save_page_info=True, minconfidence=200, site_id=site_id,
                 site_name=site_name)


def save_main(exist_item):
    len_urls = len(exist_item)
    # print('添加url数量： ', len_urls)
    p_num = min(len_urls, 4)
    if not p_num:
        return

    tasks, results = [], []
    with futures.ThreadPoolExecutor(max_workers=p_num) as executor:
        for home_page, host, site_id, site_name in tqdm(exist_item, total=len_urls, desc='add_task'):
            tasks.append(executor.submit(save_es, home_page, host, site_id, site_name))

        for task in tqdm(futures.as_completed(tasks), total=len_urls, desc='save_main'):
            # results.append(task.result())
            pass
    # print(results)


if __name__ == '__main__':
    exist_item = mysql_db.find(
        "SELECT URL,DOMAIN,ID,site_name FROM TAB_VIDEO_SITE_INFO "
        "WHERE SITE_TYPE = 3 and DISPLAY_STATUS=1 and CHECK_TYPE=1 and CHECK_STATUS=1 AND MONITOR_STATUS =1 AND IS_VALIDITY =0 "
        # "and ID=1000003509"
    )
    print(exist_item)

    save_main(exist_item)
