import os
import random
import shutil
import threading
import time
from datetime import datetime

import requests
from zc_core.client.mongo_client import Mongo
from requests.exceptions import ProxyError
from zc_core.dao.cookie_dao import CookieDao
from zc_core.middlewares.proxies.zhima_pool import ZhimaProxyPool

from esgcc.util.login import SeleniumLogin
from zc_core.util.batch_gen import time_to_batch_no

headers = {
    'Host': 'b.esgcc.com.cn',
    'Connection': 'keep-alive',
    'Upgrade-Insecure-Requests': '1',
    'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/70.0.3538.25 Safari/537.36 Core/1.70.3741.400 QQBrowser/10.5.3863.400',
    'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8',
    'Accept-Encoding': 'gzip, deflate',
    'Accept-Language': 'zh-CN,zh;q=0.9',
}
cookie_dao = CookieDao()
# cookie = cookie_dao.get_cookie('zc:cookie:esgcc')
cookie = {"__s_f_c_s_": "08A4F7F4B462071DD0A4CC0F995EF3E4", "__d_s_": "08A4F7F4B462071DD0A4CC0F995EF3E4", "__t_c_k_": "5874b56602a345b1b61a5e5a24d0d668", "JSESSIONID": "2D7CF38F87331A488DE5CE5A650A963E"}
if not cookie:
    cookie = SeleniumLogin().get_cookies()
headers['Cookie'] = '__s_f_c_s_={}; __d_s_={}; __t_c_k_={}; JSESSIONID={}'.format(
    cookie.get('__s_f_c_s_'),
    cookie.get('__d_s_'),
    cookie.get('__t_c_k_'),
    cookie.get('JSESSIONID')
)

session = requests.Session()

thread_pool = list()
thread_max = threading.BoundedSemaphore(1)

proxy_pool = ZhimaProxyPool({
    'MIN_PROXY_POOL_SIZE': 2,
    'PROXY_AMOUNT_PRE_LOAD': 1,
})


class Downloader(threading.Thread):

    def __init__(self, url, full_path, pool_path, full_dir, pool_dir):
        threading.Thread.__init__(self)
        self.url = url
        self.full_path = full_path
        self.pool_path = pool_path
        self.full_dir = full_dir
        self.pool_dir = pool_dir

    def run(self):
        try:
            # proxy = proxy_pool.get_proxy()
            proxy = ''
            print("下载 [%s] %s" % (proxy, self.url))
            try:
                rsp = session.get(
                    url=self.url,
                    # proxies={'http': 'http://{}'.format(proxy)},
                    headers=headers,
                    verify=False,
                    stream=True,
                    timeout=(15, 120)
                )
                if rsp.status_code != 200:
                    proxy_pool.remove_proxy(proxy)
                    print("移除代理1 %s" % proxy)
                else:
                    if not os.path.exists(self.full_dir):
                        os.makedirs(self.full_dir)
                    if not os.path.exists(self.pool_dir):
                        os.makedirs(self.pool_dir)

                with open(self.full_path, 'wb') as f:
                    f.write(rsp.content)
                # if os.path.exists(self.full_path) and not os.path.exists(self.pool_path):
                if os.path.exists(self.full_path):
                    shutil.copy(self.full_path, self.pool_path)
                    print("复制 %s" % full_path)
            except Exception as ex:
                if isinstance(ex, ProxyError):
                    proxy_pool.remove_proxy(proxy)
                    print("移除代理2 %s" % proxy)
                    print(ex)
                pass
        finally:
            thread_max.release()
        time.sleep(0.3)


if __name__ == '__main__':
    root_dir = 'G:/esgcc/'
    # batch_no = '20210518'
    batch_no = str(time_to_batch_no(datetime.now()))

    items = Mongo().list('cert_item_{}'.format(batch_no))
    random.shuffle(items)
    for item in items:
        skuId = item.get('skuId')
        url = item.get('url')
        path = item.get('path')
        full_dir = root_dir + '/cert/{}/{}'.format(batch_no, skuId)
        full_path = root_dir + path
        pool_dir = root_dir + '/cert/pool/{}'.format(skuId)
        pool_path = full_path.replace(batch_no, 'pool')

        if os.path.exists(full_path):
            print("存在 %s" % full_path)
            continue

        thread_max.acquire()
        loader = Downloader(url, full_path, pool_path, full_dir, pool_dir)
        thread_pool.append(loader)
        loader.start()

    for t in thread_pool:
        t.join()

    print('任务完成')
