#-*- coding=utf-8

from selenium import webdriver
from BashouScrapy.wenshu.captcha import submit_complex_code
from BashouScrapy.pipelines import MongoBase
from BashouScrapy.spiders.wenshu_list import ListContentHeader
from pyvirtualdisplay import Display
from fake_useragent import UserAgent
import random
import time
import os
import datetime
import traceback
import threading
import requests
import urllib

thread_local_data = threading.local()

class SpiderConf(object):
    def __init__(self):
        pass

    # 最大能抓取到的数据条数
    MAX_TOTAL_RESULT = 2000
    # 抓取文书网param条件中的上传日期开始时间
    BEGIN_DAY = "2013-01-01"
    # 抓取文书网param条件中的上传日期结束时间
    END_DAY = "2013-12-31"
    # 年份分界线，默认为2014，此年份之前的因为数据量较小，所以进行整年抓取
    MID_YEAR = 2014
    chrome_driver_path = r"/Users/bashou/Documents/Dir/chromedriver"
    no_monitor = False
    is_chrome = True
    use_proxy = False
    is_dial_server = False
    proxy_pool_server = '192.168.0.152:8000'

def post_data_encode(post_data):
    param_str = ''
    for key in ['Param', 'Index', 'Page', 'Order', 'Direction']:
        param_str = param_str + key + '=' + str(post_data[key]) + '&'
    return param_str

def get_param_data(data_str):
    params = data_str.replace("&&","&").split("&")
    temp_dict={}
    for i in params:
        temp = i.split("=")
        temp_dict[str(temp[0])] = urllib.parse.unquote(temp[1]).replace("+"," ")
    return temp_dict

def get_cookie_dict_from_str(cookies):
    temp_dict = {}
    cookies_split = cookies.split(" ")
    for i in cookies_split:
        temp = i.split("=")
        temp_dict[temp[0]] = temp[1]
    return temp_dict

def get_cookie_with_retry(cookie_type):
    # init_socks_proxy(thread_local_data)
    for i in range(0, 20):
        cookies = get_cookie_by_selenium(cookie_type)
        if cookies != '':
            return cookies
        if cookie_type == 1 and cookies == '':
            return ''
        # if SpiderConf.use_proxy:
        #     change_socks_proxy(thread_local_data)
        else:
            already_redial = redial('redial')
            if already_redial is True:
                time.sleep(93)
    return ''

def get_cookie_by_selenium(cookie_type, use_global_proxy_conf=True):
    print ('get_cookie_by_selenium-----------')
    # NOTE: 这儿的try...except...finally 是必须的，否则一旦这儿出错或者发生超时，都会导致浏览器窗口无法关闭（还得写crontab去定期杀掉defunct的chromium）
    driver = None
    display = None
    try:
        display, driver = get_chrome_driver(use_global_proxy_conf)
        if not driver:
            return ""
        # driver.implicitly_wait(60)
        doc_id_list = [
            "4a5c7734-fbb6-447b-a036-02191d3ee2b7", "27241ed4-619d-4d0e-a18c-a74500f0e6ca",
            "d6a12c3c-cdb5-4147-8fc3-a74500f0e6eb",
            "1c83095f-396a-442c-831b-a74500f0e6ae", "5b19caf4-3858-4796-b241-a74500f0e702"
        ]
        # driver.get("http://wenshu.court.gov.cn")
        # cookie_list = driver.get_cookies()
        if cookie_type == 1:
            # driver.set_page_load_timeout(60)
            driver.get("http://wenshu.court.gov.cn/content/content?DocID=" + random.choice(doc_id_list))
        elif cookie_type == 2:
            driver.get(
                "http://wenshu.court.gov.cn/List/List?sorttype=1&conditions=searchWord+2+AJLX++%E6%A1%88%E4%BB%B6%E7%B1%BB%E5%9E%8B:%E6%B0%91%E4%BA%8B%E6%A1%88%E4%BB%B6")
        time.sleep(1)
        if driver.page_source.find("网站当前访问量较大，请输入验证码后继续访问") != -1:
            submit_complex_code(driver)
        cookie_list = driver.get_cookies()
        cookie_str_list = []
        for cookie_dict in cookie_list:
            cookie_str_list.append("{0}={1};".format(cookie_dict["name"], cookie_dict["value"]))
        cookie_str = " ".join(cookie_str_list)
        print("Cookie str:", cookie_str)
    except Exception as e:
        print ("lxw get_cookie_by_selenium Exception: {0}\n{1}\n{2}\n\n".format(e, traceback.format_exc(), "--" * 30))
        return ""
    else:
        return cookie_str
    finally:
        if driver:
            driver.quit()
        if display:
            display.stop()

def get_chrome_driver(use_global_proxy_conf=True):
    display = None
    driver = None
    try:
        options = webdriver.ChromeOptions()
        options.add_argument('lang=zh_CN.UTF-8')
        # 更换头部
        # options.add_argument(
        #     'user-agent = Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/60.0.3112.90 Safari/537.36')
        # prefs = {"profile.managed_default_content_settings.images": 2}
        # options.add_experimental_option("prefs", prefs)
        options.add_argument("--no-sandbox")
        # if use_global_proxy_conf and SpiderConf.use_proxy:
        #     options.add_argument('--proxy-server=' + thread_local_data.socks_proxy_str)
        if SpiderConf.no_monitor:
            display = Display(visible=0, size=(800, 800))
            display.start()
        if SpiderConf.is_chrome:
            driver = webdriver.Chrome(executable_path=SpiderConf.chrome_driver_path,
                                      chrome_options=options)
        else:
            driver = webdriver.Firefox()
        # 设置超时时间
        driver.set_page_load_timeout(20)
        driver.set_script_timeout(20)  # 这两种设置都进行才有效
    except Exception as e:
        print ("lxw get_chrome_driver() Exception: {0}\n{1}\n{2}\n\n".format(e, traceback.format_exc(), "--" * 30))
        if display:
            display.stop()
        if driver:
            driver.quit()
        return None, None
    else:
        return display, driver

def init_cookies_for_content(header):
    try:
        thread_local_data.session
    except Exception:
        requests.adapters.DEFAULT_RETRIES = 5
        thread_local_data.session = requests.Session()
        thread_local_data.session.keep_alive = False
    if ListContentHeader.cookie is None:
        ListContentHeader.cookie = get_cookie_with_retry(1)
        header['User-Agent'] = UserAgent().random
        thread_local_data.session.headers.update(header)
        thread_local_data.session.cookies.update(get_cookie_dict_from_str(ListContentHeader.cookie))

def increase_change_cookie_count():
    try:
        thread_local_data.change_cookie_count += 1
    except Exception:
        thread_local_data.change_cookie_count = 0
    print ('change_cookie_count:' + str(thread_local_data.change_cookie_count))
    if thread_local_data.change_cookie_count > 10:
        thread_local_data.change_cookie_count = 0
        ListContentHeader.cookie = None
        already_redial = redial('redial')
        if already_redial is True:
            thread_local_data.change_cookie_count = 0
            time.sleep(93)

def redial(sh_name='reconnect'):
    file_name = '/root/redial_log.txt'
    if os.path.exists(file_name):
        print ("正在重新拨号------")
        return True
    os.system(r'touch %s' % file_name)
    if SpiderConf.is_dial_server:
        print (str(datetime.datetime.now()) + "开始重新pppoe连接")
        MongoBase.mongo_client.close()
        sh_file = '/usr/bin/bash /root/' + sh_name + '.sh'
        print (sh_file)
        os.system(sh_file)
        time.sleep(20)
        os.remove(file_name)
        print (str(datetime.datetime.now()) + "重新pppoe连接完成")



# if __name__ == '__main__':
#     r = requests.Session().get("http://wenshu.court.gov.cn/List/List?sorttype=1&conditions=searchWord+2+AJLX++%E6%A1%88%E4%BB%B6%E7%B1%BB%E5%9E%8B:%E6%B0%91%E4%BA%8B%E6%A1%88%E4%BB%B6")
#     vjkl5 = r.cookies.get(name='vjkl5')
#     if(vjkl5 is not None):
#         cookies_str = 'vjkl5='+r.cookies.get(name='vjkl5')
#     print (r)
#     pass