import asyncio
import json, re, time
import os, sys
import random
import traceback

from common.proxy_ip import f_list
from pymysql.converters import escape_string

sys.path.append('/root/qvenv')
import redis, pymysql
import string
import requests, urllib3
from lxml import etree
from tools.insert_update import insert_sql
from tools.logout import save_log
from configs.cfg import *
from selenium import webdriver
from selenium.webdriver.common.keys import Keys
from selenium.webdriver.common.by import By
from tools.ua import user_agent_list
from requests.packages.urllib3.exceptions import InsecureRequestWarning
from concurrent.futures import ProcessPoolExecutor
from multiprocessing import Lock
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC


class JapSpider:
    # urllib3.disable_warnings(InsecureRequestWarning)
    requests.packages.urllib3.disable_warnings(InsecureRequestWarning)
    string_temp = string.ascii_uppercase
    conn = pymysql.connect(host=mysql_host, user=mysql_name, port=mysql_port, password=mysql_pwd,
                           database='spiderdb', charset='utf8')
    cur = conn.cursor()
    options = webdriver.ChromeOptions()
    options.add_argument('--disable-extensions')  # 启动无痕界面
    options.add_argument('--start-maximized')  # 设置浏览器窗口最大化
    # 关闭chrome正受到测试脚本控制

    options.add_experimental_option('useAutomationExtension', False)
    options.add_experimental_option('excludeSwitches', ['enable-automation'])
    options.add_argument('--disable-gpu')  # 禁用GPU加速
    options.add_argument('window-size=1920x3000')  # 指定浏览器分辨率
    options.add_argument('--no-sandbox')  # 添加沙盒模式
    options.add_argument("--disable-setuid-sandbox")
    options.add_argument('--disable-dev-shm-usage')
    # options.add_argument('headless')  # 浏览器不提供可视化页面

    base_url = 'https://www.houjin-bangou.nta.go.jp/en/'
    data_url = 'https://www.houjin-bangou.nta.go.jp/en/kensaku-kekka.html'
    referer_url = 'https://www.houjin-bangou.nta.go.jp/en/henkorireki-johoto.html?selHouzinNo={}'
    detail_url = 'https://www.houjin-bangou.nta.go.jp/henkorireki-johoto.html?selHouzinNo={}'
    lock = Lock()

    def get_cookie(self, wd):
        print(f'deal {wd} datas')
        driver = webdriver.Chrome(options=self.options)
        driver.implicitly_wait(20)
        driver.get(self.base_url)
        driver.find_element(By.ID, 'corp_name').click()
        driver.find_element(By.ID, 'corp_name').send_keys(wd)
        driver.find_element(By.ID, 'corp_name').send_keys(Keys.ENTER)
        WebDriverWait(driver, 30).until(EC.presence_of_element_located((By.CLASS_NAME, 'srhResult')))
        cookies = driver.get_cookies()
        c = ''
        for cookie in cookies:
            c += f'{cookie["name"]}={cookie["value"]};'
        headers = {
            'Content-Type': 'application/x-www-form-urlencoded',
            'Cookie': c,
            'User-Agent': random.choice(user_agent_list)
        }
        emt = etree.HTML(driver.page_source)
        total_num = emt.xpath('//p[@class="srhResult"]/strong/text()')[0]
        tmp_nums = int(total_num.replace(',', ''))
        total_page = tmp_nums // 100 if tmp_nums % 100 == 0 else tmp_nums // 100 + 1
        print('total_page:', total_page)
        with ProcessPoolExecutor(max_workers=12) as pool:
            for page in range(1, total_page + 1):
                data = {
                    # 'jp.go.nta.houjin_bangou.framework.web.common.CNSFWTokenProcessor.request.token': '30704eb5-ad33-4e54-ac06-0ed7864c83c5',
                    'screenTransaction': 'true',
                    'search': 'false',
                    'viewPageNo': page,
                    'viewNumAnc': 100,
                    'kjscr0201510m1Table.specifiedPageNumber': page,
                    'houzinNmShTypeRbtn': 2,
                    'houzinNmTxtf': wd,
                    'prefectureLst': '',
                    # '_kokugaiCkbx': 'on',
                    'houzinKdRbtn': 0,
                    'orderRbtn': 1,
                }
                # self.handle_request(headers,data)
                pool.submit(self.handle_request, headers, data)
        # except Exception as e:
        #     print('出现错误：', str(e))
        #     traceback.print_exc()
        #     save_log(e, '../../logs/jap.log')

    def handle_request(self, headers, data):
        resp = requests.post(self.data_url, headers=headers, data=data, verify=False)
        html = resp.content.decode()
        emt = etree.HTML(html)
        tr_items = emt.xpath('//table[@class="fixed"]/tbody/tr')
        db = redis.StrictRedis(host=redis_host, port=redis_port, password=redis_pwd, decode_responses=True,
                               db=13, health_check_interval=30, retry_on_timeout=True)
        pipeline = db.pipeline()
        for item in tr_items:
            cid = item.xpath('./th/text()')[0]
            name = item.xpath('./td[1]/text()')[0].strip()
            register_address = ''.join([i.strip() for i in item.xpath('./td[2]/text()')])
            pipeline.sadd('jp_cid', cid)
            meta = (cid, name, register_address)
            pipeline.sadd('jp_data', str(meta))
            # print('meta:', meta)
        pipeline.execute()

    def redis2mysql(self):
        db = redis.StrictRedis(host=redis_host, port=redis_port, password=redis_pwd, decode_responses=True,
                               db=13, health_check_interval=30, retry_on_timeout=True)
        try:
            while True:
                if db.scard('jp_data') >= 1000:
                    result = db.spop('jp_data', 1000)
                    datas = [eval(i) for i in result]
                    self.conn.ping()
                    sql = insert_sql('japanese_company_info',
                                     ('cid', 'name', 'register_address'),
                                     datas)
                    self.cur.execute(sql)
                    self.conn.commit()
                    print('insert success')
                elif db.scard('jp_data') > 0:
                    result = db.spop('jp_data', db.scard('jp_data'))
                    datas = [eval(i) for i in result]
                    self.conn.ping()
                    sql = insert_sql('japanese_company_info',
                                     ('cid', 'name', 'register_address'),
                                     datas)
                    self.cur.execute(sql)
                    self.conn.commit()
                    print('insert success')
                else:
                    break
        except Exception as e:
            traceback.print_exc()

    def handle_detail(self):
        with ProcessPoolExecutor(max_workers=10) as executor:
            while True:
                # self.options.add_argument('headless')  # 浏览器不提供可视化页面
                driver = webdriver.Chrome(options=self.options)
                driver.implicitly_wait(20)
                driver.get(self.base_url)
                driver.find_element(By.ID, 'corp_name').click()
                driver.find_element(By.ID, 'corp_name').send_keys('b')
                driver.find_element(By.ID, 'corp_name').send_keys(Keys.ENTER)
                time.sleep(10)
                # WebDriverWait(driver, 30).until(EC.presence_of_element_located((By.CLASS_NAME, 'srhNavi')))
                cookies = driver.get_cookies()
                c = ''
                for cookie in cookies:
                    c += f'{cookie["name"]}={cookie["value"]};'
                headers = {
                    # 'Connection': 'close',
                    'Content-Type': 'application/x-www-form-urlencoded',
                    'Cookie': c,
                    'User-Agent': random.choice(user_agent_list)
                }
                # for i in range(50):
                db = redis.StrictRedis(host=redis_host, port=redis_port, password=redis_pwd, decode_responses=True,
                                       db=13, health_check_interval=30, retry_on_timeout=True)
                print(db.scard('jp_cid'))
                if db.scard('jp_cid') > 50:
                    cid_li = db.spop('jp_cid', 50)
                    [executor.submit(self.get_time, self.detail_url.format(cid), headers, cid) for cid in cid_li]
                elif db.scard('jp_cid') > 0:
                    cid_li = db.spop('jp_cid', db.scard('jp_cid'))
                    [executor.submit(self.get_time, self.detail_url.format(cid), headers, cid) for cid in cid_li]
                else:
                    break
                if db.scard('jp_cid') <= 0:
                    break
                time.sleep(5)

    def get_time(self, url, headers, cid):
        res = requests.get(url, headers=headers)
        r = res.content.decode()
        element = etree.HTML(r)
        pname = element.xpath('//div[@class="corpInfo"]/dl/dd[2]/text()')[0].strip()
        register_time = element.xpath(
            '//ol[contains(@class,"corpHistory1")]/li[last()]/dl[@class="corpHistory"]/dd/ul/li/span[2]/text()')[
            0].strip()
        meta = (cid, register_time, pname)
        try:
            self.conn.ping()
            cur = self.conn.cursor()
            sql = insert_sql('japanese_company_info',
                             ('cid', 'register_time', 'pname'),
                             meta)
            cur.execute(sql)
            self.conn.commit()
            print(f'{cid}')
            print('detail info inserted')
        except:
            traceback.print_exc()

    def run(self):
        db = redis.StrictRedis(host=redis_host, port=redis_port, password=redis_pwd, decode_responses=True,
                               db=13, health_check_interval=30, retry_on_timeout=True)
        for wd in self.string_temp:
            if not db.sismember('succeeded_wd', wd):
                # 获取首页数据
                self.get_cookie(wd)
                # 将redis数据转存到mysql
                self.redis2mysql()
                # 获取详情页数据
                self.handle_detail()
            # 将处理完成的搜索值放到队列中，防止重复请求
                db.sadd('succeeded_wd', wd)
            else:
                print(f'{wd} already requested！')

        # 获取首页数据
        # self.get_cookie('a')
        # # 将redis数据转存到mysql
        # self.redis2mysql()
        # # 获取详情页数据
        # self.handle_detail()

if __name__ == '__main__':
    spider = JapSpider()
    spider.run()
