import asyncio
import json, re, time
import os, sys
import random
import traceback

from common.proxy_ip import f_list

sys.path.append('/root/qvenv')
import redis, pymysql
import string
import requests, urllib3
from lxml import etree
from tools.insert_update import insert_sql
from tools.logout import save_log
from configs.cfg import *
from selenium import webdriver
from selenium.webdriver.common.keys import Keys
from selenium.webdriver.common.by import By
from tools.ua import user_agent_list
from requests.packages.urllib3.exceptions import InsecureRequestWarning
from concurrent.futures import ProcessPoolExecutor
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC


class JapSpider:
    # urllib3.disable_warnings(InsecureRequestWarning)
    requests.packages.urllib3.disable_warnings(InsecureRequestWarning)
    string_temp = string.ascii_uppercase
    # db = redis.StrictRedis(host=redis_host, port=redis_port, password=redis_pwd, decode_responses=True, db=13,
    #                        health_check_interval=30)
    # pipeline = db.pipeline()
    conn = pymysql.connect(host=mysql_host, user=mysql_name, port=mysql_port, password=mysql_pwd,
                           database='spiderdb', charset='utf8')
    base_url = 'https://www.houjin-bangou.nta.go.jp/en/'
    data_url = 'https://www.houjin-bangou.nta.go.jp/en/kensaku-kekka.html'
    referer_url = 'https://www.houjin-bangou.nta.go.jp/en/henkorireki-johoto.html?selHouzinNo={}'
    detail_url = 'https://www.houjin-bangou.nta.go.jp/henkorireki-johoto.html?selHouzinNo={}'

    def get_cookie(self, wd):
        print(f'deal {wd} datas')
        options = webdriver.ChromeOptions()
        options.add_experimental_option('detach', True)
        options.add_argument('--disable-extensions')  # 启动无痕界面
        options.add_argument('--start-maximized')  # 设置浏览器窗口最大化
        # options.add_argument('--headless')  # 浏览器不提供可视化页面
        # 关闭chrome正受到测试脚本控制
        options.add_experimental_option('useAutomationExtension', False)
        options.add_experimental_option('excludeSwitches', ['enable-automation'])
        options.add_argument('--disable-gpu')  # 禁用GPU加速
        options.add_argument('window-size=1920x3000')  # 指定浏览器分辨率
        options.add_argument('--no-sandbox')  # 添加沙盒模式
        options.add_argument('--disable-dev-shm-usage')
        driver = webdriver.Chrome(options=options)
        driver.implicitly_wait(20)
        driver.get(self.base_url)
        driver.find_element(By.ID, 'corp_name').click()
        driver.find_element(By.ID, 'corp_name').send_keys(wd)
        driver.find_element(By.ID, 'corp_name').send_keys(Keys.ENTER)
        WebDriverWait(driver, 30).until(EC.presence_of_element_located((By.CLASS_NAME, 'fixed')))
        cookies = driver.get_cookies()
        c = ''
        for cookie in cookies:
            c += f'{cookie["name"]}={cookie["value"]};'
        headers = {
            'Connection': 'close',
            'Content-Type': 'application/x-www-form-urlencoded',
            'Cookie': c,
            'Referer': 'https://www.houjin-bangou.nta.go.jp/en/kensaku-kekka.html',
            'sec-ch-ua': '" Not A;Brand";v="99", "Chromium";v="98", "Google Chrome";v="98"',
            'sec-ch-ua-mobile': '?0',
            'sec-ch-ua-platform': '"Windows"',
            'Sec-Fetch-Dest': 'document',
            'Sec-Fetch-Mode': 'navigate',
            'Sec-Fetch-Site': 'same-origin',
            'Sec-Fetch-User': '?1',
            'Upgrade-Insecure-Requests': '1',
            'User-Agent': random.choice(user_agent_list)
            # 'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/98.0.4758.82 Safari/537.36'
        }
        emt = etree.HTML(driver.page_source)
        total_num = emt.xpath('//p[@class="srhResult"]/strong/text()')[0]
        tmp_nums = int(total_num.replace(',', ''))
        total_page = tmp_nums // 100 if tmp_nums % 100 == 0 else tmp_nums // 100 + 1
        print('total_page:', total_page)
        with ProcessPoolExecutor(max_workers=8) as pool:
            for page in range(1, total_page + 1):
                data = {
                    'jp.go.nta.houjin_bangou.framework.web.common.CNSFWTokenProcessor.request.token': '30704eb5-ad33-4e54-ac06-0ed7864c83c5',
                    'screenTransaction': 'true',
                    'search': 'false',
                    'viewPageNo': page,
                    'viewNumAnc': 100,
                    'kjscr0201510m1Table.specifiedPageNumber': page,
                    'houzinNmShTypeRbtn': 2,
                    'houzinNmTxtf': wd,
                    'prefectureLst': '',
                    '_kokugaiCkbx': 'on',
                    'houzinKdRbtn': 0,
                    '_historyCkbx': 'on',
                    'orderRbtn': 1,
                }
                pool.submit(self.handle_request, headers, data)
        # except Exception as e:
        #     print('出现错误：', str(e))
        #     traceback.print_exc()
        #     save_log(e, '../../logs/jap.log')

    def handle_request(self, headers, data):
        # session = requests.Session()
        headers['User-Agent'] = random.choice(user_agent_list)
        with requests.post(self.data_url, headers=headers, data=data, verify=False) as resp:
            # resp = requests.post(self.data_url, headers=headers, data=data, verify=False)
            html = resp.content.decode()
        emt = etree.HTML(html)
        tr_items = emt.xpath('//table[@class="fixed"]/tbody/tr')
        for item in tr_items:
            cid = item.xpath('./th/text()')[0]
            name = item.xpath('./td[1]/text()')[0].strip()
            register_address = ''.join([i.strip() for i in item.xpath('./td[2]/text()')])
            headers['User-Agent'] = random.choice(user_agent_list)
            headers['Referer'] = str(self.referer_url.format(cid))
            # print('开始请求')
            try:
                requests.get(self.detail_url.format(cid),headers,proxies=random.choice(f_list),verify=False)
                db = redis.StrictRedis(host=redis_host, port=redis_port, password=redis_pwd, decode_responses=True,
                                       db=13, health_check_interval=30)
                pipeline = db.pipeline()
                # db.lpush("queue:jdata", str(meta))
            except Exception as e:
                traceback.print_exc()
                save_log(e, '../../logs/jap.log')

    def run(self):
        # for wd in self.string_temp[1:]:
        #     if not self.db.sismember('succeeded_wd', wd):
        #         self.get_cookie(wd)
        # 将处理完成的搜索值放到队列中，防止重复请求
        #     self.db.sadd('succeeded_wd', wd)
        # else:
        #     print(f'{wd} already requested！')
        # break
        self.get_cookie('h')


if __name__ == '__main__':
    spider = JapSpider()
    spider.run()
