#!/usr/bin/env python
# -*- coding:utf-8 -*-

import time
import json
from selenium import webdriver
from selenium.webdriver.common.action_chains import ActionChains
from selenium.webdriver.common.by import By
from selenium.webdriver.common.desired_capabilities import DesiredCapabilities
from selenium.webdriver.support import expected_conditions as EC
from selenium.webdriver.support.wait import WebDriverWait

desired_capabilities = DesiredCapabilities.FIREFOX
desired_capabilities['loggingPrefs'] = {'performance': 'ALL'}


class AutoSpider(object):
    def __init__(self):
        self.init_browder()
        self.company_info_dict = {}

    def init_browder(self):
        options = webdriver.FirefoxOptions()
        options.add_argument("disable-blink-features=AutomationControlled")  # 就是这一行告诉chrome去掉了webdriver痕迹
        self.browser = webdriver.Firefox(executable_path='./geckodriver.exe',
                                    options=options, desired_capabilities=desired_capabilities)
        self.wai = WebDriverWait(self.browser, 20)  # 等待20s后抛出异常

    def login(self,username="18825159736",pwd='Tg123456'):
        username_elem = self.wai.until(EC.element_to_be_clickable((By.ID, "phone")))
        pwd_elem = self.wai.until(EC.element_to_be_clickable((By.ID, "password")))
        submit_elem = self.wai.until(EC.element_to_be_clickable(
            (By.XPATH, '//button[contains(@class,"antd-pro-app-src-pages-auth-login-index-loginBtn")]')))
        # 使用账号密码登陆
        username_elem.send_keys(username)
        pwd_elem.send_keys(pwd)
        submit_elem.click()
        time.sleep(2)

    def close_alert(self):
        first_alter_elem = self.wai.until(EC.element_to_be_clickable((By.XPATH,
                                                                 '//div[@class="antd-pro-app-src-pages-application-search-vistor-components-xkb-active-modal-index-arrow-delete"]')))
        first_alter_elem.click()
        alter_elem = self.wai.until(EC.element_to_be_clickable((By.XPATH,
                                                           '//div[@class="antd-pro-app-src-pages-application-search-vistor-components-favorite-guide-index-close"]')))
        alter_elem.click()
        button_elem = self.wai.until(EC.element_to_be_clickable((By.XPATH, '//button[@class="driver-close-btn"]')))
        button_elem.click()
        time.sleep(1)

    def change_search_param(self,left_times=1,right_times=1):
        search_elem = self.wai.until(EC.element_to_be_clickable((By.XPATH,
                                                            '//span[@class="antd-pro-app-src-pages-application-search-vistor-super-search-components-new-type-select-com-index-search"]')))
        if left_times:
            start_time_elem = self.wai.until(
                EC.element_to_be_clickable((By.XPATH, '//span[contains(@class,"ant-calendar-picker-input")]')))
            end_time_elem = self.wai.until(
                EC.element_to_be_clickable((By.XPATH, '//span[contains(@class,"ant-calendar-picker-input")]')))
            start_time_elem.click()

            left_last_year_elem = self.wai.until(EC.element_to_be_clickable(
                (By.XPATH, '//div[contains(@class,"ant-calendar-range-left")]//a[@class="ant-calendar-prev-year-btn"]')))
            right_last_year_elem = self.wai.until(EC.element_to_be_clickable(
                (By.XPATH, '//div[contains(@class,"ant-calendar-range-right")]//a[@class="ant-calendar-prev-year-btn"]')))
            for i in range(left_times):
                left_last_year_elem.click()
            for i in range(right_times):
                right_last_year_elem.click()

            left_day_elem = self.wai.until(EC.element_to_be_clickable((By.XPATH,
                                                                  '//div[contains(@class,"ant-calendar-range-left")]//td[@class="ant-calendar-cell"]/div[text()=1]')))
                                                                  # '//div[contains(@class,"ant-calendar-range-left")]//td[@role="gridcell"]/div[contains(text(),"1")]')))
            right_day_elem = self.wai.until(EC.element_to_be_clickable((By.XPATH,
                                                                   '//div[contains(@class,"ant-calendar-range-right")]//td[@class="ant-calendar-cell"]/div[text()=1]')))
            left_day_elem.click()
            right_day_elem.click()
        # 移动到这上
        # address_ul_elem = self.wai.until(EC.element_to_be_clickable((By.XPATH,'//span[contains(string(),"注册地址")]//ul[@class="ant-select-selection__rendered"]')))
        # ActionChains(self.browser).move_to_element(address_ul_elem).perform()
        # clear_city_elem = self.wai.until(EC.element_to_be_clickable((By.XPATH,'//span[contains(string(),"注册地址")]//span[@class="ant-select-selection__clear"]')))
        # clear_city_elem.click()
        # span_type_elem = self.wai.until(EC.element_to_be_clickable(
        #     (By.XPATH, '//span[contains(string(),"注册地址")]//span[@class="ant-select-search__field__placeholder"]')))
        # span_type_elem.click()
        # random_city_elems = self.browser.find_elements_by_xpath('//ul[@class="ant-select-tree"]/li')
        # from random import randint
        # random_city_elem = random_city_elems[randint(0,len(random_city_elems)-1)]
        # random_city_elem.click()


        search_elem.click()
        time.sleep(1)

    def wait_company_info_lode(self):
        self.wai.until(EC.element_to_be_clickable((By.XPATH,'//tbody[@class="ant-table-tbody"]//div[@class="antd-pro-app-src-pages-application-search-vistor-components-table-center-index-company-name"]')))

        time.sleep(1)

    def save_company_info_old(self):
        # company_div_elems = self.browser.find_elements_by_xpath('//tbody[@class="ant-table-tbody"]//div[@class="antd-pro-app-src-pages-application-search-vistor-components-table-center-index-company-name"]')
        company_div_elems = self.browser.find_elements_by_xpath('//tbody[@class="ant-table-tbody"]//div[@class="antd-pro-app-src-pages-application-search-vistor-components-xun-avatar-index-avatar-con avatar-con"]')
        is_end = False
        for elem in company_div_elems:
            time.sleep(1)
            try:
                if not is_end:
                    self.browser.execute_script("arguments[0].scrollIntoView();", elem)
                    ActionChains(self.browser).move_to_element(elem).click().perform()
            except:
                print('翻到底了，跳过')
                is_end = True
            time.sleep(1)
            company_div_elems = self.browser.find_elements_by_xpath(
                '//tbody[@class="ant-table-tbody"]//div[@class="antd-pro-app-src-pages-application-search-vistor-components-xun-avatar-index-avatar-con avatar-con"]')
            try:
                elem.click()

                try:
                    source_elem = self.wai.until(EC.element_to_be_clickable((By.XPATH,'//span[@class="antd-pro-app-src-pages-application-search-vistor-components-source-form-index-key-name"]')))
                    # source_elem = self.wai.until(EC.element_to_be_clickable((By.XPATH,'//div[@class="antd-pro-app-src-pages-application-search-vistor-components-table-center-index-company-name"]')))
                    # source_elem.click()
                    ActionChains(self.browser).move_to_element(source_elem).perform()
                    file_name = '逍客/' + str(time.time())
                    with open(file_name, 'w', encoding='utf-8') as f:
                        f.write(self.browser.page_source)
                        self.parse_source_code(self.browser.page_source)
                except:
                    print('no source')
                try:
                    close_elem = self.wai.until(EC.element_to_be_clickable((By.XPATH,'//div[@class="antd-pro-app-src-pages-application-search-vistor-components-common-drawer-index-closeBtn"]')))
                    close_elem.click()
                except:
                    print('五收起')
            except:
                print('observe')


    def save_company_info(self):
        company_div_elems = self.browser.find_elements_by_xpath('//tbody[@class="ant-table-tbody"]//div[@class="antd-pro-app-src-pages-application-search-vistor-components-xun-avatar-index-avatar-con avatar-con"]')
        for elem in company_div_elems:
            time.sleep(1)
            try:
                elem.click()
                try:
                    source_elem = self.wai.until(EC.element_to_be_clickable((By.XPATH,'//span[@class="antd-pro-app-src-pages-application-search-vistor-components-source-form-index-key-name"]')))
                    # source_elem = self.wai.until(EC.element_to_be_clickable((By.XPATH,'//div[@class="antd-pro-app-src-pages-application-search-vistor-components-table-center-index-company-name"]')))
                    # source_elem.click()
                    ActionChains(self.browser).move_to_element(source_elem).perform()
                except:
                    print('no source')
                file_name = '逍客/' + str(time.time())
                with open(file_name, 'w', encoding='utf-8') as f:
                    f.write(self.browser.page_source)
                    self.parse_source_code(self.browser.page_source)
            except:
                print('error')


    def parse_source_code(self,text):
        from lxml import etree
        selector = etree.HTML(text,parser=etree.HTMLParser(encoding='utf-8'))
        company_name = ''.join(selector.xpath('//div[@class="antd-pro-app-src-pages-application-search-vistor-components-details-components-header-index-name"]/text()')).strip()
        start_year = selector.xpath('//div[@class="antd-pro-app-src-pages-application-search-vistor-components-details-components-header-index-value"]/text()')[0].strip()
        div_selectors = selector.xpath('//div[contains(@class,"antd-pro-app-src-pages-application-search-vistor-components-details-components-detils-phone-index-left")]')
        if not self.company_info_dict.get(company_name,{}):
            self.company_info_dict[company_name] = {
                'phone_ls':[]
            }
        self.company_info_dict[company_name]['start_year'] = start_year
        phone_count = 0
        save_ls = []
        for div_selector in div_selectors:
            phone = ''.join(div_selector.xpath('./div//span[@class="antd-pro-app-src-pages-application-search-vistor-components-details-components-detils-phone-index-phone-name"]//text()')).strip()
            source = ''.join(div_selector.xpath('./div//span[@class="antd-pro-app-src-pages-application-search-vistor-components-source-form-index-key-name"]/span/text()'))
            source_ls = div_selector.xpath('./div//span[@class="antd-pro-app-src-pages-application-search-vistor-components-source-form-index-key-name"]//div[contains(@class,"antd-pro-app-src-pages-application-search-vistor-components-source-form-index-source-item ")]/text()')
            source_ls.append(source)
            if len(phone) == 11:
                save_ls.append([source_ls, [], phone])
                self.company_info_dict[company_name]['phone_ls'].append({
                    "phone":phone,
                    'source_ls':source_ls,
                    'source_href_ls': []
                })
                phone_count += 1
                print(company_name,phone,source_ls)
                print('==============================')
        self.company_info_dict[company_name]['phone_count'] = phone_count
        for ls in save_ls:
            with open('jz_info_xk.csv', 'a', encoding='utf-8') as f:
                start_year = self.company_info_dict[company_name].get('start_year')
                f.write(f'{company_name},{start_year},{phone_count},{ls[2]},{str(ls[0])},{str(ls[1])}\n')
    def main(self):
        self.browser.maximize_window()
        self.browser.get('https://www.xiaoke.cn/xk/auth/login')
        # 登录
        self.login()
        # 进入高级搜索
        self.browser.get('https://www.xiaoke.cn/xk/searvistor/supersearch?')
        # 关闭弹窗
        self.close_alert()
        for i in range(8):
            # 更改搜索条件
            self.change_search_param(left_times=i,right_times=i)
            # time.sleep(1000)
            # 等待 公司信息加载
            self.wait_company_info_lode()
            self.save_company_info()
            self.browser.get('https://www.xiaoke.cn/xk/searvistor/supersearch?')
            time.sleep(3)
        with open('result_xk.json','w',encoding='utf-8') as f:
            json.dump(self.company_info_dict,f,ensure_ascii=False,indent=4)


if __name__ == '__main__':
    spider = AutoSpider()
    spider.main()
