# -*- coding: UTF-8 -*-
# Project : aiqicha
# File : index.py
# IDE : PyCharm
# Author : 博科（鑫贝西）田聪
# Date : 2021/10/26 15:11
import json
import random
import re
import time

import pymysql
import requests
from retry import retry
from urllib.parse import quote
from region import reg
from tools.settings import *


class AiQiCha:
    def __init__(self):
        self.config = {
            "host": HOST,
            "user": USER,
            "password": PASSWORD,
            "database": 'agency',
            'cursorclass': pymysql.cursors.DictCursor,

        }

        # 从数据库中提取所有企业名称和手机号
        db = pymysql.connect(**self.config)
        cursor = db.cursor()
        search_sql_version = 'SELECT entName,phone FROM `en_information` '
        cursor.execute(search_sql_version)
        self.en_name_list = cursor.fetchall()  # [[{'entName': '山东康瑞尔医疗科技有限公司', 'phone': ''},
        cursor.close()
        db.close()

        self.session = requests.session()
        self.start_request()

    def start_request(self):
        # 获取cookie
        # 爱企查获取cookie
        self.web_spider(
            url='https://aiqicha.baidu.com/',
            headers={
                'Host': 'aiqicha.baidu.com',
                'Cache-Control': 'max-age=0',
                'sec-ch-ua': '"Microsoft Edge";v="95", "Chromium";v="95", ";Not A Brand";v="99"',
                'sec-ch-ua-platform': '"Windows"',
                'Upgrade-Insecure-Requests': '1',
                'User-Agent': random.choice(USERLIST),
                'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.9',
                'Accept-Encoding': 'gzip, deflate',
                'Accept-Language': 'zh-CN,zh;q=0.9,en;q=0.8,en-GB;q=0.7,en-US;q=0.6',

            }
        )

        # 搜索
        # self.en_name_list = [{'enname': '山东康瑞尔医疗科技有限公司', 'phone': ''},.....]
        for en_name_dict in self.en_name_list:
            en_name = en_name_dict.get('entName')  # 企业名称
            phone = en_name_dict.get('phone')  # 企业联系方式
            if phone == '' or phone is None:
                time.sleep(10)
                # 当电话为空时。爬取
                referer = f'https://aiqicha.baidu.com/s?q={quote(en_name)}&t=0'  # referer 中的企业名称需要编码

                en_search_link = f'https://aiqicha.baidu.com/s/advanceFilterAjax?q={en_name}&t=&p=1&s=10&o=0&f={{}}'  # 搜索企业链接

                try:
                    self.parse(en_search_link=en_search_link, referer=referer)
                except Exception as error:
                    print('#' * 100)
                    print(error)
                    print(en_name_dict)
                    print('#' * 100)

    # 获取搜索企业列表页 只获取第一页
    def parse(self, en_search_link, referer):
        headers = {
            'Host': 'aiqicha.baidu.com',
            'Connection': 'keep-alive',
            'sec-ch-ua': '"Microsoft Edge";v="95", "Chromium";v="95", ";Not A Brand";v="99"',
            'Accept': 'application/json, text/plain, */*',
            'X-Requested-With': 'XMLHttpRequest',
            'sec-ch-ua-mobile': '?0',
            'User-Agent': random.choice(USERLIST),
            'Referer': referer,
            'Accept-Encoding': 'gzip, deflate, br',
        }
        response = self.web_spider(
            url=en_search_link,
            headers=headers
        )

        respose_json = json.loads(response.text)
        msg = respose_json.get('msg')  # msg 为空 取到数据，不为空直接跳过此次循环

        if msg is None:
            return

        json_data = respose_json.get('data')  # type:dict
        query_str = json_data.get('queryStr')  # 搜索关键字 企业名称

        # 内容列表
        result_list = json_data.get('resultList')

        for result in result_list:

            item = {}
            entName = re.sub('<.*?>', '', result.get('entName'))  # 企业名称
            legalPerson = result.get('legalPerson')  # 法人

            item['entName'] = entName
            item['legalPerson'] = re.sub('<.*?>', '', legalPerson)
            pid = result.get('pid')  # 进入详情页
            ent_info_link = 'https://aiqicha.baidu.com/company_detail_{}'.format(pid)
            if entName == query_str:
                self.parse_content(ent_info_link, item, referer)
                break

    def to_mysql(self, item):
        if item['phone'] == '':
            item['phone'] = '暂无电话'
        sql = f"UPDATE en_information SET phone='{item['phone']}',enhref='{item['enhref']}',legalPerson='{item['legalPerson']}',area='{item['area']}',addr='{item['addr']}' WHERE entName = '{item['entName']}'"
        print(sql)
        db = pymysql.connect(**self.config)
        cursor = db.cursor()
        try:

            cursor.execute(sql)
            db.commit()
            print(f"{item['entName']}#########写入成功")

        except Exception as error:
            print(sql)
            print(error)
        finally:
            cursor.close()
            db.close()

    def parse_content(self, ent_info_link, item, referer):
        # 从详情页中获取电话
        headers = {
            'Host': 'aiqicha.baidu.com',
            'Connection': 'keep-alive',
            'sec-ch-ua': '"Microsoft Edge";v="95", "Chromium";v="95", ";Not A Brand";v="99"',
            'User-Agent': random.choice(USERLIST),
            'Accept': 'application/json, text/plain, */*',
            'X-Requested-With': 'XMLHttpRequest',
            'Referer': referer,
            'Accept-Encoding': 'gzip, deflate, br',
            'Accept-Language': 'zh-CN,zh;q=0.9',
        }
        response = self.web_spider(
            url=ent_info_link,
            headers=headers

        )
        try:
            page_data = re.findall('window.pageData =(.+);', response.text)[0]
            page_data_json = json.loads(page_data)
            result = page_data_json.get('result')
            addr = result.get('addr')  # 法人
            item['area'] = reg(addr)
            item['enhref'] = ent_info_link
            item['addr'] = addr
            phoneinfo = result.get('phoneinfo')  # type:list
            phones = []
            for phone in phoneinfo:
                phone = phone.get('phone')
                phones.append(phone)
            phones = '，'.join(phones)
            item['phone'] = phones
            # print(item)
            self.to_mysql(item)
        except IndexError as e:
            print('#' * 100)
            print(e)
            print(response.text)
            print(ent_info_link)
            print('#' * 100)

    @retry(tries=10, delay=1, backoff=2, max_delay=10)
    def web_spider(self, url, headers: dict):
        response = self.session.get(
            url,
            headers=headers
        )
        response.encoding = response.apparent_encoding
        if response.status_code not in [302, 200]:
            raise Exception('没有获取到数据')
        return response


if __name__ == '__main__':
    while True:
        try:
            AiQiCha()
        except Exception as e:
            print(e)
            pass
        print("############    休眠200s    ##############")
        time.sleep(200)
