import pymysql
from requests_html import HTMLSession
import time
import random
import math


class Load:
    def __init__(self, url):
        self.url = url
        self.headers = {"User-Agent": (
            "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/86.0.4240.111 Safari/537.36")}

    def get_response(self):
        session = HTMLSession()
        return session.get(self.url, headers=self.headers)

    def filter_html(self, filter):
        html_data = self.get_response()
        return html_data.html.find(filter)

    def filter_json_wait(self, wait_time):
        if wait_time:
            time.sleep(wait_time)
            try:
                return self.filter_json()
            except:
                wait_time += random.randint(3, 5)
                print("【%s】获取json出现异常，等待%s秒" % (self.url, wait_time))
                return self.filter_json_wait(wait_time);
                # 获取json数据

    def filter_json(self):
        result = {}
        try:
            result = self.get_response().json()
        except:
            wait_time = random.randint(3, 5)
            print("【%s】获取json出现异常，等待%s秒" % (self.url, wait_time))
            return self.filter_json_wait(wait_time)
        if result["status"] == 0:
            return result["data"]
        else:
            print("结果相应不正确相应数据为：%s" % (str(result)))
        return []


# 查询企业列表
def load_enterprise_list(area_list, history, is_find=None):
    count = 0
    if isinstance(area_list, str):
        page_no = find_start_page(history, area_list)
        if page_no > 0:
            count += load_enterprise_page(area_list, page_no)

    elif isinstance(area_list, list):
        for area in area_list:
            if isinstance(area, str):
                page_no = find_start_page(history, area)
                if page_no > 0:
                    count += load_enterprise_page(area, page_no)
            elif isinstance(area, dict):
                if area["city"]:
                    count += load_enterprise_list(area["city"], history, is_find)
                if area["province"]:
                    count += load_enterprise_list(area["province"], history, is_find)
    return count


# 寻找上次爬取的位置
def find_start_page(history_list, area):
    for history in history_list:
        if history[0] == area:
            return -1 if math.floor(history[1] / 10) >= 10 else math.floor(history[1] / 10)
    return 1


def load_enterprise_page(q, page_no):
    count = 0
    if len(q) > 0:
        result_list = None
        while result_list is None or len(result_list) > 0:
            print("开始爬取 q=%s,page_no=%d" % (q, page_no))
            load = Load("https://aiqicha.baidu.com/s/l?q=" + q + "&t=&p=" + str(page_no) + "&s=10&o=0&f={}")
            query_result = load.filter_json()
            if isinstance(query_result, dict):
                result_list = query_result["resultList"]
                if len(result_list) > 0:
                    if isinstance(result_list, list):
                        for enterprise_info in result_list:
                            count += load_detail(enterprise_info, q)
                    else:
                        raise Exception(print("企业列表查询结果数据结构发生了变化"))
                    page_no += 1
    return count


# 加载企业详情数据
def load_detail(enterprise_info, areaName):
    url_list = ["detail/basicAllDataAjax",
                "yuqing/hotpotNewsAjax",
                "yuqing/latestLyricalAjax?type=aside",
                "relations/finalBenefitajax",
                "relations/doubtControllerAjax",
                "cpc/getCpcDataAjax?type=2",
                "stockchart/stockchartAjax?drill=0"]
    base_url = "https://aiqicha.baidu.com"

    enterprise_data = []
    for url in url_list:
        query_url = base_url + "/" + url
        interface_name = url
        if query_url.__contains__("?"):
            query_url += "&"
            interface_name = interface_name[0:interface_name.index("?")]
        else:
            query_url += "?"
        query_url += "pid=" + enterprise_info["pid"]
        detail_data = Load(query_url).filter_json()

        enterprise = [query_url, interface_name, enterprise_info["entName"], int(enterprise_info["pid"]),
                      str(detail_data), areaName]
        enterprise_data.append(enterprise)
    print(enterprise_info["entName"])
    return save_data(enterprise_data)


def save_data(data_list):
    connect = pymysql.connect(host="127.0.0.1", port=3306, user="root", password="root", database="spider",
                              charset="utf8mb4")
    cursor = None
    result = 0
    try:
        cursor = connect.cursor()

        sql = "insert into enterprise_info(interface_url, interface_name, enterprise_name, pid, detail_data,area_name) values (%s,%s,%s,%s,%s,%s) "
        for row in data_list:
            for index in range(len(row)):
                if isinstance(row[index], str):
                    row[index] = connect.escape(row[index])
        result += cursor.executemany(sql, data_list)

        connect.commit()
    except Exception:
        connect.rollback()
        raise Exception
    finally:
        if cursor:
            cursor.close()
        if connect:
            connect.close()
    return result


def load_history():
    connect = pymysql.connect(host="127.0.0.1", port=3306, user="root", password="root", database="spider",
                              charset="utf8mb4")
    cursor = None
    result = None
    try:
        cursor = connect.cursor()

        sql = "select  replace(area_name,'''','') area_name,count(pid) c from ( select area_name,pid from enterprise_info GROUP BY area_name,pid) t group by area_name order by count(pid)"

        cursor.execute(sql)

        result = cursor.fetchall()
    except Exception:
        connect.rollback()
        raise Exception
    finally:
        if cursor:
            cursor.close()
        if connect:
            connect.close()
    return result


def run():
    url = 'https://aiqicha.baidu.com/index/districtListAjax'
    main = Load(url)
    area_list = main.filter_json()
    history = load_history()
    count = load_enterprise_list(area_list, history)
    print("总共爬取到%d条数据" % count)


run()
