#!/usr/bin/env python3
# -*- encoding: utf-8 -*-
"""
@UserFor     :   ***    ***
@File        :   crawl_qcc_list.py    
@Email       :   18574945291@163.com
@Modify Time :   2020/12/12 14:08
@Author      :   code_wizard (wll) 
"""
import requests
import pymysql
import redis
import sys
import os
import datetime
import json
from urllib import parse
from bs4 import BeautifulSoup
from lxml import etree

sys.path.append(os.getcwd())
import local_settings
import cookie_pool
import execute_sql
import push_search_keywords
_NOW = datetime.datetime.now()


class CrawlQccList(object):
    HEADERS = {
        'authority': 'www.qcc.com',
        'method': 'GET',
        'path': None,
        'scheme': 'https',
        'accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8',
        'accept-encoding': 'gzip, deflate, br',
        'accept-language': 'zh,en;q=0.9,zh-CN;q=0.8',
        'cache-control': 'max-age=0',
        'cookie': None,
        'upgrade-insecure-requests': '1',
        'user-agent': None
    }

    def __init__(self):
        self.mysql_con = pymysql.connect(**local_settings.mysql_dev)
        self.cursor = self.mysql_con.cursor(cursor=pymysql.cursors.DictCursor)

    def __del__(self):
        self.mysql_con.commit()
        self.mysql_con.close()

    def get_html_doc(self, keywords, **kwargs):
        search_link = "https://www.qcc.com/web/search?key={key_words}".format(key_words=parse.quote(keywords))
        web_path = '/web/search?key={key}'.format(key=parse.quote(keywords))
        self.HEADERS["cookie"] = kwargs.get("cookie")  # 设置cookie
        self.HEADERS["user-agent"] = kwargs.get("agent")  # 设置user-agent
        self.HEADERS["path"] = web_path  # 设置web_path
        response = requests.get(url=search_link, headers=self.HEADERS, proxies=local_settings.PROXIES)
        if response.status_code == 200:
            # self.parse_data(**self.get_content(response.text))
            return self.get_content(response.text)

    @staticmethod
    def get_content(doc):
        soup = BeautifulSoup(doc, "html.parser")
        select_result = soup.select('script')
        for i in select_result:
            if "window.__INITIAL_STATE__={" in i.text:
                temp = i.text.split("window.__INITIAL_STATE__=")[1].split(";(function()")[0]
                return json.loads(temp)

    def insert_into_base_info(self, **item):
        # 检查数据库是否存在这个数据
        self.cursor.execute(execute_sql.base_exist_sql, item.get("qcc_id"))
        exists = self.cursor.fetchone()
        if not exists:
            self.cursor.execute(execute_sql.insert_base_info,(
                item.get("qcc_id",None),item.get("company_name",None),item.get("gs_code",None),item.get("credit_code",None),item.get("open_status",None),
                item.get("start_time_unix", None),item.get("cp_addr",None),item.get("oper_name",None),item.get("reg_capi_str",None),item.get("contact_number",None),
                item.get("emial", None),item.get("image_url",None),item.get("tel_list_string",None),item.get("oper_info",None),item.get("tag",None),
                item.get("area_code", None),item.get("web_site",None),item.get("short_status",None),item.get("oper_type",None),"https://www.qcc.com/firm/{qcc_id}.html".format(qcc_id=item.get("qcc_id",None)),
                item.get("reg_cap_type",None),_NOW,_NOW
            ))
            if item.get("tags_info",None) is not None:
                for i in item.get("tags_info",None):
                    self.cursor.execute("insert into qcc_cp_tag (t,n,s,d,credit_code,create_time,update_time)values (%s,%s,%s,%s,%s,%s,%s)",(
                        i.get("t",None), i.get("n",None), i.get("s",None), i.get("d",None),item.get("credit_code",None),_NOW,_NOW))
            print("数据插入成功")
            self.mysql_con.commit()
        else:
            print("数据库已经存在该数据")


if __name__ == '__main__':
    qcc = cookie_pool.QccCookie()

    push = push_search_keywords.PushKey()
    crawl = CrawlQccList()
    while True:
        key_words = push.pop_key()
        dict = qcc.get_cookie_pool()

        try:
            if key_words is not None and dict is not None:
                print("当前抓取key", key_words)
                json_data = crawl.get_html_doc(key_words, **dict)
                # # JSON_DATA 是生数据 需要保存并压缩 关键字.zip
                if json_data is None:
                    continue
                range_data = json_data.get("search", None).get("searchRes", None).get("Result", None)
                if range_data is None:
                    continue
                for i in range_data:
                    item = {}
                    item["qcc_id"] = i.get("KeyNo", None)  # 详情页id
                    item["company_name"] = i.get("name", None)  # 公司名字
                    item["gs_code"] = i.get("No", None)  # 工商注册号
                    item["credit_code"] = i.get("CreditCode", None)
                    item["open_status"] = i.get("Status", None)
                    item["start_time_unix"] = i.get("StartDate", None)
                    item["cp_addr"] = i.get("Address", None)
                    item["oper_name"] = i.get("OperName", None)
                    item["reg_capi_str"] = i.get("RegistCapi", None)
                    item["contact_number"] = i.get("ContactNumber", None)
                    item["emial"] = i.get("Email", None)
                    item["image_url"] = i.get("ImageUrl", None)
                    item["tel_list_string"] = i.get("TelList", None)
                    # item["oper_info"] = i.get("operInfoArr", None)
                    item["tag"] = i.get("Tag", None)
                    item["area_code"] = i.get("AreaCode", None)
                    item["web_site"] = i.get("Gwlink", None)
                    item["short_status"] = i.get("ShortStatus", None)
                    item["oper_type"] = i.get("OperType", None)
                    item["reg_cap_type"] = i.get("RegistCapiType", None)
                    item["tags_info"] = i.get("TagsInfo", None)
                    # print(item)
                    crawl.insert_into_base_info(**item)

            else:
                break
        except Exception as e:
            print(e)
