#!/usr/bin/env python3
# -*- encoding: utf-8 -*-
"""
@UserFor     :   ***    ***
@File        :   qcc_detail_spider.py    
@Email       :   18574945291@163.com
@Modify Time :   2020/12/14 15:28
@Author      :   code_wizard (wll) 
"""
import sys, os ,requests
import datetime
import pymysql
import redis
import umsgpack
import hashlib
from lxml import etree
from fake_useragent import UserAgent
sys.path.append(os.getcwd())
import local_settings
import cookie_pool

class QccDetailSpider(object):
    HEADERS = {
        'authority': 'www.qcc.com',
        'method': 'GET',
        'scheme': 'https',
        'accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8',
        'accept-encoding': 'gzip, deflate, br',
        'accept-language': 'zh,en;q=0.9,zh-CN;q=0.8',
        'cache-control': 'max-age=0',
        'cookie': None,
        'upgrade-insecure-requests': '1',
        'user-agent': None
    }

    def __init__(self):
        self.mysql_connect = pymysql.connect(**local_settings.mysql_dev)
        self.cursor = self.mysql_connect.cursor(cursor=pymysql.cursors.DictCursor)
        self.redis_con = redis.Redis(**local_settings.redis_dev)


    def parse_base_yyzz(self,element):
        """ 抓取基本营业执照信息 """
        if element is None:
            return None
        father = element.xpath('//section[@id="Cominfo"]/table[@class="ntable"]')[0]
        item = dict()
        item['legal_per'] = father.xpath('./tr[1]/td[2]//h2/text()')
        item['open_status'] = father.xpath('./tr[1]/td[4]/text()')
        item['reg_date'] = father.xpath('./tr[1]/td[6]/text()')
        item['reg_cap'] = father.xpath('./tr[2]/td[2]/text()')
        item['act_cap'] = father.xpath('./tr[2]/td[4]/text()')
        item['check_cap'] = father.xpath('./tr[2]/td[6]/text()')
        item['social_code'] = father.xpath('./tr[3]/td[2]/text()')
        item['org_code'] = father.xpath('./tr[3]/td[4]/text()')
        item['gs_code'] = father.xpath('./tr[3]/td[6]/text()')
        item['nsr_code'] = father.xpath('./tr[4]/td[2]/text()')
        item['io_code'] = father.xpath('./tr[4]/td[4]/text()')
        item['industry'] = father.xpath('./tr[4]/td[6]/text()')
        item['qc_type'] = father.xpath('./tr[5]/td[2]/text()')
        item['js_date'] = father.xpath('./tr[5]/td[4]/text()')
        item['reg_org'] = father.xpath('./tr[5]/td[6]/text()')
        item['size'] = father.xpath('./tr[6]/td[2]/text()')
        item['join_baoxina'] = father.xpath('./tr[6]/td[4]/text()')
        item['area_pro'] = father.xpath('./tr[6]/td[6]/text()')
        item['pass_name'] = father.xpath('./tr[7]/td[2]/text()')
        item['english_pro'] = father.xpath('./tr[7]/td[4]/text()')
        item['addr'] = father.xpath('./tr[8]/td[2]/text()')
        item['scope'] = father.xpath('./tr[9]/td[2]/text()')

        print(item)


    def get_hmtl_doc(self, id, url):
        id = "22"
        url = 'https://www.qcc.com/firm/fe870495ff55716f0e5029f23088bb33.html'
        qcc = cookie_pool.QccCookie()
        dicts = qcc.get_cookie_pool()
        print(dicts)
        self.HEADERS["cookie"] = dicts.get("cookie")  # 设置cookie
        self.HEADERS["user-agent"] = dicts.get("agent")  # 设置user-agent
        response = requests.get(url=url,headers=self.HEADERS,proxies=local_settings.PROXIES)
        if response.status_code == 200:
            response_doc = response.text
            if len(response_doc)>500:
                print("抓取成功")
                self.save_into_file(self.md5(url),response_doc)
                element = etree.HTML(response_doc)
                self.parse_base_yyzz(element)
                # 解析 压缩保存
            else:
                print("抓取失败 ")
                hash_temp = dict()
                hash_temp["id"] = id
                hash_temp["url"] = url
                self.redis_con.lpush(local_settings.DETAIL_LINK, umsgpack.packb(hash_temp))

    def save_into_file(self, hash_code, doc):
        with open(hash_code+".html",'w',encoding="utf-8") as f:
            f.write(doc)

    def spider_detail(self):
        spider_dict = self.pop_link_pool()
        if spider_dict is None:
            print("link pool is empty ! ")
            return None

    def push_pool(self, id, url):
        if id is None or url is None:
            return None
        temp_dict = dict()
        temp_dict["id"] = id
        temp_dict["url"] = url
        self.redis_con.lpush(local_settings.DETAIL_LINK,umsgpack.packb(temp_dict))
        # self.cursor.execute("select * from qcc_base_info where credit_code is not null")
        # result = self.cursor.fetchall()
        # for index in result:
        #     temp_dict = dict()
        #     temp_dict["id"] = index.get("id", None)
        #     temp_dict["url"] = index.get("detail_url", None)
        #     self.redis_con.lpush(local_settings.DETAIL_LINK,umsgpack.packb(temp_dict))

    def pop_link_pool(self):
        res = self.redis_con.rpop(local_settings.DETAIL_LINK)
        if res is not None:
            return umsgpack.unpackb(res)
        return None

    @staticmethod
    def md5(string):
        m = hashlib.md5()
        m.update(string.encode("utf8"))
        return m.hexdigest()

    def __del__(self):
        self.mysql_connect.commit()
        self.mysql_connect.close()
        self.redis_con.close()


if __name__ == '__main__':
    qc = QccDetailSpider()
    qc.get_hmtl_doc(None,None)
    # print(qc.pop_link_pool())