#!/usr/bin/env python 
# coding:utf-8
# @Time :9/26/18 12:01

import json
import os
import sys
import time
import hashlib

from lxml import html

sys.path.append("..")
sys.path.append("../..")
sys.path.append("../../..")
from proxy.proxy_type import ProxyType

from common.mongo import MongDb
from ext.task_base import TaskBase
from logger import AppLogger
from sites.stock.www_cninfo_com_cn.compnay_list.company_list_crawl import CompanyListCrawl
from config.mongo_conf import AppDataMongoConf


logger = AppLogger('key_person_crawl.log').get_logger()


class KeyPersonCrawl(TaskBase):
    __START_URL = "http://www.cninfo.com.cn/information/management/{type_code}{stock_code}.html"

    __HOST = "www.cninfo.com.cn"

    __MONGO_TABLE = "juchao_info"

    __MAX_CACHE_COUNT = 50

    def __init__(self, log):
        super(KeyPersonCrawl, self).__init__(log)
        # self._reset_beanstalk_handler(MQFactory.get_gsxt_clue_beanstalk_handler(log))
        self._app_data_mongo = MongDb(AppDataMongoConf.HOST, AppDataMongoConf.PORT, AppDataMongoConf.DB,
                                      AppDataMongoConf.USER, AppDataMongoConf.PASS, log=log)
        self.__set_headers()

    def __set_headers(self):
        headers = {
            "Host": self.__HOST,
            "Connection": "keep-alive",
            "Upgrade-Insecure-Requests": "1",
            "User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_13_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/69.0.3497.100 Safari/537.36",
            "Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8",
            "Accept-Encoding": "gzip, deflate",
            "Accept-Language": "zh-CN,zh;q=0.9,en-US;q=0.8,en;q=0.7",
        }
        self._proxy_requests.set_header(headers)

    def __grab_key_person(self, **company_params):
        type_code = company_params.get("type_code")
        stock_name = company_params.get("stock_name")
        stock_code = company_params.get("stock_code")
        self.log.info("__grab_key_person 当前正在采集 {} 公司， 股票代码是 {} ...".format(stock_name, stock_code))
        url = self.__START_URL.format(type_code=type_code, stock_code=stock_code)
        response = self._proxy_requests.get(url, proxy_type=ProxyType.KUNPENG_DYNAMIC)
        # print ('encoding:', response.encoding) # 'ISO-8859-1'
        # print ('apparent_encoding:', response.apparent_encoding) # 'GB2312'
        html_response = html.fromstring(response.content.decode('gb18030'))
        info_extract = html_response.xpath('//div[@class="zx_left"]/div[@class="clear"]//tr')[1:]

        key_person = []
        for per_info_extract in info_extract:
            key_person_info = per_info_extract.xpath('./td/text()')
            key_person_info = map(lambda x: x.strip(), key_person_info)
            dict_key_person = dict(zip(['name', 'position', 'year', 'sex', 'diploma'], key_person_info))
            key_person.append(dict_key_person)

        id = self.__generate_md5(stock_code)
        item = {
            "_id": id,
            "_record_id": id,
            "_in_time": time.strftime("%Y-%m-%d %H:%M:%S"),
            "_utime": time.strftime("%Y-%m-%d %H:%M:%S"),
            "stock_code": stock_code,
            "stock_name": stock_name,
            "_src": [
                {
                    "url": "http://" + self.__HOST,
                    "download_time": int(time.time()),
                    "site": self.__HOST,
                }
            ],
            "key_person": key_person
        }
        return item

    def __load_company_params(self):
        self.log.info("开始加载公司参数...")
        if not os.path.exists("../company_list.json"):
            CompanyListCrawl(logger, save_json=True)()

        with open("../company_list.json", 'r') as f:
            data = json.load(f)
            return data

    def __generate_md5(self, data):
        md = hashlib.md5()
        md.update(data)
        return md.hexdigest().lower()

    def __put_data(self, result_list):
        try:
            self.log.info("当前mongo正在批量插入数据...")
            self._app_data_mongo.insert_batch_data(self.__MONGO_TABLE, result_list)
        except BaseException as e:
            self.log.error("__put_data 发生错误：")
            self.log.exception(e)

    def start(self, *args, **kwargs):

        company_params = self.__load_company_params()
        result_list = []
        for per_company_params in company_params:
            try:
                item = self.__grab_key_person(**per_company_params)
                if isinstance(item, dict):
                    result_list.append(item)
                if len(result_list) > self.__MAX_CACHE_COUNT:
                    self.__put_data(result_list)
                    del result_list[:]

            except BaseException as e:
                self.log.error("start 爬取公司信息时发生错误：")
                self.log.exception(e)

        self.log.info("成功退出采集程序...")


def main():
    try:
        KeyPersonCrawl(logger)()
    except Exception as e:
        logger.error("采集异常退出: ")
        logger.exception(e)


if __name__ == '__main__':
    main()
