#!/usr/bin/env python
# encoding: utf-8
"""
@author: youfeng
@email: youfeng243@163.com
@license: Apache Licence
@file: zhejiang_crawl.py
@time: 2018/1/2 14:30
"""
import json
import sys

import click
import gevent.pool
import requests
from gevent import monkey

monkey.patch_all()

sys.path.append('..')
sys.path.append('../..')

from common.mongo import MongDb
from logger import Logger

MONGO_DB_SOURCE = {
    'host': '172.16.215.2',
    'port': 40042,
    'db': 'company_data',
    'username': 'work',
    'password': 'haizhi'
}

# 日志模块
log = Logger('zhejiang_company_crawl.log').get_logger()

source_db = MongDb(MONGO_DB_SOURCE['host'], MONGO_DB_SOURCE['port'], MONGO_DB_SOURCE['db'],
                   MONGO_DB_SOURCE['username'],
                   MONGO_DB_SOURCE['password'], log=log)

SEARCH_URL_TEMPLATE = 'http://gsxt.zjaic.gov.cn/client/entsearch/list?isOpanomaly=&pubType=1&searchKeyWord={}'

SEED_TABLE_NAME = 'zhejiang_keyword'
PARAM_TABLE_NAME = 'new_zhejiang_search_list'

# 浙江加密服务
encry_zj_conf = {
    'url': 'http://cs4.sz-internal.haizhi.com:4000/api/run_script/gs_zj'
}


# 获得加密后的pripid
def get_encry_pripid(encry_url, script):
    session = requests.session()
    session.headers['Content-Type'] = 'application/json'

    post_data = {"script": script}

    try:
        r = session.post(encry_url, json=post_data)
        if r.status_code == 200:
            return r.text
    except Exception as e:
        log.exception(e)

    return None


def json_loads(text):
    try:
        return json.loads(text)
    except Exception:
        return None


# 获得加密后的信息
def get_encry_pripid_detail(encry_url, script):
    encry_href = get_encry_pripid(encry_url, script)
    if encry_href is None:
        return None

    json_data = json_loads(encry_href)
    if json_data is None:
        return None

    error = json_data.get('error', 'fail')
    if error == 'fail':
        return None
    if error is not None:
        return None

    result = json_data.get('result', None)
    if result is None:
        return None

    return result


def get_search_key_word(company):
    script = "strEnc('{}','a','b','c')".format(company)
    search_key_word = get_encry_pripid_detail(encry_zj_conf['url'], script)
    return search_key_word


# 抓取列表页
def crawl_list_page(search_key_word):
    url = SEARCH_URL_TEMPLATE.format(search_key_word)



    # 抓取第一页
    total_page, total_num = crawl_fisrt_page(url)

# 抓取主进程
def crawl_process(company):
    log.info("当前抓取企业名单: company = {}".format(company))

    try:
        search_key_word = get_search_key_word(company)
        status = crawl_list_page(search_key_word)
    except Exception as e:
        log.error("抓取列表页失败: company = {}".format(company))
        log.exception(e)
        status = 0

    log.info("当前抓取状态: company = {} status = {}".format(company, status))


@click.command()
@click.option('--thread',
              default=10,
              help='线程数目')
def main(thread):
    pool = gevent.pool.Pool(thread)
    log.info('启用协程...')
    log.info('当前开启协程数目: thread_num = {}'.format(thread))

    times = 0
    while True:

        times += 1
        result_list = []
        for item in source_db.traverse_batch_field(SEED_TABLE_NAME,
                                                   {'$or': [{'search_status': None},
                                                            {'search_status': 0}]}, ['_id']):
            company = item.get('_id')
            result_list.append(pool.apply_async(crawl_process, args=(company,)))
            if len(result_list) >= 10000:
                for result in result_list:
                    result.get()
                del result_list[:]

        if len(result_list) > 0:
            for result in result_list:
                result.get()

        # 等待线程结束
        pool.join()

        log.info("种子信息遍历完成: times = {} 休眠5s".format(times))


if __name__ == '__main__':
    main()
