#!/usr/bin/env python
# -*- coding:utf-8 -*-
import sys
from multiprocessing.dummy import Pool as ThreadPool

import pymongo
import requests
from bs4 import BeautifulSoup
from scpy.logger import get_logger
from scpy.xawesome_codechecker import timeit
from scpy.xawesome_time import now
from xtls.util import sha1

from util import get_mongo_conf

logger = get_logger(__file__)
reload(sys)
sys.setdefaultencoding("utf-8")

MONGO_CONF = get_mongo_conf()


class ChinataxCreditsCrawler(object):
    MONGO_CONN = pymongo.MongoClient(MONGO_CONF[0], MONGO_CONF[1])

    URL = 'http://hd.chinatax.gov.cn/fagui/action/InitCredit.do'

    PROVINCES = {
        u"北京": "110000", u"天津": "120000", u"河北": "130000", u"山西": "140000", u"内蒙古": "150000",
        u"辽宁": "210000", u"大连": "210200", u"吉林": "220000", u"上海": "310000", u"江苏": "320000",
        u"浙江": "330000", u"宁波": "330200", u"安徽": "340000", u"福建": "350000", u"厦门": "350200",
        u"江西": "360000", u"山东": "370000", u"青岛": "370200", u"河南": "410000", u"湖北": "420000",
        u"湖南": "430000", u"广东": "440000", u"深圳": "440300", u"广西": "450000", u"海南": "460000",
        u"重庆": "500000", u"四川": "510000", u"贵州": "520000", u"云南": "530000", u"西藏": "540000",
        u"陕西": "610000", u"甘肃": "620000", u"青海": "630000", u"宁夏": "640000", u"新疆": "650000",
        u"黑龙江": "230000",
    }

    def __init__(self, province, page=1):
        if province not in self.PROVINCES:
            raise ValueError(u'Province error.')
        self.province = province
        self.tax_code = self.PROVINCES[province]
        self.page = page
        self.request = requests.Session()
        self.request.headers[
            'User-Agent'] = 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/42.0.2311.90 Safari/537.36'
        self.request.headers['Accept-Language'] = 'en-US,en;q=0.8,zh-CN;q=0.6,zh;q=0.4'
        self.request.headers['Connection'] = 'keep-alive'
        self.post_data = self.build_post_data(BeautifulSoup(self.get(self.URL), 'html5lib'))
        logger.info('init new instance done.')

    def get(self, url, deep=0):
        if deep == 3:
            return None
        try:
            return self.request.get(url).content
        except Exception, e:
            print e
            return self.get(url, deep + 1)

    def post(self, url, data, deep=0):
        if deep == 3:
            return None
        try:
            return self.request.post(url, data=data).content
        except Exception, e:
            print e
            return self.post(url, data, deep + 1)

    def build_post_data(self, soup):
        form = soup.find('form', attrs={'id': 'searchForm'})
        find_value = lambda _id: form.find('input', attrs={'name': _id})['value']
        post_data = {
            'articleField01': find_value('articleField01'),
            'articleField02': find_value('articleField02'),
            'articleField06': find_value('articleField06'),
            'taxCode': self.tax_code, 'cPage': self.page,
            'randCode': find_value('randCode'),
            'flag': find_value('flag'),
        }
        return post_data

    def save(self, data):
        data['_id'] = sha1('\n'.join(`x` for x in data.values()))
        data['updateTime'] = now()
        coll = self.MONGO_CONN['crawler_company_all']['chinataxCredit']
        coll.find_one_and_update(
            filter={'_id': data['_id']},
            update={'$set': data},
            upsert=True
        )

    def run(self):
        while True:
            logger.info('task at [%s-%s]' % (self.province, self.page))
            content = self.post(self.URL, self.post_data)
            soup = BeautifulSoup(content)
            items = [td.getText() for td in soup.find_all('td', attrs={'bgcolor': "#F0F0F0"})]

            for uid, name, year in zip(items[::3], items[1::3], items[2::3]):
                data = {
                    'taxpayerId': uid,
                    'taxpayerName': name,
                    'year': year,
                    'province': self.province,
                    'taxcode': self.tax_code,
                }
                self.save(data)
            try:
                next_page = soup.find('a', attrs={'title': u"下一页"})['onclick'][:-2]
                self.page = next_page[next_page.rfind("'") + 1:]
                self.post_data = self.build_post_data(soup)
            except:
                break


def task(prov):
    ChinataxCreditsCrawler(prov).run()


@timeit
def main():
    pool = ThreadPool(4)
    pool.map(task, ChinataxCreditsCrawler.PROVINCES.keys())
    pool.close()
    pool.join()
    # task(u'北京')

if __name__ == '__main__':
    main()

