#!/usr/bin/env python
# -*- coding:utf-8 -*-
import copy
import json
import re
import requests
import sys
from bs4 import BeautifulSoup
import pymongo
from scpy.logger import get_logger
from scpy.xawesome_codechecker import timeit
from util import get_mongo_conf
from scpy.xawesome_time import now

logger = get_logger(__file__)
reload(sys)
sys.setdefaultencoding("utf-8")
MONGO_CONF = get_mongo_conf()

PLACES = {
    u"北京": "743", u"天津": "744", u"河北": "745", u"山西": "746", u"辽宁": "748", u"吉林": "750", u"深圳": "766",
    u"上海": "752", u"江苏": "753", u"浙江": "754", u"安徽": "756", u"福建": "757", u"江西": "759", u"青岛": "761",
    u"山东": "760", u"河南": "762", u"湖北": "763", u"湖南": "764", u"广东": "765", u"广西": "767", u"海南": "768",
    u"重庆": "769", u"四川": "770", u"贵州": "771", u"云南": "772", u"西藏": "773", u"陕西": "774", u"甘肃": "775",
    u"青海": "776", u"宁夏": "777", u"新疆": "778", u"大连": "749", u"宁波": "755", u"厦门": "758", u"黑龙江": "751",
    u"内蒙古": "747",
}


class TaxIllegalCrawler(object):
    MONGO_CONN = pymongo.MongoClient(MONGO_CONF[0], MONGO_CONF[1])
    PATTERN_SPACE = re.compile(ur'\t|\n|\r| ')
    LIST_URL = 'http://hd.chinatax.gov.cn/xxk/action/ListXinxikucomXml.do?dotype=area&id='
    DETAIL_URL = 'http://hd.chinatax.gov.cn/xxk/action/GetArticleView1.do?op=xxkweb&id='

    FORMAT = {
        'taxpayerName': '', 'taxpayerId': '', 'organizationCode': '', 'regAddress': '',
        'legalPerson': '', 'financePerson': '', 'agency': '', 'caseNature': '',
        'detail': '', '_id': '', 'area': ''
    }

    KV_MAP = {
        u'纳税人名称': 'taxpayerName', u'纳税人识别号': 'taxpayerId', u'组织机构代码': 'organizationCode',
        u'注册地址': 'regAddress', u'法定代表人或者负责人姓名、性别、证件名称及号码': 'legalPerson',
        u'负有直接责任的财务负责人姓名、性别、证件名称及号码': 'financePerson',
        u'负有直接责任的中介机构信息及其从业人员信息': 'agency',
        u'案件性质': 'caseNature', u'主要违法事实相关法律依据及税务处理处罚情况': 'detail',
    }

    @timeit(logger)
    def __init__(self, area):
        if area not in PLACES:
            raise Exception("area error!")
        self.area = area
        self.areaid = PLACES[area]
        self.request = requests.Session()
        self.request.headers['User-Agent'] = 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/42.0.2311.90 Safari/537.36'
        self.request.headers['Accept-Language'] = 'en-US,en;q=0.8,zh-CN;q=0.6,zh;q=0.4'
        self.request.headers['Connection'] = 'keep-alive'
        self.list_url = self.LIST_URL + self.areaid
        if area == u'黑龙江':
            self.list_url = 'http://hd.chinatax.gov.cn/xxk/action/ListXinxikucomXml.do?dotype=seriesarea&id=751,%E9%BB%91%E9%BE%99%E6%B1%9F5.15%E6%A1%88%E4%BB%B6'
        logger.info('init new instance done.')

    @timeit(logger)
    def get(self, url, deep=0):
        if deep == 3:
            return None
        try:
            return self.request.get(url).content
        except Exception, e:
            print e
            return self.get(url, deep + 1)

    def get_detail(self, uid, name):
        result = copy.deepcopy(self.FORMAT)
        result['area'] = self.area
        result['_id'] = uid
        content = self.get(self.DETAIL_URL + uid)
        soup = BeautifulSoup(content, 'html5lib')
        for tr in soup.find_all('tr'):
            tds = tr.find_all('td')
            if len(tds) != 2:
                continue
            key, value = self.PATTERN_SPACE.sub('', tds[0].getText()), tds[1].getText().strip()
            if key in self.KV_MAP:
                result[self.KV_MAP[key]] = value
        return result

    def save(self, data):
        try:
            data['updateTime'] = now()
            coll = self.MONGO_CONN['crawler_company_all']['chinataxIllegal']
            coll.update_one({'_id': data['_id']}, {'$set': data}, True)
        except Exception, e:
            logger.exception(e)

    @timeit(logger)
    def run(self):
        content = self.get(self.list_url)
        soup = BeautifulSoup(content)
        for item in soup.find_all('item'):
            try:
                name, uid = item['name'], item['id']
                self.save(self.get_detail(uid, name))
                logger.info("%s, %s" % (name, uid))
            except Exception, e:
                logger.exception(e)


@timeit(logger)
def main():
    for area in PLACES:
        # print area, PLACES[area]
        logger.info('start %s' % area)
        TaxIllegalCrawler(area).run()
        logger.info('end  %s' % area)


if __name__ == '__main__':
    main()
