#!/usr/bin/env python
# encoding=utf-8

"""
上海证券交易所-监管动态
http://www.sse.com.cn/disclosure/credibility/dynamic/
"""
import sys

from pymongo import MongoClient
from scpy.logger import get_logger
from xtls.basecrawler import BaseCrawler
from xtls.codehelper import no_exception
from xtls.timeparser import parse_time, now
from xtls.util import BeautifulSoup, sha1

from util import get_mongo_conf

reload(sys)
sys.setdefaultencoding('utf-8')

logger = get_logger(__file__)
LIST_URL = 'http://www.sse.com.cn/disclosure/credibility/dynamic/'
THRESHOLD = 10
HOST, PORT = get_mongo_conf()
CONN = MongoClient(HOST, PORT)


class SseCrawler(BaseCrawler):
    def __init__(self):
        super(SseCrawler, self).__init__(status=[0, 0, 0])
        self._request.headers.update({'Host': 'www.sse.com.cn'})
        logger.info('init Sse Crawler done.')

    @no_exception(on_exception=None)
    def parse(self, item):
        a = item.find('a')
        link = 'http://www.sse.com.cn' + a['href']
        content = BeautifulSoup(self.get(link)).find('div', attrs={'class': 'block_l1'}).getText().strip()
        return {
            'content': content,
            'title': a.getText().strip(),
            'url': link,
            'sourceStockExchange': 'sse',
            'time': parse_time(item.find('span').getText()),
        }

    @no_exception(on_exception=2, logger=logger)
    def save(self, data):
        data['_id'] = sha1(str(data.values()))
        logger.info('save item : %s' % data['_id'])
        data['updateTime'] = now()
        result = CONN['crawler_company_all']['regulatoryDynamic'].find_one_and_update(
            filter={'_id': data['_id']},
            update={'$set': data}, upsert=True)
        if result:
            return 1
        return 0

    def run(self):
        html = self.get(LIST_URL)
        soup = BeautifulSoup(html)
        list_soup = soup.find('div', attrs={'id': 'content_ab'}).find_all('li')
        for li in list_soup:
            data = self.parse(li)
            if not data:
                continue
            result = self.save(data)
            self.status[result] += 1
            if self.status[1] > THRESHOLD or self.status[2] > THRESHOLD:
                return


def main():
    SseCrawler().run()


if __name__ == '__main__':
    main()
