#!/usr/bin/env python
# -*- coding:utf-8 -*-
import datetime
import json
import sys
import time
# try:
#     from cStringIO import StringIO
# except ImportError:
#     from StringIO import StringIO

from pymongo import MongoClient
from scpy.logger import get_logger
from scpy.qiniu import Qiniu
from scpy.xawesome_crawler import BaseCrawler
from xtls.timeparser import now
from xtls.codehelper import forever
# from util import convert

reload(sys)
sys.setdefaultencoding("utf-8")
logger = get_logger(__file__)

MONGO = MongoClient('10.132.23.104', 27017)
DB_NAME, COLL_NAME = 'stockProj', 'shenhuStockAnno'
qiniu = Qiniu(bucket='sc-crawler', host='10.132.23.104')

QUERY_URL = 'http://www.cninfo.com.cn/cninfo-new/announcement/query'
POST_HEADERS = {
    'X-Requested-With': 'XMLHttpRequest',
    'Content-Type': 'application/x-www-form-urlencoded; charset=UTF-8',
}
CATEGORY = {
    "category_ndbg_szsh": u"年度报告",
    "category_bndbg_szsh": u"半年度报告",
    "category_yjdbg_szsh": u"一季度报告",
    "category_sjdbg_szsh": u"三季度报告",
    "category_scgkfx_szsh": u"首次公开发行及上市",
    "category_pg_szsh": u"配股",
    "category_zf_szsh": u"增发",
    "category_kzhz_szsh": u"可转换债券",
    "category_qzxg_szsh": u"权证相关公告",
    "category_qtrz_szsh": u"其他融资",
    "category_qyfpxzcs_szsh": u"权益及限制出售股份",
    "category_gqbd_szsh": u"股权变动",
    "category_jy_szsh": u"交易",
    "category_gddh_szsh": u"股东大会",
    "category_cqfxyj_szsh": u"澄清风险业绩预告",
    "category_tbclts_szsh": u"特别处理和退市",
    "category_bcgz_szsh": u"补充及更正",
    "category_zjjg_szsh": u"中介机构报告",
    "category_ssgszd_szsh": u"上市公司制度",
    "category_zqgg_szsh": u"债券公告",
    "category_qtzdsx_szsh": u"其它重大事项",
    "category_tzzgx_szsh": u"投资者关系信息",
    # addition 2016.7.8
    "category_dshgg_szsh": u"董事会公告",
    "category_jshgg_szsh": u"监事会公告",
}


def parse_time_stamp(time_stamp):
    return time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(time_stamp))


class StockCrawler(BaseCrawler):
    def __init__(self, category, page=1):
        super(StockCrawler, self).__init__(page=page, category=category)
        self.post_data = {
            'stock': '',
            'searchkey': '',
            'plate': '',
            'category': '%s;' % self.category,
            'trade': '',
            'column': 'szse',
            'columnTitle': u'历史公告查询',
            'pageNum': 0,
            'pageSize': 50,
            'tabName': 'fulltext',
            'sortName': '',
            'sortType': '',
            'limit': '',
            'showTitle': '',
            'seDate': self.build_date(),
        }

    @classmethod
    def build_date(cls):
        return '1949-10-01 ~ ' + datetime.datetime.now().strftime('%Y-%m-%d')

    def parse(self, item):
        result = {
            '_id': item['announcementId'],
            'stockCode': item['secCode'],
            'companyShortName': item['secName'],
            'title': item['announcementTitle'],
            'type': CATEGORY[self.category],
            'announcementTime': parse_time_stamp(item['announcementTime'] / 1000),
            'announcementId': item['announcementId'],
            'filetype': str(item['adjunctType']).lower(),
            'downloadUrl': 'http://www.cninfo.com.cn/' + item['adjunctUrl'],
            'section': item['pageColumn'],  # 中小版、创业板...
            'orgId': item['orgId'],
            'columnId': item['columnId'],
            'associateAnnouncement': item['associateAnnouncement'],
        }
        return result

    def save(self, data):
        logger.info('save item %s' % data['_id'])
        data['updateTime'] = now()
        old_data = MONGO[DB_NAME][COLL_NAME].find_one({'_id': data['_id']})
        if not old_data:
            try:
                if data['downloadUrl'].lower().endswith('pdf'):
                    content = self.get(data['downloadUrl'])
                    rst = qiniu.upload(content, suffix='pdf')
                    data['fileKey'] = rst['_id']
                # fp = StringIO(content)
                # data['content'] = convert(fp)
            except Exception, e:
                logger.exception(e)

            data['type'] = [data['type']]
            MONGO[DB_NAME][COLL_NAME].insert_one(data)
            rst = 0
        elif data['type'] not in old_data['type']:
            data['type'] = old_data['type'] + [data['type']]
            MONGO[DB_NAME][COLL_NAME].find_one_and_update(
                {'_id': data['_id']},
                {'$set': data}
            )
            rst = 1
        else:
            rst = -1
        return rst

    def run(self):
        saved = 0
        while True:
            logger.info('now page : %s' % self.page)
            self.post_data['pageNum'] = self.page
            data = self.post(QUERY_URL, data=self.post_data, headers=POST_HEADERS)
            annos = json.loads(data)['announcements']
            if not annos:
                break
            for index, item in enumerate(annos, start=1):
                anno = self.parse(item)
                rst = self.save(anno)
                if rst == -1:
                    saved += 1
                    if saved > 20:
                        return
                if rst == 1:
                    logger.info('onemore %s' % anno['_id'])
            if self.page == 1000:
                break
            if len(annos) != 50:
                return
            self.page += 1


def main():
    for loop_count in forever(1):
        logger.info('now loop_count %s' % loop_count)
        before = time.time()
        for category in CATEGORY:
            logger.info('crawling %s-%s' % (category, CATEGORY[category]))
            StockCrawler(category).run()
        end = time.time()

        if end - before < 3600:
            logger.info('task done, sleeping... %s' % (3600 - end + before))
            time.sleep(3600 - end + before)


if __name__ == '__main__':
    # qiniu.upload_file('trans.py')
    qiniu.upload('trans.py')
    # main()

