#!/usr/bin/env python
# -*- coding:utf-8 -*-

"""
__author__ : xyh
__date__: 2016-09-14
A股市场公告爬虫(源:巨潮网),手动启动,
可以通过父文件夹中的config.cfg文件中的[input]下的shenhu_date_range设置公告发布的起始时间
"""

import sys
import os
import time
from ConfigParser import ConfigParser

from scpy.logger import get_logger
from base_crawler import StockCrawler


reload(sys)
sys.setdefaultencoding("utf-8")
logger = get_logger(__file__)

CATEGORY = {
    "category_ndbg_szsh": u"年度报告",
    "category_bndbg_szsh": u"半年度报告",
    "category_yjdbg_szsh": u"一季度报告",
    "category_sjdbg_szsh": u"三季度报告",
    "category_scgkfx_szsh": u"首次公开发行及上市",
    "category_pg_szsh": u"配股",
    "category_zf_szsh": u"增发",
    "category_kzhz_szsh": u"可转换债券",
    "category_qzxg_szsh": u"权证相关公告",
    "category_qtrz_szsh": u"其他融资",
    "category_qyfpxzcs_szsh": u"权益及限制出售股份",
    "category_gqbd_szsh": u"股权变动",
    "category_jy_szsh": u"交易",
    "category_gddh_szsh": u"股东大会",
    "category_cqfxyj_szsh": u"澄清风险业绩预告",
    "category_tbclts_szsh": u"特别处理和退市",
    "category_bcgz_szsh": u"补充及更正",
    "category_zjjg_szsh": u"中介机构报告",
    "category_ssgszd_szsh": u"上市公司制度",
    "category_zqgg_szsh": u"债券公告",
    "category_qtzdsx_szsh": u"其它重大事项",
    # "category_tzzgx_szsh": u"投资者关系信息", # post_data 和其他的不一样
    # addition 2016.7.8
    "category_dshgg_szsh": u"董事会公告",
    "category_jshgg_szsh": u"监事会公告",
}


class ShenhuCrawler(StockCrawler):

    def __init__(self, category, date_range=None, page=1):
        post_data = {
            'stock': '',
            'searchkey': '',
            'plate': '',
            'category': '%s' % category,
            'trade': '',
            'column': 'szse',
            'columnTitle': u'历史公告查询',
            'pageNum': 0,
            'pageSize': 50,
            'tabName': 'fulltext',
            'sortName': '',
            'sortType': '',
            'limit': '',
            'showTitle': '',
            'seDate': '',
        }

        super(ShenhuCrawler, self).__init__(post_data=post_data,
                                            date_range=date_range,
                                            page=page)
        self.db = 'stockProj'
        self.collection = 'shenhuStockAnno'
        self.qurey_url = 'http://www.cninfo.com.cn/cninfo-new/announcement/query'
        self.name = 'shenhu'

# -------------------------parse 重写基类-----------------------
    def parse(self, item):
        result = {
            '_id': item['announcementId'],
            'stockCode': item['secCode'],
            'companyShortName': item['secName'],
            'title': item['announcementTitle'],
            'type': [CATEGORY[self.post_data.get('category', None)]],
            'announcementTime': self.parse_time_stamp(item['announcementTime'] / 1000),
            'announcementId': item['announcementId'],
            'filetype': str(item['adjunctType']).lower(),
            'downloadUrl': 'http://www.cninfo.com.cn/' + item['adjunctUrl'],
            'section': item['pageColumn'],  # 中小版、创业板...
            'orgId': item['orgId'],
            'columnId': item['columnId'],
            'associateAnnouncement': item['associateAnnouncement'],
        }
        return result

    def parse_time_stamp(self, time_stamp):
        return time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(time_stamp))

# -------------------------save 继承基类-----------------------
# -------------------------run 继承基类------------------------
# -------------------------END: class StockCrawler----------------------


if __name__ == '__main__':
    config = ConfigParser()
    config_path = os.path.abspath(os.path.join(os.path.dirname(__file__), os.path.pardir, 'config.cfg'))
    with open(config_path, 'r') as cfgfile:
        config.readfp(cfgfile)

    date_range = config.get('input', 'shenhu_date_range')
    for category in CATEGORY:
        logger.info('crawling %s-%s' % (category, CATEGORY[category]))
        ShenhuCrawler(category, date_range).run()