#!/usr/bin/env python
# -*- coding:utf-8 -*-
"""
__author__ : xyh
__date__: 2016-09-14
A股市场预披露公告爬虫（源：巨潮）,手动启动,
可以通过父文件夹中的config.cfg文件中的[input]下的pre_date_range设置公告发布的起始时间
"""
import json
import sys
import os
import time

from ConfigParser import ConfigParser
from scpy.logger import get_logger

from base_crawler import StockCrawler


reload(sys)
sys.setdefaultencoding("utf-8")
logger = get_logger(__file__)


class PreAShareCrawler(StockCrawler):

    def __init__(self, date_range=None, page=1):
        post_data = {
            'stock': '',
            'searchkey': '',
            'plate': '',
            'category': '',
            'trade': '',
            'column': 'pre_disclosure',
            'columnTitle': u'历史公告查询',
            'pageNum': 0,
            'pageSize': 50,
            'tabName': 'fulltext',
            'sortName': '',
            'sortType': '',
            'limit': '',
            'showTitle': '',
            'seDate': '',
        }

        super(PreAShareCrawler, self).__init__(post_data=post_data,
                                               date_range=date_range,
                                               page=page)

        self.db = 'stockProj'
        self.collection = 'preDisclosure'
        self.qurey_url = 'http://www.cninfo.com.cn/cninfo-new/announcement/query'
        self.name = 'pre_shenhu'


# --------------------------------parse----------------------------------------
    def parse(self, item):
        result = {
            '_id': item['announcementId'],
            'stockCode': item['secCode'],
            'companyShortName': item['secName'],
            'title': item['announcementTitle'],
            'type': [item['announcementType']],
            'announcementTime': self.parse_time_stamp(item['announcementTime'] / 1000),
            'announcementId': item['announcementId'],
            'filetype': str(item['adjunctType']).lower(),
            'downloadUrl': 'http://www.cninfo.com.cn/' + item['adjunctUrl'],
            'section': item['pageColumn'],
            'orgId': item['orgId'],
            'columnId': item['columnId'],
            'associateAnnouncement': item['associateAnnouncement'],
            'stockMarket': 'sh',
        }
        return result

    def parse_time_stamp(self, time_stamp):
        return time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(time_stamp))

# -------------------------save 继承基类-------------------------------------
# -------------------------run, 继承基类-------------------------------------
# -------------------------END: class StockCrawler----------------------


def main():
    config = ConfigParser()
    config_path = os.path.abspath(os.path.join(os.path.dirname(__file__), os.path.pardir, 'config.cfg'))
    with open(config_path, 'r') as cfgfile:
        config.readfp(cfgfile)

    date_range = config.get('input', 'pre_date_range')
    PreAShareCrawler(date_range).run()


if __name__ == '__main__':
    main()
