#!/usr/bin/env python
# encoding=utf-8
# debug at 2016/08/09 for information addition

import sys
from copy import deepcopy

from pymongo import MongoClient
from scpy.logger import get_logger
from xtls.basecrawler import BaseCrawler
from xtls.timeparser import now
from xtls.util import BeautifulSoup

reload(sys)
sys.setdefaultencoding('utf-8')
logger = get_logger(__file__)
MONGO = MongoClient('127.0.0.1', 27017)

TPYE_MAPPING = {
    '1': [u'深市主板', 'szmb'],
    '2': [u'中小企业板', 'szsme'],
    '3': [u'创业板', 'szcn'],
    '4': [u'沪市主板', 'shmb'],
    '5': [u'香港主板', 'mb'],
    '6': [u'香港创业板', 'gem'],
}

INFO_TYPE_MAPPING = {
    u'最新资料': 'lastest',
    u'发行筹资': 'issue',
    u'分红配股': 'dividend',
    u'高管人员': 'management',
    u'股本结构': 'stockstructure',
    u'财务指标': 'financialreport'

}

COMPANY_INFO_URL = 'http://www.cninfo.com.cn/information/brief/%s%s.html'
HK_COMPANY_INFO_URL = 'http://www.cninfo.com.cn/information/hk/%s/brief%s.html'

COMPANY_FORMAT = {
    '_id': '',
    'stockCode': '',
    'companyName': '',
    'companyShortName': '',
    'companyEnglishName': '',
    'regAddress': '',
    'stockType': '',
    'legalPerson': '',
    'secretary': '',
    'regCap': '',
    'industry': '',
    'zipcode': '',
    'phone': '',
    'fax': '',
    'website': '',
    'prospectusTime': '',
    'PERatio': '',
    'releaseMode': '',
    'mainUnderwriter': '',
    'listingRecommended': '',
    'sponsorInstitution': '',
}
HK_COMPANY_FORMAT = {
    '_id': '',
    'stockCode': '',
    'companyName': '',
    'companyShortName': '',
    'regAddress': '',
    'stockType': '',
    'industry': '',
}
HK_DETAIL_MAPPING = {
    u'股份名称': 'companyName',
    u'公司地址': 'address',
    u'主要业务': 'principalActivities',
    u'主席': 'chairman',
    u'行业分类': 'industry',
    u'注册地点': 'regAddress',
    u'过户处': 'registrar',
    u'买卖单位': 'boardLot',
    u'发行股数': 'issuedShares',
    u'市值货币': 'capitalisationCurrency',
    u'市值': 'capitalisation',
    u'每股盈利货币': 'EPSCurrency',
    u'每股盈利': 'EPS',
    u'每股盈利经调整指示': 'EPSAdjustedIndicator',
    u'交易货币': 'tradingCurrency',
    u'上市日期': 'listingDate',
    u'纯利货币': 'netProfitCurrency',
    u'纯利': 'netProfit',
    u'资产净值货币': 'netAssetValueCurrency',
    u'资产净值': 'netAssetValue',
    u'指示': 'indicator',
}
DETAIL_MAPPING = {
    u'公司全称：': 'companyName',
    u'英文名称：': 'companyEnglishName',
    u'注册地址：': 'regAddress',
    u'法定代表人：': 'legalPerson',
    u'公司董秘：': 'secretary',
    u'注册资本(万元)：': 'regCap',
    u'行业种类：': 'industry',
    u'邮政编码：': 'zipcode',
    u'公司电话：': 'phone',
    u'公司传真：': 'fax',
    u'公司网址：': 'website',
    u'上市时间：': 'listingDate',
    u'招股时间：': 'prospectusTime',
    u'发行数量（万股）：': 'issueShares',
    u'发行价格（元）：': 'issuePrice',
    u'发行市盈率（倍）：': 'PERatio',
    u'发行方式：': 'releaseMode',
    u'主承销商：': 'mainUnderwriter',
    u'上市推荐人：': 'listingRecommended',
    u'保荐机构：': 'sponsorInstitution',
}


class CompanyCrawler(BaseCrawler):
    def __init__(self):
        super(CompanyCrawler, self).__init__(total=0)

    def find_company_detail(self, url):
        result = {}
        soup = BeautifulSoup(self.get(url))

        for tr in soup.find_all('tr'):
            tds = tr.find_all('td')
            if len(tds) != 2:
                continue
            title = tds[0].getText().strip()
            value = tds[1].getText().strip()

            key = DETAIL_MAPPING.get(title, None)
            if key:
                result[key] = value
            else:
                logger.info(u'undealed: %s' % title)
        return result

    def save(self, data):
        data['updateTime'] = now()
        MONGO['stockProj']['stockInfo'].find_one_and_update({'_id': data['_id']}, {'$set': data}, upsert=True)

    def deal(self, tp, soup):
        for index, a in enumerate(soup.find_all('a')):
            self.total += 1
            company = deepcopy(COMPANY_FORMAT)
            text = a.getText().strip().split(' ', 1)
            company['_id'] = company['stockCode'] = text[0]
            company['companyShortName'] = text[1].strip()
            company['stockType'] = TPYE_MAPPING[str(tp)][0]

            if MONGO['stockProj']['stockInfo'].find_one({'_id': text[0]}):
                continue
            company.update(self.find_company_detail(COMPANY_INFO_URL % (TPYE_MAPPING[str(tp)][1], text[0])))
            logger.info('%s-%s' % (index, company['companyShortName']))
            self.save(company)

    def find_hk_company_detail(self, url):
        result = {}
        soup = BeautifulSoup(self.get(url))

        for tr in soup.find_all('tr'):
            tds = tr.find_all('td')
            if len(tds) != 4:
                continue
            title = tds[2].getText().strip()
            value = tds[3].getText().strip()
            key = HK_DETAIL_MAPPING.get(title, None)
            if key:
                result[key] = value
            else:
                logger.info(u'undealed: %s' % title)
        return result

    def deal_hk(self, tp, soup):
        for index, a in enumerate(soup.find_all('a')):
            self.total += 1
            company = deepcopy(HK_COMPANY_FORMAT)
            text = a.getText().strip().split(' ', 1)
            company['_id'] = company['stockCode'] = text[0]
            company['companyShortName'] = text[1].strip()
            company['stockType'] = TPYE_MAPPING[str(tp)][0]

            if MONGO['stockProj']['stockInfo'].find_one({'_id': text[0]}):
                continue
            company.update(self.find_hk_company_detail(HK_COMPANY_INFO_URL % (TPYE_MAPPING[str(tp)][1], text[0])))
            # print json.dumps(company, ensure_ascii=False, sort_keys=True, indent=4)
            # raw_input('go onm')
            logger.info('%s-%s' % (index, company['companyShortName']))
            self.save(company)

    def run(self):
        html = self.get('http://www.cninfo.com.cn/cninfo-new/information/companylist')
        soup = BeautifulSoup(html)
        for tp in xrange(0, 7):
            if tp <= 4:
                # self.deal(tp, soup.find('div', id='con-a-' + str(tp)))
                self.deal(tp, soup.find('ul',attrs={'class':'company-list'}))
            else:
                # self.deal_hk(tp, soup.find('div', id='con-a-' + str(tp)))
                self.deal_hk(tp, soup.find('ul',attrs={'class':'company-list'}))
        print self.total


def main():
    CompanyCrawler().run()


if __name__ == '__main__':
    main()
