#!/usr/bin/env python
# encoding=utf-8
'''
数据源：巨潮 -> 市场资讯 -> 上市公司 -> 公司资讯 -> 公司概况
爬取类目：公司概况
股票类型不包括港股（港股没有最新资料）
url_sample: http://www.cninfo.com.cn/information/brief/szmb000001.html
'''

import sys
import json
import os
from ConfigParser import ConfigParser
from bs4 import BeautifulSoup
from xtls.logger import get_logger
from company_crawler import CompanyCrawler

ROOT_PATH = os.path.abspath(os.path.join(os.path.dirname(__file__), os.path.pardir))
if ROOT_PATH not in sys.path:
    sys.path.append(ROOT_PATH)

reload(sys)
sys.setdefaultencoding('utf-8')


'''
{
    'stockId': '',
    'stockCode': '',
    'companyFullName': '',
    'companyEnglishName': '',
    'regAddress': '',
    'province': '',
    'legalPerson': '',
    'secretary': '',
    'regCap': '',
    'industry': '',
    'zipcode': '',
    'phone': '',
    'fax': '',
    'website': '',
    'listingDate': '',
    'prospectusDate': '',
    'issuedShares': '',
    'issuedPrice': '',
    'pe': '',
    'releaseMode': '',
    'mainUnderwriter': '',
    'listingRecommender': '',
    'sponsorInstitution': '',
    'province': '',
}
'''

def get_detail_mapping():
    DETAIL_MAPPING = {
        u'公司全称：': 'companyFullName',
        u'公司简称：': 'companyName',
        u'英文名称：': 'companyEnglishName',
        u'注册地址：': 'regAddress',
        u'法定代表人：': 'legalPerson',
        u'公司董秘：': 'secretary',
        u'注册资本(万元)：': 'regCap',
        u'行业种类：': 'industry',
        u'邮政编码：': 'zipcode',
        u'公司电话：': 'phone',
        u'公司传真：': 'fax',
        u'公司网址：': 'website',
        u'上市时间：': 'listingDate',
        u'招股时间：': 'prospectusDate',
        u'发行数量（万股）：': 'issuedShares',
        u'发行价格（元）：': 'issuedPrice',
        u'发行市盈率（倍）：': 'pe',
        u'发行方式：': 'releaseMode',
        u'主承销商：': 'mainUnderwriter',
        u'上市推荐人：': 'listingRecommender',
        u'保荐机构：': 'sponsorInstitution',
    }
    return DETAIL_MAPPING


CONFIG = ConfigParser()

CONFIG_PATH = ROOT_PATH + '/config.cfg'

with open(CONFIG_PATH, 'r') as filecfg:
    CONFIG.readfp(filecfg)

WHICH_DB = CONFIG.get('using_db', 'db_name')


def get_api_ip():
    return CONFIG.get(WHICH_DB, 'apiIp')


class CompanyBrief(CompanyCrawler):
    def __init__(self, stock_list):
        logger = get_logger(__file__)
        super(CompanyBrief, self).__init__(stock_list=stock_list, logger=logger)
        self.category = 'brief'
        self.tab_name = 'brief'
        self.detail_mapping = get_detail_mapping()

    def find_company_info_from(self, url):
        result = {'brief': {}}
        detail = {}
        html = self.get(url)

        if not html:
            return result

        soup = BeautifulSoup(html, 'html5lib')
        if soup.find('img', src='/error.jpg'):
            self.logger.info(u'undealed: not find the company %s' % url)
            return result

        for tr in soup.find_all('tr'):
            tds = tr.find_all('td')
            if len(tds) != 2:
                continue
            title = tds[0].getText().strip()
            value = tds[1].getText().strip()

            key = self.detail_mapping.get(title, None)
            if key:
                detail[key] = value
            else:
                self.logger.info(u'undealed: %s' % title)

        result['brief'] = detail
        return result

    def clean_data(self, dirty_data):
        self.cleaner.set_dirty_data(dirty_data)
        self.cleaner.clean_ten_thousand_to_int(['regCap', 'issuedShares'])
        self.cleaner.clean_get_id_single(['industry', 'mainUnderwriter', 'listingRecommender', 'sponsorInstitution'],
                                    conn=self.conn)
        self.cleaner.clean_get_id_list(['releaseMode'], conn=self.conn)
        single_clean = self.cleaner.complete()
        single_clean['primaryIndustryId'] = self.get_primary_industry_id(dirty_data.get('industry'))
        single_clean['province'] = self._find_province(single_clean.get('companyFullName'))

        return single_clean

    def _find_province(self, company_full_name):
        if not company_full_name:
            return
        url = u'http://{}:9030/company/profile/feature?feaType=location&companyName={}&residentMapping=y'
        url = url.format(get_api_ip(), company_full_name)
        content = self.get(url)

        if not content:
            return

        json_content = json.loads(content)
        province = json_content.get('result', dict()).get('province', None)

        return self._clean_province(province)

    def _clean_province(self, province):
        if not province:
            return
        for short_name in [u'内蒙', u'广西', u'新疆', u'宁夏', u'西藏',]:
            if short_name in province:
                return short_name
        else:
            return province

    def get_primary_industry_id(self, industry):
        if industry:
            sql = 'SELECT primary_industry_id FROM company_relation_industry_primary_industry WHERE industry_name=%s'
            self.cur.execute(sql, (industry, ))
            return self.cur.fetchone()[0]

    def save_update(self, data):
        single = data.get('brief', None)
        if not single:
            self.logger.warn('brief has no information %s-%s' % (data.get('stock_id'), data.get('stock_code')))
            return
        if not data.get('stock_id'):
            return

        single_clean = self.clean_data(single)
        single_clean['stockId'] = data.get('stock_id', None)
        single_clean['versionNum'] = 1 + self.select_current_version_num(table_name='brief',
                                                                         stock_id=data.get('stock_id'))
        self.insert_to_db(table_name='brief', cleaned_data=single_clean)

    def save_print(self, data):
        print json.dumps(data, indent=4)


def main():
    pass


if __name__ == '__main__':
    main()
