#!/usr/bin/env python
# encoding=utf-8

"""
上市公司全称
http://stock.quote.stockstar.com/200468.shtml
"""

import sys

from pymongo import MongoClient
from scpy.logger import get_logger
from xtls.basecrawler import BaseCrawler
from xtls.codehelper import no_exception
from xtls.pinyin import parse
from xtls.util import BeautifulSoup

reload(sys)
sys.setdefaultencoding('utf-8')

__author__ = 'xlzd'
logger = get_logger(__file__)

CONN_NEW = MongoClient('10.132.23.104', 27017)


class StockDetailCrawler(BaseCrawler):
    def __init__(self, stock):
        super(StockDetailCrawler, self).__init__(stock=stock)

    @no_exception(on_exception=None)
    def run(self):
        result = {
            'companyName': '',
            'totalEquity': '',
            'website': '',
            'circulationEquity': '',
            'equityType': '',
            'companyShortName': '',
            'spell': '',
        }
        html = self.get('http://stock.quote.stockstar.com/{stock}.shtml'.format(stock=self.stock))
        soup = BeautifulSoup(html)
        result['companyShortName'] = soup.find('title').getText().strip().split('(')[0]
        result['spell'] = ''.join(x[0] for x in parse(result['companyShortName'], True))
        target_soup = soup.find('div', attrs={'class': 'con gszl_wrap'}).find('div', attrs={'class': 'lf'})
        for p in target_soup.find_all('p'):
            p_text = p.getText().strip()
            if p_text.startswith(u'公司名称'):
                result['companyName'] = p_text[5:]
            elif p_text.startswith(u'最新总股本'):
                result['totalEquity'] = p_text[6:]
            elif p_text.startswith(u'公司网址'):
                result['website'] = p_text[5:]
            elif p_text.startswith(u'最新流通B股') or p_text.startswith(u'最新流通A股'):
                result['circulationEquity'] = p_text[7:]
                result['equityType'] = p_text[4]
        return result


def find_new(stock, source):
    data = StockDetailCrawler(stock).run()
    data['_id'] = stock
    data['stock'] = stock
    data['source'] = source
    if not data:
        logger.info('error at : %s' % stock)
        return
    CONN_NEW['crawler_company_all']['stockCode'].insert_one(data)
