#!usr/bin/env python
# -*- coding:utf-8 -*-
"""
数据源：巨潮 -> 市场资讯 -> 上市公司 -> 公司资讯 -> 最新资料
爬取类目：最新股本状况/最新财务指标
股票类型不包括港股（港股没有最新资料）
url_sample: http://www.cninfo.com.cn/information/lastest/szmb000001.html
"""
"""
result_sample_of_company_lastest = {
    '_id': '',
    'stockCode': '',
    'companyShortName': '',
    'stockType': '',
    'lstCaptialStock': {
        'totalShares',
        'circulatingShares',
        'stateShares',
        'legalShares',
        'sponsorShares',
        'transferredShares',
        'bShares',
        'bShares',
    },
    'lstFinancialIndex: {
        'timeLabel': '2016中期',
        'eps': '', #Earnings Per Share
        'acps': '', #Additional Paid-In Capital Per Share
        'upps': '', #Undistributed Profit Per Share
        'roe': '', #Rate of Return on Common Stockholders' Equity
        'naps': '', #Net Asset Per Share
    }
}
"""

import sys
import json
from bs4 import BeautifulSoup
from xtls.logger import get_logger

from company_crawler import CompanyCrawler
from cleaner import Cleaner


reload(sys)
sys.setdefaultencoding('utf-8')

def get_detail_mapping_captial_stock():
    DETAIL_MAPPING_CAPTIAL_STOCK = {
        u'总股本：': 'totalShares',
        u'流通股：': 'circulatingShares',
        u'国家股：': 'stateShares',
        u'法人股：': 'legalShares',
        u'发起人股：': 'sponsorShares',
        u'转配股：': 'transferredShares',
        u'B股：': 'bShares',
        u'H股：': 'hShares',
    }
    return DETAIL_MAPPING_CAPTIAL_STOCK

def get_detail_mapping_financial_index():
    DETAIL_MAPPING_FINANCIAL_INDEX ={
        u'每股收益(元)：': 'eps', #Earnings Per Share
        u'每股资本公积金(元)：': 'acps', #Additional Paid-In Capital Per Share
        u'每股未分配利润(元)：': 'upps', #Undistributed Profit Per Share
        u'净资产收益率(%)：': 'roe', #Rate of Return on Common Stockholders' Equity
        u'每股净资产(元)：': 'naps', #Net Asset Per Share
    }
    return DETAIL_MAPPING_FINANCIAL_INDEX

def _merge_mapping():
    temp_dict = {}
    temp_dict.update(get_detail_mapping_captial_stock())
    temp_dict.update(get_detail_mapping_financial_index())
    return temp_dict

class CompanyLast(CompanyCrawler):
    def __init__(self, stock_list):
        logger = get_logger(__file__)
        super(CompanyLast, self).__init__(stock_list=stock_list, logger=logger)
        self.category = 'lastest'
        self.tab_name = 'lastest'
        self.detail_mapping = _merge_mapping()

    def find_company_info_from(self, url):
        result = {}
        html = self.get(url)
        soup = BeautifulSoup(html, 'html5lib')
        if not soup.find('img', src='/error.jpg'):
            tables = soup.select('div.zx_left div.clear')
            if len(tables) == 2:
                result['lstCaptialStock'] = self._find_lastest_info(tables[0])
                result['lstFinancialIndex'] = self._find_lastest_info(tables[1])
            else:
                self.logger.info(u'undealed: the page format has been changed %s' % url)
        else:
            self.logger.info(u'undealed: not find the company %s' % url)
        return result

    def _find_lastest_info(self, soup):
        lastest_info = {}
        tr_list = soup.find_all('tr')
        if tr_list:
            self.logger.info(u'undealed: the page format has been changed--No Table')
            return lastest_info

        for tr in tr_list:
            tds = tr.find_all('td')
            if len(tds) != 2:
                continue
            title = tds[0].getText()
            value = tds[1].getText()

            key = self.detail_mapping.get(title, None)
            if key:
                lastest_info[key] = value
            else:
                self.logger.info(u'undealed: undefined %s' % title)
        return lastest_info

    def save_update(self, data):
        cleaner = Cleaner()
        lst_stock = data.get('lstCaptialStock', None)
        if not lst_stock:
            self.logger.warn('Non Save: latest stock has no information')
        else:
            single = lst_stock
            cleaner.set_dirty_data(single)
            cleaner.clean_money_to_int(single.keys())
            single_clean = cleaner.complete()

            single_clean['stockId'] = data.get('stock_id', None)
            self.insert_to_db(table_name='lastest_stock', cleaned_data=single_clean)

        lst_financial_index = data.get('lstFinancialIndex', None)
        if not lst_financial_index:
            self.logger.warn('Non Save: latest stock has no information')
        else:
            single = lst_financial_index
            cleaner.set_dirty_data(single)
            cleaner.clean_percent_to_float(['roe'])
            keys = single.keys()
            keys.remove('roe')
            cleaner.clean_str_to_float([keys])
            single_clean = cleaner.complete()

            single_clean['stockId'] = data.get('stock_id', None)
            self.insert_to_db(table_name='lastest_financial', cleaned_data=single_clean)

    def save_print(self, data):
        print json.dumps(data, indent=4)