#!usr/bin/env python
#-*- coding:utf-8 -*-
"""
数据源：巨潮 -> 市场资讯 -> 上市公司 -> 公司资讯 -> 十大股东
爬取类目：十大股东、流通股东(只爬最新的十个,有可能不足10个)
股票类型不包括港股（港股没有最新资料）
url_sample: http://www.cninfo.com.cn/information/shareholders/002003.html #十大股东
url_sample: http://www.cninfo.com.cn/information/circulateshareholders/002003.html #流通股东
"""
"""
result_sample_of_company_dividend = {
    '_id': '',
    'stockCode': '',
    'companyShortName': '',
    'stockType': '',
    'shareholders': [{
        'deadline': '',
        'detail': [{
            'holderName':'',
            'holdQuantity':,
            'holdProportion',
            'stockProperty',
        },],
    },],
    'circulateshareholders': [{
        'deadline': '',
        'detail': [{
            'holderName',
            'holdQuantity',
            'holdProportion',
            'stockProperty',
        },],
    },],
}
"""


import sys
import re
from copy import deepcopy

from company_crawler import CompanyCrawler
from company_crawler import CompanyInfoUtil
from xtls.logger import get_logger
from xtls.timeparser import now
from bs4 import BeautifulSoup

reload(sys)
sys.setdefaultencoding('utf-8')

CATEGORY = ['shareholders', 'circulateshareholders']

DETAIL_MAPPING = {
    u'截止时间': 'deadline',
    u'股东名称': 'holderName',
    u'持股数量(股)': 'holdQuantity',
    u'持股比例（%）': 'holdProportion',
    u'股份性质': 'stockProperty',
}

class CompanyShareHolder(CompanyCrawler):
    def __init__(self, company_info_util_instance):
        super(CompanyShareHolder, self).__init__(company_info_util_instance=company_info_util_instance)

    def find_company_info_from(self, url):
        category = re.search(r'/information/(\w+)/\d+', url).group(1)
        result_single_format = {
            'deadline': '',
            'detail': [],
        }
        result = {category: []}

        html = self.get(url)
        soup = BeautifulSoup(html, 'html5lib')
        if not soup.find('img', src='/error.jpg'):
            tr_list = soup.find_all('tr')
            if len(tr_list) <= 1:
                self.logger.info(u'undealed: the page format has been changed %s' % url)
            else:
                time_count_max = 3
                titles = [td.getText() for td in tr_list[0].find_all('td')]
                tr_list_noheader = tr_list[1:]
                index_list = self._get_index_list(tr_list_noheader)
                if not index_list:
                    self.logger.warn(u'Index_List_Error: %s' % url)
                if (len(index_list)-1) < time_count_max:
                    time_count_max = len(index_list) - 1

                for index in xrange(time_count_max):
                    start = index_list[index]
                    end = index_list[index + 1]
                    result_single = deepcopy(result_single_format)
                    detail = []
                    is_first = True
                    for tr in tr_list_noheader[start:end]:
                        if is_first:
                            result_single['deadline'] = tr.find('td').getText()
                            tds = tr.find_all('td')[1:]
                            is_first = False
                        else:
                            tds = tr.find_all('td')

                        detail.append(self._get_detail_single(tds, titles))
                    result_single['detail'] = detail
                    result[category].append(result_single)
        else:
            self.logger.info(u'undealed: not find the company %s' % url)

        return result

    def _get_index_list(self, tr_list_noheader):
        index_list = []
        for index, tr in enumerate(tr_list_noheader):
            if len(tr.find_all('td')) == 5:
                index_list.append(index)

        tr_first = tr_list_noheader[0]
        if len(index_list) == 1 & (not tr_first.find('td').getText()):
            index_list.append(0)
        else:
            index_list.append(len(tr_list_noheader))

        return index_list

    def _get_detail_single(self, tds, titles):
        detail_single = {}
        for index, td in enumerate(tds):
            key = self.company_info.detail_mapping.get(titles[index + 1], None)
            if key:
                detail_single[key] = td.getText()
            else:
                self.logger.info(u'undealed: Undefined %s' % titles[index + 1])
        return detail_single

    def save(self, data):
        data['updateTime'] = now()
        for key, value in data.items():
            # if key in ['shareholders', 'circulateshareholders']:
            #     for holder_info in value:
            #         for child_key, child_value in holder_info.items():
            #             if child_key in ['shareholdersDetail', 'circulateshareholdersDetail']:
            #                 for holder in child_value:
            #                     for sub_key, sub_value in holder.items():
            #                         print '{}: {}'.format(sub_key, sub_value)
            #                     print '*' * 20
            #             else:
            #                 print '{}: {}'.format(child_key, child_value)
            #         print '-'*40
            # else:
            print '{}: {}'.format(key, value)

        print "=" * 70

    def deal(self, tp, soup):
        super(CompanyShareHolder, self).deal(tp=tp, soup=soup)

    def run(self, startType=1, endType=4):
        super(CompanyShareHolder, self).run(startType=startType, endType=endType)

def main():
    global CATEGORY
    global DETAIL_MAPPING
    from_company_info = CompanyInfoUtil(category=CATEGORY,
                                        detail_mapping=DETAIL_MAPPING)
    from_company_info.set_logger(get_logger(__file__))
    url_stock_type_label = {'1': '', '2': '', '3': '', '4': '', '5': '', '6': ''}
    from_company_info.set_url_stock_type_label(url_stock_type_label)

    try:
        CompanyShareHolder(from_company_info).run()
    except:
        from_company_info.logger.warn('%s shuttdown' % from_company_info.category)
    # CompanyShareHolder(from_company_info).run_one('http://www.cninfo.com.cn/information/shareholders/000020.html')

if __name__ == '__main__':
    main()

