# coding=utf8
__author__ = 'Soul'

from core.PageFetcher import *
import string
import sys
import time
import random
from dboperater.DbOperatorFactory import *
from core.Logger import *

class QiXinDetailPageFetcher(PageFetcher):
    def init(self, url, intent, recurse=False):
        factory = DbOperatorFactory()
        self.__db__ = factory.GetDbOperator('qixindb')

        self.__max_info_record_limit = 1000

        if intent:
            self.__cookie_path__ = intent['cookie_path']

        url_splits = url.split('/')
        self.__eid__ = url_splits[len(url_splits) - 1]

        return PageFetcher.init(self, url, True)

    def GetStoredCookies(self):
        if not self.__cookie_path__:
            return ''

        cookie_file = open(self.__cookie_path__, "r")
        cookie_lines = cookie_file.readlines()
        cookie_file.close()
        cookies = []
        for cookie_line in cookie_lines:
            if cookie_line:
                cookies.append(cookie_line.strip())

        cookies_str = ";".join(cookies)
        return cookies_str

    def preFectch(self, params, headers):
        headers['Host'] = 'www.qixin.com'
        headers['Accept'] = 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8'
        # headers['Accept-Encoding'] = 'gzip, deflate, sdch'
        headers['Accept-Language'] = 'en-US,en;q=0.8,zh-CN;q=0.6,zh-TW;q=0.4'

        '''
           eid is hiding in the url itself.
           Get cookie and compose the header
        '''

        cookie = self.GetStoredCookies()
        if cookie:
            headers['Cookie'] = cookie

    def __get_cur_time_str(self):
        timeparts = str(time.time()).split('.')
        curtime = ''
        if len(timeparts) > 1:
            curtime = "%s%s" % (timeparts[0], timeparts[1])
        else:
            curtime = "%s000" % (timeparts[0])

        return curtime

    def __read_json_data(self, url):
        try:
            headers = {
                'Host': 'www.qixin.com',
                'Accept': 'application/json, text/javascript, */*; q=0.01',
                'X-Requested-With': 'XMLHttpRequest',
                'User-Agent': 'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/51.0.2704.103 Safari/537.36',
                'Content-Type': 'application/json; charset=utf-8',
                'Accept-Language': 'en-US,en;q=0.8,zh-CN;q=0.6,zh-TW;q=0.4'
            }
            cookie = self.GetStoredCookies()
            if cookie:
                headers['Cookie'] = cookie

            # add this to pretend to be not a spider
            req = urllib2.Request(url=url, headers=headers)
            reader = urllib2.urlopen(req)

        except urllib2.HTTPError as e:
            logger.error(e)
            return None
        except ValueError as e:
            logger.error(e)
            return None

        data_src = reader.read()
        return json.loads(data_src)

    def __analysis_ability_info(self):
        if self.__eid__:
            logger.info("QiXinDetailPageFetcher:: Start analysing ability info")
            items_list = []
            current_page = 0
            hit_size = 5
            total, items = self.__analysis_ability_page(current_page, hit_size)
            if total > self.__max_info_record_limit:
                total = self.__max_info_record_limit

            items_list.extend(items)
            while (current_page + 1) * hit_size < total:
                logger.info("QiXinDetailPageFetcher::loop to __analysis_ability_info: total=%d"%(total))
                current_page = current_page + 1
                t_total, items = self.__analysis_ability_page(current_page, hit_size)
                items_list.extend(items)
                if current_page % 10:
                    self.__random_sleep()

            self.__db__.insert_ability_info(self.__eid__, items_list)

    def __analysis_ability_page(self, current_page, hit_size):
         logger.info("QiXinDetailPageFetcher:: analysing ability page: current_page=%d, hit_size=%d"%(current_page, hit_size))

         get_ability_info_url = \
                'http://www.qixin.com/service/getPagingAbilityInfo?type=1&eid=%s&page=%d&hit=%d' % \
                (self.__eid__, current_page, hit_size)
         ability_info_json_data = self.__read_json_data(get_ability_info_url)
         if ability_info_json_data:
             return self.__analysis_ability_json(ability_info_json_data)
         return 0, []

    def __analysis_ability_json(self, json_root):
        if json_root['status'] == 0:
            json_data = json_root['data']
            total = json_data['total']
            items = json_data['items']
            return total, items

    def __analysis_job_info(self):
        if self.__eid__:
            logger.info("QiXinDetailPageFetcher:: Start analysing job info")
            items_list = []
            current_page = 0
            hit_size = 10
            total, items = self.__analysis_operation_page()
            if total > self.__max_info_record_limit:
                total = self.__max_info_record_limit

            items_list.extend(items)
            while (current_page + 1) * hit_size < total:
                current_page= current_page + 1
                t_total, items = self.__analysis_job_page(current_page, hit_size)
                items_list.extend(items)
                if current_page % 10:
                    self.__random_sleep()

                self.__db__.insert_job_info(self.__eid__, items_list)

    def __analysis_operation_page(self):
        logger.info("QiXinDetailPageFetcher:: analysing operation page")
        get_operation_info_url = \
            'http://www.qixin.com/service/getOperationInfo?eid=%s' % \
            (self.__eid__)
        operation_info_json_data = self.__read_json_data(get_operation_info_url)
        if operation_info_json_data:
            return self.__analysis_operation_json(operation_info_json_data)
        return 0, []

    def __analysis_operation_json(self, json_root):
        if json_root['status'] == 0:
            json_data = json_root['data']
            job_data = json_data['job']
            total = job_data['total']
            items = job_data['items']
            return total, items

    def __analysis_job_page(self, current_page, hit_size):
        logger.info("QiXinDetailPageFetcher:: analysing job page: current_page=%d, hit_size=%d"%(current_page, hit_size))
        get_job_info_url = \
            'http://www.qixin.com/service/getPagingOperationInfo?type=1&eid=%s&page=%s&hit=%s' % \
            (self.__eid__, str(current_page), str(hit_size))
        job_info_json_data = self.__read_json_data(get_job_info_url)
        if job_info_json_data:
            return self.__analysis_job_json(job_info_json_data)
        return 0, []

    def __analysis_job_json(self, json_root):
        if json_root['status'] == 0:
            json_data = json_root['data']
            total = json_data['total']
            items = json_data['items']
            return total, items

    def __analysis_investment_info(self):
        if self.__eid__:
            logger.info("QiXinDetailPageFetcher:: Start analysing investment info")

            get_invested_url = \
                'http://www.qixin.com/service/getInvestedCompaniesById?eid=%s' % \
                (self.__eid__)
            invested_info_json_data = self.__read_json_data(get_invested_url)
            if invested_info_json_data:
                invested_info_list = self.__analysis_investment_json(invested_info_json_data)
                self.__db__.insert_investment_info(self.__eid__, invested_info_list)

    def __analysis_investment_json(self, json_root):
        logger.info("QiXinDetailPageFetcher:: analysing investment page")
        if json_root['status'] == 0:
            json_data = json_root['data']
            total = json_data['total']
            #page_num = json_data['num']
            items = json_data['items']
            return items
        return []

    def __analysis_com_basic_info(self):
        try:

            logger.info("QiXinDetailPageFetcher:: Start analysing basic company info")

            from bs4 import BeautifulSoup
            soup = BeautifulSoup(self.__html_src__, "html.parser")

            com_info_dict = {
                'com_basic_info': {
                    'com_qixin_code': self.__eid__
                }
            }

            company_card = soup.find("div", {'class': 'company-card'})
            com_name = company_card.h2.text
            com_info_dict['com_basic_info']['com_name'] = com_name
            com_info_item_list = company_card.find_all("div", {'class': 'company-info-item'})
            com_info_item_list_len = len(com_info_item_list)
            if com_info_item_list_len:
                try:
                    com_state = com_info_item_list[0].find("span", {'class': 'status'}).text
                    com_info_dict['com_basic_info']['com_state'] = com_state
                except:
                    com_info_dict['com_basic_info']['com_state'] = ""
                    self.__except_log_info()

                if com_info_item_list_len > 1:
                    try:
                        com_tel = com_info_item_list[1].find('div', {'class': 'company-info-item-text'})
                        com_info_dict['com_basic_info']['com_tel'] = com_tel.text
                    except:
                        com_info_dict['com_basic_info']['com_tel'] = ""
                        self.__except_log_info()
                if com_info_item_list_len > 2:
                    try:
                        com_website = com_info_item_list[2].find('div', {'class': 'company-info-item-text'}).a['href']
                        com_info_dict['com_basic_info']['com_website'] = com_website
                    except:
                        com_info_dict['com_basic_info']['com_website'] = ""
                        self.__except_log_info()

                if com_info_item_list_len > 3:
                    try:
                        com_addr = com_info_item_list[3].find('span', {'class': 'location-contant'}).text
                        com_info_dict['com_basic_info']['com_addr'] = com_addr
                    except:
                        com_info_dict['com_basic_info']['com_addr'] = ""
                        self.__except_log_info()

            info_div = soup.find("div", {'id': 'info'})
            basic_info_panel = info_div.find("div", {'class': 'panel panel-default basic-info'})
            if basic_info_panel:
                com_basic_info_dict = {
                    'com_credit_code': (0, 1),
                    'com_org_code': (0, 3),
                    'com_reg_code': (1, 1),
                    'com_type': (2, 1),
                    'com_estb_date': (2, 3),
                    'com_legal_prsnt': (3, 1),
                    'com_oprt_time_range': (3, 3),
                    'com_reg_money': (4, 1),
                    'com_cert_date': (4, 3),
                    'com_reg_org': (5, 1),
                    'com_oprt_range': (7, 1)
                }
                basic_info_table_row_list = basic_info_panel.find('table').find_all('tr')
                for key in com_basic_info_dict:
                    (x, y) = com_basic_info_dict[key]
                    if x < len(basic_info_table_row_list):
                        basic_info_table_row = basic_info_table_row_list[x]
                        basic_info_table_row_td_list = basic_info_table_row.find_all('td')
                        if y < len(basic_info_table_row_td_list):
                            try:
                                com_info_dict['com_basic_info'][key] = basic_info_table_row_td_list[y].text
                            except:
                                com_info_dict['com_basic_info'][key] = ""
                                self.__except_log_info()

            info_panel_list = info_div.find_all("div", {'class': 'panel panel-default'})
            info_panel_list_len = len(info_panel_list)
            com_info_dict['com_shareholders'] = []
            if info_panel_list_len:
                if info_panel_list_len > 0:
                    shlder_info_panel = info_panel_list[0]
                    no_info_div = shlder_info_panel.find('div', {'class': 'no-info'})
                    if no_info_div == None:
                        com_shareholders = []
                        shlder_table_row_list = shlder_info_panel.find('table').find_all('tr')
                        for shlder_table_row in shlder_table_row_list:
                            shlder_table_row_cell_list = shlder_table_row.find_all('td')
                            if len(shlder_table_row_cell_list) >= 4:
                                try:
                                    com_shareholders.append({
                                        'shlder_type': shlder_table_row_cell_list[0].text,
                                        'shlder_name': shlder_table_row_cell_list[1].text,
                                        'shlder_paid': shlder_table_row_cell_list[2].text,
                                        'shlder_real_paid': shlder_table_row_cell_list[3].text
                                    })
                                except:
                                    self.__except_log_info()
                                    continue

                        com_info_dict['com_shareholders'] = com_shareholders

                com_info_dict['com_members'] = []
                if info_panel_list_len > 1:
                    members_info_panel = info_panel_list[1]
                    no_info_div = members_info_panel.find('div', {'class': 'no-info'})
                    if no_info_div == None:
                        com_members = []
                        memb_table_row_list = members_info_panel.find('ul', {'class': 'major-person-list'}).find_all('li')
                        for memb_table_row in memb_table_row_list:
                            memb_job_title = memb_table_row.find('span', {'class': 'job-title'}).text
                            memb_name = memb_table_row.find('span', {'class': 'links'}).a['title']
                            com_members.append({
                                'memb_job_title': memb_job_title,
                                'memb_name': memb_name
                            })

                        com_info_dict['com_members'] = com_members

                self.__db__.insert_basic_info(com_info_dict)
        except:
            self.__except_log_info()

    def __except_log_info(self):
        (type, value, tb) = sys.exc_info()
        logger.error("Exception throwed!!::")
        logger.error(str(type))
        logger.error(str(value))
        logger.error(str(tb))
        import traceback
        traceback.print_exception(type, value, tb)

    def __random_sleep(self):
        rand_int = random.randint(0, 5)
        time.sleep(rand_int)

    def analysis(self):
        if self.__html_src__:
            self.__analysis_com_basic_info()
            #self.__analysis_job_info()
            self.__random_sleep()
            #self.__analysis_ability_info()
            self.__analysis_investment_info()
            self.__random_sleep()
            del self.__db__