#!/usr/bin/env python
# -*- encoding: utf-8 -*-
# Created on 2017-07-17 08:29:18
# Project: itjuzi

from pyspider.libs.base_handler import *
import random
import pymongo
from bs4 import BeautifulSoup
import arrow

# how long update day
UPDATE_DELAY = 15
# choice db
DB = pymongo.MongoClient('192.168.1.220',29001)['itjuzi']


class Handler(BaseHandler):
    crawl_config = {
        'retries':7,
        'proxy': '192.168.1.220:3128'
        
    }
    retry_delay = {
        0: 30,
        1: 5*60,
        2: 15*60,
        3: 65*60,
        '': 24*60*60,
    }

    @every(minutes= 24 * 60)
    def on_start(self):
        aw = arrow.now()
        url = 'http://www.itjuzi.com/company/%s'
        FINSH = [each.get('_id') for each in DB['company'].find({},{'_id':1})]
        max_id = (list(DB['company'].find({},{'_id':1}).sort('_id',-1).limit(1)) or [{'_id':0}]).pop().get('_id')

        FAIL = [each.get('_id') for each in DB['temp'].find({'_id':{'$lte':max_id},'update':{'$gte':aw.replace(days=-UPDATE_DELAY).datetime}},{'_id':1})]
        for i in range(60000,74371):

            if i > max_id:
                if i - max_id>100:
                    #break
                    pass


            if i in FINSH or i in FAIL:
                continue

            self.crawl(url%i, callback=self.detail_page,save=i)

        old_ids = [each.get('_id') for each in DB['company'].find({'update':{'$lte':aw.replace(days=-UPDATE_DELAY).datetime}},{'_id':1})]
        for i in old_ids:
            self.crawl(url%i, callback=self.detail_page,save=i)


 
    @catch_status_code_error
    @config(priority=2,age=5 * 24 * 60 * 60,cookies={'cookies':'acw_tc=AQAAAABaaxPbfgIAkVEQcC6d2w3MCN9I; gr_user_id=b2f925ef-4994-4aee-bc7b-0a1395171968; _hp2_id.2147584538=%7B%22userId%22%3A%225837954218760983%22%2C%22pageviewId%22%3A%226402464240000847%22%2C%22sessionId%22%3A%225399898142871267%22%2C%22identity%22%3Anull%2C%22trackerVersion%22%3A%223.0%22%7D; acw_sc=595d91684bb40fb958bee4cc68da1d974e9921e1; session=ad6e5eaaffcb91949e4189070fc634f9dff723ea; _gat=1; identity=326737833%40qq.com; remember_code=1FQQAzGfLu; Hm_lvt_1c587ad486cdb6b962e94fc2002edf89=1498200405,1500027712,1500128389; Hm_lpvt_1c587ad486cdb6b962e94fc2002edf89=1500133056; _ga=GA1.2.1277010002.1490167436; _gid=GA1.2.1637416395.1500027572; gr_session_id_eee5a46c52000d401f969f4535bdaa78=c2d75c14-28dc-494f-bde6-cc7c5cb9d00d'},user_agent='Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/57.0.2987.133 Safari/537.36')
    def detail_page(self, response):
        
        _id = response.save
        aw = arrow.now()
        
        if response.status_code == 404:
            DB['temp'].update_one({'_id':_id},{'$set':{'update':aw.floor('day').datetime}},upsert=True)
            return
       
        html = response.text
        if not html or response.status_code != 200:
            raise Exception('no html')
        assert 'ider' in response.text
        
 
        try:
            jz = itjuzi(response.text)
            ret_dic = jz.parse()
            version = DB['company_history'].find({'id':_id}).count()+1
            ret_dic['_id'] = _id
            ret_dic['version'] = version
            ret_dic['update'] = aw.floor('day').datetime
            DB['company'].update_one({'_id':_id},{'$set':ret_dic},upsert=True)
            ret_dic['_id'] = '%s_%s'%(_id,aw.floor('day').format('YYYYMMDD'))
            ret_dic['id'] = _id
            DB['company_history'].insert_one(ret_dic)
            
            ret_dic.update({
                "html": response.text,
                "url": response.url,
                "title": response.doc('title').text(),
                })
            del(ret_dic['update'])
            return ret_dic
        except pymongo.errors.DuplicateKeyError as e:
            pass
        except emptyPage as e:
            DB['temp'].update_one({'_id':_id},{'$set':{'update':aw.floor('day').datetime}},upsert=True)

        except Exception as e:
            raise e
            


    

class emptyPage(Exception):
    def __str__(self):  
        return 'page not exixt'

class MySoup(BeautifulSoup):
    '''
    包装类
    '''
    def select(self, selector, _candidate_generator=None, limit=None, force=True):
        value = super().select(selector,_candidate_generator,limit)
        if value or force:
            return value
        return MySoup('<a><a></a></a>', 'xml').select_one('a > a')

    def select_one(self,selector,force=False):

        value = self.select(selector, limit=1)
        if value  or force:
            return value[0]

        return MySoup('<a><a></a></a>', 'xml').select_one('a > a')

class base:
    def __init__(self,html):
        self.soup = MySoup(html,'lxml')

    def assert_dic(self, dic):
        pass


    def parse(self):
        pass

class itjuzi(base):

    def assert_dic(self, dic):
        dic = dict(filter(lambda x:x[1],dic.items()))

        assert 'des' in dic
        assert 'category' in dic
        assert 'locate' in dic
        assert 'name' in dic
        assert 'tagset' in dic
        return dic

    def parse(self):
        ret_dic = {}

        ## head info

        name = self.soup.select('span > h1')

        ret_dic = {}
        
        name = self.soup.select_one('h1').get_text().strip().split('\n')[0]
        round_ =  self.soup.select_one('h1 > span').get_text(strip=True)
        if not name.replace('\t',''): raise emptyPage()
        ret_dic['name']=name.replace('\t','')
        ret_dic['round']=round_.strip('()\t ')
        ret_dic['slogan'] = self.soup.select('.seo-slogan')[0].get_text(strip=True)
        ret_dic['category'] = [each.get_text() for each in self.soup.select('.scope > a') ]
        ret_dic['locate'] = [each.get_text() for each in self.soup.select('.loca > a') ]
        ret_dic['tagset'] = [each.get_text() for each in self.soup.select('.c-gray-aset span') ]
        ret_dic['weibo'] = self.soup.select_one('a > .icon-weibo').parent.attrs.get('href')
        ret_dic['weblink'] = self.soup.select_one('.douniwan').attrs.get('href')

        # basic info
        ret_dic['des'] = self.soup.select_one('head > meta[name=Description]').attrs.get('content')

        for each in self.soup.select('.seo-second-title'):
            lis = each.get_text(strip=True).split('：') 
            ret_dic[lis[0]] = lis[1]
        ret_dic['states'] = self.soup.select_one('.des-more .tag').get_text(strip=True)

        fundinground = []
        for tds in self.soup.select('.list-round-v2 tr'):
            date = tds.select_one('.date').get_text(strip=True)
            rounds = tds.select_one('.round').get_text(strip=True)
            finades = tds.select_one('.finades').get_text(strip=True)
            investors = [a.get_text(strip=True) for a in tds.select('td > a,td:nth-of-type(4) > span')]
            fundinground.append({'date':date,'rounds':rounds,'finades':finades,'investors':investors})
        ret_dic['fundinground']=fundinground

        ret_dic['merge'] = self.soup.select_one('.invst-data b').get_text(strip=True)

        teams = []
        for tds in self.soup.select('.institu-member li > div'):
            pid = tds.select_one('.person-name > a.title').attrs.get('href')
            name = tds.select_one('b > .c').get_text(strip=True)
            position = tds.select_one('b > .c-gray').get_text(strip=True)
            des = tds.select_one('.person-des').get_text(strip=True)
            weibo = tds.select_one('.flr > a').attrs.get('href') if tds.select_one('.flr > a') else None
            teams.append({'pid':pid,'name':name,'position':position,'des':des,'weibo':weibo})
        ret_dic['team']=teams

        # product
        product = []
        for tds in self.soup.select('.list-prod .on-edit-hide'):
            tag = tds.select_one('span.tag').get_text(strip=True)
            name = tds.select_one('h4 > b').get_text(strip=True)
            des = tds.select_one('p').get_text(strip=True)
            product.append({'tag':tag,'name':name,'des':des,})
        ret_dic['product']=product


        # business
        bs_name = self.soup.select_one('.essential th').get_text(strip=True)
        if bs_name:
            ret_dic['business'] = {}
            ret_dic['business']['name'] = bs_name
            for each in self.soup.select('.essential td'):
                try:
                    ret_dic['business'][each.select_one('.tab_title').get_text(strip=True)] = each.select_one('.tab_main').get_text(strip=True)
                except:pass

        ret_dic = self.assert_dic(ret_dic) 
        return ret_dic