import requests
from bs4 import BeautifulSoup
from pymongo import MongoClient
import time
import pymongo
import re
from config import get_header,get_log
import arrow

Loger = get_log('chungyebang')
# cookie={'Cookie':'gr_user_id=74520d0e-0888-4453-b0fa-6a3f2635fd5f; acw_tc=AQAAACEe8EJh7A0AK7Hkc30U3KqJU82k; session=ea9c31f86378b7f72f67487608441ddfd39cc451; _gat=1; identity=326737833%40qq.com; remember_code=.tl26XkKYr; gr_session_id_eee5a46c52000d401f969f4535bdaa78=602172c7-e4ea-48e5-be34-b07c58515f0c; _ga=GA1.2.2004034647.1485244002; Hm_lvt_1c587ad486cdb6b962e94fc2002edf89=1485244002; Hm_lpvt_1c587ad486cdb6b962e94fc2002edf89=1485246109'}
PROXY = {
    # 'http':'http://218.109.238.144:8989',
    'http':'http://192.168.1.220:8087',
}
TIMEOUT = 15
REQ_RETYP = 3
NEXT_DAILY = 3
UPDATE_DELAY = 10

DB = MongoClient('192.168.1.220',29001)['chuangyebang']

class Company:

    def __init__(self, html):
        soup = BeautifulSoup(html, 'lxml')
        self.sum_dic = {}
        self.sum_dic.update(self.get_company_detail(soup))
        self.sum_dic.update(self.get_company_basic_info(soup))
        self.del_empty()
        # assert '_id' in self.sum_dic

    def del_empty(self):
        empty = []
        for k,v in self.sum_dic.items():
            if not v:
                empty.append(k)
        for k in empty:
            del(self.sum_dic[k])

    def get_company_detail(self, soup):
        sub_soup = soup.find('div', 'top-info')
        ret_dic = {}

        ret_dic['name'] = sub_soup.find('li','name').string
        ret_dic['full_name'] = sub_soup.find('li','time').string.lstrip('公司全称：') if sub_soup.find('li','time') else None
        ret_dic['url'] = sub_soup.find('div','com-url').find('a').get('href') if sub_soup.find('div','com-url') else None
        
        return ret_dic
    

    def get_company_basic_info(self, soup):
        ret_dic = {}
        sub_soup = soup.find('div', 'all-info')

        info_tag = sub_soup.find('div', 'info-tag')

        ret_dic['date'] = info_tag.find('i','i1').find_parent().get_text() if info_tag.find('i','i1') else None
        ret_dic['financing_stage'] = info_tag.find('i','i3').find_parent().get_text() if info_tag.find('i','i3') else None
        ret_dic['tag'] =[each.string for each in info_tag.find('i','i6').find_parent().find_all('span')] if info_tag.find('i','i6') else None
        ret_dic['location'] = info_tag.find('i','i2').find_parent().get_text() if info_tag.find('i','i2') else None

        ret_dic['desc'] = sub_soup.find('div','info-box').get_text().strip()

        # qcc
        ret_dic['qcc'] = {}
        for ps in sub_soup.find('div', 'qcc').find_all('p'):
            if len(ps.find_all('span'))>1:
                ret_dic['qcc'][ps.find('span').string.strip(' :')] = ps.find('span').next_sibling.string.strip() or ps.find_all('span')[1].get_text().strip()
            else:  
                ret_dic['qcc'][ps.find('span').string.strip(' :')] = ps.find('span').next_sibling.string.strip() if ps.find('span').next_sibling else None
            

        live_soup = soup.find('div', 'live')
        # print(live_soup)
        if live_soup:
            ret_dic['fundinground'] = []
            for trs in live_soup.find('table').find_all('tr',recursive=False)[1:]:
                dic = {}
                dic['series']=trs.find_all('td',recursive=False)[0].get_text()
                dic['raised']=trs.find_all('td',recursive=False)[1].find('div','money').get_text()
                dic['investors']=trs.find_all('td',recursive=False)[2].get('title')
                dic['time']=trs.find_all('td',recursive=False)[3].get_text()
                ret_dic['fundinground'].append(dic)

        return ret_dic


def undate_main():
    aw = arrow.now().floor('day')
 
    FAIL = [each.get('_id') for each in DB['temp'].find({'update':{'$gte':aw.replace(days=-UPDATE_DELAY).datetime}},{'_id':1})]
    old_ids = [each.get('_id') for each in DB['enterprise'].find({'update':{'$gte':aw.replace(days=-UPDATE_DELAY).datetime}},{'_id':1})]
    Dlis = [each for each in DB['enterprise_index'].find()]
    for each in Dlis:
        _id = each.get('_id')
        cyb_url = each.get('cyb_url')
        name = each.get('name')
        if _id in old_ids or _id in FAIL:
            continue

        if len(cyb_url) <25:
            continue
        Loger.debug('%s start get'%_id)

        html = get_html(cyb_url)
        if html:
            try:
                com = Company(html)
                com.sum_dic['update'] = aw.datetime
                DB['enterprise'].update_one({'_id':_id},{'$set':com.sum_dic},upsert=True)

                com.sum_dic['_id'] = '%s_%s'%(_id,aw.floor('day').format('YYYYMMDD'))
                com.sum_dic['id'] = _id
                DB['enterprise_history'].update_one({'_id':com.sum_dic['_id']},{'$set':com.sum_dic},upsert=True)
                Loger.debug('%s success'%_id)
            except Exception as e:
                Loger.exception(e)
                print(cyb_url,name)


def get_indexs(html):
    soup = BeautifulSoup(html, 'lxml')
    soup = soup.find('div','list-table')
    ret_lis = []
    for plate in soup.find_all('tr','table-plate'):
        dic = {}
        dic['name'] = plate.get('data-title')
        dic['cyb_url'] = plate.get('data-url')
        dic['_id'] = int(plate.get('data-id'))

        dic['financing_stage'] = plate.find('td','table-stage').get_text().strip()
        dic['business_type'] = plate.find('td','table-type').get_text().strip()
        dic['date'] = plate.find('td','table-time').get_text().strip()
        dic['update'] = arrow.now().floor('day').datetime
        ret_lis.append(dic)
    # print(ret_lis)
    return ret_lis

def get_lastpage(html):
    soup = BeautifulSoup(html, 'lxml')
    ret = soup.find('a',{'id':'lastpage'}).get('href')
    ret = re.findall(r'[1-9]{3,}',ret)
    if ret:
        return int(ret[0])


def get_html(url):
    tm = 5
    for _ in range(REQ_RETYP):
        try:
            # browser.get(url)
            # return browser.page_source
            req = requests.get(url,timeout=TIMEOUT,headers=get_header(),proxies=PROXY)
            if req.status_code == 404:
                return 404
            assert req.status_code == 200, req.status_code
            req.encoding = 'utf8'
            return req.text 
        except requests.exceptions.RequestException:
            Loger.warning('http fail')
        except Exception as e:
            Loger.warning(e)
            time.sleep(tm)
            tm += 5
    Loger.error('%s time over'%url)
    return None

def update_index(page_count=None):
    START = 0

    DB = MongoClient('192.168.1.220',29001)['chuangyebang']
    url = 'http://www.cyzone.cn/vcompany/list-0-0-%s-0-1/'



    Loger.debug('%s start crawl'%(url%START))
    html = get_html(url%START)
    if not html:
        Loger.error('%s crawl fail'%(url%START))  
        raise
    page =  get_lastpage(html) if not page_count else page_count
    ret_lis = get_indexs(html)
    try:
        DB['enterprise_index'].insert_many(ret_lis,ordered=False)
    except pymongo.errors.BulkWriteError:
        pass

    for i in range(START+1,page+1):
        Loger.debug('%s start crawl'%(url%i))
        html = get_html(url%i)
        if not html:
            Loger.error('%s crawl fail'%(url%i))  
            continue
        ret_lis = get_indexs(html)
        try:
            DB['enterprise_index'].insert_many(ret_lis,ordered=False)
        except pymongo.errors.BulkWriteError:
            pass


def test():
    # url = 'http://www.cyzone.cn/r/20170412/50957.html'
    # req = requests.get(url,timeout=10,headers=get_header())
    # with open('test.html','wb')as f:
    #     f.write(req.content)
    with open('test.html','rt')as f:
        html = f.read()
    com = Company(html)
    print(com.sum_dic)

def range_update():
    aw = arrow.now().floor('day') 
    url = 'http://www.cyzone.cn/r/20170808/%s.html'

    FINSH = [each.get('_id') for each in DB['enterprise'].find({},{'_id':1})]
    max_id = max(FINSH) if FINSH else 0
    Loger.debug('%s max_id'%max_id)

    FAIL = [each.get('_id') for each in DB['temp'].find({'_id':{'$lte':max_id},'update':{'$gte':aw.replace(days=-UPDATE_DELAY).datetime}},{'_id':1})]
    old_ids = [each.get('_id') for each in DB['enterprise'].find({'update':{'$gte':aw.replace(days=-UPDATE_DELAY).datetime}},{'_id':1})]
    serial_count = 0
    for _id in range(1,60000):
        if _id in FAIL or _id in old_ids:
            continue
        if serial_count>100:
            break

        Loger.debug('%s start get'%_id)
        html = get_html(url%_id)
        if 404 == html:
            DB['temp'].update_one({'_id':_id},{'$set':{'update':aw.floor('day').datetime}},upsert=True)
            Loger.error('%s crawl 404'%_id)
            serial_count += 1
            continue
        
        if html:
            try:
                com = Company(html)
                com.sum_dic['update'] = aw.datetime
                DB['enterprise'].update_one({'_id':_id},{'$set':com.sum_dic},upsert=True)
                com.sum_dic['_id'] = '%s_%s'%(_id,aw.floor('day').format('YYYYMMDD'))
                com.sum_dic['id'] = _id
                DB['enterprise_history'].update_one({'_id':com.sum_dic['_id']},{'$set':com.sum_dic},upsert=True)
                Loger.debug('%s success'%_id)
                serial_count = 0
            except Exception as e:
                Loger.exception(e)
                print(_id)
        time.sleep(NEXT_DAILY)


if __name__ == '__main__':

    range_update()
    update_index()
    # update_index(page_count=1)
    undate_main()
    # proxy = Proxy.get_proxy()
    # print(proxy)
