import requests
from bs4 import BeautifulSoup
from pymongo import MongoClient
import time
from config import get_header,get_log
import arrow
Loger = get_log('touzijie')

PROXY = {
    # 'http':'http://218.109.238.144:8989',
    'http':'http://192.168.1.220:8087',
}
TIMEOUT = 15
REQ_RETYP = 3
NEXT_DAILY = 5
UPDATE_DELAY = 15

DB = MongoClient('192.168.1.220',29001)['touzijie']

class Company:
    def __init__(self, html):
        soup = BeautifulSoup(html, 'lxml')
        self.sum_dic = {}
        self.sum_dic.update(self.get_company_detail(soup))
        self.sum_dic.update(self.get_company_basic_info(soup))

        self.sum_dic = dict(filter(lambda x:x[1],self.sum_dic.items()))
        # print(self.sum_dic)

    def get_company_detail(self, soup):
        sub_soup = soup.find('div', 'zdb-top')
        ret_dic = {}

        ret_dic['full_name'] = sub_soup.find('div','info').find('h1').next_element.strip()

        ret_dic['name'] = sub_soup.find('div','info').find('em').string.strip() if sub_soup.find('div','info').find('em').string else None
        ret_dic['en_name'] = sub_soup.find('div','info').find('h2').string.strip() if sub_soup.find('div','info').find('h2') else None
        for li in sub_soup.find_all('li'):
            ret_dic[li.find('span').string.strip(' ：')] = li.find('span').next_sibling.string.strip()
    
        return ret_dic 

    def get_company_basic_info(self, soup):
        desc_soup = soup.find('div', {'id':'desc'})
        ret_dic = {}
        ret_dic['desc'] = desc_soup.get_text().lstrip('\n\t\r简介').rstrip('\n\t\r分享')

        try:
            contact_soup = soup.find('div', {'id':'contact'}).find('p')
            contact = list(map(lambda x:x.replace('\u3000',''),filter(lambda x: len(x.strip())>0,contact_soup.get_text().split('\n'))))
            ret_dic['contact']=dict(map(lambda x:x.split('：'),contact))
        except Exception as e:
            print(e)

        inv_lis = soup.find('div', {'id':'inv-box'}).find_all('li')[1:]
        ret_dic['fundinground'] = []
        for li in inv_lis:
            dic = {}
            d = li.find('span','d')

            dic['date'] = li.find('span','time').get_text()

            dic['money'] = d.get_text() + ' ' + li.find('span','m').get_text() if d else li.find('span','m').get_text()
            dic['series'] = li.find('span','r').get_text() if li.find('span','r') else None
            dic['investors'] = [org.get_text() for org in li.find('dt','group').find_all('a')]
            ret_dic['fundinground'].append(dic)

        return ret_dic

def get_html(url):
    tm = 5
    for _ in range(REQ_RETYP):
        try:
            req = requests.get(url,timeout=TIMEOUT,headers=get_header(),proxies=PROXY,allow_redirects=False)
            if req.status_code in [301,302]:
                return '300'
            assert req.status_code == 200, req.status_code
            req.encoding = 'utf8'
            return req.text 
        except requests.exceptions.RequestException:
            Loger.warning('http fail')
        except Exception as e:
            Loger.warning(e)
            time.sleep(tm)
            tm += 5
    Loger.error('%s time over'%url)
    return None
    


def test():
    from pprint import pprint
    # url = 'http://zdb.pedaily.cn/enterprise/show11/'
    # req = requests.get(url,timeout=10,headers=get_header())
    # req.encoding = 'utf8'
    # # print(req.text)
    # with open('test.html','wb')as f:
    #     f.write(req.content)
    with open('test.html','rt')as f:
        html = f.read()
    com = Company(html)
    pprint(com.sum_dic)

def update(): 
    aw = arrow.now()
    old_ids = [each.get('_id') for each in DB['enterprise'].find({'update':{'$lte':aw.replace(days=-UPDATE_DELAY).datetime}},{'_id':1})]
    for i in old_ids:
        Loger.debug('%s start update'%i)
        url = 'http://zdb.pedaily.cn/enterprise/show%s/'%i

        html = get_html(url)
        if not html:
            Loger.error('%s update fail'%i)
            continue

        if '您正在浏览的网页可能已被删除或者转移。'in html or html == '300':
            DB['temp'].update_one({'_id':i},{'$set':{'update':aw.floor('day').datetime}},upsert=True)
            Loger.error('%s update lost'%i)
            continue

        try:
            com = Company(html)
            com.sum_dic['_id'] = i
            com.sum_dic['update'] = aw.floor('day').datetime
            DB['enterprise'].update_one({'_id':i},{'$set':com.sum_dic},upsert=True)

            com.sum_dic['_id'] = '%s_%s'%(i,aw.floor('day').format('YYYYMMDD'))
            com.sum_dic['id'] = i
            DB['enterprise_history'].insert_one(com.sum_dic)
            Loger.debug('%s update success'%i)
            time.sleep(NEXT_DAILY)
        except Exception as e:
            Loger.error(i)
            Loger.exception(e)
            continue

def main():
    aw = arrow.now()
    DB = MongoClient('192.168.1.220',29001)['touzijie']
    FINSH = [each.get('_id') for each in DB['enterprise'].find({},{'_id':1})]
    max_id = max(FINSH) if FINSH else 0
    FAIL = [each.get('_id') for each in DB['temp'].find({'_id':{'$lte':max_id},'update':{'$gte':aw.replace(days=-UPDATE_DELAY).datetime}},{'_id':1})]

    serial_count = 0
    for i in range(1,50000):
        if i in FINSH or i in FAIL:
            continue

        Loger.debug('%s start crawl'%i)
        url = 'http://zdb.pedaily.cn/enterprise/show%s/'%i

        html = get_html(url)
        if not html:
            Loger.error('%s crawl fail'%i)  
            continue

        if '您正在浏览的网页可能已被删除或者转移。'in html or html == '300':
            DB['temp'].update_one({'_id':i},{'$set':{'update':aw.floor('day').datetime}},upsert=True)
            Loger.error('%s crawl lost'%i)
            serial_count += 1
            if serial_count>10:
                break
            continue
        try:
            com = Company(html)
            com.sum_dic['_id'] = i
            com.sum_dic['update'] = aw.floor('day').datetime
            DB['enterprise'].update_one({'_id':i},{'$set':com.sum_dic},upsert=True)
            com.sum_dic['_id'] = '%s_%s'%(i,aw.floor('day').format('YYYYMMDD'))
            com.sum_dic['id'] = i
            DB['enterprise_history'].insert_one(com.sum_dic)
            Loger.debug('%s crawl success'%i)
            serial_count = 0
            time.sleep(NEXT_DAILY)
        except Exception as e:
            Loger.error(i)
            Loger.exception(e)
            continue


if __name__ == '__main__':
    main()
    update()
    # test()
