import requests
from bs4 import BeautifulSoup
from pymongo import MongoClient
import time
import pymongo
from config import get_header
import re
from config import get_header_json,get_log
import arrow

Loger = get_log('fellowplus')
PROXY = {
    'http':'http://218.109.238.144:12345',
    # 'http':'http://192.168.1.220:8087',
}
TIMEOUT = 15
REQ_RETYP = 3
NEXT_DAILY = 5
UPDATE_DELAY = 25
web_token = '4sD5zZ1X%2FH4FUhXtgTpGdBSbD2UCVcqwubON6I%2FFyfY%3D'

def get_html(url):
    tm = 5
    for _ in range(REQ_RETYP):
        try:
            req = requests.get(url,timeout=TIMEOUT,headers=get_header_json(),proxies=PROXY)
            assert req.status_code == 200, req.status_code
            return req.json() 
        except requests.exceptions.RequestException:
            Loger.warning('http fail')
        except Exception as e:
            Loger.warning(e)
            time.sleep(tm)
            tm += 5
    Loger.error('%s time over'%url)
    return None


def update_index(page_count=None):
    def parse_index(dic):
        dic['update'] = arrow.now().floor('day').datetime
        dic['_id'] = dic['id'] + '_' + dic['funding_time'].replace('-','')
        return dic
    
    START = 55
    url = 'https://api.fellowplus.com/v2/projects/list?funding_start_time=2000-04-12&funding_end_time=2017-04-19&page_num={page}&web_token={web_token}'
    DB = MongoClient('192.168.1.220',29001)['fellowplus']

    Loger.debug('%s start crawl'%START)
    js_data = get_html(url.format(page=START,web_token=web_token))
    if not js_data:
        Loger.error('%s crawl fail'%START)
        raise
    page =  js_data['data']['pages'] if not page_count else page_count
    ret_lis = map(parse_index, js_data['data']['project_list'])
    try:
        DB['enterprise_index'].insert_many(ret_lis,ordered=False)
    except pymongo.errors.BulkWriteError:
        pass

    for i in range(START+1,page+1):
        Loger.debug('%s start crawl'%i)
        js_data = get_html(url.format(page=START,web_token=web_token))
        if not js_data:
            Loger.error('%s crawl fail'%i)  
            continue
        ret_lis = map(parse_index, js_data['data']['project_list'])
        try:
            DB['enterprise_index'].insert_many(ret_lis,ordered=False)
        except pymongo.errors.BulkWriteError:
            pass
        time.sleep(NEXT_DAILY)

def update_main():
    aw = arrow.now().floor('day')
    DB = MongoClient('192.168.1.220',29001)['fellowplus']
    url = 'https://api.fellowplus.com/v2/projects/detail?unique_str={unique_str}&web_token={web_token}'
    old_ids = [each.get('id') for each in DB['enterprise'].find({'update':{'$gte':aw.replace(days=-UPDATE_DELAY).datetime}},{'_id':1})]
    Dlis = [each for each in DB['enterprise_index'].find()]
    for each in Dlis:
        unique_str = each.get('unique_str')
        _id = each.get('id')
        name = each.get('name')
        Loger.debug('%s start crawl'%name)
        if _id in old_ids:
            continue

        js_data = get_html(url.format(unique_str=unique_str,web_token=web_token))
        ret_dic = js_data['data']
        ret_dic['_id'] = _id
        ret_dic['update'] = arrow.now().floor('day').datetime
        DB['enterprise'].insert_one(ret_dic)
        ret_dic['_id'] = _id + '_' + arrow.now().format('YYYYMMDD')
        DB['enterprise_history'].insert_one(ret_dic)
        time.sleep(NEXT_DAILY)

if __name__ == '__main__':
    # index()
    # main()n
    update_index()
    # update_main()
    # update_index(page_count=10)
    # undate_main()
    # proxy = Proxy.get_proxy()
    # print(proxy)
