'''
使用协程 线程池 来 抓取angelio user 信息
'''
import requests
import logging
from bs4 import BeautifulSoup
import json
from pymongo import MongoClient
import asyncio
import aiohttp
import redis
import random
from multiprocessing.dummy import Pool, Manager


def get_recorder_logger(log_name):
    my_logger = logging.getLogger(log_name)
    my_logger.setLevel(logging.DEBUG)
    if not my_logger.handlers:
        # logging format
        fmt = logging.Formatter('%(asctime)s - %(levelname)s - %(message)s')
        # filehandler
        fh = logging.FileHandler('person_log/%s' % log_name)
        fh.setFormatter(fmt)
        fh.setLevel(logging.INFO)
        my_logger.addHandler(fh)
        # StreamHandler
        sh = logging.StreamHandler()
        sh.setFormatter(fmt)
        sh.setLevel(logging.DEBUG)
        my_logger.addHandler(sh)

    return my_logger


class FIND_SOUP():
    def __init__(self, soup):
        self.soup = soup
        if soup:
            if soup.string:
                self.string = soup.string.strip()
            else:
                self.string = None
        else:
            self.string = None

    def find(self, *args, **kw):
        if not self.soup:
            return FIND_SOUP(None)
        ret = self.soup.find(*args, **kw)
        if ret:
            return FIND_SOUP(ret)
        return FIND_SOUP(None)

    def find_all(self, *args, **kw):
        if not self.soup:
            return []
        ret = self.soup.find_all(*args, **kw)
        return ret

    def find_next_sibling(self, *args, **kw):
        if not self.soup:
            return None
        ret = self.soup.find_next_sibling(*args, **kw)
        return ret

    def get_text(self):
        if self.soup:
            return self.soup.get_text().strip()
        return None

    def get(self, *args, **kw):
        if self.soup:
            return self.soup.get(*args, **kw)
        return None


class BASE:
    @staticmethod
    def del_none(dic):
        no_list = []
        for k, v in dic.items():
            if not v or v == '':
                no_list.append(k)
        for k in no_list:
            del(dic[k])

    def get_data(self):
        BASE.del_none(self.ret_dict)
        return self.ret_dict


class PERSON(BASE):

    def __init__(self, html):
        self.soup = BeautifulSoup(html, 'lxml')
        try:
            self.ret_dict = {}
            self.ret_dict.update(self.get_prefix())
            self.ret_dict.update(self.get_experience())
            self.ret_dict.update(self.get_education())
            self.ret_dict.update(self.get_about())
        except Exception as e:
            MY_LOG.exception(e)
            self.ret_dict = {}
        # self.get_data()

    def get_prefix(self):
        prefix_soup = self.soup.find('div', 'prefix')
        if not prefix_soup:
            return {}
        prefix_soup = FIND_SOUP(prefix_soup)
        data_dic = {}
        data_dic['name'] = prefix_soup.find('h1', 'js-name').get_text()
        data_dic['bio'] = prefix_soup.find('h2').get_text()
        data_dic['tags'] = [FIND_SOUP(each).get('title') for each in prefix_soup.find(
            'div', 'tags').find_all('span', 'tag')]

        for each in prefix_soup.find('div', 'links').find_all('span', 'link'):
            data_dic[each.find('a').get('data-field')
                     ] = each.find('a').get('href')
        return data_dic

    def get_experience(self):
        data_soup = self.soup.find(
            'div', attrs={'data-_tn': 'startup_roles/portfolio'})
        if not data_soup:
            return {}
        data_dic = data_soup.get('data-roles')

        return {'experience': json.loads(data_dic)}

    def get_education(self):
        data_soup = self.soup.find(
            'div', attrs={'data-_tn': 'profiles/show/profile_college_tagger'})
        if not data_soup:
            return {}
        data_dic = data_soup.get('data-taggings')
        return {'education': json.loads(data_dic)}

    def get_about(self):
        data_soup = self.soup.find('div', 'about')
        if not data_soup:
            return {}
        divs = data_soup.find('div', 's-grid')
        if not divs:
            return {}
        about_dict = {}
        for div in divs.find_all('div', 'field'):
            div = FIND_SOUP(div)
            # print(div)
            name = div.find('div', 'u-fontWeight300').string
            if name in ['What I Do', 'Achievements', "What I'm Looking For"]:
                values = div.find('p').get_text()
            else:
                tags_temp = []
                for tab in div.find_all('span', 'tag'):
                    tab = FIND_SOUP(tab)
                    tag_temp = tab.find('a').get_text()
                    tags_temp.append(tag_temp)
                values = tags_temp
            about_dict[name] = values
        return {'about': about_dict}


def check_page(html):
    if 'Followers'in html and 'Activity' in html and 'Overview' in html:
        company(html)
    elif 'Investments' in html:
        person(html)


def get_all_url(collection):
    ret_list = {each.get('angel_url')
                for each in collection.find({}, {'_id': 0, 'angel_url': 1})}
    return ret_list


async def get_page(work_urls):
    ip_port = await get_ipproxy()
    conn = aiohttp.ProxyConnector(proxy="http://%s" % ip_port)
    session = aiohttp.ClientSession(connector=conn)

    while 1:
        angel_url = work_urls.get()
        if angel_url == 'end_kill':
            work_urls.put('end_kill')
            break

        repeat_num = 0
        empty_num = 0
        while True:
            repeat_num += 1
            try:
                with aiohttp.Timeout(35):
                    async with session.get(angel_url, headers=HEADERS) as r:
                        text = await r.text()
                        if r.status == 404:
                            LOG_404.info(angel_url)
                            break
                        assert r.status == 200
                        if 'Your IP is blocked, kid.'not in text and 'AngelList' in text:
                            person = PERSON(text)
                            data_dic = person.get_data()
                            data_dic['angel_url'] = angel_url
                            if not data_dic.get('name'):
                                raise RuntimeError('name is None')
                            DATA_TARGET.insert_one(data_dic)
                            print('successful get_page : %s' % angel_url)
                            break
            except RuntimeError as e:
                # EMPTY_LOG.error('page is empty : %s' % angel_url)
                empty_num += 1
                await asyncio.sleep(60)
            except aiohttp.errors.ClientError as e:
                PROXY_LOG.error('client error: %s' % ip_port)
                await asyncio.sleep(30)
            except aiohttp.errors.DisconnectedError as e:
                PROXY_LOG.error('disconnect error: %s' % ip_port)
                await asyncio.sleep(30)
            except asyncio.TimeoutError as e:
                TIMEOUT_LOG.error('TimeoutError : %s' % angel_url)
                await asyncio.sleep(60)
            except AssertionError as e:
                STATUS_LOG.error('%s : %s' % (angel_url, r.status))
                await asyncio.sleep(10)
            except aiohttp.errors.HttpProcessingError as e:
                PROXY_LOG.error('too much connection : %s' % (angel_url,))
                await asyncio.sleep(10)
            except Exception as e:
                UNKNOW_LOG.exception(e)
                UNKNOW_LOG.debug("fail: %s" % angel_url)
                await asyncio.sleep(10)

            if empty_num>7:
                EMPTY_LOG.error('page is empty : %s' % angel_url)
                break
            if repeat_num > 5:
                del_ipproxy(ip_port)
                ip_port = await get_ipproxy()
                conn = aiohttp.ProxyConnector(proxy="http://%s" % ip_port)
                session = aiohttp.ClientSession(connector=conn)
                repeat_num = 0
                REPEAT_LOG.info(angel_url)


async def get_ipproxy():
    global REDIS
    while True:
        count = REDIS.zcount('ipproxy:3', 0, 20)
        if count == 0:
            print('wait 60s no proxy')
            await asyncio.sleep(60)
            continue
        my_id = random.randint(0, count)
        ret = REDIS.zrange('ipproxy:3', my_id, my_id)
        if not ret:
            continue
        return ret[0].decode('utf8')


def del_ipproxy(proxy):
    global REDIS
    REDIS.zrem('ipproxy:3', proxy)


MY_LOG = get_recorder_logger('ang_log')
UNKNOW_LOG = get_recorder_logger('unknow_log')
EMPTY_LOG = get_recorder_logger('empty_log')
STATUS_LOG = get_recorder_logger('status_log')
TIMEOUT_LOG = get_recorder_logger('timeout_log')
REPEAT_LOG = get_recorder_logger('repeat_log')
PROXY_LOG = get_recorder_logger('proxy_log')
LOG_404 = get_recorder_logger('404_log')


DB = MongoClient('192.168.0.220', 27017)['angellist']
DATA_SOURCE = DB['users']
DATA_TARGET = DB['users_detail']
Proxy_Pool = redis.ConnectionPool(host='localhost', port=6379, db=0)
REDIS = redis.StrictRedis(connection_pool=Proxy_Pool)
ASYNC_NUM = 30
THREAD_NUM = 2
HEADERS = {
    'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/51.0.2704.84 Safari/537.36'}

async def test_work(arg):
    import os
    while 1:
        v = arg.get()
        if v == 'end_kill':
            arg.put('end_kill')
            break
        await asyncio.sleep(0.1)
        v += v
        print('%s : %s' % (os.getpid(), v))


def work(work_queue):

    loop = asyncio.new_event_loop()
    asyncio.set_event_loop(loop)
    print('work')
    tasks = [asyncio.ensure_future(get_page(work_queue)) for i in range(ASYNC_NUM)]
    # tasks = [asyncio.ensure_future(test_work(work_queue)) for i in range(ASYNC_NUM)]
    loop.run_until_complete(asyncio.wait(tasks))
    loop.close()


def main():
    source_urls = get_all_url(DATA_SOURCE)
    finush_urls = get_all_url(DATA_TARGET)
    work_urls = source_urls - finush_urls
    manager = Manager()
    work_queue = manager.Queue()

    for url in work_urls:
        work_queue.put(url)
    work_queue.put('end_kill')

    pool = Pool(THREAD_NUM)

    for i in range(THREAD_NUM):    
        pool.map_async(work, (work_queue,))

    pool.close()
    pool.join()

    print('end')


def test():
    import requests
    req = requests.get('https://angel.co/robert-flandrau', timeout=20)
    html = req.text
    # with open('temp', 'wt')as f:
    #     f.write(req.text)
    # with open('temp', 'rt')as f:
    #     html = f.read()
    company = PERSON(html)
    print(json.dumps(company.get_data()))

if __name__ == '__main__':
    # test()
    main()
