import re
import requests
from fake_useragent import UserAgent
import pymongo
from multiprocessing import Pool

ua = UserAgent(use_cache_server=True)


class RRC_BBS(object):
    client = pymongo.MongoClient('localhost')
    db = client['forum']

    def get_list_html(self, page):
        """
        获取第一页数据。
        :return:
        """
        self.headers = {'User-Agent': '{}'.format(ua.random)}
        response = requests.get('https://bbs.renrenche.com/forum.php?mod=forumdisplay&fid=51&page={}'.format(page), headers=self.headers).text
        # print(response)
        data_list = re.findall(re.compile(r'<th class="new">.*?<em>.*?<a.*?>(.*?)</a>.*?<a href="(.*?)".*?>(.*?)</a>.*?<td class="by">.*?<a.*?>(.*?)</a>.*?<span.*?>(.*?)</span>.*?<td class="num">.*?<a.*?>(.*?)</a>.*?<em>(.*?)</em>.*?<td class="by">.*?<a.*?>(.*?)</a>.*?<a.*?>(.*?)</a>', re.S), response)
        # print(data_list)
        return data_list

    def create_url_dict(self, data_list):
        """
        :param data_list:
        :return:
        """
        for data in data_list:
            data_dict = {}
            data_dict['title'] = data[0]
            data_dict['content'] = data[2]
            data_dict['name'] = data[3]
            data_dict['time'] = data[4]
            data_dict['forum_num'] = data[5]
            data_dict['view'] = data[6]
            data_dict['last_forum'] = data[7]
            data_dict['last_forum_time'] = data[8]
            # print(item)
            detail_urls = "https://bbs.renrenche.com/" + data[1].replace('amp;', '')
            # list评论列表
            list = self.get_usr_forum(detail_urls)
            data_dict['forum_list'] = list
            self.db['forum'].insert_one(data_dict)
            print(detail_urls)
            print(data_dict)

    def get_usr_forum(self, detail_urls):
        response = requests.get(detail_urls, headers=self.headers).text
        # print(response)
        data_list = re.findall(re.compile(r'<div class="authi"><a.*?>(.*?)</a>.*?<div class="authi">.*?<em.*?>(.*?)</em>.*?<td class="t_f".*?>(.*?)</td>', re.S), response)
        list = []
        for data in data_list:
            pattern = re.compile(r'<.*>', re.S)
            update_data = re.sub(pattern, '', data[2]).replace('\r\n', '')
            list.append({'author': data[0], 'time': data[1], 'forum': update_data})

        return list

    def start_spider(self, page):
        data_list = self.get_list_html(page)
        self.create_url_dict(data_list)


if __name__ == '__main__':
    rs = RRC_BBS()
    pool = Pool(3)
    pool.map(rs.start_spider, [x for x in range(1, 10)])
    pool.close()
    pool.join()
    # rs.start_spider(2)








