import requests
import sys
import math

from bs4 import BeautifulSoup
from pymongo import MongoClient
from multiprocessing import Process

h1 = {
    'authority': 'www.wukong.com',
    'method': 'GET',
    'scheme': 'https',
    'accept': 'application/json, text/javascript, */*; q=0.01',
    'accept-encoding': 'gzip, deflate, br',
    'accept-language': 'en,zh-CN;q=0.9,zh;q=0.8,en-US;q=0.7',
    'cache-control': 'no-cache',
    'pragma': 'no-cache',
    'referer': 'https://www.wukong.com/user/?uid=4204700994&type=0',
    'user-agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_13_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/67.0.3396.99 Safari/537.36',
    'x-requested-with': 'XMLHttpRequest'
}

h2 = {
    'authority': 'www.wukong.com',
    'method': 'GET',
    'scheme': 'https',
    'accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8',
    'accept-encoding': 'gzip, deflate, br',
    'accept-language': 'en,zh-CN;q=0.9,zh;q=0.8,en-US;q=0.7',
    'cache-control': 'no-cache',
    'pragma': 'no-cache',
    'referer': 'https://www.wukong.com/user/?uid=4204700994&type=0',
    'upgrade-insecure-requests': '1',
    'user-agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_13_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/67.0.3396.99 Safari/537.36'
}

sys.setrecursionlimit(1000000)
net = sys.argv[1] if len(sys.argv) == 2 else 'localhost'

client = MongoClient(net, 27017, connect = False)

wukong_user_db = client['wukong_user_db']
wukong_user_coll = wukong_user_db['wukong_user_coll']

wukonghuida_post_db = client['wukonghuida_post_db']
wukonghuida_post_coll = wukonghuida_post_db['wukonghuida_post_coll']


def start(begin, end):
    uid_list = []

    for user in wukong_user_coll.find().skip(begin).limit(end - begin):
        if 'crawled' in user:
            continue

        uid_list.append(str(user['user_id']))

    for uid in uid_list:
        print(uid)
        crawl_question_id(uid)
        wukong_user_coll.update_one({'user_id': uid}, {'$set': {'crawled': True}})


def crawl_question_id(uid, cursor = 0, count = 15):
    h1['referer'] = 'https://www.wukong.com/user/?uid=' + uid + '&type=0'

    try:
        resp = requests.get('https://www.wukong.com/wenda/web/my/brow/', params = {
            'cursor': cursor,
            'other_uid': uid,
            'count': count
        }, headers = h1).json()
    except:
        print('error')
        return

    if not 'data' in resp:
        return

    if not 'feed_question' in resp['data']:
        return

    feed_question = resp['data']['feed_question']

    for que in feed_question:
        crawl_question_answer(uid, que['question']['qid'])

    if resp['data']['has_more']:
        crawl_question_id(uid, resp['data']['cursor'], count)


def crawl_question_answer(uid, qid):
    url = 'https://www.wukong.com/question/' + qid + '/'
    h2['referer'] = 'https://www.wukong.com/user/?uid=' + uid + '&type=0'

    try:
        r = requests.get(url, headers = h2)
        r.encoding = 'utf-8'
    except:
        print('errrrr')
        return

    soup = BeautifulSoup(r.text, 'lxml')

    if soup.find(class_ = 'question-name'):
        title = soup.find(class_ = 'question-name').get_text().strip()
        subtitle = soup.find(class_ = 'question-text').get_text().strip()
    else:
        title = ''
        subtitle = ''

    qa_item = {
        'qid': qid,
        'title': title,
        'subtitle': subtitle,
        'answers': [item.find(class_ = 'answer-text-full').get_text() for item in soup.select('.answer-items .answer-item')]
    }

    if not wukonghuida_post_coll.find_one({'qid': qid}):
        wukonghuida_post_coll.insert_one(qa_item)
        print(qa_item)


if __name__ == '__main__':
    pool = []
    step = math.ceil(wukong_user_coll.count() / 1)

    for i in range(1):
        p = Process(target = start, args = (i * step, (i + 1) * step))
        pool.append(p)

    for p in pool:
        p.start()

    for p in pool:
        p.join()
