import re
import os
import sys
import time
import json
import math
import random
import pprint
import datetime
import requests
import platform

from bs4 import BeautifulSoup
from selenium import webdriver
from pymongo import MongoClient
from multiprocessing import Process
from selenium.webdriver.common.desired_capabilities import DesiredCapabilities

net = sys.argv[1] if len(sys.argv) == 2 else 'localhost'

sys.setrecursionlimit(1000000)

client = MongoClient(net, 27017, connect = False)

sina_blog_user_db = client['sina_blog_user_db']
sina_blog_user_coll = sina_blog_user_db['sina_blog_user_coll']


def crawl_user(uid):
    r = requests.get('http://footprint.cws.api.sina.com.cn/list.php', params = {
        'uid': uid,
        'pagesize': 100000,
        'pid': 1,
        'varname': 'requestId_visitor_list'
    })
    record = json.loads(r.text.replace('var requestId_visitor_list=', ''))['data']['record']

    for r in record:
        if not sina_blog_user_coll.find_one({'uid': r['uid']}):
            sina_blog_user_coll.insert_one(r)
            print(r['name'])

            crawl_user(r['uid'])


if __name__ == '__main__':
    pool = []

    for i in sina_blog_user_coll.aggregate([{'$sample': {'size': 100}}]):
        p = Process(target = crawl_user, args = (i['uid'],))
        pool.append(p)

    for p in pool:
        p.start()

    for p in pool:
        p.join()
