#! /usr/bin/env python
#coding=utf-8
#
import os
from Queue import Queue
import threading
import time
import urllib
from urlparse import *
from urlparse import urljoin
import redis
import urllib2
import json

def get_pro():
	try:
		pro = r.spop('redis_pan_pro')
		if pro == None:
			try:
				pros =  urllib2.urlopen('http://chenapi.sinaapp.com/index.php/api').read()
				for i in json.loads(pros):
					r.sadd('redis_pan_pro',i)
				return get_pro()
			except Exception, e:
				raise e
		else:
			return pro
	except Exception, e:
		raise e
# def get_url(url):
# 	headers =  {
# 		'Host':'pan.baidu.com',
# 		'User-Agent' : 'Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/45.0.2454.101 Safari/537.36',
# 		'Referer' : 'http://pan.baidu.com',
# 		'Connection' : 'keep-alive'
# 	}
# 	request = urllib2.Request(url,'',headers)
# 	try:
# 		return urllib2.urlopen(request,timeout=20).read()
# 	except Exception, e:
# 		raise e
def get_url(url):
	try:
		try:
			pro = get_pro()
		except Exception, e:
			raise Exception('get pro_http err')
		proxy = {'http': pro}
		proxy_support = urllib2.ProxyHandler(proxy)
		opener = urllib2.build_opener(proxy_support)
		urllib2.install_opener(opener)
		i_headers = {
			'Host':'www.baidu.com',
			'User-Agent' : 'Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/45.0.2454.101 Safari/537.36',
			'Referer' : 'http://www.baidu.com',
			'Connection' : 'keep-alive'
		}
		req = urllib2.Request(url,headers=i_headers)
		return urllib2.urlopen(req,timeout=20).read()
	except Exception, e:
		raise e
def get_pan_data(data):
	try:
		dic = json.loads(data)
		errno = dic['errno']
		if errno == 0:
			return dic
		else:
			raise Exception('errno isnot 0 , but '+str(errno))
	except Exception, e:
		raise e
def get_pan_url(url):
	try:
		return get_pan_data(get_url(url))
	except Exception,e:
		raise e
		#raise Exception(url)
def get_shuju_url(url):
	try:
		dic = get_pan_url(url)
		relist = []
		if 'fans_list' in dic:
			shuju = dic['fans_list']
			for i in shuju:
				relist.append(i['fans_uk'])
		elif 'follow_list' in dic:
			shuju = dic['follow_list']
			for i in shuju:
				relist.append(i['follow_uk'])
		else:
			raise Exception('dic by url is wrong')
		return relist
	except Exception, e:
		raise e
def get_fanslist(uk):
	fans = []
	url = 'http://pan.baidu.com/pcloud/friend/getfanslist?query_uk='+str(uk)+'&limit=24&start=0'
	try:
		dic = get_pan_url(url)
		total_count = dic['total_count']
		j=0
		while j < total_count:
			url = 'http://pan.baidu.com/pcloud/friend/getfanslist?query_uk='+str(uk)+'&limit=24&start='+str(j)
			try:
				relist = get_shuju_url(url)
				fans += relist
				j +=24
			except Exception, e:
				r.sadd(key_no,url)
				print e
		return fans
	except Exception, e:
		raise e
def get_followlist(uk):
	follows = []
	url = 'http://pan.baidu.com/pcloud/friend/getfollowlist?query_uk='+str(uk)+'&limit=24&start=0'
	try:
		dic = get_pan_url(url)
		total_count = dic['total_count']
		j=0
		while j < total_count:
			url = 'http://pan.baidu.com/pcloud/friend/getfollowlist?query_uk='+str(uk)+'&limit=24&start='+str(j)
			try:
				relist = get_shuju_url(url)
				follows += relist
				j +=24
			except Exception, e:
				r.sadd(key_no,url)
				print e
		return follows
	except Exception, e:
		raise e
def get_userinfo():
	'http://pan.baidu.com/pcloud/user/getinfo?query_uk=321447710'
def run_uk(key_to,key_ed,key_mi,key_no,queue):
	while True:
		name = threading.currentThread().getName()
		uk = queue.get()
		if uk:
			print "["+name+"号] "+uk+"\n"
			try:
				fanslist = get_fanslist(uk)
				for new in fanslist:
					if not r.sismember(key_mi,new) and not r.sismember(key_ed,new):
						r.sadd(key_to,new)
				followlist = get_followlist(uk)
				for new in followlist:
					if not r.sismember(key_mi,new) and not r.sismember(key_ed,new):
						r.sadd(key_to,new)
				r.smove(key_mi,key_ed,uk)
				queue.task_done()
			except Exception, e:
				r.smove(key_mi,key_to,uk)
				#print 'get ukdata:'+str(uk)+'err\n'
				print e
def run_url(key_to,key_ed,key_no,queue):
	while True:
		url = r.spop(key_no)
		if url:
			try:
				datalist = get_shuju_url(url)
				for new in datalist:
					if not r.sismember(key_mi,new) and not r.sismember(key_ed,new):
						r.sadd(key_to,new)
			except Exception, e:
				print e
			finally:
				r.sadd(key_no,url)
def check(key_to,key_mi,queue):
	while True:
		name = threading.currentThread().getName()
		if queue.qsize()<5:
			uk=r.srandmember(key_to)
			if uk:
				r.smove(key_to,key_mi,uk)
				queue.put(uk)
if __name__ == '__main__':
	key_to = 'redis_pan_user_to'
	key_ed = 'redis_pan_user_ed'
	key_mi = 'redis_pan_user_mi'
	key_no = 'redis_pan_errno'
	num_threads=2
	queue=Queue()
	uks = ['842263796','321447710','2956110277','1566620287','436751272','3811108456','319707070','2435513896']
	r =  redis.Redis(host='localhost', port=6379)
	is_uk = r.srandmember(key_to)
	#初始化uk来源
	if not is_uk:
		for uk in uks:
			r.sadd(key_to,uk)
	for i in range(5):
		uk=r.srandmember(key_to)
		r.smove(key_to,key_mi,uk)
		queue.put(uk)
	for j in  range(num_threads):
		if j==0:
			worker=threading.Thread(target=check,args=(key_to,key_mi,queue),name=j+1)
			worker.setDaemon(True)
			worker.start()
		elif j==1:
			worker=threading.Thread(target=run_uk,args=(key_to,key_ed,key_mi,key_no,queue),name=j+1)
			worker.start()
		elif j==2:
			worker=threading.Thread(target=run_url,args=(key_to,key_ed,key_no,queue),name=j+1)
			worker.start()