#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Get proxies from urls, and test their speed"""
import re, time, threading, sys, urllib
import threadpool, StringIO, pycurl, traceback

urls = ["http://proxy.ipcn.org/proxya2.html",
         "http://www.cz88.net/proxy/index.shtml",
         "http://www.cz88.net/proxy/http_2.shtml",
         "http://www.cz88.net/proxy/http_3.shtml",
          "http://www.cz88.net/proxy/http_4.shtml",
          "http://www.cz88.net/proxy/http_5.shtml",
          "http://www.cz88.net/proxy/http_6.shtml",
          "http://www.cz88.net/proxy/http_7.shtml",
          "http://www.cz88.net/proxy/http_8.shtml",
         "http://www.cz88.net/proxy/http_9.shtml",
          "http://www.cz88.net/proxy/http_10.shtml",
        "http://proxy.ipcn.org/proxylist.html",
        "http://proxy.ipcn.org/proxylist2.html",
        ]		#where to get proxies
urls_proxy = {}		#proxy used to connect urls
proxy_pattern = [r"""(\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}):(\d{1,})""",
                 r"""<tr><td>(\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})</td><td>(\d{1,})</td>""",]

test_url = "http://video.google.com/"
dnslook_url = "http://www.cz88.net/ip/ipcheck.aspx?ip="
test_pattern = re.compile(r"""Upload Your Videos""")
time_out = 15.0		#max waiting time to test proxies
output_file = "Proxies.txt"

def getlocation(ip):
        if ':' in ip:
                ip = ip[:ip.index(':')]
        try:
                print "get location of "+ip +" from: "+dnslook_url+ip
                f = urllib.urlopen(dnslook_url+ip)
        except:
                print "error , timeout"
                return "Location unknown"
        else:
                data = f.read()
                f.close()
                loc = re.findall(r'document.write\("(.*)"\)', data)
                if len(loc)>0:
                        return "Location "+loc[0]
                else:
                        return "Location unknown"
def totest(proxy, result):
	"""test a proxy's speed in time_out seconds"""
	test = TestTime(proxy)
	test.setDaemon(False) 
	print "testing "+proxy
	test.start()
        try:
                test.join(time_out) 	#wait time_out seconds for testing
                print test.stat
                if test.time:
                        result.append((test.time, proxy))
        except:
                test = None
			
def fetchproxylist(set_ofproxies, data):
        for patstr in proxy_pattern:
                set_ofproxies.update([ippart+":"+ipport for (ippart, ipport) in re.findall(patstr, data)])

def makeRequests(callable, args_list, callback=None, exc_callback=None):
        requests = []

        for item in args_list:
                curl = pycurl.Curl()
                requests.append(threadpool.WorkRequest(callable,
                                                       [item, curl], None,
                                                       callback=callback,
                                                       exc_callback=exc_callback))
        return requests

totnum = 0
if __name__ == "__main__":
        pycurl.global_init(pycurl.GLOBAL_ALL) # GLOBAL_ALL must be set in normal
	#get old proxies in output_file
	try:
		f = open(output_file)
	except:
		allproxies = set()
	else:
		allproxies = set([x[:x.index(' ')] for x in f.readlines()])
		f.close()	


	#get else proxies from urls
	for url in urls:
		print "getting proxy from "+url
		try:
                        data = StringIO.StringIO()
                        curl = pycurl.Curl()
                        curl.setopt(pycurl.URL, url)
                        curl.setopt(pycurl.TIMEOUT, 15)
                        curl.setopt(pycurl.WRITEFUNCTION, data.write)
                        curl.setopt(pycurl.FOLLOWLOCATION, 1)
                        curl.perform()
                        curl.close()
                except:
                        del(data)
                        del(curl)
                        traceback.print_exc()
		else:
                        del(curl)
                        fetchproxylist(allproxies, data.getvalue())
			print url+" finished!"
                        del(data)

        result = []

        data = [x for x in allproxies]

        totnum = len(allproxies)
        def test_thisproxy(url, curl):
                start = time.time()
		try:
                        data = StringIO.StringIO()
                        curl.setopt(pycurl.URL, test_url)
                        curl.setopt(pycurl.PROXY, "http://"+url)
                        curl.setopt(pycurl.TIMEOUT, 15)
                        curl.setopt(pycurl.NOSIGNAL, 1)
                        curl.setopt(pycurl.WRITEFUNCTION, data.write)
                        curl.setopt(pycurl.FOLLOWLOCATION, 1)
#                        curl.setopt(pycurl.VERBOSE, 1)
                        curl.perform()
                        curl.close()
		except:
                        curl.close()
			raise RuntimeError("proxy %s failed"%url)
		else:
			end = time.time()
			if test_pattern.search(data.getvalue()): #if data is matched
                                print "proxy %s ok, time: %s"%(url,end-start)
                                return (end-start, url)
			else:
                                raise RuntimeError("proxy %s failed"%url)

        def recordit(request, res):
                result.append(res)

        def exc_dropit(request, exc_info):
                print "Exception occured in request #%u: %s" % \
          (int(request.requestID), exc_info[1])
                
        requests = makeRequests(test_thisproxy, data, recordit, exc_dropit)

        main = threadpool.ThreadPool(100)
        for req in requests:
                main.putRequest(req)

        print "%d proxies to be checked by %d threads"%(len(requests),
                                                        100)
        main.wait()
        
#         #test all proxies' speed
#         for proxy in allproxies:
#                 #new thread to test every proxy
#                 t = threading.Thread(target=totest, args=(proxy, result))
#                 t.setDaemon(True)
#                 t.start()
#         time.sleep(time_out+5.0)
	#show all proxies' speed
	result.sort()
	for i in xrange(len(result)):
		print str(i+1)+"\t"+result[i][1]+"   \t:\t"+str(result[i][0])
	
	#output needed proxies	
	num = min(abs(int(raw_input("\nHow many proxies to output: "))), len(result))
	try:
		f = open(output_file, "w")
	except:
		print "Can not open output file!"
	else:
		f.writelines([x[1]+" "+getlocation(x[1])+"\n" for x in result[:num]])
		f.close()
		print str(num)+" proxies are output."
	
