#coding:utf-8
import urllib2
import sys

url = 'http://www.baidu.com'

def checkProxy(h, p):
	_p = "http://%s:%s"%(h, p)
	_h = urllib2.ProxyHandler({'http': _p})
	_o = urllib2.build_opener(_h, urllib2.HTTPHandler)
	try:
		_r = _o.open('http://www.baidu.com/img/baidu_jgylogo3.gif', timeout = 2)
		_l = len(_r.read())
		if _l == 705:
			return True
		return False
	except Exception,e:
		return False
                
def check_proxy(ip,port):
    proxy_handle = urllib2.ProxyHandler({'http':'http://'+ip+':'+port})
    opener = urllib2.build_opener(proxy_handle,urllib2.HTTPHandler)
    urllib2.install_opener(opener)
    content = urllib2.urlopen(url).read()
    return content.find('11000002000001')>0
    
def check_network():
    content = urllib2.urlopen(url).read()
    return content.find('11000002000001')>0

def check_file(files):
    proxys = open(files,'r')
    avaliableProxyOut = open("avaliableProxyOut.txt", 'w')
    acnt = 0
    sum = 0
    for line in proxys:
        content = line.split(' ')
        sum += 1
        if checkProxy(content[0], content[1]):
            avaliableProxyOut.write("%s %s\n" %(content[0], content[1]))
            print "<:)> %s:%s is avaliable" %(content[0], content[1])
            acnt += 1
        else:
            print "<:(> %s:%s is unavaliable" %(content[0], content[1])
    print "total (%d/%d) avaliable" %(acnt, sum)
            
if __name__ == '__main__':
    #print check_network()
    if (len(sys.argv)>2):
        print checkProxy(sys.argv[1], sys.argv[2])
    if (len(sys.argv)>1):
        check_file(sys.argv[1])