#!/usr/bin/env python
# coding=UTF8

#
# This program fetch proxy list from http://www.publicproxyservers.com and verify them
#

# Finished Features:
#    Fetch proxy list from www.publicproxyservers.com
#    Verify proxy

# TODOs:
#    Fetch proxy list from www.freeproxylists.com
#    Write command argument parser

import urllib, urlparse
import pycurl
import cStringIO
import os, sys
import warnings, re
import random
import datetime
import warnings

DIR = '.'           # The working directory, config file will save in the folder

class PycurlWrapper:
    '''PycurlWrapper is a wrapper for Pycurl, it take care of the following things:
       1. GET/POST through HTTP,
       2. switch useable proxy,
       3. Explorer agent simulating,
       4. error handling.
    '''
    verbose = True
    debug = True
    # The HTTP proxy to use, e.g.: 200.100.152.163:8080
    proxy = ''
    # The variable to store HTML page
    contents = ''
    # The current page's url
    current_url = ''
    enable_cookie = True
    cookie_file = ''
    proxier = None  # Instance of MyProxy, used by fetch() to change proxy
    retry = 2   # Fetch retry times

    # Configurations
    connection_timeout = 10
    timeout = 20

    # Note: This is not constructor(constructor is __init__), curlInit must be called before using fetch()
    def curlInit(self):
        # Initialize connection options
        self.retry = 2
        self.connection_timeout = 5
        self.timeout = 20
        self.body = cStringIO.StringIO()  # Return a file descriptor
        # Initialize COOKIES file
        self.curl = pycurl.Curl()
        self.curl.setopt(pycurl.FOLLOWLOCATION, 1)
        if not self.cookie_file:
            warnings.filterwarnings(action='ignore',message='tempnam is a potential security risk to your program')
            self.cookie_file = os.tempnam(None)
            self.curl.setopt(pycurl.COOKIEFILE, self.cookie_file)
            print "[DBG] cookie file not set, use random:", self.cookie_file
        else:
            print "[DBG] cookie file:", self.cookie_file
            self.curl.setopt(pycurl.COOKIEFILE, self.cookie_file)
            self.curl.setopt(pycurl.COOKIEJAR, self.cookie_file) # Save cookie file when curl.close()
        self.curl.setopt(pycurl.WRITEFUNCTION, self.body.write)
        # Simulate Windows XP + IE6
        self.curl.setopt(pycurl.HTTPHEADER, ["Accept:	image/gif, image/x-xbitmap, image/jpeg, image/pjpeg, application/x-shockwave-flash, application/vnd.ms-excel, application/vnd.ms-powerpoint, application/msword, */*", 'Accept-Language:	en-US, zh-CN'])
        self.curl.setopt(pycurl.USERAGENT, 'Mozilla/4.0 (compatible; MSIE 6.0; Windows NT 5.1; SV1)')
        self.curl.setopt(pycurl.ENCODING, 'gzip, deflate')
        self.curl.setopt(pycurl.REFERER, 'http://s2.travian.cn/dorf1.php')
        # Switch proxy on
        self.curl.setopt(pycurl.PROXY, self.proxy)
        # Error Control options (optional)
        self.curl.setopt(pycurl.MAXREDIRS, 3)
        self.curl.setopt(pycurl.CONNECTTIMEOUT, self.connection_timeout)
        self.curl.setopt(pycurl.TIMEOUT, self.timeout)
        self.curl.setopt(pycurl.NOSIGNAL, 1)
        self.curl.setopt(pycurl.VERBOSE, 0)

    def curlClose(self):
        self.curl.close()

    def fetch(self, url, postdata=None):
        '''Fetch a webpage(POST data, Cookies enabled). If error happens, it
        will be handled in house. It can automatically switch proxy if needed.'''
        # Set HTML storage variable
        self.body.close()
        self.body = cStringIO.StringIO()
        self.curl.setopt(pycurl.WRITEFUNCTION, self.body.write)
        # Set proxy
        self.curl.setopt(pycurl.PROXY, self.proxy)
        # Set Cookies
        #self.curl.setopt(pycurl.COOKIEFILE, self.cookie_file)
        #print "[DBG] PycurlWrapper::fetch() cookie_file", self.cookie_file

        # Set POST data if have any
        if postdata == None:
            #self.curl.setopt(pycurl.POSTFIELDS, '')  # No post data
            pass
        else:
            if not isinstance(postdata, dict):
                if self.verbose: print "[ERR] Postdata data type error"
                raise # Post data type error
            self.curl.setopt(pycurl.POSTFIELDS, urllib.urlencode(postdata))

        # Set target url
        #print "[DBG] fetch() url=", url
        self.curl.setopt(pycurl.URL, url)

        # Perform action, if failed switch another proxy and try again
        OK = False
        try_count = 1
        while not OK:
            try:
                self.curl.perform()
                OK = True
            except:
                if self.verbose:
                    print "[ERR] Connection error, try again ..."
                    try_count += 1
                    # Switch Proxy if too many errors
                    if try_count >= self.retry:
                        if self.proxier:
                            self.proxy = self.proxier.get()
                            # Switch proxy
                            if self.verbose: print "[MSG] Switching to proxy: %s" % self.proxy
                            self.curl.setopt(pycurl.PROXY, self.proxy)
                            try_count = 0
                OK = False

        # Save HTML source code
        self.contents = self.body.getvalue()
        self.current_url = url

    def tryFetch(self, url, postdata=None):
        '''tryFetch() is used to fetch a web page, it has no error handller.'''
        # Set HTML storage variable
        self.body.close()
        self.body = cStringIO.StringIO()
        self.curl.setopt(pycurl.WRITEFUNCTION, self.body.write)
        # Set proxy
        self.curl.setopt(pycurl.PROXY, self.proxy)
        # Set POST data if have any
        if postdata == None:
            self.curl.setopt(pycurl.POSTFIELDS, '')  # No post data
        else:
            if not isinstance(postdata, dict):
                if self.verbose: print "[ERR] Postdata data type error"
                raise "Post data type error"
            self.curl.setopt(pycurl.POSTFIELDS, urllib.urlencode(postdata))
        # Set target url
        self.curl.setopt(pycurl.URL, url)
        OK = False
        err_count = 0
        # Perform action
        self.curl.perform()
        # Save HTML source code
        self.contents = self.body.getvalue()

    # Save HTML in local file
    def savePage(self, filename):
        try:
            filename = os.path.abspath(filename)
            if os.path.isfile(filename):
                os.remove(filename)
            fobj = open(filename, 'w')
            fobj.write(self.contents)
            fobj.close()
        except:
            print "[ERR] Saving file %s error" % filename


class MyProxy(PycurlWrapper):
    '''MyProxy provides two functionalities: 
    1. Proxy list maintaince
    grabs proxy list from web pages and verify them. Then save useable proxy
    into proxy_list.txt, save invalid proxies into invalid_proxy.txt.
    2. Internet access agent
    Other program could use MyProxy as a agent. It can handle proxy stuff for
    you. even you don't use proxy, it could handle timeout and many mores.
    '''
    
    verbose = True      # Whether display verbose information
    debug = True        # In debug mode, more info will be print out
    # Proxy lists
    proxy_filename = 'proxy_list.txt'    # The absolute path of valid proxy list file
    invalid_proxy_filename = 'invalid_proxy.txt' # The invalid proxy list
    proxylist = []
    invalid_proxylist = []
    isProxylistChanged = False  # Flat to identify if proxylist is changed
    isInvalidProxylistChanged = False # Flat to identify if invalid_proxy_list is changed
    proxynum = 0        # The number of available(valid) proxies
    # Proxy source
    proxypages = []     # A list of [ proxylist_url, handller_function ]
    # Auto update settings
    auto = False         # Auto update proxy if available proxy less than threshold
    threshold = 10      # If available proxy less than this number, autoUpdate will execute
    lastverify = ''     # The date of last update proxylist 'YYYY-MM-DD'
    TTT = True          # Use non 80 proxy only, because TPG cache'd port 80
    verify_before_get = False # Verify the proxy before get() or not
    isUpdateFile = True
    # Commonly used patterns
    pattern_ip = re.compile('\d+\.\d+\.\d+\.\d+')
    pattern_proxy = re.compile('\d+\.\d+\.\d+\.\d+:\d+')
    # The verification data to be used to verify proxies
    verify_data = [['http://09city.com/index.php?a=kargo', '09city.com'],
        ['http://pgl.yoyo.org/http/browser-headers.php', 'HTTP Header Viewer'],
            ['http://s2.travian.cn', '<title>Travian cn2</title>']]
            
    # The constructor
    def __init__(self):
        self.proxy = '' # MyProxy itself does not use proxy
        # Initialize PycurlWrapper
        self.curlInit()
        # Proxy page and its corresponding parser
        self.proxypages.append(['http://www.samair.ru/proxy/proxy-01.htm', self.parseSamair])
        self.proxypages.append(['http://www.samair.ru/proxy/proxy-02.htm', self.parseSamair])
        self.proxypages.append(['http://www.samair.ru/proxy/proxy-03.htm', self.parseSamair])
        self.proxypages.append(['http://www.samair.ru/proxy/proxy-04.htm', self.parseSamair])
        self.proxypages.append(['http://www.samair.ru/proxy/proxy-05.htm', self.parseSamair])
        self.proxypages.append(['http://www.samair.ru/proxy/proxy-06.htm', self.parseSamair])
        self.proxypages.append(['http://www.proxy4free.com/page1.html', self.parseP4F])
        self.proxypages.append(['http://www.proxy4free.com/page2.html', self.parseP4F])
        self.proxypages.append(['http://www.proxy4free.com/page3.html', self.parseP4F])
        self.proxypages.append(['http://www.proxy4free.com/page4.html', self.parseP4F])
        self.proxypages.append(['http://www.proxy4free.com/page5.html', self.parseP4F])
        self.proxypages.append(['http://www.publicproxyservers.com/page1.html', self.parsePPS])
        self.proxypages.append(['http://www.publicproxyservers.com/page2.html', self.parsePPS])
        self.proxypages.append(['http://www.publicproxyservers.com/page3.html', self.parsePPS])
        self.proxypages.append(['http://www.publicproxyservers.com/page4.html', self.parsePPS])
        self.proxypages.append(['http://www.publicproxyservers.com/page5.html', self.parsePPS])

        #http://www.proxy4free.com/page1.html
        self.proxy_filename = os.path.join(DIR, self.proxy_filename)
        self.invalid_proxy_filename = os.path.join(DIR, self.invalid_proxy_filename)
        # Load local proxy list
        self.loadProxylist()
        # Verify all proxies
        d = datetime.date
        if self.lastverify <> str(d.today()):
            if self.verbose: print "[DBG] List has not been verified today, check forced"
            self.verifyAll()
        else:
            if self.verbose: print "[MSG] Local proxy list verification skipped."
        # Update proxy if needed
        self.autoUpdate()
        
    def verify(self, proxy, verify_data=None):
        '''Check if the proxy working, if not remove it from our list (if necessary)'''
        # If TTT is set, Ignore proxy with port 80 
        if self.TTT == True:
            p1 = proxy.find(':')
            port = proxy[p1+1:]
            if port == '80':
                return False
        # Skip already checked proxies
        if proxy in self.invalid_proxylist: 
            if self.verbose: print "[MSG] Verifing proxy", proxy, "... Ignored" 
            return False
        # Set verification target and expected data
        if verify_data == None:
            target_url, target_content = self.verify_data[0]
            print "checking", target_url
        else:
            target_url, target_content = verify_data
        if self.verbose: print "[MSG] Verifing proxy", proxy, "...", 
        # Set on proxy
        self.proxy = proxy
        # Try to use proxy getting the target HTML
        max_try = 2
        try_count = 1
        while True:
            try:
                self.tryFetch(target_url)
                self.proxy = '' # Disable proxy
                if self.contents.find(target_content) >= 0:
                    if self.verbose: print "OK"
                    return True # Proxy OK
                else:
                    if try_count < max_try:
                        try_count += 1
                        print "retry...",
                    else:
                        return False
            except:
                # Try next time
                if try_count < max_try: 
                    print "retry...",
                    try_count += 1
                    continue
                # Invalid proxy
                self.proxy = '' # Disable proxy
                if self.verbose: print "Failed"
                #self.invalid(proxy)   # Remove false proxy from valid list, add it into invalid list
                return False
    
    # Verify all the proxies in proxylist
    def verifyAll(self):
        if self.verbose: print "[MSG] Start verifing local proxy list"
        isChanged = False   # The flag to identify whether self.proxylist is changed
        for proxy in self.proxylist:
            if not self.verify(proxy):
                self.invalid(proxy)
        self.autoUpdate()    
        
    # Remove the proxy from valid list and add into invalid list
    def invalid(self, proxy):
        '''Remove a proxy from our proxy list (if it exists in our list)'''
        if proxy in self.proxylist:
            self.proxylist.remove(proxy)
            if self.isUpdateFile: # Write file is file update enabled
                self.writeValidProxyFile()
            else:   # Mark changing, if update file disabled
                self.isProxylistChanged = True
        if not proxy in self.invalid_proxylist:
            self.invalid_proxylist.append(proxy)
            if self.isUpdateFile: # Write file is file update enabled
                self.writeInvalidProxyFile()
            else:   # Mark changing, if update file disabled
                self.isInvalidProxylistChanged = True

    def parsePPS(self,url):
        '''Parse www.publicproxyservers.com's proxy list pages'''
        proxylist = []
        self.fetch(url)
        # Check for just checked proxies first
        #print self.contents
        #latest = re.findall('\d+\.\d+\.\d+\.\d+:\d+', self.contents)
        #print latest
        # The re.S flag makes the "." match any char include new line character
        pattern_block = re.compile('<tr bgcolor="#e3e6ea".+?class="menulink1.+?</tr>', re.S) 
        blocks = pattern_block.findall(self.contents)
        if blocks:
            for block in blocks:
                #----Anonymous proxy only----
                if block.find('transparent') >= 0: continue # Skip transparent proxy
                m = re.search('\d+\.\d+\.\d+\.\d+', block)
                if m:
                    ip = m.group()
                else: continue
                m = re.search('>\d+<', block)
                if m:
                    port = m.group()[1:-1]
                else: continue
                proxylist.append(ip + ":" + port)
            #print "[MSG] Imported %d anonymous proxies from %s" % (len(proxylist), url)
            return proxylist
        else:
            #print "no match" # DEBUG
            return []   # Return empty array if no proxy found in this page
    
    def parseP4F(self, url):
        '''Parse www.proxy4free.com's proxy list pages'''
        self.fetch(url)
        proxylist = []
        # Search for ip:port formated proxy first
        proxies = self.pattern_proxy.findall(self.contents)
        proxylist += proxies
        #Parse page
        blocks = re.findall('<tr bgcolor="#ffffff" class="text".+?</tr>', self.contents, re.S)
        if len(blocks) == 0:
            print "[ERR] No match found, page may change."
        else:
            pattern_port = re.compile('>\d+<')
            for block in blocks:
                #print block
                #----Anonymouse proxy only----
                #if block.find('transparent') >= 0: continue
                m = self.pattern_ip.search(block)
                if m: 
                    ip = m.group()
                else: 
                    continue
                m = pattern_port.search(block)
                if m: 
                    port = m.group()[1:-1]
                else: 
                    continue
                proxylist.append(ip + ':' + port)
        return proxylist
    
    # Parse proxy from http://www.samair.ru/proxy/
    def parseSamair(self, url):
        self.fetch(url)
        proxylist = []
        m = re.search('<table class="tablelist".+?</table', self.contents, re.S)
        chunk = m.group()
        rows = re.findall('<tr>.+?</tr', chunk, re.S)
        for row in rows:
            spans = re.findall('<span.+?>', row, re.S)
            for span in spans:
                row = row.replace(span, '')
            row = row.replace('</span>', '')
            row = row.replace("\n", '')
            #print row
            m1 = self.pattern_proxy.search(row)
            if m1:
                proxy = m1.group()
                proxylist.append(proxy)
        return proxylist
            
    #Load proxy list from local file
    def loadProxylist(self):
        # Load valid proxy list
        if os.path.isfile(self.proxy_filename):
            count = 0
            for line in open(self.proxy_filename):
                if count == 0:
                    self.lastverify = line.strip()
                    count += 1
                    continue
                # Get proxies from proxylist file
                m = self.pattern_proxy.search(line)
                if m:
                    count += 1
                    proxy = m.group()
                    if not proxy in self.proxylist:
                        self.proxylist.append(proxy)
                    else:  # Duplicated entries in proxy list file
                        if self.debug: print "[DBG] Duplicated proxy found" 
                else:
                    if self.verbose: print "[ERR] Invalid configure file, in line: %s" % line
            self.proxynum = len(self.proxylist)
            if self.verbose: print "[MSG] %d proxies ready for use." % len(self.proxylist)
        # Load invalid proxy list
        if os.path.isfile(self.invalid_proxy_filename):
            for line in open(self.invalid_proxy_filename):
                m = self.pattern_proxy.search(line)
                if m:
                    proxy = m.group()
                    if not proxy in self.invalid_proxylist:
                        self.invalid_proxylist.append(proxy)
                else:
                    if self.verbose: print "[ERR] Invalid configure file, in line: %s" % line
                    
    def update(self):
        ''' Update proxy list. Fetch latest proxies from proxy pages and verify them, then 
        update local useable list.
        '''
        # Get new proxies from web pages
        isProxylistChange = False
        if self.verbose: print "[MSG] Start updating proxies, it may take a few minutes."
        # Turn off update file switch, so as not to write file after each verification
        self.isUpdateFile = False 
        for proxypage in self.proxypages:
            url, func = proxypage
            if self.verbose: print "[MSG] Grabbing proxies from %s" % url
            proxylist = func(url)
            for proxy in proxylist:
                if not proxy in self.proxylist: # Avoid existed proxy
                    if self.verify(proxy):  # Verify proxy 
                        self.proxylist.append(proxy) 
                        self.isProxylistChanged = True
                    else:
                        self.invalid(proxy)
                if self.isProxylistChanged:
                    self.writeValidProxyFile()
                if self.isInvalidProxylistChanged:
                    self.writeInvalidProxyFile()
        self.isUpdateFile = True
            
    # Write self.proxylist into file
    def writeValidProxyFile(self):
        line = ''
        if os.path.isfile(self.proxy_filename): os.remove(self.proxy_filename)
        fobj = open(self.proxy_filename, "w")
        d = datetime.date
        self.lastverify = d.today()
        line = str(self.lastverify) + "\n"
        fobj.write(line)
        for proxy in self.proxylist:
            line = proxy + "\n"
            fobj.write(line)
        fobj.close()
        self.proxynum = len(self.proxylist)
    
    # Write self.invalid_proxylist into file
    def writeInvalidProxyFile(self):
        line = ''
        if os.path.isfile(self.invalid_proxy_filename): os.remove(self.invalid_proxy_filename)
        fobj = open(self.invalid_proxy_filename, "w")
        for proxy in self.invalid_proxylist:
            line = proxy + "\n"
            fobj.write(line)
        fobj.close()
            
    def autoUpdate(self):
        if self.auto:
            if len(self.proxylist) < self.threshold:
                    self.update()           
        
    def get(self, exclusive_list = None):
        '''Get a useable proxy from the pool'''
        if len(self.proxylist) == 0:
            self.update()
            if len(self.proxylist):
                if self.verbose: print "[MSG] No proxy available."
                return False
        else:
            count = 0
            i = random.randint(0, len(self.proxylist)-1)  # Randomly choose a proxy from proxy list
            while True:
                # Ignore the proxy in exclusive list
                if exclusive_list <> None and self.proxylist[i] in exclusive_list:
                    i = random.randint(0, len(self.proxylist)-1) 
                    continue
                count += 1
                if not self.verify_before_get: # Don't verify before get proxy
                    return self.proxylist[i]
                if self.verify(self.proxylist[i]):  # Verify proxy before return 
                    return self.proxylist[i]    # Return proxy if verify successful
                else:
                    # If proxy verify failed, randomly choose another proxy and verify it
                    i = random.randint(0, len(self.proxylist)-1) 
                if count >=3: self.autoUpdate()
                if count >=6: return False

    def main(argv=None):
        if argv is None:
            argv = sys.argv
        try:
            try:
                opts, args = getopt.getopt(argv[1:], "h", ["help"])
            except getopt.error, msg:
                raise Usage(msg)
            # more code, unchanged
        except Usage, err:
            print >>sys.stderr, err.msg
            print >>sys.stderr, "for help use --help"
            return 2

if (__name__ == "__main__"):
    '''If run myproxy as an application, it will generate a list of useable proxy and store in 
    a text file.
    '''
    if len(sys.argv) == 1:
        print "python myproxy.py [update | verify]"
    else:
        mp = MyProxy()
        for arg in sys.argv:
            if arg.lower() == 'update':
                mp.update()
            if arg.lower() == 'verify':
                mp.verifyAll()
    
