#!/usr/bin/env python
'''
Created on May 20, 2012

@author: stuart
'''

import argparse
import collections
import ConfigParser
import csv
import datetime
import httplib
import os
import Queue
import random
import re
import socket
import sqlite3
import sys
import threading
import time
import urllib2

#Configuration and defaults
global config, CONFIG_DEFAULTS, CONFIG_PATHLIST, CONFIG_MAINSECTION
CONFIG_DEFAULTS = {"nthreads": "128",
                   "user_agent": "Mozilla/5.0 (Windows; U; Windows NT 5.1; "
                   "en-US) AppleWebKit/A.B (KHTML, like Gecko) Chrome/X.Y.Z.W "
                   "Safari/A.B.",
                   "verbose": False,
                   "debug": False,
                   "ports": "80,1080,3128,8000,8080",
                   "ipcheck_url": "http://api.externalip.net/ip/",
                   "ipcheck_interval": 300,
                   "check_random": False,
                   "timeout": 10,
                   "scrape_urls": "",
                   "check_entire_subnet": False,
                   "status_lines": 20,
                   "verbose_update_interval": 0.5,
                   "sqlite_db": "",
                   }
config = ConfigParser.SafeConfigParser()
CONFIG_PATHLIST = [os.path.join(os.path.expanduser("~"), ".pyproxyfinder", 
                                "pyproxyfinder.cfg"), "pyproxyfinder.cfg",]
CONFIG_MAINSECTION = "Settings"
CONFIG_URLSECTION = "URL List"

#Database variables
DB_PATHLIST = ["pyproxyfinder.db",
               os.path.join(os.path.expanduser("~"), ".pyproxyfinder", 
                            "pyproxyfinder.db"),]

#Own information
global OWN_IP
OWN_IP = ""

#Message Queues
global verbosequeue, debugqueue, VOLEN
verbosequeue = Queue.Queue()
debugqueue = Queue.Queue()

#Sources for ip addresses
#Make sure to update the list of all of them
global DATABASE, SCRAPED, RANDOM
DATABASE="Database"
SCRAPED= "Scraped"
RANDOM=  "Random"
SUBNETS= "Subnets"
global IP_SOURCES
IP_SOURCES=[DATABASE, SCRAPED, SUBNETS, RANDOM] #What a hack
global seen_ipps, seen_ipps_lock #Set of IP:ports we've seen
seen_ipps = set()
seen_ipps_lock = threading.Lock()
global seen_ips, seen_ips_lock #Ignores ports.  Used for accounting
seen_ips = set()
seen_ips_lock = threading.Lock()

#Only allow one db connection at once
global db_conn_lock
db_conn_lock = threading.Lock()

#Types of proxies to check
global PTYPES
#PTYPES = ("http", "https", "ftp", "socks", "socks4", "socks5")
PTYPES = ("http",)

#Thread to check proxies
class ProxyChecker(threading.Thread):
    """Thread to check if a proxy hides us."""
    def __init__(self, workqueue, reportqueue, name):
        """@param queue: the queue from which to pull IPs to check."""
        threading.Thread.__init__(self, name=name)
        self.daemon = True
        self.queue = workqueue
        self.reportqueue = reportqueue
    def run(self):
        while True:
            #Wait for an ip to check
            ip = self.queue.get(block=True)
            self.check_ip(ip)
            self.queue.task_done()
    def check_ip(self, ips):
        """@param ips: An (ip:port to check as a string, the SOURCE, ptype)."""
        ip = "%s://%s"%(ips[2],ips[0])
        source = ips[1]
        ptype = ips[2]
        #This code stolen from an anon on pastebin: http://pastebin.com/j66Dc5UC
        proxy_handler = urllib2.ProxyHandler({ptype: ip})
        opener = urllib2.build_opener(proxy_handler)
        opener.addheaders = [('User-agent', config.get(CONFIG_MAINSECTION, 
                                                       "user_agent"))]
        try:
            apparent_ip = \
                opener.open(config.get(CONFIG_MAINSECTION, "ipcheck_url"),
                            timeout=config.getfloat(CONFIG_MAINSECTION,
                                                    "timeout")).read().strip()
            works = None
            #Make sure the return value is actually an ip address
            if not re.match(r"\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}", 
                            apparent_ip): #If it's not an IP address
                works = False
            elif apparent_ip == OWN_IP:
                works = False #If it's an IP Address and ours
            else: #Who knows
                works = True
            self.reportqueue.put((works, ip, ptype, datetime.datetime.now(), 
                                  source))
            debug("[%s] Proxy %s returned IP %s."%(self.name, ip, 
                                                   apparent_ip[:15]))
        except (urllib2.HTTPError, urllib2.URLError, socket.error) as e:
            debug("[%s] Proxy %s (%s) gave error: %s."%(self.name, 
                                                      ip, ptype, e))
            self.reportqueue.put((False, ip, ptype, 
                                  datetime.datetime.now(), source))
        except (httplib.BadStatusLine) as e:
            debug("[%s] Proxy %s (%s) gave empty status line."%(self.name,
                                                              ip, ptype))
            self.reportqueue.put((False, ip, ptype, 
                                  datetime.datetime.now(), source))
        except (Exception) as e:
            debug("[%s] Proxy %s (%s) caused unhandled %s exception %s."%
                  (self.name, ip, ptype, str(type(e)), str(e)))
            self.reportqueue.put((False, ip, ptype, 
                                  datetime.datetime.now(), source))
    
        stats.increment(stats.CHECKED, source)
#Keep our IP address up to date        
class OwnIPChecker(threading.Thread):
    def __init__(self, name):
        threading.Thread.__init__(self, name=name)
        self.daemon = True
        self.update_ip()
    def run(self):
        while True:
            time.sleep(config.getint(CONFIG_MAINSECTION, "ipcheck_interval"))
            self.update_ip()
    def update_ip(self):
        global OWN_IP
        OWN_IP = urllib2.urlopen(config.get(CONFIG_MAINSECTION, 
                                            "ipcheck_url")).read().strip()
        debug("[%s] Own IP: %s"%(self.name, OWN_IP))            

#Scrapes URLs and Comes up with random IP addresses to check
class _URLGenerator(threading.Thread):
    def __init__(self, task_queue, name="Unnamed URL Generator"):
        threading.Thread.__init__(self, name=name)
        #Whether a subnet has been queued
        self.task_queue = task_queue
        self.daemon = True
    
    #Queue an ip address.  Source is SCRAPED or RANDOM
    #ip is a 2-tuple (ip, port) port is "" if not specified
    def queue(self, ip, source):
        #Lock the seen ip list
        #Queue each ip:port combo
        for port in (config.portlist + 
            ([ip[1]] if len(ip[1]) > 1 else [])):
            ipp = ip[0] + ":" + port
            #Don't add the same ip:port twice
            with seen_ipps_lock:
                cont = False
                if ipp in seen_ipps:
                    cont = True
                else:
                    seen_ipps.add(ipp)
            if cont: continue
            #Queue up a check for each proxy type
#            ptypes = ("https", "ftp", "socks", "socks4", "socks5")
            for ptype in PTYPES:
                self.task_queue.put((ipp, source, ptype))
        with seen_ips_lock:
            if ip[0] not in seen_ips:
                seen_ips.add(ip[0])
                stats.increment(stats.FOUND, source)

class DBURLGenerator(_URLGenerator):
    def __init__(self, taskqueue, dbthread, name="DB URL Generator"):
        _URLGenerator.__init__(self, taskqueue, name=name)
        self.dbthread = dbthread
            
    def run(self):
        #Get IPs from the database
        db_path = config.get(CONFIG_MAINSECTION, "sqlite_db")
        if db_path != "!":
            #Get the proxies from the database
            self.dbthread.have_list.wait()
            ip_list = self.dbthread.db_list
            #Queue the ip addresses
            for ip in ip_list:
                self.queue(ip)
            verbose("Successfully retrieved %i IP addresses from "
                    "database"%len(ip_list), self.name)
        debug("[%s] Terminating."%self.name)
                
    #Queue an IP address
    def queue(self, ip):
        _URLGenerator.queue(self, ip, DATABASE)
        debug("[%s] Queued databased ip %s."%(self.name, ip[0]))
            
class ScrapingURLGenerator(_URLGenerator):
    def run(self):
        #Get URLs to scrape from the config file
        successful = 0
        try:
            urllist = csv.reader([config.get(CONFIG_MAINSECTION, 
                                             "scrape_urls")]).next()
            for url in urllist:
                #Don't bother with an empty url
                if len(url) == 0:
                    continue
                #Get a list of ip addresses from the url
                ip_list = self.scrape(url)
                verbose("Got %i ip addresses from %s."%(len(ip_list), url), 
                        self.name)
                #Check each ip we've not checked
                for ip in ip_list:
                    self.queue(ip, SCRAPED)
                    debug("[%s] Queued scraped ip %s."%(self.name, ip[0]))
                successful += 1
        except Exception, e:
                verbose("Unable to scrape %s."%url, self.name)
                debug("[%s] %s gave %s: %s."%(self.name, str(type(e)), e))
#        verbose("Successfully scraped %s URLs."%str(successful), self.name)
        debug("[%s] Terminating."%self.name)
        
                    
    def scrape(self, url):
        """Scrape a list of URLs for ip addresses.
        Returns a list of IPs found on the page."""
        verbose("Scraping %s"%(url), self.name)
        try:
            opener = urllib2.build_opener()
            opener.addheaders = [('User-agent', config.get(CONFIG_MAINSECTION, 
                                                           "user_agent"))]
            response = opener.open(url).read()
            
            ip_list = set(re.findall(
                r"(\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}):?(\d*)", response))
            for ip in ip_list:
                #Make something appendable
                if len(ip[1]) > 0:
                    port = ":" + ip[1]
                else:
                    port = ""
                debug("[%s] Found %s on %s"%
                      (self.name, 
                       ip[0]+port,
                       url))       
        except (urllib2.URLError) as e:
            if config.getboolean(CONFIG_MAINSECTION,"debug"):
                debug("[%s] %s gave error %s"%(self.name,url,e))
            else:
                verbose("Unable to scrape %s.  Is nthreads too high?"%url, 
                        self.name)
            ip_list = []
        return ip_list

class RandomURLGenerator(_URLGenerator):
    def run(self):
        #Check random IPs if necessary
        random.seed()
        while config.getboolean(CONFIG_MAINSECTION, "check_random"):
            #Wait for the queue to become empty
            if self.task_queue.qsize() >= 2:
                self.task_queue.not_full.acquire()
                self.task_queue.not_full.wait()
                self.task_queue.not_full.release()
            if self.task_queue.qsize() > 1:
                continue
            #Queue a number of ip addresses equal to the number of threads
            for _ in xrange(config.getint(CONFIG_MAINSECTION, "nthreads")):
                ip = (str(random.randint(1,255)) + "." + 
                      str(random.randint(1,255)) + "." + 
                      str(random.randint(1,255)) + "." + 
                      str(random.randint(1,255)), "")
                self.queue(ip, RANDOM)
                debug("[%s] Queued random ip %s."%(self.name, ip[0]))
            verbose("Queued %i random ip addresses"%
                    config.getint(CONFIG_MAINSECTION, "nthreads"), self.name)
        debug("[%s] Terminating."%self.name)

#Check subnets as needed.
class SubnetURLGenerator(_URLGenerator):
    def __init__(self, task_queue, subnetqueue, name="Subnet URL Generator"):
        _URLGenerator.__init__(self, task_queue, name)
        self.seen_subnets = set() #Subnets we've seen
        self.seen_subnets_lock = threading.Lock()
        self.subnetqueue = subnetqueue
        
    def run(self):
        while True:
            ip = self.subnetqueue.get() #(ip:port, source)
            with self.seen_subnets_lock:
                #Network portion
                net = re.findall(r"(\d+\.\d+\.\d+\.)\d+:?(\d+)?", ip[0])[0]
                #Make sure we've not seen it
                if net[0] not in self.seen_subnets:
                    #Queue it up
                    debug("[%s] Checking subnet %s0/24."%(self.name, str(net[0])))
                    for x in range(256):
                        self.queue((net[0] + str(x), net[1]), SUBNETS)
                        
                    #Note we've cheecked it
                    self.seen_subnets.add(net[0])

#Thread to handle reports
class ReportHandler(threading.Thread):
    def __init__(self, reportqueue, dbputqueue, subnetqueue,
                 name="Report Handler"):
        threading.Thread.__init__(self, name=name)
        self.reportqueue = reportqueue
        self.daemon = True
        self.db_conn = None
        self.dbputqueue = dbputqueue
        self.subnetqueue = subnetqueue
            
    def run(self):
        while True:
            #Get a report -> (Working?, ip:port, ptype, timestamp, source)
            report = self.reportqueue.get()
            #If the proxy worked:
            if report[0] is True:
                stats.increment(stats.WORKING, report[4])
                #Print nicely if verbose or debug
                if (config.getboolean(CONFIG_MAINSECTION, "verbose") or
                    config.getboolean(CONFIG_MAINSECTION, "debug")):
                    pass
                    verbose("Found working proxy %s"%report[1], self.name)
                #Print to stdout otherwise
                else:
                    sys.stdout.write("%s\n"%(report[1]))
                    sys.stdout.flush()

                #Put it in the db if we can/should
                if config.get(CONFIG_MAINSECTION, "sqlite_db") != "!":
                    ipp = split_ip(report[1])
                    #(ip, port, ptype)
                    self.dbputqueue.put((ipp[0], ipp[1], report[2]))
                #Check entire subnet if applicable
                if config.getboolean(CONFIG_MAINSECTION, 
                                     "check_entire_subnet"):
                    self.subnetqueue.put((report[1], report[4]))
                    
#Thread to print debug statements nicely
class DebugPrinter(threading.Thread):
    def __init__(self, debugqueue, name="Debug Printer"):
        threading.Thread.__init__(self, name=name)
        self.debugqueue = debugqueue
        self.daemon = True
    def run(self):
        while config.getboolean(CONFIG_MAINSECTION, "debug"):
            msg = self.debugqueue.get()
            if msg is None:
                return
            sys.stderr.write(msg + "\n")
            sys.stderr.flush()

#Thread to print verbose messages
class VerbosePrinter(threading.Thread):
    def __init__(self, verbosequeue, name="Verbose Printer"):
        threading.Thread.__init__(self,name=name)
        self.verbosequeue = verbosequeue
        self.daemon = True
        self.printqueue = collections.deque(
            maxlen=config.getint(CONFIG_MAINSECTION, "status_lines") - 
                                 len(stats.stats_str().split("\n")) - 1)
        self.pqlock = threading.Lock()
        with self.pqlock:
            for _ in xrange(config.getint(CONFIG_MAINSECTION, "status_lines") + 10):
                self.printqueue.append(" ")
        self.verbosequeue = verbosequeue
        self.stop_event = threading.Event()
        #Clear screen if we're actually verbose
        if config.getboolean(CONFIG_MAINSECTION, "verbose"):
            osname = os.name
            if osname == 'posix':
                os.system('clear')
            elif osname == 'nt' or osname == 'dos':
                os.system('cls')
            else:
                sys.stdout.write("\n" * 24)
                sys.stdout.flush()
            #Hide the cursor
            sys.stdout.write("\033[?25l")
            sys.stdout.flush()

    def run(self):
        while config.getboolean(CONFIG_MAINSECTION, "verbose"):
            #Get a printable item
            msg = self.verbosequeue.get()
            #If we get a none...
            if msg is None:
                self.update(dont_return=True)
                debug(None)
                sys.stdout.write("\033[?25h")
                sys.stdout.flush()
                return #Done sir done!
            #Add the item and make sure our printq has <= VOLEN members
            self.pqlock.acquire()
            self.printqueue.append(msg)
            self.pqlock.release()
            self.update()
    
    def update(self, dont_return=False):
        #Print the print queue
        if (config.getboolean(CONFIG_MAINSECTION, "verbose")):
            with self.pqlock:
                for item in self.printqueue:
                    sys.stdout.write(item + "\033[K\n")
                    sys.stdout.flush()
                #Print the stats
                statsstr = stats.stats_str()
                for line in statsstr.split("\n"):
                    sys.stdout.write(line + "\033[K\n")
                    sys.stdout.flush()
                if not dont_return:
                    sys.stdout.write("\033[;H") #Return to the top
                    sys.stdout.flush()
            
#CLass to keep hold of stats        
class StatsClass(object):
    def __init__(self, taskqueue):
        #Whats to increment
        self.CHECKED = "Checked"
        self.FOUND = "Found"
        self.WORKING = "Working"
        self._whats=[self.FOUND, self.CHECKED, self.WORKING]
        self.lock = threading.Lock() #All write methods should use this
        self.stat_d = None
        self.zero_stats()
        self.taskqueue = taskqueue
   
    #Incrementors.  Each takes a source to increment
    def increment(self, what, source):
        #This should work, right?
        with self.lock:
            self.stat_d[what][source] += 1
            
    #Reset counters to zero
    def zero_stats(self):
        with self.lock:
            #Zero the accounting table (dict)
            r = {}
            for s in self._whats:
                r[s] = {}
                for source in IP_SOURCES:
                    r[s][source] = 0
            self.stat_d = r


    def stats_str(self):
        with self.lock: #Don't want things messing up the numbers mid-print
            #Space needed for the first field
            sstrlen = str(len(max(IP_SOURCES, key=len)))
            ostr = "="*80#TODO Finish this
            ostr += "\n"
            ostr += " "*(int(sstrlen) + 2)
            for w in self._whats:
                ostr += " "*15
                ostr += w
            ostr += "\n"
            f = "".join(["%", sstrlen,
                         "s %10i (%7.3f%%) %10i (%7.3f%%) %10i (%7.3f%%)\n"])
            #Calculate totals
            totals = {s:0 for s in self._whats}
            for source in IP_SOURCES:
                for what in self._whats:
                    totals[what] += self.stat_d[what][source]
            #Print out each source
            for source in IP_SOURCES:
                p = [source]
                for what in self._whats:
                    count = self.stat_d[what][source]
                    p.append(count)
                    p.append(self.safedivide(float(count), float(totals[what])))
                ostr += f%tuple(p)
            #Print totals
            t = [totals[w] for w in self._whats]
            t.insert(0,"Total")
            f= "%" + sstrlen + "s %10i            %10i            %10i\n"
            ostr += f%tuple(t)
            ostr += "Queued Checks: %i"%self.taskqueue.qsize()
        return ostr
    
    def safedivide(self, n, d):
        try:
            return float(n)/float(d)*100.
        except ZeroDivisionError:
            return 0.
    
#Database connection
class DBManager(threading.Thread):
    def __init__(self, dbputqueue, name="DB Manager"):
        threading.Thread.__init__(self, name=name)
        self.db_lock = threading.Lock()
        self.db_conn = None
        self.db_path = config.get(CONFIG_MAINSECTION, "sqlite_db")
        self.daemon = True
        self.dbputqueue = dbputqueue
        self.have_list = threading.Event()
        self.db_list = set() #So, not a list.  Heh.
    def run(self):
        if self.db_path == "!":
            return
        
        #Connect to the database
        self.db_connect()

        #Die if we don't have a database
        if self.db_conn is None:
            return

        #Get a list of proxies in the database
        db_cur = self.db_conn.execute("SELECT ip, port "
                                      "FROM working_proxies;")
        #Roll it into a list
        self.db_list = set()
        for i in db_cur:
            self.db_list.add(i)
        self.have_list.set()

        #Commit found IPs when they come in
        while True:
            #(ip, port, ptype)
            ippt = self.dbputqueue.get()
            with self.db_lock:
                self.db_conn.execute("REPLACE INTO working_proxies "
                                 "(ip, port, type) VALUES (?,?,?)", ippt)
                self.db_conn.commit()
                debug("[%s] Put %s://%s:%s into database."%(self.name, 
                                                            ippt[2], ippt[0], 
                                                            ippt[1]))  
    #Connect to database
    def db_connect(self):
        try: #Connect to the database
            #Path (excluding file) portion
            p = os.path.dirname(self.db_path)
            #Make sure there's a directory for the database
            if not os.path.isdir(p):
                os.makedirs(p)
            #Connect to the database
            self.db_conn = sqlite3.connect(self.db_path)
        except Exception, e:
            if config.getboolean(CONFIG_MAINSECTION, "verbose"):
                verbose("Unable to connect to sqlite3 database.", 
                        self.name)
            else:
                debug("[%s] Unable to connect to database at %s.  SQLite "
                      "rasied %s: %s."%(self.name, self.db_path, 
                                        type(e), e))
                self.db_conn = None
        if self.db_conn is not None:
            #Ensure we have a table set up
            self.db_conn.execute("CREATE TABLE IF NOT EXISTS working_proxies "
                            "(ip TEXT, port TEXT, type TEXT, "
                            "timestamp TIMESTAMP DEFAULT CURRENT_TIMESTAMP, "
                            "UNIQUE(ip, port, type))", ())
            self.db_conn.commit()

    #Print out list of ip addresses
    def print_list(self):
        self.db_connect()
        if self.db_conn is None: return
        #Get a list of proxies in the database
        db_cur = self.db_conn.execute("SELECT type, ip, port "
                                      "FROM working_proxies;")
        #Roll it into a list
        self.db_list = set()
        for i in db_cur:
            self.db_list.add("%s://%s:%s\n"%i)
            
        #Print a list.
        for i in self.db_list:
            sys.stdout.write(i)
        sys.stdout.flush()
       

#PFM
def main():
    
    #Parse arguments
    cmdconf = get_arguments()
    #Read Config data, including command line input
    get_config(cmdconf)
    
    #Queues
    #Taskqueue gets (ip:port, SOURCE, ptype)
    taskqueue = Queue.Queue()
    #Reportqueue gets (Working?, ip:port, ptype, timestamp, source)
    reportqueue = Queue.Queue()
    #Subnetqueue gets (ip[:port], source)
    subnetqueue = Queue.Queue()
    #dbputqueue gets (ip, port, ptype)
    dbputqueue = Queue.Queue()
    verbose("Starting", "Main")

    #Keep stats
    global stats
    stats = StatsClass(taskqueue)


    debug("[%s] Made queues."%"Main")    
    #Threads
    debug("[Main] Making threads.")
    db_manager = DBManager(dbputqueue, name="DB Manager")
    db_url_generator = DBURLGenerator(taskqueue, db_manager,
                                      name="DB URL Generator")
    scraping_url_generator = ScrapingURLGenerator(taskqueue,
                                                  name="Scraping URL Generator")
    random_url_generator = RandomURLGenerator(taskqueue,
                                              name="Random URL Generator")
    subnet_url_generator = SubnetURLGenerator(taskqueue, subnetqueue)
    debug_printer = DebugPrinter(debugqueue)
    verbose_printer = VerbosePrinter(verbosequeue)
    ownIPChecker = OwnIPChecker(name="Own IP Checker")
    reporthandler = ReportHandler(reportqueue, dbputqueue, subnetqueue, 
                                  name="Report Handler")
    #TODO: Recheck all known ips and urls periodically

    debug("[Main] Starting threads.")
    #Start the verbose printer
    verbose_printer.start()
    #Start the debug printer
    debug_printer.start()
    #Get our own IP Address
    ownIPChecker.start()
    #Read reports
    reporthandler.start()
    #Manage the DB
    db_manager.start()
    
    #Into the task queue should be put ip:port strings
    #into the report queue should be put (works, ip, type, dtg, SOURCE) tuples
    checkerpool = []
    for x in xrange(config.getint(CONFIG_MAINSECTION, "nthreads")):
        t = ProxyChecker(taskqueue, reportqueue, name="Checker-%i"%x)
        t.start()
        checkerpool.append(t)
        
    #TODO: Section of config for proxies
    #List of threads we shouldn't kill.
    ip_producers = []
    ip_producers.append(db_url_generator)
    ip_producers.append(scraping_url_generator)
    ip_producers.append(random_url_generator)
    
    #Get sources
    subnet_url_generator.start() #This one can die
    for p in ip_producers: p.start()
        
    debug("[Main] Entering main loop.")
    try:
        #Time to wait between verbose updates
        wait_time = config.getfloat(CONFIG_MAINSECTION, 
                                    "verbose_update_interval")
        #Don't die until we've no more production and no more tasks
        while ((len(ip_producers) > 0) or
               (taskqueue.qsize() > 0) or 
               (subnetqueue.qsize() > 0)):
            for p in ip_producers: #Check each producer for death
                p.join(wait_time / len(ip_producers)) #Wait for it...
                if not p.is_alive(): #If it's dead, 
                    ip_producers.remove(p)
                    verbose_printer.update()
            if len(ip_producers) == 0:
                time.sleep(wait_time)
            verbose_printer.update()
    except KeyboardInterrupt:
        verbose("Terminating", "Main")
        pass
    if (len(ip_producers) == 0) and (taskqueue.qsize() == 0):
        verbose("All tasks completed", "Main")
    try:
        verbose(None, None)
        debug(None)
    except:
        pass
    verbose_printer.join(1)
    debug_printer.join(1)
    
    
#Handle command line arguments
def get_arguments():
    #Argument parser
    #TODO: Re-arrange into standard and advanced sections
    parser = argparse.ArgumentParser(description='Find anonymous proxies.')
    parser.add_argument("--nthreads", "-n", type=int, 
                        help="Number of proxy-checking threads to run in "
                        "parallel.  Setting this too high may cause strange "
                        "behavior.")
    parser.add_argument("--configfile", "-c", type=str, 
                        help="Config file.  If not specified the default "
                        "locations will be searched for config files.  If set,"
                        "the config file specified may be updated with options "
                        "specified on the command line.")
    parser.add_argument("--verbose", "-v", action="store_true", default=None,
                        help="Print verbose messages.")
    parser.add_argument("--debug", "-d", action="store_true", default=None,
                        help="Print debugging messages.")
    parser.add_argument("--ports", "-p",
                        help="Comma-seperated list of ports to check.")
    parser.add_argument("--ipcheck-url", "-o",
                        help="URL to query to determine own IP address.")
    parser.add_argument("--ipcheck-interval", "-i",
                        help="Interval in seconds to check our IP address.")
    parser.add_argument("--check-random", "-r", action="store_true",
                        default=None,
                        help="Check random IP Addresses for proxy servers.")
    parser.add_argument("--scrape-urls", "-u",
                        help="Comma-separated ist of URLs to scrape for lists "
                        "of proxy servers."
                        "servers.")
    parser.add_argument("--timeout", "-t", type=float,
                        help="Timeout in seconds for each proxy test.")
    parser.add_argument("--check-entire-subnet", "-e", action="store_true",
                        default=None,
                        help="When a proxy is found, check all other ip "
                        "addresses on the same /24 subnet.")
    parser.add_argument("--user-agent", "-a",
                        help="Set the user agent to use.")
    parser.add_argument("--status-lines", "-l",
                        help="Number of verbose status lines to print.")
    parser.add_argument("--verbose-update-interval", "-w", type=float,
                        help="Interval in seconds between verbose updates.")
    parser.add_argument("--sqlite-db", "-s", type=str, nargs="?", const="", 
                        default="",
                        help="Database in which to store working "
                        "proxies.  If none is specified, the default database"
                        "will be used.  The special value \"!\" indicates no "
                        "database should be used.  By default, the default "
                        "database will be used (as opposed to no database).")
    parser.add_argument("--list-proxies", '-g', action="store_true",
                        default=False,
                        help="Only display a list of databased proxies.")
    return parser.parse_args()

#Reads config file
def get_config(cmdconf):
    #Update the pathlist with a file possibly specified on the command line
    if cmdconf.configfile is not None:
        CONFIG_PATHLIST.append(cmdconf.configfile)
    #Read in the config file
    config.read(CONFIG_PATHLIST)

    #Make sure the config file has a main section
    if not config.has_section(CONFIG_MAINSECTION):
        config.add_section(CONFIG_MAINSECTION)
    #For each config item (key in CONFIG_DEFAULTS), first try the command line,
    #then try the ini file, then use the default
    for item in CONFIG_DEFAULTS.keys():
        if hasattr(cmdconf, item) and getattr(cmdconf, item) is not None:
            config.set(CONFIG_MAINSECTION, item, str(getattr(cmdconf, item)))
        elif config.has_option(CONFIG_MAINSECTION, item):
            continue
        else:
            config.set(CONFIG_MAINSECTION, item, str(CONFIG_DEFAULTS[item]))

    #Handle special configuration items
    #Sqlite DB
    sqldbloc = getattr(cmdconf, "sqlite_db")
    #If we got a ! or a path, put that in.
    if len(sqldbloc) > 0:
        config.set(CONFIG_MAINSECTION, "sqlite_db", sqldbloc)
    if sqldbloc == "": #Default location, see which we have
        #If he didn't say anything, use the default database
        for fname in DB_PATHLIST:
            config.set(CONFIG_MAINSECTION, "sqlite_db", fname)
            if os.path.isfile(fname):
                break
    
    #Print out the db if requested
    if cmdconf.list_proxies:
        dbm = DBManager(None, name="DB Printer")
        dbm.print_list()
        sys.exit()
        
    #Turn the ports into a list
    config.portlist = csv.reader([config.get(CONFIG_MAINSECTION, 
                                             "ports")]).next()
    #Remove empty port
    for port in config.portlist:
        if len(port) == 0:
            config.portlist.remove(port)
                                                 
    #Print out the config file if requested
    if cmdconf.configfile is not None:
        config.write(open(cmdconf.configfile, "w"))
#Prints debugging messages to stderr if debug is set
def debug(s):
    if not config.getboolean(CONFIG_MAINSECTION, "debug"):
        return
    debugqueue.put(s)
#Prints interesting messages as appropriate
def verbose(s, name="Unknown!"):
    #Print debuggingly as well.  Useful for logs
    if s is None:
        debug(None)
    else:
        debug("[%s] "%name + str(s))
    #Queue up verbose message if we're doing such things
    if not config.getboolean(CONFIG_MAINSECTION, "verbose"):
        return
    verbosequeue.put(s)    
#Split an IP:port string into (ip, port)
def split_ip(s):
    return re.findall(r"(\d+\.\d+\.\d+\.\d+):?(\d+)?", s)[0]


if __name__ == '__main__':
    main()
