#!/usr/bin/env python

import Queue
import threading
import time
import xmlrpclib

def test_server(ip):
    s = xmlrpclib.ServerProxy('http://' + ip + ':1591')
    return s.portage_pretend('gcc')

ips = ["192.168.0.101", "192.168.0.102"]

class ThreadUrl(threading.Thread):
    """Threaded Url Grab"""
    def __init__(self, queue, out_queue):
        threading.Thread.__init__(self)
        self.queue = queue
        self.out_queue = out_queue

    def run(self):
        while True:
            #grabs host from queue
            ip = self.queue.get()

            #grabs urls of hosts and then grabs chunk of webpage
            output = test_server(ip)
            # url = urllib2.urlopen(host)
            # chunk = url.read()

            #place chunk into out queue
            self.out_queue.put(output)

            #signals to queue job is done
            self.queue.task_done()

class GetoutputThread(threading.Thread):
    """Threaded Url Grab"""
    def __init__(self, out_queue):
        threading.Thread.__init__(self)
        self.out_queue = out_queue

    def run(self):
        while True:
            #grabs host from queue
            output = self.out_queue.get()

            #parse the chunk
            # soup = BeautifulSoup(chunk)
            # print soup.findAll(['title'])
            print output

            #signals to queue job is done
            self.out_queue.task_done()

start = time.time()
def main():

    queue = Queue.Queue()
    out_queue = Queue.Queue()

    #spawn a pool of threads, and pass them queue instance
    for i in range(2):
        t = ThreadUrl(queue, out_queue)
        t.setDaemon(True)
        t.start()

    #populate queue with data
    for ip in ips:
        queue.put(ip)

    for i in range(1):
        dt = GetoutputThread(out_queue)
        dt.setDaemon(True)
        dt.start()


    #wait on the queue until everything has been processed
    queue.join()
    out_queue.join()

main()
print "Elapsed Time: %s" % (time.time() - start)
