from client import SiftAgent
from queue import RunQueue, NoJobsError
from urlparse import urlunparse
from job import SiftJob


urls = ['http://www.bbc.co.uk/news/world-middle-east-12377179',
        'http://www.bbc.co.uk/news/uk-wales-south-east-wales-12373149',
        'http://www.bbc.co.uk/news/world-12354346',
        'http://www.bbc.co.uk/news/uk-12374262',
        'http://www.bbc.co.uk/1/hi/programmes/from_our_own_correspondent/9387520.stm',
        'http://www.bbc.co.uk/news/uk-wales-12373754',
        'http://www.bbc.co.uk/news/entertainment-arts-12375556',
        'http://www.bbc.co.uk/news/uk-england-london-12378430',
        'http://www.bbc.co.uk/news/world-europe-12375646']*5

class SiftEngine():

    def __init__(self, reactor, queue, agent, consumer, maxTransactions):
        self.reactor = reactor
        self.queue = queue
        self.agent = agent
        self.consumer = consumer
        self.maxTransactions = maxTransactions
        self.openTransactions = 0
        self.running = 0

        self.added = 0
        self.back = 0
        
        for url in urls:
            self.queue.add_job(SiftJob(url))

    def start(self):
        self.running = 1
        self.process_queue()
        self.reactor.run()

    def stop(self):
        self.running = 0
        print "Shutting down..."
        while self.openTransactions != 0:
            print "[%d::%d::%d]" % (self.added,
                                    self.back,
                                    self.openTransactions)
        self.reactor.stop()

    def process_queue(self):
        while self.running and self.openTransactions <= self.maxTransactions:
            try:
                job = self.queue.provide_job()
                print "[%d::%d::%d] Added: %s" % (self.added,
                                                  self.back,
                                                  self.openTransactions,
                                                  job.host)
            except NoJobsError:
                self.stop()
            else:
                self.added +=1
                self.openTransactions +=1
                d = self.agent.request('GET', job.url)
                d.addCallback(self.agent.cbRequest)
                d.addCallback(self.feed_consumer)

    def feed_consumer(self, response):
        self.back +=1
        self.openTransactions -=1
        self.process_queue()
        print "Got %d!" % (len(response),)
