"""Classes related to downloading data from the Eve API."""
import os
import httplib, urllib
import threading, Queue
import time

import util

FETCH_PERIODIC = 1
FETCH_NEVER = 2
FETCH_TYPES = (FETCH_PERIODIC, FETCH_NEVER)

class PoolThread(threading.Thread):
    """Thread that manages a number of FetchThreads. Periodically inserts
 FetchItems into the fetch_pool for the FetchThreads, and handles requests
 for global URL refetches."""
    def __init__(self, prefix, num_fetch = 3, interval=600):
        threading.Thread.__init__(self)
        # Directory to save all the files
        self.prefix = prefix
        # Number of fetch threads to start, only need a few
        self.num_fetch = num_fetch
        # Interval between automatic updates
        self.interval = interval
        # Holds the fetch threads
        self.fetch_threads = []
        self.fetch_pool = Queue.Queue()
        # Holds FetchItems
        self.fetch_items = []
        # Holds urls of fetched items, to alert when things are fetched
        self.fetched = set()
        # Controlled by refetch 
        self.refetch_event = threading.Event()
        # Controlled by global refetch. Set both this and refetch_event.
        self.g_event = threading.Event()

        # kill switch
        self.is_alive = True

    def run(self):
        """Main run loop for the PoolThread."""
        # Create fetch threads
        for i in xrange(self.num_fetch):
            self.fetch_threads.append(FetchThread(self))
            self.fetch_threads[-1].start()

        last_update = 0 # Force a fetch initially

        while self.is_alive:
            # Check for auto update and refetch event
            auto = time.time() - last_update > self.interval
            if self.refetch_event.isSet() or auto:
                for i in xrange(len(self.fetch_items)):
                    itm = self.fetch_items[i]
                    # Fetch if it's a periodic url, or a global refetch
                    # or if it is marked "fetch_now"
                    if (auto and itm.fetch is FETCH_PERIODIC) or \
                            self.g_event.isSet() or itm.fetch_now:
                        self.fetch_pool.put(itm)
                        itm.fetch_now = False
                # Wait for all the items to be fetched
                self.fetch_pool.join()
                # Reset
                last_update = time.time()
                self.refetch_event.clear()
            time.sleep(1)

    def pushURL(self, url, params=None, fetch=FETCH_NEVER, fetch_now=False):
        """Pushes a URL to be fetched into the fetch pool. The presence
of params indicates that the url is character specific. The fetch
parameter determines whether a URL should be regularaly fetched, like
the current Skill in Training. fetch_now determines if the item is to
be fetched on the next loop iteration."""

        # Fetch permanent files if they don't exist
        filename = util.xmlpath(self.prefix, url, params)
        if fetch is FETCH_NEVER:
            if not os.path.isfile(filename):
                fetch_now = True
            else: # Add into self.fetched if already exists to clear wait()
                self.fetched.add(url)
        itm = FetchItem(url, filename, params, fetch, fetch_now)
        self.fetch_items.append(itm)

    def refetch(self, get_all=False):
        """Sets some or all of the fetch_items to be refetched."""
        # Makes run() push urls into fetch_pool
        self.refetch_event.set()
        if get_all:
            self.g_event.set()

    def kill(self):
        """Kills the PoolThread and all of its FetchThreads."""
        for thread in self.fetch_threads:
            thread.kill()
        self.is_alive = False

    def wait(self, url, timeout = 9999):
        """Waits until the requested item shows up in "fetched". Takes
an optional timeout parameter, specified in seconds."""
        while url not in self.fetched:
            time.sleep(1)
            timeout -= 1
            if timeout < 0:
                raise Exception, "URL could not be fetched, timing out."
        self.fetched.remove(url)


class FetchItem():
    """Class that holds a URL to be put in the fetch pool and defines
 how often the URL is fetched, and if it is to be fetched immediately."""
    def __init__(self, url, filename, params, fetch, fetch_now):
        if fetch not in FETCH_TYPES:
            raise Exception, "Invalid fetch type %s" % fetch
        self.url = url
        self.params = params
        self.fetch = fetch
        self.fetch_now = fetch_now
        self.filename = filename


class FetchThread(threading.Thread):
    """Thread that watches the fetch_pool of a PoolThread, waiting for
FetchItems to fetch from the Eve API. It reports fetched items back
into the parent's "fetched" set."""
    def __init__(self, pool):
        threading.Thread.__init__(self)
        self.pool = pool

        # kill switch
        self.is_alive = True

    def run(self):
        """Main run loop for the FetchThread. Blocks until a FetchItem
shows up in the fetch_pool, fetches it, and then alerts the PoolThread."""

        headers = {"Content-type": "application/x-www-form-urlencoded"}
        conn = httplib.HTTPConnection("api.eve-online.com")

        while self.is_alive:
            # Block until an item pops into the pool
            itm = self.pool.fetch_pool.get(True)

            #print "Fetching %s..." % itm.url

            conn.connect()
            conn.request("POST", itm.url, urllib.urlencode(itm.params), headers)
            response = conn.getresponse()
            data = response.read()
            conn.close()

            # Check if directories exist before writing
            try:
                os.makedirs(os.path.split(itm.filename)[0])
            except Exception, errmsg:
                pass
                #print errmsg # FIXME
            # Write the file
            handle = open(itm.filename, "w")
            handle.write(data)
            handle.close()

            #print "%s saved as %s" % (itm.url, itm.filename)

            # Tell the pool we're done
            self.pool.fetch_pool.task_done()
            # Put it in the fetched set
            self.pool.fetched.add(itm.url)

    def kill(self):
        """Kills the FetchThread."""
        self.is_alive = False
