import StringIO
import gzip
import os.path
import socket
import urllib2
import settings
import logging
import random
import hashlib
import datetime
import pickle
import zipfile
from xml.dom import minidom
import threading, thread, Queue

socket.setdefaulttimeout(settings.SOCKET_TIMEOUT)

class DefaultErrorHandler(urllib2.HTTPDefaultErrorHandler):
    "Custom error handler to return HTTP status code instead of an error"    
    def http_error_default(self, req, fp, code, msg, headers): 
        result = urllib2.HTTPError(req.get_full_url(), code, msg, headers, fp)       
        result.status = code                                   
        return result  


class updateWorker(threading.Thread):
    def __init__(self, queue, function, callback):
        self.q = queue
        self.function = function
        self.callback = callback
        return super(updateWorker, self).__init__()
        
    def run(self):
        running = True
        while running:
            item = self.q.get()
            if item == "END":
                running = False
            else:
                item.runfunc = self.function
                self.callback(item)
                getattr(item, self.function)()
                item.runfunc = None
                self.callback(item)
            self.q.task_done()

class AddonTools(object):

    def __init__(self):
        self.qitems = self.qnr = 0

    def enzip(self, folder, filename):
        base = os.path.basename(folder)
        z = zipfile.ZipFile(filename, "w", zipfile.ZIP_DEFLATED)
        for entries in os.walk(folder):
            path = entries[0]
            zpath = path[len(folder):].strip("/\\").replace("\\", "/") #strip the original folder
            for f in entries[2]:
                fpath = os.path.join(path, f)
                if zpath:
                    zippath = str("%s/%s/%s"% (base, zpath, f))
                else:
                    zippath = str("%s/%s"% (base, f))
                #zippath = os.path.join(base, zpath, f)
                logging.debug(u"Adding file '%s' as '%s'" % (fpath ,zippath))
                z.write(fpath, zippath)
        z.close()
        return filename

    def GetAddons(self, path, settings):
        "Checks all subfolder in path for update.xml - creates addon object if exists"
        addonlist = []
        for x in os.listdir(path):
            xml = os.path.join(path, x, "update.xml")
            if os.path.isfile(xml):
                addon = Addon(path, x, settings)
                addon.runfunc = None
                addonlist.append(addon)
        return addonlist

    def thread_get_progress(self):
        return (self.qitems, self.qnr)

    def _thread_callback(self, item):
        if not item.runfunc:
            self.countlock.acquire()
            self.qnr += 1
            self.countlock.release()
        self.callback(item)
        if self.qnr == self.qitems and self.complete_callback:
            self.complete_callback()
            

    def thread_run(self, addons, command, progress_callback, complete_callback = None):
        if self.qitems != self.qnr or not addons:
            #A bug, or already a run in progress.
            return False
        
        self.countlock = thread.allocate_lock()

        self.callback = progress_callback
        self.complete_callback = complete_callback
        
        q = Queue.Queue()
        for a in addons:
            q.put(a)

        self.qitems = q.qsize()
        self.qnr = 0
                
        for x in xrange(settings.NUM_THREADS):
            th = updateWorker(q, command, self._thread_callback)
            th.start()
            q.put("END")
        return True
        
class Addon(object):
    def __init__(self, path, folder, config):
        self.path = path
        self.folder = folder
        self.config = config
        self.scanaddon()
        self.canupdate = False
        self.backupdir = os.path.join(config.get("settings", "wowFolder"), settings.BACKUPDIR)
        if not os.path.exists(self.backupdir):
            os.mkdir(self.backupdir)
        self.confname = "addon-%s" % self.name #TODO better, more unique name
        
        if self.conf_get("VersionInfo"):
            self.versioninfo = pickle.loads(self.conf_get("VersionInfo").decode("hex"))
        
        if self.conf_get("remote_version"):
            if self.conf_get("remote_version") != self.version:
                self.canupdate = True
        
    def conf_get(self, option):
        return self.config.get(self.confname, option)

    def conf_set(self, option, value):
        if value:
            self.config.set(self.confname, option, value)
        else:
            self.config.c.remove_option(self.confname, option)

    def get(self, parent, child, asString = False):
        "Grabs info from XML tree"
        try:
            key = parent+'+'+child
            if not self.info.has_key(key):
                   self.info[key] = self.GetNodeData(self.xml, parent, child)
            if asString:
                return self.info[key][0]
            else:
                return self.info[key]
        except:
            return False

    def GetNodeData(self, xml, parent, child):
        "Get node data from XML"
        try:
            # We assume we get one hit here.
            node = xml.getElementsByTagName(parent)[0]
            children = node.getElementsByTagName(child)
            data = []
            for x in children:
                data.append(x.firstChild.data)
            return data
        except:
            return None

    def checkupdate(self, callback = None, force = False):
        "Check if there is an update avaliable"
        self.updatekey = "N"
        currentversion = self.version
        logging.debug("Checking for new updates for addon "+self.name)
        logging.debug(" Old version : "+self.version)
        for x in self.updateurls:
            logging.debug(" Checking : "+x)
            raw_xml = self.get_url(x, force)
            try:
                if raw_xml:
                    xml = minidom.parse(raw_xml)
                    remoteversion = self.GetNodeData(xml, "version", "number")[0]
                    logging.debug(" New version : "+remoteversion)
                    if remoteversion > currentversion:
                        logging.debug("  New version found.\n  From "+x)
                        self.canupdate = True
                        self.updatekey = "Y"
                        urls = self.GetNodeData(xml, "download", "url")
                        self.versioninfo = {}
                        currentversion = remoteversion
                        self.versioninfo['version'] = remoteversion
                        self.versioninfo['urls'] = urls
                        cl = self.GetNodeData(xml, "information", "changelog")
                        if cl:
                            self.changelog = cl[0]
                        self.versioninfo['string'] = self.GetNodeData(xml, "version", "string")[0]
                        self.conf_set("VersionInfo", pickle.dumps(self.versioninfo,2).encode("hex"))
                        self.conf_set("remote_version_string", self.versioninfo['string'])
                        self.conf_set("remote_version", self.versioninfo['version'])
            except Exception, err:
                logging.info("Problem on parsing xml")
                logging.info(u"Exception : %s" % err)
        logging.debug("Checking complete.\n")
        if callback:
            callback(self)
        return self.canupdate

    def weighted_choice_sub(weights):
        #Thanks to http://eli.thegreenplace.net/2010/01/22/weighted-random-generation-in-python/ for this
        rnd = random.random() * sum(weights)
        for i, w in enumerate(weights):
            rnd -= w
            if rnd < 0:
                return i

    def get_url(self, url, dldata=False):
        "URL fetcher that understand etag, last modified, and gzip"
        
        logging.debug(u"Getting url : %s" % url)
        
        urlhash = hashlib.md5(url).hexdigest()
        
        request = urllib2.Request(url)
        opener = urllib2.build_opener(DefaultErrorHandler())
        
        #No need for etag / last modified / gzip if you download the new zip
        if not dldata:
            lastmodified = self.conf_get("http-lastmodified-"+urlhash)
            etag = self.conf_get("http-etag-"+urlhash)
            if etag:
                request.add_header('If-None-Match', etag)
            if lastmodified:
                request.add_header('If-Modified-Since', lastmodified)
            request.add_header('Accept-encoding', 'gzip')
        
        request.add_header('User-Agent', "%s/%s" % (settings.AGENTNAME, settings.VERSION))
        try:
            datastream = opener.open(request)
        except:
            return False
        
        datastream.status = getattr(datastream, "status", 200)
        
        logging.debug("Result code : %s" % datastream.status)
        
        if datastream.status == 200: #Sucessfull retrieval
            if not dldata:
                self.conf_set("http-etag-"+urlhash, datastream.headers.get('ETag'))
                self.conf_set("http-lastmodified-"+urlhash, datastream.headers.get('Last-Modified'))
            length = datastream.headers.get('Content-Length')
            
            logging.debug("Content length : %s - limit : %s" % (length, settings.MAX_INMEM_FILESIZE))
            
            #If file is too big, store to a temp file on disk instead of memory
            if length > settings.MAX_INMEM_FILESIZE:
                logging.debug("Large file, downloading to temp file")
                CHUNK = 128*1024
                temp = os.tmpfile()
                while True:
                    C = datastream.read(CHUNK)
                    if not C: break
                    temp.write(C)
                temp.seek(0)
                data = temp
            else:
                data = StringIO.StringIO(datastream.read())
            
            if datastream.headers.get('Content-Encoding') == "gzip":
                logging.debug("gzip encoded, wrapping file object in decoder")
                data = gzip.GzipFile(fileobj=data)

            return data
        
        if datastream.status == 304:
            logging.debug("No change since last update")
        
        if datastream.status == 404:
            logging.info("URL: 404 not found")

        return False
    
    def scanaddon(self):
        "Fetch info from update.xml in addon directory"
        self.xml = minidom.parse(os.path.join(self.path, self.folder, "update.xml"))
        self.info = {}
        self.name = self.get("information", "name", True)
        self.author = self.get("information", "author", True)
        self.homepage = self.get("information", "homepage", True)
        self.email = self.get("information", "email", True)
        self.changelog = self.get("information", "changelog", True)
        self.description = self.get("information", "description", True)
        self.updateurls = self.get("update", "url")
        self.version = self.get("version","number", True)
        self.versionstr = self.get("version","string", True)
        logging.debug("Creating a new addon.")
        logging.debug(" Name   : "+self.name)
        logging.debug(" Version: "+self.version+"\n")
        self.updatekey = "-"           

    def unzip(self, f, path):
        "Unzip file object f to path"
        z2 = zipfile.ZipFile(f)
        for x in z2.namelist():
            #Remove first directory from name
            xn = x.split("/", 1)[-1:][0]
            name = os.path.join(path,*xn.split("/"))
            if name.endswith('/'): # if its a directory
                if not os.path.exists(name):
                    os.mkdir(name)
            else:
                fw = open(name,"wb")
                fw.write(z2.read(x))
                fw.close()

    def update(self):
        "Try to update the addon, with the list of urls found via checkupdate()"    
        
        #Update data is old, so try a refresh before updating
        if self.updatekey != "Y":
            self.checkupdate()
        
        #Randomize the url list
        update_urls = self.versioninfo['urls'][:]
        random.shuffle(update_urls)
    
        now = unicode(datetime.datetime.now())
        folder = os.path.join(self.path, self.folder)
        backup = os.path.join(self.backupdir, self.folder)
     
        #Check all urls until you successfully download and unzip one.
        for x in update_urls:
                         
                logging.debug("Downloading %s" % x)
                f = self.get_url(x, True)
                if f:
    
                    now = datetime.datetime.now().strftime("%Y_%m_%d_%H_%M_%S_%s")
                    backupfolder = os.path.join(backup, "backup-r"+ self.version + "-" + now)
                    failfolder = os.path.join(backup, "FAIL-r"+ self.version + "-" + now)
    
                    #Move the old directory to a backup before unipping new
                    if not os.path.exists(backup):
                        os.mkdir(backup)
                    
                    os.rename(folder, backupfolder)
                    os.mkdir(folder)
                    
                    logging.debug("Unzipping file")
                    try:
                        self.unzip(f, folder)
                        self.scanaddon()
                        self.canupdate = False
                        return True
                    except:
                        #Unzip failed, restore old directory
                        logging.debug("Unzip failed")
                        try:
                            os.rmdir(folder)
                            os.rename(backup, folder)
                        except:
                            #New directory have files, moving to backup folder instead
                            logging.info("Deletion of folder failed")
                            os.rename(folder, failfolder)
                            os.rename(backupfolder, folder)
                else:
                    logging.debug("Could not DL %s" % x)
        return False
