#@PydevCodeAnalysisIgnore
import pyinotify
import json
import os
import sys
import urllib
import urllib2
import base64
import time
import string
import threading
import logging


class EventHandler(pyinotify.ProcessEvent):
    def process_IN_DELETE(self, event):
        logger.debug('Delete event: ' + event.pathname)
        for wlk in walkers:
            if string.find(event.pathname, wlk.directory) == 0:
                wlk.lock.acquire()
                logger.info('Removing ' + event.pathname)
                try:
                    wlk.Rmfile(event.pathname)
                except:
                    logger.error('Error removing file!')
                    wlk.resync_required = True
                wlk.lock.release()

    def process_IN_CLOSE_WRITE(self, event):
        logger.debug('Close write event: ' + event.pathname)
        for wlk in walkers:
            if string.find(event.pathname, wlk.directory) == 0:
                if os.path.isfile(event.pathname):
                    wlk.lock.acquire()
                    logger.debug('Ready to upload ' + event.pathname)
                    try:
                        mtime = os.stat(event.pathname).st_mtime
                        size = os.stat(event.pathname).st_size
                        wlk.localfiles[event.pathname] = mtime
                        if (event.pathname in wlk.serverfiles.keys() and
                            int(mtime) ==
                            int(wlk.serverfiles[event.pathname]["mtime"]) and
                            size == wlk.serverfiles[event.pathname]["size"]):
                            logger.debug("Skipping upload, same file")
                        else:
                            if wlk.readonly == False:
                                wlk.Putfile(event.pathname)
                            else:
                                logger.debug("Downloading instead of \
                                 uploading due to readonly flag")
                                wlk.Getfile(event.pathname)
                    except:
                        logger.error('Error uploading file!')
                        wlk.resync_required = True
                    wlk.lock.release()


class Timer(threading.Thread):
    wlk = None

    def __init__(self, wlk):
        threading.Thread.__init__(self)
        self.wlk = wlk

    def run(self):
        while True:
            wlk.lock.acquire()
            wlk.CheckForUpdates()
            if wlk.resync_required:
                wlk.Resync()
            wlk.lock.release()
            time.sleep(wlk.refresh)


class Walker():
    ''' Main client class'''
    directory = ''
    url = ''
    tag = ''
    user = ''
    password = ''
    localfiles = {}
    serverfiles = {}
    update = 0
    refresh = 10
    readonly = False
    resync_required = False
    serverfiles_insync = False
    lock = threading.Lock()

    def __init__(self, directory, url, tag, user, password, refresh):
        self.directory = os.path.abspath(directory)
        self.url = url
        self.tag = tag
        self.user = user
        self.password = password
        self.refresh = refresh
        self.resync_required = True

    def WalkDir(self):
        ''' Get list of local files with timestamps'''
        result = {}
        try:
            for dir, subdirs, files in os.walk(self.directory):
                for curfile in files:
                    fullpath = os.path.abspath(dir + '/' + curfile)
                    result[fullpath] = os.stat(fullpath).st_mtime
        except:
            logger.critical('Error reading dir: ' + directory)
            sys.exit(1)
        self.localfiles = result
        return result

    def CheckForUpdates(self):
        '''Check tag timestamp for updates'''
        logger.debug("Trying to update tag %s" % self.tag)
        try:
            data = urllib.urlencode({"tag": self.tag})
            request = urllib2.Request(self.url + '/TagInfo?' + data)
            base64string = base64.encodestring('%s:%s' %
                                              (self.user,
                                              self.password)).replace('\n', '')
            request.add_header("Authorization", "Basic %s" % base64string)
            reply = urllib2.urlopen(request)
            result = json.loads(reply.read())
            logger.debug("Remote ts: %s, local ts: %s" %
                         (result["update"], self.update))
            if result["update"] != self.update:
                self.GetMode()
                self.ReadServerFiles()
                self.Resync()
                self.update = result["update"]
        except:
            logger.error('Error interacting with server: ' + self.url)

    def ReadServerFiles(self):
        ''' Get list of remote files with all metadata '''
        logger.debug("Reading server files for  tag %s" % self.tag)
        self.serverfiles_insync = False
        data = urllib.urlencode({"tag": self.tag})
        request = urllib2.Request(self.url + '/ListFiles?' + data)
        base64string = base64.encodestring('%s:%s' %
                                           (self.user,
                                            self.password)).replace('\n', '')
        request.add_header("Authorization", "Basic %s" % base64string)
        reply = urllib2.urlopen(request)
        files = reply.read()
        result = json.loads(files)

        normresult = {}

        for key in result.keys():
            normresult[self.directory + '/' + key] = result[key]

        self.serverfiles = normresult
        self.serverfiles_insync = True
        return normresult

    def GetMode(self):
        ''' Get subscription mode '''
        logger.debug("Getting suscription mode for tag %s" % self.tag)
        try:
            request = urllib2.Request(self.url + '/ListSubscriptions')
            base64string = base64.encodestring('%s:%s' %
                                               (self.user,
                                                self.password)).replace('\n',
                                                                         '')
            request.add_header("Authorization", "Basic %s" % base64string)
            reply = urllib2.urlopen(request)
            subscriptions = json.loads(reply.read())

            if self.tag in subscriptions.keys():
                self.readonly = subscriptions[self.tag]['readonly']
                logger.info("Setting readonly mode for tag %s to %r" %
                            (self.tag, self.readonly))
            else:
                logger.error("Error: not subscribed to tag %s" % self.tag)
        except:
            print traceback.print_exception()
            logger.error('Error getting subcsriptions from server: ' +
                         self.url)

    def Rmfile(self, localfile):
        logger.info("Removing remote %s" % localfile)
        result = False
        try:
            d = {"file": localfile[len(self.directory) + 1:],
                 "tag": self.tag,
                 "mtime": time.time()}
            request = urllib2.Request(self.url + "/Delete")
            base64string = base64.encodestring('%s:%s' %
                                               (self.user,
                                                self.password)
                                                ).replace('\n', '')
            request.add_header("Authorization", "Basic %s" % base64string)
            output = urllib2.urlopen(request, urllib.urlencode(d))
            ret = json.loads(output.read())
            if ret == []:
                result = True
                if localfile in self.serverfiles.keys():
                    self.serverfiles[localfile]['removed'] = True

        except:
            logger.error('Error removing remote file!')
        return result

    def Putfile(self, localfile):
        logger.info("Putting %s" % localfile)
        result = False
        try:
            with open(localfile, 'r') as f:
                mtime = os.stat(localfile).st_mtime
                content = f.read()
                data = base64.b64encode(content)
                d = {"file": localfile[len(self.directory) + 1:],
                     "tag": self.tag,
                     "mtime": mtime,
                     "content": data}
                request = urllib2.Request(self.url + "/Put")
                base64string = base64.encodestring('%s:%s' %
                                                   (self.user,
                                                    self.password)
                                                   ).replace('\n', '')
                request.add_header("Authorization", "Basic %s" % base64string)
                output = urllib2.urlopen(request, urllib.urlencode(d))
                ret = json.loads(output.read())
                if ret == []:
                    result = True
                    self.serverfiles[localfile] = {'client': self.user,
                                              'mtime': mtime,
                                              'size': len(content),
                                              'removed': False}
        except:
            logger.error('Error uploading file!')
        return result

    def Getfile(self, remotefile):
        logger.info("Getting %s" % remotefile)
        result = False
        try:
            d = {"file": remotefile[len(self.directory) + 1:],
                 "tag": self.tag}
            request = urllib2.Request(self.url + "/Get?" + urllib.urlencode(d))
            base64string = base64.encodestring('%s:%s' %
                                               (self.user,
                                                self.password)
                                               ).replace('\n', '')
            request.add_header("Authorization", "Basic %s" % base64string)
            output = urllib2.urlopen(request)
            ret = json.loads(output.read())
            if ret["removed"] == True:
                logger.info("Removing " + remotefile)
                if os.path.isfile(remotefile):
                    os.unlink(remotefile)
            else:
                logger.info("Updating " + remotefile)
                data = base64.b64decode(ret["content"])
                if not os.path.isdir(os.path.dirname(remotefile)):
                    os.makedirs(os.path.dirname(remotefile))
                if (os.path.isfile(remotefile) and
                    int(os.stat(remotefile).st_mtime) == int(ret["mtime"])
                    and os.stat(remotefile).st_size == ret["size"]):
                    logger.debug("Skipping update:same file")
                else:
                    f = open(remotefile, 'w')
                    f.write(data)
                    f.close()
                    os.utime(remotefile, (time.time(), ret["mtime"]))

            result = True
            self.localfiles[remotefile] = ret["mtime"]
        except:
            logger.error("Error downloading file!")
        return result

    def Resync(self):
        ''' Make full resync '''
        logger.info('Resyncing for tag ' + self.tag)
        logger.debug("Local: " + str(self.localfiles))
        logger.debug("Remote: " + str(self.serverfiles))
        if not self.serverfiles_insync:
            logger.debug("Serverfiles are not in sync, refusing to sync")
            return
        self.resync_required = False
        for lfile in self.localfiles.keys():
            if (not lfile in self.serverfiles.keys()
                or int(self.localfiles[lfile]) >
                int(self.serverfiles[lfile]["mtime"])):
                if not self.readonly:
                    logger.info('Uploading ' + lfile)
                    if self.Putfile(lfile) == False:
                        self.resync_required = True
                else:
                    logger.debug('Skipping upload due to readonly flag' +
                                 lfile)
        for sfile in self.serverfiles.keys():
            if (not sfile in self.localfiles.keys() or
                int(self.serverfiles[sfile]["mtime"]) >
                int(self.localfiles[sfile])):
                if (os.path.isfile(sfile) or
                    self.serverfiles[sfile]["removed"] == False):
                    logger.info('Downloading ' + sfile)
                    if self.Getfile(sfile) == False:
                        self.resync_required = True
            elif (self.readonly == True and
                  sfile in self.localfiles.keys() and
                  (int(self.serverfiles[sfile]["mtime"])
                  != int(self.localfiles[sfile])
                  or self.serverfiles[sfile]["size"]
                  != os.stat(sfile).st_size)):
                logger.info('Downloading due to readonly flag' + sfile)
                if self.Getfile(sfile) == False:
                    self.resync_required = True


logger = logging.getLogger('unchaos-client')
hdlr = logging.StreamHandler(sys.stderr)
formatter = logging.Formatter('%(asctime)s %(levelname)s %(message)s')
hdlr.setFormatter(formatter)
logger.addHandler(hdlr)
logger.setLevel(logging.DEBUG)

walkers = []

try:
    conffile = open('config.json', 'r')
    config = json.loads(conffile.read())

    for key, val in config.items():
        if key == 'subscription':
            wlk = Walker(val['path'], val['url'], val['tag'],
                         val['user'], val['password'], val['refresh'])
            walkers.append(wlk)
except:
    logger.critical('Error parsing configfile!')
    sys.exit(1)

try:
    for wlk in walkers:
        wlk.WalkDir()
except:
    logger.critical('Error walking dir!')
    sys.exit(1)

wm = pyinotify.WatchManager()
mask = pyinotify.IN_DELETE | pyinotify.IN_CLOSE_WRITE
handler = EventHandler()
notifier = pyinotify.Notifier(wm, handler)

try:
    for wlk in walkers:
        wlk.CheckForUpdates()
        wm.add_watch(wlk.directory, mask, rec=True)
        for dir, subdirs, files in os.walk(wlk.directory):
            for curdir in subdirs:
                wdd = wm.add_watch(dir + '/' + curdir, mask, rec=True)
        timer = Timer(wlk)
        timer.setDaemon(True)
        timer.start()

except:
    logger.critical('Error while resyncing!')
    sys.exit(1)

notifier.loop()
