import urllib2
import os
import json
import hashlib
import thread
import time
import urllib
import errno
from time import sleep
import threading
import logging
import re
from ConfigParser import ConfigParser
import sys
import shutil
from urllib2 import URLError

from tools import getlogger
from tools import DIRS, HOMEDIR, BASEURL, BASEDIR
from tools import CALLBACKS, ERR_CALLBACKS

log = getlogger(__name__)

BASEDIR = BASEDIR if str(BASEDIR).endswith('/') else BASEDIR + '/'
SERVICE_ADDRESS = os.path.join(BASEURL, 'sync')
BLOCKSIZE = 8192
FILTER_FILENAME_REG = ['.*?\.pyc$', '^golia$']


def main(remote_path=None, local_path=None):
    log.info('sync module start')
    assert (remote_path is None and local_path is None) or (
        remote_path is not None and local_path is not None)
    localDirDict = get_local_dir(remote_path, local_path)
    local_json = read_local_dir(localDirDict, FILTER_FILENAME_REG, local_path)
    servi_json = read_remote(SERVICE_ADDRESS, FILTER_FILENAME_REG, remote_path)
    target = minus_json(servi_json, local_json)
    result = compare_and_download(target, local_path)
    if len(target) > 0 and len(result) == 0:
        raise Exception('should sync, bu no sync')
    return result


def get_local_dir(remote_path, local_path=None):
    result = {}
    if remote_path is None:
        return DIRS
    else:
        if is_absolute(local_path):
            result['remote_path'] = local_path
        else:
            result['remote_path'] = os.path.join(BASEDIR, local_path)
    return result


def is_absolute(local_path):
    return local_path is not None and local_path.startswith('/')


def read_local_dir(dirs, ignored_reg=FILTER_FILENAME_REG, local_path=None):
    result = {}
    base = HOMEDIR if HOMEDIR.endswith('/') else HOMEDIR + '/'
    for dirpath in dirs.values():
        for d, subd, flist in os.walk(dirpath):
            for f in flist:
                matched = None
                for r in ignored_reg:
                    matched = re.compile(r).match(str(f))
                    if matched is not None:
                        break
                if matched is None:
                    fullpath = os.path.abspath(os.path.join(d, f))
                    filename = fullpath.replace(os.path.abspath(base), '')
                    if filename.startswith('/'):
                        filename = filename[1:]
                    _md5 = md5(fullpath)
                    mtime = str(os.stat(fullpath).st_mtime)
                    result[filename] = dict({'md5': _md5, 'mtime': mtime})
    return result


def read_remote(remote_addr, ignored_reg=FILTER_FILENAME_REG, target=None):
    result = {}
    jsonstr = urllib2.urlopen(remote_addr).read()
    jsonObj = json.loads(jsonstr)

    for k in jsonObj.keys():
        filename = k.split('/')[-1]
        matched = None
        for r in ignored_reg:
            matched = re.compile(r).match(str(filename))
            if matched is not None:
                break
        if matched is not None:
            log.info('syncconfig del jsonObj[%s]' % k)
            del jsonObj[k]

    if target is not None:
        for key in target.split(','):
            if key in jsonObj.keys():
                val = jsonObj[key]
                result[key] = val
        return result
    return jsonObj


def compare(a, b, compare_key='md5'):
    ak = set(a.keys())
    bk = set(b.keys())
    if ak != bk:
        return False
    else:
        av = [a[k][compare_key] for k in ak]
        bv = [b[k][compare_key] for k in bk]
        return av == bv


def minus_json(a, b, compare_key='md5', compare_func=lambda x, y: x == y):
    ret = {}
    for ak, av in a.items():
        if ak not in b:
            ret[ak] = av
        else:
            bv = b.get(ak, None)
            avv = av.get(compare_key)
            bvv = bv.get(compare_key)
            if compare_func(avv, bvv) == False and avv is not None:
                ret[ak] = av
    return ret


def compare_and_download(jsonObj, local_path=None):
    filelist = []
    for filename, info in jsonObj.items():
        destpath = os.path.join(HOMEDIR, filename)
        if is_absolute(local_path):
            destpath = os.path.join(local_path, filename.split('/')[-1])
        if dowload(
                os.path.join(SERVICE_ADDRESS, filename), destpath,
                info['md5']):
            filelist.append(destpath)
        else:
            raise Exception("download error")
    return filelist


def clean_old_backup(tempfiles, max_hold=3):
    import stat
    if len(tempfiles) > 3:
        tempfiles.sort(key=lambda x: os.stat(x)[stat.ST_MTIME])
        for f in tempfiles[-3:]:
            os.remove(f)


def dowload(remotepath, locapath, servimd5, tempfile=True):
    if tempfile:
        locapath = locapath + '.tmp'
    u = urllib2.urlopen(remotepath)
    h = u.info()
    size = int(h.get("Content-Length", -1))
    log.info('downloading %s to %s ,size %d', remotepath, locapath, size)
    d = os.path.dirname(locapath)
    if not os.path.isdir(d):
        os.makedirs(d)
    with open(locapath, 'wb') as fp:
        while 1:
            chunk = u.read(BLOCKSIZE)
            size -= len(chunk)
            if not chunk:
                break
            fp.write(chunk)
        fp.flush()
    log.info('downloaded %s', locapath)
    if md5(locapath) != servimd5:
        log.error('md5 not match')
        return False
    return True


def md5(filepath):
    with open(filepath, 'rb') as f:
        md5obj = hashlib.md5()
        md5obj.update(f.read())
        hash = md5obj.hexdigest()
        return hash


def chmod(filepath):
    if os.path.isfile(filepath):
        if filepath.find('/sh/') > 0 or filepath.find('/bin/') > 0:
            os.chmod(filepath, 0744)
        else:
            os.chmod(filepath, 0644)


def clean_temp_files(e=None, j=None):
    def rmtmp(arg, dirname, names):
        for n in names:
            if n.endswith('.tmp'):
                os.remove(os.path.join(HOMEDIR, dirname, n))

    os.path.walk(HOMEDIR, rmtmp, ())


def rename_files(filelists, job):
    if filelists:
        try:
            log.info('rename files %s', filelists)
            for locapath in filelists:
                tempath = locapath + '.tmp'
                log.info('rename: %s to %s', tempath, locapath)
                if os.path.isfile(tempath):
                    if os.path.isfile(locapath):
                        p = re.compile(locapath + '(\d+)' + '.*')
                        idx = locapath.rfind('/')
                        tempfiles = [f for f in os.listdir(locapath[:idx])
                                     if p.match(f) is not None]
                        clean_old_backup(tempfiles)
                        shutil.copy(locapath, locapath + time.strftime(
                            "%Y%m%d%H%M%S", time.localtime()))
                    shutil.copy(tempath, locapath)
                    chmod(locapath)
                    clean_temp_files()
                    return True
        except Exception, ex:
            log.error('rename file error %s' % ex)


ERR_CALLBACKS['clear_tmp_files'] = clean_temp_files
CALLBACKS['rename_files'] = rename_files

if __name__ == '__main__':
    remote_path = None
    local_path = None
    if len(sys.argv) >= 2:
        remote_path = sys.argv[1]
        local_path = sys.argv[2]
    main(remote_path, local_path)
