# coding: utf-8
#--------------------#
# Coder  : Dzlua
# Email  : 505544956@qq.com
# module : scheduler
# file   : run.py
# Time   : 2017/09/29
#--------------------#
import signal, threadpool, time, config, os, socket
from apscheduler.schedulers.blocking import BlockingScheduler
from database import DataBase

import base

#--------------------#
sched = BlockingScheduler()
sched.daemonic = False
# default
tdpool = threadpool.ThreadPool(config.TP_THREAD_NUM)

#--------------------#
# tool functions
#----------#
# fun
def tool_reduce(lst):
    func = lambda x,y:x if y in x else x + [y]
    reduce(func, [[], ] + lst)
    return lst
#----------#
# make dirs
def tool_mkdirs(path):
    try:
        os.makedirs(path)
    except:
        pass
#----------#
# delete file or dirs
def tool_deldirfile(path):
    try:
        cmd = 'rm -rf %s' % path
        os.system(cmd)
    except:
        pass
#----------#
def tool_cp_lib(lib_dir, new_lib_dir):
    try:
        cmd = 'cp -rf "%s" "%s"' % (lib_dir, new_lib_dir)
        os.system(cmd)
    except:
        return 1
    return 0
#----------#
# find dir
def tool_get_colemake_dir(root_dir):
    for (root,dirs,files) in os.walk(root_dir):
        if len(dirs) + len(files) != 1:
            return root
    return root_dir
#----------#
# unpack zip package
def tool_unpack_zip(file, to_path):
    print('[tdpool] tool_unpack_zip: unpacking...')
    #
    cmd = "unzip -O cp936 -q '%s' -d '%s'" % (file, to_path)
    if config.SCHED_SHOW_INFO:
        print('[tdpool] tool_unpack_zip: cmd: %s' % cmd)
    #
    try:
        ret = os.system(cmd)
        print('[tdpool] tool_unpack_zip: unpack done.')
    except:
        print('[tdpool] tool_unpack_zip: Error. cmd: %s' % cmd)
    #
    return ret
def tool_trans_to_text(path):
    _exts = {
        '.doc': 'txt:Text (encoded):UTF8',
        '.docx': 'txt:Text (encoded):UTF8',
        '.html': 'txt:Text (encoded):UTF8',
        '.xls': 'csv:Text - txt - csv (StarCalc):44,34,0',
        '.xlsx': 'csv:Text - txt - csv (StarCalc):44,34,0',
    }
    _exts_ex = {
        '.pdf': 'html:impress_html_Export',
    }
    def hit(ext):
        for k,v in _exts.items():
            if k == ext:
                return 1
        for k,v in _exts_ex.items():
            if k == ext:
                return 2
        return None
    #
    def getext(tp):
        return '.' + tp[:tp.find(':')]
    #
    def gettp(ext):
        for k,v in _exts.items():
            if k == ext:
                return v
        for k,v in _exts_ex.items():
            if k == ext:
                return v
        return None
    #
    def trans(file, exts):
        path = os.path.dirname(file)
        tp = gettp(exts[1])
        if tp is None:
            return None
        cmd = 'soffice --headless --convert-to' \
            ' "%s" --outdir "%s" "%s"' % (tp, path, file)
        ret = os.system(cmd)
        if ret != 0:
            return None
        return getext(tp)
    #
    files = base.dir_files(path)
    for f in files:
        exts = base.file_ext(f)
        ht = hit(exts[1])
        if not ht:
            continue
        elif ht is 1:
            ff = base.path_join(path, f)
            trans(ff, exts)
            tool_deldirfile(ff)
        elif ht is 2:
            ff = base.path_join(path, f)
            ext = trans(ff, exts)
            tool_deldirfile(ff)
            #
            ff = base.path_join(path, exts[0] + ext)
            trans(ff, [exts[0], ext])
            tool_deldirfile(ff)
#--------------------#

#--------------------#
# for requests
#----------#
def req_prots(cmd):
    try:
        sk = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
        sk.connect((config.PROTS_HOST, config.PROTS_PORT))
        sk.sendall(cmd)
        res = sk.recv(65536)
        sk.close()
        return res
    except:
        print('[tdpool] req_prots : Error socket. %s' % cmd)
        return None
#--------------------#

#--------------------#
# prots
#----------#
def prots_open_lib(file):
    #> openlib\r\n/disk2/coledata/it/newlib.src
    cmd = 'openlib\r\n%s' % file
    print('cmd', cmd)
    # ret!=1,error
    ret = req_prots(cmd)
    print(ret)
    
    if ret.find('1:') == -1:
        return 1

    return 0
#----------#
def prots_close_lib(libname):
    #> dellib\r\nlibname
    cmd = 'closelib\r\n%s' % libname
    print('cmd', cmd)
    # 1:ok,0:no that lib
    ret = req_prots(cmd)
    print(ret)
    return 0
#--------------------#

#--------------------#
#----------#
# cole make posts
def colemake_posts(id, lib_dir):
    #> colemake mem blog fenlei_id(顶级分类) path(lib存放路径)
    cmd = "%s mem blog %s '%s'" \
        % (config.CM_BIN, id, lib_dir)
    if config.SCHED_SHOW_INFO:
        print('[tdpool] colemake_posts: cmd: %s' % cmd)
    #
    try:
        ret = os.system(cmd)
    except:
        print('[tdpool] colemake_posts: Error. cmd: %s' % cmd)
    #
    return ret
#----------#
# cole make pkgs
def colemake_pkgs(lib_name, src_dir, lib_dir):
    #> colemake all mem sourcedir libname libdir
    cmd = "%s all mem '%s' %s '%s'" \
        % (config.CM_BIN, src_dir, lib_name, lib_dir)
    if config.SCHED_SHOW_INFO:
        print('[tdpool] colemake_pkgs: cmd: %s' % cmd)
    #
    try:
        ret = os.system(cmd)
    except:
        print('[tdpool] colemake_pkgs: Error. cmd: %s' % cmd)
    #
    return ret
#----------#
def colemake_itbase(lib_dir):
    #> colemake mem mysql itbase (/path)
    cmd = "%s mem mysql itbase '%s'" % (config.CM_BIN, lib_dir)
    if config.SCHED_SHOW_INFO:
        print('[tdpool] colemake_itbase: cmd: %s' % cmd)
    #
    try:
        ret = os.system(cmd)
    except:
        print('[tdpool] colemake_itbase: Error. cmd: %s' % cmd)
    #
    return ret
#--------------------#

#--------------------#
#----------#
# pool
def pool_submit_job(fun, *args, **kwargs):
    def _fun(params):
        return fun(*args, **kwargs)
    reqs = threadpool.makeRequests(_fun, [''])
    [ tdpool.putRequest(req) for req in reqs ]
    if config.SCHED_SHOW_INFO:
        print('[scheduler] pool_submit_job: %s' % args[0])
    tdpool.poll()
#----------#
# pool stop
def pool_stop():
    tdpool.dismissWorkers(len(tdpool.workers), False)
    if tdpool.dismissedWorkers:
        tdpool.joinAllDismissedWorkers()
#--------------------#

#--------------------#
#----------#
# database tool
def db_get_db():
    return DataBase(config.DB_HOST, config.DB_USER,
                config.DB_PASSWORD, config.DB_DATABASE,
                config.DB_SHOW_INFO )
#----------#
# get posts
def db_get_posts(db):
    sql = "SELECT ID FROM " + config.DB_POSTS + \
          " WHERE post_status='publish' and pack_state='%s';"
    result = db.execute(sql, (config.DBPS_FIND,))
    return result
#----------#
# get pkgs
def db_get_pkgs(db):
    sql = "SELECT file_id,lib_name,file_name,user_id FROM " + config.DB_PKGS + \
          " WHERE pack_state='%s';"
    result = db.execute(sql, (config.DBPS_FIND,))
    return result
#----------#
# get taxonomy: posts id
def db_get_posts_taxonomy(db, id):
    sql = "SELECT term_taxonomy_id FROM " + config.DB_TERM_RELATIONSHIPS + \
          " WHERE object_id='%s';"
    for res in db.execute(sql, (id,) ):
        sql = "SELECT parent FROM " + config.DB_TERM_TAXONOMY + \
          " WHERE term_taxonomy_id='%s';"
        for r in db.execute(sql, (res[0],) ):
            if int(r[0]) == 0:
                return res[0]
        #
    #
    return None
#----------#
# get terms by parent
def db_get_terms(db, parent):
    sql = "SELECT term_id FROM " + config.DB_TERM_TAXONOMY + \
        " WHERE parent='%s';"
    return db.execute(sql, (parent,) )
#----------#
# get user name by id
def db_get_user_name(db, id):
    sql = "SELECT user_nicename FROM " + config.DB_USERS + \
          " WHERE ID='%s';"
    for res in db.execute(sql, (id,) ):
        return res[0].encode('utf8')
    #
    return None
#----------#
# get post type
def db_get_posts_type_name(db, id):
    sql = "SELECT name FROM " + config.DB_TERMS + \
          " WHERE term_id='%s';"
    for res in db.execute(sql, (id,) ):
        return res[0].encode('utf8')
    #
    return None
#----------#
# change posts pack_state
def db_posts_set_state(db, id, state):
    sql = "UPDATE " + config.DB_POSTS + \
          " SET pack_state='%s' WHERE ID='%s'"
    param = (state, id)
    db.execute(sql, param)
#----------#
def db_pkgs_set_state(db, id, state):
    sql = "UPDATE " + config.DB_PKGS + \
          " SET pack_state='%s' WHERE file_id='%s'"
    param = (state, id)
    db.execute(sql, param)
#----------#
def db_pkgs_haslib_by_name(db, lib_name):
    sql = "SELECT file_id FROM " + config.DB_PKGS + \
          " WHERE lib_name='%s';" % lib_name
    for res in db.execute(sql):
        return True
    return False
#----------#
def db_pkgs_haslib_by_zip(db, file_name):
    sql = "SELECT lib_name FROM " + config.DB_PKGS + \
          " WHERE file_name='%s';" % file_name
    for res in db.execute(sql):
        return True
    return None
#--------------------#

#--------------------#
#----------#
# on shutdown
def on_shutdown():
    sched.shutdown(False)
    print('[scheduler] pool stopping...')
    pool_stop()
    print('[scheduler] pool stopped.')
#----------#
# check posts need pack?
def check_posts():
    datas = []
    with db_get_db() as db:
        for r in db_get_terms(db, 0):
            typename = db_get_posts_type_name(db, r[0])
            if not typename:
                continue
            info = {
                'id': r[0],
                'type': typename
            }
            datas.append(info)
        #
    return datas
#----------#
# check pkgs need pack?
def check_pkgs():
    datas = []
    with db_get_db() as db:
        for r in db_get_pkgs(db):
            info = {
                'id': r[0],
                'lib_name': r[1].encode('utf8'),
                'file_name': r[2].encode('utf8'),
                'user_id': r[3],
            }
            datas.append(info)
    return datas
#----------#
def set_prots(args, lib_name, lib_dir, new_dir):
    print('[tdpool] set_prots : lib:%s. id:%s.' % (lib_name, args['id']))
    # close lib
    ret = prots_close_lib(lib_name)
    if ret != 0:
        print('[tdpool] set_prots : Error close lib %s. id:%s.' % (lib_name, args['id']))
        return config.DBPS_FAILED_CLOSE_LIB

    # TODO:
    new_lib_dir = new_dir + '/' + lib_name
    
    # delete old dirs
    tool_deldirfile(new_lib_dir)
    # make new dirs
    tool_mkdirs(new_lib_dir)

    # copy
    ret = tool_cp_lib(lib_dir,new_dir)
    if ret != 0:
        print('[tdpool] set_prots : Error copy lib %s. id:%s.' % (lib_name, args['id']))
        return config.DBPS_FAILED_COPY_LIB

    # delete old dirs
    tool_deldirfile(lib_dir)

    # open lib
    open_type = '.src'
    try:
        open_type = args['opentype']
    except:
        pass
    file = new_lib_dir + '/' + lib_name + open_type
    ret = prots_open_lib(file)
    if ret != 0:
        print('[tdpool] set_prots : Error open lib %s. id:%s.' % (lib_name, args['id']))
        return config.DBPS_FAILED_OPEN_LIB

    return 0
#--------------------#

#--------------------#
# jobs
#----------#
# tasks
def job_test(name, sleep_time, a, b, c='c'):
    id = int(time.time() % 1000)
    print '[tdpool] job_test %s: %s, %s, %s ,id:%s' % (name, a, b, c, id)
    time.sleep(sleep_time)
    print '[tdpool] job_test %s: done, %s' % (name, id)
    return 'job_test'
#----------#
def job_posts(args):
    print('[tdpool] job_posts : begin. %s' % args)
    with db_get_db() as db:
        lib_name = args['type'] + '博文库'

        try:
            # todo:
            lib_dir = config.CM_POSTS_LIB_PATH
            #
            ret = colemake_posts(args['id'], lib_dir)
            if ret != 0:
                raise 'Error'

            ret = set_prots(args, lib_name, lib_dir + '/' + lib_name, config.CM_USING_POSTS_PATH)
            if ret != 0:
                print('[tdpool] job_posts : Error set_prots. id:%s.' % args['id'])
                return
            
            print('[tdpool] job_posts : Done. %s' % args)
        except:
            print('[tdpool] job_posts : Error. id:%s.' % args['id'])
    print('[tdpool] job_posts : done. id:%s.' % args['id'])
#----------#
def job_pkgs(args):
    print('[tdpool] job_pkgs : begin. %s' % args)
    with db_get_db() as db:
        nicename = db_get_user_name(db, args['user_id']) or 'NoName'
        lib_name = nicename + '_' + args['lib_name']
        file_name = config.CM_PKGS_PKGS_PATH + args['file_name']
        unpack_path = config.CM_PKGS_UNPACK_PATH + lib_name
        lib_dir = config.CM_PKGS_LIB_PATH

        # delete old dirs
        tool_deldirfile(unpack_path)
        # make new dirs
        tool_mkdirs(unpack_path)

        #----------#
        try:
            # pack_state to 1 for start
            db_pkgs_set_state(db, args['id'], config.DBPS_START)

            # unpack
            ret = tool_unpack_zip(file_name, unpack_path)
            if ret != 0:
                # spack_state to 3 for error 
                db_pkgs_set_state(db, args['id'], config.DBPS_FAILED_UNPACK)
                print('[tdpool] job_pkgs : Error unpack. id:%s.' % args['id'])
                # delete old dirs
                tool_deldirfile(unpack_path)
                return

            tool_trans_to_text(unpack_path)

            # make cole
            ret = colemake_pkgs(lib_name,
                tool_get_colemake_dir(unpack_path),
                lib_dir )
            if ret != 0:
                raise 'Error'

            ret = set_prots(args, lib_name, lib_dir + '/' + lib_name, config.CM_USING_PKGS_PATH)
            if ret != 0:
                db_pkgs_set_state(db, args['id'], ret)
                print('[tdpool] job_pkgs : Error set_prots. id:%s.' % args['id'])
                # delete old dirs
                tool_deldirfile(unpack_path)
                return

            # pack_state to 2 for success
            db_pkgs_set_state(db, args['id'], config.DBPS_OK)
                
            print('[tdpool] job_pkgs : Done. %s' % args)
        except:
            # spack_state to 3 for error 
            db_pkgs_set_state(db, args['id'], config.DBPS_FAILED)
            print('[tdpool] job_pkgs : Error. id:%s.' % args['id'])
        #----------#

        # delete old dirs
        tool_deldirfile(unpack_path)
    print('[tdpool] job_pkgs : done. id:%s' % args['id'])
#----------#
def job_itbase():
    #> cp (/path+itbase) /disk2/coledata/config/itlib
    #> set_prots
    print('[tdpool] job_itbase : begin')

    lib_name = 'itbase'
    lib_dir = config.CM_ITBASE_LIB_PATH
    #
    ret = colemake_itbase(lib_dir)
    if ret != 0:
        print('[tdpool] job_itbase : Error. colemake')
        return
    #
    args = {
        'id': 0,
        'opentype': '.rec',
    }
    ret = set_prots(args, lib_name, lib_dir + '/' + lib_name, config.CM_USING_ITBASE_PATH)
    if ret != 0:
        print('[tdpool] job_itbase : Error. ')
        return
    #
    print('[tdpool] job_itbase : done. ')
#----------#
def job_unuse_pkgs():
    print('[tdpool] job_unuse_pkgs : begin')
    with db_get_db() as db:
        dirs = base.listdir(config.CM_USING_PKGS_PATH)
        for d in dirs:
            u,lib_name = d.split('_')
            if db_pkgs_haslib_by_name(db, lib_name):
                continue
            # close lib
            prots_close_lib(lib_name)
            # delete lib
            print('[tdpool] job_unuse_pkgs : delete lib: %s' % (lib_name) )
            fd = base.path_join(config.CM_USING_PKGS_PATH, d)
            tool_deldirfile(fd)
        # for zips
        dirs = base.listdir(config.CM_PKGS_PKGS_PATH)
        for d in dirs:
            if db_pkgs_haslib_by_zip(db, d):
                continue
            # delete lib
            print('[tdpool] job_unuse_pkgs : delete zip: %s' % (d) )
            fd = base.path_join(config.CM_PKGS_PKGS_PATH, d)
            tool_deldirfile(fd)
    print('[tdpool] job_unuse_pkgs : done. ')
#--------------------#

#--------------------#
#----------#
# main job posts, it will run every 1 day.
def job_main_posts():
    print '[scheduler] check_posts: ...'    
    # check posts
    datas = check_posts()
    # pack posts
    for data in datas:
        pool_submit_job(job_posts, data)
    print '[scheduler] check_posts: done.'        
#----------#
# main job pkgs, it will run every 5 minutes.
def job_main_pkgs():
    print '[scheduler] check_pkgs: ...'
    # check pkgs
    datas = check_pkgs()
    # pack pkgs
    for data in datas:
        pool_submit_job(job_pkgs, data)
    print '[scheduler] check_pkgs: done'
#----------#
def job_main_itbase():
    print '[scheduler] check_itbase: ...'
    #pool_submit_job(job_itbase)
    job_itbase()
    print '[scheduler] check_itbase: done'
#----------#
def job_del_unuse_pkgs():
    print '[scheduler] unuse_pkgs: ...'
    #pool_submit_job(job_unuse_pkgs)
    job_unuse_pkgs()
    print '[scheduler] unuse_pkgs: done'
#--------------------#

#--------------------#
# signals
#----------#
def signal_handler(signum, frame):
    print('[scheduler] Stopping...')
    on_shutdown()
    print('[scheduler] Stopped.')
#----------#

#--------------------#
# handle signal
signal.signal(signal.SIGINT, signal_handler)
# add jobs posts
sched.add_job(job_main_posts, **config.SCHED_POSTS_TIME)
# add jobs pkgs
sched.add_job(job_main_pkgs, **config.SCHED_PKGS_TIME)
# add jobs itbase
sched.add_job(job_main_itbase, **config.SCHED_ITBASE_TIME)
# add jobs unuse_pkgs
sched.add_job(job_del_unuse_pkgs, **config.SCHED_UNUSE_PKGS_TIME)
# start
print('[scheduler] Running... (Press Ctrl+C to stop.)')
sched.start()
#--------------------#
