# coding: utf-8
#--------------------#
# Coder  : Dzlua
# Email  : 505544956@qq.com
# module : scheduler
# file   : run.py
# Time   : 2017/09/29
#--------------------#
import signal, threadpool, time, config, os
from apscheduler.schedulers.blocking import BlockingScheduler
from database import DataBase

#--------------------#
sched = BlockingScheduler()
sched.daemonic = False
# default
tdpool = threadpool.ThreadPool(config.TP_THREAD_NUM)

#--------------------#
# tool functions
#----------#
# fun
def tool_reduce(lst):
    func = lambda x,y:x if y in x else x + [y]
    reduce(func, [[], ] + lst)
    return lst
#----------#
# make dirs
def tool_mkdirs(path):
    try:
        os.makedirs(path)
    except:
        pass
#----------#
# delete file or dirs
def tool_deldirfile(path):
    try:
        cmd = 'rm -rf %s' % path
        os.system(cmd)
    except:
        pass
#----------#
# find dir
def tool_get_colemake_dir(root_dir):
    for (root,dirs,files) in os.walk(root_dir):
        if len(dirs) + len(files) != 1:
            return root
    return root_dir
#----------#
# unpack zip package
def tool_unpack_zip(file, to_path):
    print('[tdpool] tool_unpack_zip: unpacking...')
    #
    cmd = "unzip -q '%s' -d '%s'" % (file, to_path)
    if config.SCHED_SHOW_INFO:
        print('[tdpool] tool_unpack_zip: cmd: %s' % cmd)
    #
    try:
        ret = os.system(cmd)
        print('[tdpool] tool_unpack_zip: unpack done.')
    except:
        print('[tdpool] tool_unpack_zip: Error. cmd: %s' % cmd)
    #
    return ret
#----------#
# cole make posts
def colemake_posts(id, lib_dir):
    #> colemake mem blog fenlei_id(顶级分类) path(lib存放路径)
    cmd = "%s mem blog %s '%s'" \
        % (config.CM_BIN, id, lib_dir)
    if config.SCHED_SHOW_INFO:
        print('[tdpool] colemake_posts: cmd: %s' % cmd)
    #
    try:
        ret = os.system(cmd)
    except:
        print('[tdpool] colemake_posts: Error. cmd: %s' % cmd)
    #
    return ret
#----------#
# cole make pkgs
def colemake_pkgs(lib_name, src_dir, lib_dir):
    #> colemake all mem sourcedir libname libdir
    cmd = "%s all mem '%s' %s '%s'" \
        % (config.CM_BIN, src_dir, lib_name, lib_dir)
    if config.SCHED_SHOW_INFO:
        print('[tdpool] colemake_pkgs: cmd: %s' % cmd)
    #
    try:
        ret = os.system(cmd)
    except:
        print('[tdpool] colemake_pkgs: Error. cmd: %s' % cmd)
    #
    return ret
#----------#
# pool
def pool_submit_job(fun, *args, **kwargs):
    def _fun(params):
        return fun(*args, **kwargs)
    reqs = threadpool.makeRequests(_fun, [''])
    [ tdpool.putRequest(req) for req in reqs ]
    if config.SCHED_SHOW_INFO:
        print('[scheduler] pool_submit_job: %s' % args[0])
    tdpool.poll()
#----------#
# pool stop
def pool_stop():
    tdpool.dismissWorkers(len(tdpool.workers), False)
    if tdpool.dismissedWorkers:
        tdpool.joinAllDismissedWorkers()
#----------#
# database tool
def db_get_db():
    return DataBase(config.DB_HOST, config.DB_USER,
                config.DB_PASSWORD, config.DB_DATABASE,
                config.DB_SHOW_INFO )
#----------#
# get posts
def db_get_posts(db):
    sql = "SELECT ID FROM " + config.DB_POSTS + \
          " WHERE post_status='publish' and pack_state='%s';"
    result = db.execute(sql, (config.DBPS_FIND,))
    return result
#----------#
# get pkgs
def db_get_pkgs(db):
    sql = "SELECT file_id,lib_name,file_name FROM " + config.DB_PKGS + \
          " WHERE pack_state='%s';"
    result = db.execute(sql, (config.DBPS_FIND,))
    return result
#----------#
# get taxonomy: posts id
def db_get_posts_taxonomy(db, id):
    sql = "SELECT term_taxonomy_id FROM " + config.DB_TERM_RELATIONSHIPS + \
          " WHERE object_id='%s';"
    for res in db.execute(sql, (id,) ):
        sql = "SELECT parent FROM " + config.DB_TERM_TAXONOMY + \
          " WHERE term_taxonomy_id='%s';"
        for r in db.execute(sql, (res[0],) ):
            if int(r[0]) == 0:
                return res[0]
        #
    #
    return None
#----------#
# change posts pack_state
def db_posts_set_state(db, id, state):
    sql = "UPDATE " + config.DB_POSTS + \
          " SET pack_state='%s' WHERE ID='%s'"
    param = (state, id)
    db.execute(sql, param)
#----------#
def db_pkgs_set_state(db, id, state):
    sql = "UPDATE " + config.DB_PKGS + \
          " SET pack_state='%s' WHERE file_id='%s'"
    param = (state, id)
    db.execute(sql, param)
#----------#
# on shutdown
def on_shutdown():
    sched.shutdown(False)
    print('[scheduler] pool stopping...')
    pool_stop()
    print('[scheduler] pool stopped.')
#----------#
# check posts need pack?
def check_posts():
    datas = []
    with db_get_db() as db:
        for r in db_get_posts(db):
            taxonomy = db_get_posts_taxonomy(db, r[0])
            if not taxonomy:
                continue
            info = {
                'id': r[0],
                'taxonomy': taxonomy,
            }
            datas.append(info)
        #
    return datas
#----------#
# check pkgs need pack?
def check_pkgs():
    datas = []
    with db_get_db() as db:
        for r in db_get_pkgs(db):
            info = {
                'id': r[0],
                'lib_name': r[1],
                'file_name': r[2],
            }
            datas.append(info)
    return datas
#----------#

#--------------------#
# jobs
#----------#
# tasks
def job_test(name, sleep_time, a, b, c='c'):
    id = int(time.time() % 1000)
    print '[tdpool] job_test %s: %s, %s, %s ,id:%s' % (name, a, b, c, id)
    time.sleep(sleep_time)
    print '[tdpool] job_test %s: done, %s' % (name, id)
    return 'job_test'
#----------#
def job_posts(args):
    print('[tdpool] job_posts : %s' % args)
    with db_get_db() as db:
        try:
            # pack_state to 1 for start
            db_posts_set_state(db, args['id'], config.DBPS_START)

            # todo:
            lib_dir = config.CM_POSTS_LIB_PATH
            #
            ret = colemake_posts(args['taxonomy'], lib_dir)
            if ret != 0:
                raise 'Error'
            
            # pack_state to 2 for success
            db_posts_set_state(db, args['id'], config.DBPS_OK)
            print('[tdpool] job_posts : Done. %s' % args)
        except:
            # spack_state to 3 for error 
            db_posts_set_state(db, args['id'], config.DBPS_FAILED)
            print('[tdpool] job_posts : Error. id:%s.' % args['id'])
#----------#
def job_pkgs(args):
    print('[tdpool] job_pkgs : %s' % args)

    file_name = config.CM_PKGS_PKGS_PATH + args['file_name']
    unpack_path = config.CM_PKGS_UNPACK_PATH + args['lib_name']
    lib_dir = config.CM_PKGS_LIB_PATH

    # delete old dirs
    tool_deldirfile(unpack_path)
    # make new dirs
    tool_mkdirs(unpack_path)

    with db_get_db() as db:
        try:
            # pack_state to 1 for start
            db_pkgs_set_state(db, args['id'], config.DBPS_START)

            # unpack
            ret = tool_unpack_zip(file_name, unpack_path)
            if ret != 0:
                # spack_state to 3 for error 
                db_pkgs_set_state(db, args['id'], config.DBPS_FAILED_UNPACK)
                print('[tdpool] job_pkgs : Error unpack. id:%s.' % args['id'])
                return

            # make cole
            ret = colemake_pkgs(args['lib_name'],
                tool_get_colemake_dir(unpack_path),
                lib_dir )
            if ret != 0:
                raise 'Error'

            # pack_state to 2 for success
            db_pkgs_set_state(db, args['id'], config.DBPS_OK)
            print('[tdpool] job_pkgs : Done. %s' % args)
        except:
            # spack_state to 3 for error 
            db_pkgs_set_state(db, args['id'], config.DBPS_FAILED)
            print('[tdpool] job_pkgs : Error. id:%s.' % args['id'])
    #----------#        
    # delete old dirs
    tool_deldirfile(unpack_path)
#----------#

#--------------------#
#----------#
# main job posts, it will run every 1 day.
def job_main_posts():
    print '[scheduler] check_posts: ...'    
    # check posts
    datas = check_posts()
    # pack posts
    for data in datas:
        pool_submit_job(job_posts, data)
    print '[scheduler] check_posts: done.'        
#----------#
# main job pkgs, it will run every 5 minutes.
def job_main_pkgs():
    print '[scheduler] check_pkgs: ...'
    # check pkgs
    datas = check_pkgs()
    # pack pkgs
    for data in datas:
        pool_submit_job(job_pkgs, data)
    print '[scheduler] check_pkgs: done'
#----------#

#--------------------#
# signals
#----------#
def signal_handler(signum, frame):
    print('[scheduler] Stopping...')
    on_shutdown()
    print('[scheduler] Stopped.')
#----------#

#--------------------#
# handle signal
signal.signal(signal.SIGINT, signal_handler)
# add jobs posts
sched.add_job(job_main_posts, **config.SCHED_POSTS_TIME)
# add jobs pkgs
sched.add_job(job_main_pkgs, **config.SCHED_PKG_TIME)
# start
print('[scheduler] Running... (Press Ctrl+C to stop.)')
sched.start()
#--------------------#
