#!/usr/bin/env python
# -*- coding: UTF-8 -*-
# File: mfinger.py
# Date: 2008-10-28
# Author: tbao@tudou.com
#

"""
获得该台机器上的存储的 DNA 的列表 的 DB
执行指纹匹配
并将匹配到的结果post到中央服务器上
"""

import os
import re
import time
import sys
import types
import socket
import traceback
import commands
import urlparse
import thread
import threading
import copy
import mylog
import subwork
import netutil
import setting

#
socket.setdefaulttimeout(60)

# global
#
G_JOB_LIST   =[]
G_RESULT_LIST=[]
G_POST_LIST  =[]
G_LOCK_JOB   =thread.allocate_lock()
G_LOCK_RESULT=thread.allocate_lock()
G_LOCK_POST  =thread.allocate_lock()
#
DNA_STORAGE_ROOT=None
DNA_DB_ABS_DIR  =None
SAFE_FILE       =None
WGET_CMD='wget -q -T 60 -t 3  %(_src)s  -O %(_dst)s'
# Timeout
WAIT_FROM_SERVER= None
WGET_TIMEOUT    = None
MATCH_TIMEOUT   = None
MAX_POST_TIMES  = None
MAX_JOBS_LIST   = None
# Cent Server
SHOST=None
PORT=None
md5key=None
# hostname
HOSTNAME=None
# mrserver info
getMrserver=None
getBoosttxt=None
getEmparams=None
getFivess=None
#
url_reporterror="/y/finger/reporterror/%s/%s/"
url_getjob     ="/y/finger/getjob/%s/"
url_postfinish ="/y/finger/postfinish/%s/%s/"
url_postchunk  ="/y/finger/postchunk/%s/%s/"
# loadavg
MIN_LOADING=30
# re
e_mrserver=r'mrserver2\.2\.3\.exe'


#
def init_hostname():
    """
    init HOSTNAME
    """
    global HOSTNAME
    try:
        HOSTNAME = socket.gethostname()
        if not HOSTNAME:
            rr_code, rr_txt=commands.getstatusoutput('hostname')
            if rr_code is 0:
                HOSTNAME=rr_txt
    except:
        msg="socket.gethostname() Exception!"
        return False
    #
    return True

def check_loadavg():
    """
    get this machine loadavg
    """
    try:
        d1,d2,d3=os.getloadavg()
    except:
        msg = " os.getloadavg() Error"
        return False
    if d1 >= MIN_LOADING:
        msg="%s is big than %s"%(d1, MIN_LOADING)
        return False # means we should sleep() some times, and then to wait loading low!
    else:
        return True

def pack_check_loadavg(_job_id=None):
    """
    block !
    """
    while True:
        if not check_loadavg():
            msg = "check_loadavg() return False, msg=%s this server is busy now, loadavg > %s"%(time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()), MIN_LOADING)
            print msg
            if _job_id:
                report_process(-100, _job_id, msg)
            # make sure hight load is not me !
            pack_isAlive_mrserver()
            time.sleep(10)
            continue
        else:
            break
    #
    # return
    return True

def reload_config_ini():
    """
    reload ConfigParser ini file
    """
    global DNA_STORAGE_ROOT
    global DNA_DB_ABS_DIR
    global SAFE_FILE
    global WAIT_FROM_SERVER
    global MAX_POST_TIMES
    global MAX_JOBS_LIST
    global MIN_LOADING
    global getMrserver
    global getBoosttxt
    global getEmparams
    global getFivess
    global PORT
    global SHOST
    global md5key
    global url_reporterror
    global url_getjob
    global url_postfinish
    global url_postchunk
    #
    init_hostname()
    # url tmp
    url_reporterror =setting.url_reporterror
    url_getjob      =setting.url_getjob
    url_postfinish  =setting.url_postfinish
    url_postchunk   =setting.url_postchunk
    # path
    DNA_STORAGE_ROOT=setting.DNA_STORAGE_ROOT
    DNA_DB_ABS_DIR  =setting.DNA_DB_ABS_DIR
    SAFE_FILE       =setting.SAFE_FILE
    # timeout
    WGET_TIMEOUT    =setting.WGET_TIMEOUT
    MATCH_TIMEOUT   =setting.MATCH_TIMEOUT
    MAX_POST_TIMES  =setting.MAX_POST_TIMES
    MIN_LOADING     =setting.MIN_LOADING
    MAX_JOBS_LIST   =setting.MAX_JOBS_LIST
    WAIT_FROM_SERVER=setting.WAIT_FROM_SERVER
    # post server info
    PORT            =setting.PORT
    SHOST           =setting.SHOST
    md5key          =setting.md5key
    # mrserver
    getMrserver     =setting.getMrserver
    getBoosttxt     =setting.getBoosttxt
    getEmparams     =setting.getEmparams
    getFivess       =setting.getParameter
    print "======================================" 
    print "DNA_STORAGE_ROOT=",DNA_STORAGE_ROOT
    print "DNA_DB_ABS_DIR=",DNA_DB_ABS_DIR
    print "MIN_LOADING=",MIN_LOADING
    print "MAX_POST_TIMES=", MAX_POST_TIMES
    print "WAIT_FROM_SERVER=",WAIT_FROM_SERVER
    print "getMrserver=",getMrserver
    print "getBoosttxt=",getBoosttxt
    print "getEmparams=",getEmparams
    print "getFivess=",getFivess
    print "HOSTNAME=",HOSTNAME
    print "PORT=", PORT
    print "SHOST=", SHOST
    print "md5key=",md5key
    print "url_reporterror=",url_reporterror
    print "url_getjob=",url_getjob
    print "url_postchunk=",url_postfinish
    print "url_postchunk",url_postchunk
    print "======================================" 
    return True


def get_db_list():
    """
    get DNA's DB list
    return:
    [[1.kdb, 1.cdb], [2.kdb, 2.cdb],,,]
    """
    dna_dir = DNA_DB_ABS_DIR
    if not dna_dir:
        msg = "dna_dir is None."
        return False, msg
    if not os.path.exists(dna_dir):
        msg = "%s is not exists !"%dna_dir
        return False, msg
    if not os.path.isdir(dna_dir):
        msg = "%s is not a dir!"%dna_dir
        return False, msg
    # OK, now we are safe.
    one_db = [os.path.join(dna_dir,e)  for e in  os.listdir(dna_dir) if re.search(r'\.fdb$', e)]
    two_db = [os.path.join(dna_dir,e)  for e in  os.listdir(dna_dir) if re.search(r'\.kdb$', e)]
    new_db = []
    for i in one_db:
        e_1=r'^%s\.'%(str(i).split('.')[0])
        tmp=None
        tmp=[e for e in two_db if re.search(e_1, e)]
        if tmp: # OK, find!
            new_db.append([i, tmp[0]])
    #
    # return
    if new_db:
        new_db=sorted(new_db, key=lambda d: int((os.path.basename(d[0])).split('.')[0]), reverse=True)
        print "new_db=", len(new_db)
        return True, new_db
    else:
        return False, 'can not get right DNA db list.'

#
def kill9999(_pid):
    """
    kill -9 pid
    """
    if not _pid:
        print "_pid is not usable!"
        return False
    killed = False
    for i in range(10):
        print "trying to kill process %s #%d times"%(_pid, i)
        try:
            cmd = 'kill -9 %s'%_pid
            out_code,out_text=commands.getstatusoutput(cmd)
        except:
            print "commands.getstatusoutput(%s) Exception!"%cmd
            time.sleep(1)
            continue
        print '%s out_code=%s, out_text=%s'%(_pid, out_code, out_text)
        if out_code == 0:
            print "this may killed pid, but i can not sure!"
        elif out_code == 256 and out_text.find("No such process") > 0:
            print "kill -9 %s OK."%_pid
            return True
        #
        time.sleep(1)
    #
    # return OK
    print "kill -9 %s Fail."%_pid
    return False

#
def isAlive_mrserver():
    """
    important !
    make sure every time this CDN server machine just run one mrserver process,
    MAKE SURE !
    """
    cmd='ps -ef'
    try:
        out_code, req = commands.getstatusoutput(cmd)
        print out_code
    except:
        print "commands.getstatusoutput('ps -ef') Error"
        return False
    if out_code is 0: # OK, exe Success !
        req=req.split('%s'%(os.linesep))
        live_mr = [(e.split())[1] for e in req if re.search('%s'%e_mrserver, e)]
        print "len(living-mrserver2.2.3.exe)=",live_mr
        if live_mr: # means stuil have living mrserver, so , we should kill this zombie process.
            m_len = len(live_mr)
            for i in live_mr:
                if kill9999(i):
                    m_len -=1
        else: # means no living mrserver,
            return True
    else: # Fail, exe Fail !
        return False
    #
    # return
    if m_len is 0:
        return True
    else:
        return False

#
def pack_isAlive_mrserver(_job_id=None):
    """
    """
    # CHECK living mrserver !
    while True:
        if not isAlive_mrserver():
            msg = "isAlive_mrserver() return False, msg= there are some living mrserver process is running! "
            if _job_id:
                report_process(-100, _job_id, msg)
            time.sleep(10)
            continue
        else:
            break
    #
    # return
    return True

#
def check_make_sure_one():
    """
    make sure one time one mrserver2.2.3.exe is running!
    """
    cmd='ps -ef'
    try:
        out_code, req = commands.getstatusoutput(cmd)
        print "out_code=",out_code
    except:
        print "commands.getstatusoutput('ps -ef') Error"
        return False
    if out_code is 0: # OK, exe Success !
        req=req.split('%s'%(os.linesep))
        live_mr = [(e.split())[1] for e in req if re.search('%s'%e_mrserver, e)]
        m_len = len(live_mr)
        if m_len >= 2: # o, we should kill all mrserver2.2.3.exe
            print "len(living-mrserver2.2.3.exe)=",live_mr
            for i in live_mr:
                if kill9999(i):
                    m_len -=1
        else: # means no living mrserver,
            return True
    else: # Fail, exe Fail !
        return False
    #
    # return
    if m_len is 0:
        return True
    else:
        return False

#
def make_sure_one():
    """
    pack check_make_sure_one() loop forever!
    """
    while True:
        if not check_make_sure_one():
            msg="check_make_sure_one() return False, msg= there are two mrserver2.2.3.exe are running!"
            mylog.GenLog("ERROR", msg)
            time.sleep(10)
            continue
        else:
            break
    #
    # return 
    return True



#
def clear_dirs(_dir=None, deep=False):
    """
    clear & delete _dir's files
    return True means  OK
    return False means Fail
    """
    if not _dir:
        msg = "_dir is unavailable !"
        return False, msg
    if not os.path.exists(_dir):
        msg = "%s is not exists !"%_dir
        return False, msg
    if not os.path.isdir(_dir):
        msg = "%s is not really dir !"%_dir
        return False, msg
    #
    for n in os.listdir(_dir):
        tmp_abs_path = os.path.join(_dir, n)
        try:
            os.remove(tmp_abs_path)
        except:
            msg = "os.remove(%s) is Exception !"%tmp_abs_path
            return False, msg
    #
    if not deep:
        return True, "clear OK!"
    # go on!
    tmp_dir=_dir
    while True:
        if os.listdir(tmp_dir):
            break
        else:
            os.rmdir(tmp_dir)
            if not re.search(r'/$', tmp_dir):
                tmp_dir += '/'
            tmp_dir = re.sub(r'/\w+/$',"", tmp_dir)
    #
    # return
    return True, "clear OK !"

#
def pack_rename(src=None, dst=None):
    """
    os.rename(src, dst)
    """
    if not src:
        msg = 'src is unavailable !'
        return False, msg
    if not dst:
        msg = 'dst is unavailable !'
        return False, msg
    if not os.path.exists(src):
        msg = 'src=%s is not exists !'%src
        return False, msg
    if os.path.exists(dst):
        msg = 'dst=%s have exists !'%dst
        return False, msg
    #
    try:
        os.rename(src, dst)
    except:
        msg = "os.rename(%s, %s) Exception !"%(src, dst)
        return False, msg
    #
    return True, "os.rename(src, dst) OK !"

#
def doCmd(cmd, timeout=None):
    """
    execute cmd
    """
    out_code, o ,e = 0, '', ''
    print "Start cmd ..."
    try:
        if not timeout:
            sw=subwork.subwork()
        elif timeout:
            sw=subwork.subwork(timeout)
        out_code = sw.run(subwork.split_cmd(cmd))
        o, e= sw.get_out()
    except:
        o, e= sw.get_out()
        out_code, e = -1, ['Error in subwork process...\n'] + e
        print out_code, o, e
    print "Done cmd ..."
    #
    # return
    return out_code, o, e


#
def wget_key_from_server(job_id=None, _url=None):
    """
    download URL key & key_list from server by wget tools
    [IN]
    URL = 'http://10.5.10.180/1005.flv.key'
    [OUT]
    key_info = {
            '_key_dir':'/home/tommy/tmp/000/001/005/1005.flv.key',
            '_key_list':'/home/tommy/tmp/000/001/005/1005.txt',
            }
    """
    # make dirname path
    # os.path.join(DNA_STORAGE_ROOT， MID_DIRS)
    if not job_id:
        msg = "job_id is not unavailable !"
        return False, msg
    try:
        MID_STR = '%09d'%(int(job_id))
        if len(MID_STR) is not 9:
            msg = "len(%s) is not 9 !"%MID_STR
            return False, msg
    except:
        msg = "int(%s) Error!"%job_id
        return False, msg
    # clear ./tmp
    if os.path.exists(DNA_STORAGE_ROOT) and os.listdir(DNA_STORAGE_ROOT):
        try:
            cmd = "rm -fr %s"%DNA_STORAGE_ROOT
            print "os.system(%s)"%cmd
            os.system(cmd)
        except Exception, e:
            msg="os.system(%s) Exception, %s"%(cmd, e)
            mylog.GenLog("ERROR", msg)
            return False, msg
    #
    sample_dir = os.path.join(DNA_STORAGE_ROOT, MID_STR[0:3], MID_STR[3:6], MID_STR[6:])
    if not os.path.exists(sample_dir): # not exists ,Yes
        try:
            os.makedirs(sample_dir)
        except Exception, e:
            msg="os.makedirs(%s, mode=0777) Exception, %s"%(sample_dir, e)
            mylog.GenLog("ERROR", msg)
            return False, msg
    else: # have exists, now we should clear this dirs !
        clear_dirs(sample_dir, False)
    #
    # wget key file
    basename = os.path.basename((urlparse.urlparse(_url))[2])
    dst = os.path.join(sample_dir, basename)
    tmp_basename=(basename + '.tmp')
    tmp_dst = os.path.join(sample_dir, tmp_basename)
    req_cmd = {}
    req_cmd['_src']=_url
    req_cmd['_dst']=tmp_dst
    # M_WGET_CMD='wget -q -T 60 -t 3  %(_src)s  -O %(_dst)s'
    M_WGET_CMD=WGET_CMD%req_cmd
    # begin to wget !
    r_code, r_txt, r_err = doCmd(M_WGET_CMD, WGET_TIMEOUT)
    if r_code is not 0:
        msg = "doCmd(%s, %s) Error! msg=%s"%(M_WGET_CMD,WGET_TIMEOUT, r_err)
        # clear ./tmp
        if os.path.exists(DNA_STORAGE_ROOT) and os.listdir(DNA_STORAGE_ROOT):
            try:
                cmd = "rm -fr %s"%DNA_STORAGE_ROOT
                print "os.system(%s)"%cmd
                os.system(cmd)
            except Exception, e:
                msg="os.system(%s) Exception, %s"%(cmd, e)
                mylog.GenLog("ERROR", msg)
                return False, msg
        return False, msg
    # rename (tmp, really)
    rename_code, rename_txt=pack_rename(tmp_dst, dst)
    if not rename_code: # rename() Error !
        msg="pack_rename(%s, %s) Error! msg=%s"%(tmp_dst, dst, rename_txt)
        return False, msg
    # wget key list ?
    #
    #
    #
    # touch this new list file by "*.txt"
    m_key_list = os.path.join(sample_dir, basename+'.txt')
    m_list_fd = open(m_key_list, 'a+')
    m_list_fd.write('%s'%basename + os.linesep)
    m_list_fd.close()
    #
    key_info={}
    m_key_dir = os.path.dirname(dst)
    if not re.search(r'\/$', m_key_dir):
        m_key_dir = m_key_dir + '/'
    key_info['_key_dir']=m_key_dir
    key_info['_key_txt']=m_key_list # change ?
    #
    # return
    return True, key_info

#
def patch_url2hostabspath(_old_job=None):
    """
    download key_list.txt from server to this host server machine.
    [IN]
    _old_job = {
            'job_id':1005,
            'key_url':'http://10.5.10.180/test_key.txt'.
            }
    [OUT]
    return True
           _new_job = {
            '_job_id':1005,
            '_key_dir':'/home/tommy/tmp/000/001/005/',
            '_key_txt':'/home/tommy/tmp/000/001/005/1005.txt',
            }
    return False, msg
    """
    if not (_old_job and isinstance(_old_job, dict)):
        msg = "%s is unavailable."%_old_job
        print msg
        return False, msg
    if _old_job.has_key('_job_id'):
        job_id=_old_job['_job_id']
    else:
        msg = "_old_job=%s is unavailable !"%_old_job
        return False, msg
    #if _old_job.has_key('level'):
    #    level =_old_job['level']
    #else:
    #    msg = "_old_job=%s is unavailable !"%_old_job
    #    return False, msg
    if _old_job.has_key('_key_url'):
        m_key_url=_old_job['_key_url']
    else:
        msg = "_old_job=%s is unavailable !"%_old_job
        return False, msg
    #
    wget_code, key_list=wget_key_from_server(job_id=job_id, _url=m_key_url)
    if not wget_code:
        msg = "wget_key_from_server(%s, %s) Error! msg=%s"%(job_id, m_key_url, key_list)
        return False, msg
    _new_job = {}
    _new_job['_job_id'] =job_id
    _new_job['_key_dir']=key_list['_key_dir']
    _new_job['_key_txt']=key_list['_key_txt']
    #
    # return
    return True, _new_job


#
def collect_result(_file_path):
    """
    open the _file_path & read all info to string
    """
    if not os.path.exists(_file_path):
        msg="%s is not exists!"%_file_path
        return False, msg
    if not os.path.isfile(_file_path):
        msg="%s is not a really file!"%_file_path
        return False, msg
    #
    fd=open(_file_path, 'r')
    req=fd.read()
    fd.close()
    #
    try:
        os.remove(_file_path)
    except:
        print "os.remove(%s) Error."%_file_path
    #
    if req:
        return True, req
    else:
        return True, ''

#
def save_result2glist(m_job_id=None, m_basename=None, step=1, coll_txt=None):
    """
    save results to G_RESULT_LIST
    G_RESULT_LIST=[[1005, 1225942516, msg, chunk/finish],,,]
    """
    global G_RESULT_LIST
    #
    if not m_job_id:
        print "m_job_id is unavailable!"
        return False
    if not m_basename:
        print "m_basename is unavailable!"
        return False
    #if not coll_txt:
    #    print "coll_txt is unavailable!"
    #    return False
    # OK,
    try:
        try:
            G_LOCK_RESULT.acquire()
            G_RESULT_LIST.append([m_job_id, m_basename, coll_txt, step])
        finally:
            G_LOCK_RESULT.release()
    except:
        return False
    #
    # return
    return True
        
#
def exe_mrserver(_new_job=None, _DNA_DB_LIST=None):
    """
    execute mrserver _new_job
    ./mrserver.exe boostextdescr.txt "/home/tommy/tmp/1111.fdb"\
            "/home/tommy/tmp/1111.kdb" emparams.bin -5000 10 10 \
            "/home/tommy/tmp/000/001/005/1005_result.txt"\
            "/home/tommy/tmp/000/001/005/1005_status.txt"\
            "/home/tommy/tmp/000/001/005/"\
            "/home/tommy/tmp/000/001/005/1005.txt"\

    [IN]
    _new_job = {'_job_id':1005,\
            '_key_dir':'/home/tommy/tmp/000/001/005/',\
            '_key_txt':'/home/tommy/tmp/000/001/005/1005.txt',\
            }
    _DNA_DB_LIST = [[1.fdb, 1.kdb], [2.fdb, 2.kdb], [3.fdb, 3.kdb],,,]

    [OUT]
    return True,  {'job_id':1005, 'result_txt':'xxxxxxxx'}
    return False, 'exe_mrserver(%s, %s) Error!'
    """
    CMD_MRSERVER="%(_mrserver)s  %(_boosttxt)s  %(_fdb)s  %(_kdb)s  %(_emparams)s  %(_fivess)s  %(_result)s  %(_status)s   %(_key_dir)s  %(_key_txt)s"
    rresult_list=[]
    req={}
    req['_mrserver']=getMrserver
    req['_boosttxt']=getBoosttxt
    req['_fdb']     =''
    req['_kdb']     =''
    req['_emparams']=getEmparams
    req['_fivess']  =getFivess
    req['_key_dir'] =_new_job['_key_dir']
    req['_key_txt'] =_new_job['_key_txt']
    req['_result']  =os.path.join(_new_job['_key_dir'], 'tmp_result.txt')
    req['_status']  =os.path.join(_new_job['_key_dir'], 'tmp_status.txt')
    m_job_id = _new_job['_job_id']
    # loop to exe, until done all _DNA_DB_LIST
    for e in _DNA_DB_LIST:
        pprint_time("start exe_mrserver()")
        # CHECK living mrserver !
        # this function will be block!
        print "==================================="
        print "     pack_isAlive_mrserver()       "
        print pack_isAlive_mrserver()
        print "==================================="
        #
        req['_fdb']=e[0]
        req['_kdb']=e[1]
        # get m_basename
        m_basename = (os.path.basename(e[0])).split('.')[0]
        # OK, now we are safe !
        cmd_mr = CMD_MRSERVER%req
        cmd_code, cmd_o, cmd_e=doCmd(cmd_mr, MATCH_TIMEOUT)
        print "doCmd(%s, %s)! msg=%s %s"%(cmd_mr, MATCH_TIMEOUT, cmd_o, cmd_e)
        if cmd_code == -1:
            print 
            print "********************************************"
            msg="doCmd(%s, %s) return False! msg=%s"%(cmd_mr, MATCH_TIMEOUT, cmd_e)
            print msg
            mylog.GenLog("ERROR", msg)
            print "********************************************"
            print 
            # kill -9 mrserver
            # CHECK living mrserver !
            # This function will be block!
            pack_isAlive_mrserver()
            #
            continue
        # collect result
        #
        coll_code, coll_txt=collect_result(req['_result'])
        if not save_result2glist(m_job_id, m_basename, 1, coll_txt):
            mylog.GenLog("ERROR", "save_result2pickle(%s, %s, 0, %s) return False."%(m_job_id, m_basename, coll_txt))
    # report we done !
    if not save_result2glist(m_job_id, m_basename, 0, ''):
        mylog.GenLog("ERROR", "save_result2pickle(%s, %s, 1, %s) return False."%(m_job_id, m_basename, coll_txt))
    # clear tmp !
    clear_code, clear_txt=clear_dirs(_new_job['_key_dir'], True)
    if not clear_code:
        msg="clear_code(%s, %s) return False! msg=%s"%(_new_job['_key_dir'], "True", clear_txt)
        mylog.GenLog("ERROR", msg)
        return False, m_job_id
    #
    # return
    return True, m_job_id

#
def pprint_time(msg=None):
    """
    print msg:time
    """
    print
    print '[%s] %s'%(time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()), msg)
    print


#
def dodo(_job):
    """
    [IN]
    _job = {
            '_job_id':1005,
            '_key_url':'http://10.5.10.180/test_key.txt'.\
            }
    [OUT]
    return False, msg
    return True, {'job_id':1005, 'return_txt':'xxxxxxxxxxxxx'}
    """
    #
    pprint_time("================start dodo()===============")
    m_job_id=_job['_job_id']
    # CHECK living mrserver !
    # This functions will be block!
    pack_isAlive_mrserver(m_job_id)
    #
    # check loadavg? is Block!
    pack_check_loadavg(m_job_id)
    # Prepare [IN] Values for mrserver to do !
    patch_code, patch_txt=patch_url2hostabspath(_job)
    print 'patch_url2hostabspath=', patch_txt
    if not patch_code:
        msg = "patch_url2hostabspath(%s) Error! msg=%s"%(_job, patch_txt)
        if not report_process(-100, m_job_id,  msg):
            mylog.GenLog("ERROR", "%s %s"%(m_job_id, msg))
        return False
    m_new_job=patch_txt
    # Exe mrserver !
    # get this server's ALL DNA DB list
    db_code, db_txt=get_db_list()
    print 'get_db_list=', db_txt
    if not db_code:
        msg = "get_db_list() return False! msg=%s"%db_txt
        if not report_error(-100, m_job_id, msg):
            mylog.GenLog("ERROR", "%s %s"%(m_job_id, msg))
        return False
    m_db_list=db_txt
    exe_code, exe_txt=exe_mrserver(m_new_job, m_db_list)
    print 'exe_txt=', exe_txt
    if not exe_code:
        msg = "exe_mrserver(%s, %s) return False! msg=%s"%(m_new_job, m_db_list, exe_txt)
        if not report_process(-100, m_job_id, exe_txt):
            mylog.GenLog("ERROR", "report_process(-100, %s, %s) return False."%(m_job_id, exe_txt))
        return False
    pprint_time("===============end dodo()===============")
    return True


#
def report_error(code=-100, m_job_id=None, msg=None):
    """
    report this job is error!
    """
    if not m_job_id:
        print "m_job_id is unavailable!"
        return False
    server="%s:%s"%(SHOST, PORT)
    #url   ="/y/finger/reporterror/%s/%s/"%(HOSTNAME, m_job_id)
    url   =url_reporterror%(HOSTNAME, m_job_id)
    print "======================== error ========================="
    print "server=", server
    print "url=", url
    if not netutil.client_get_report(server, url, md5key):
        print "client_get(%s, %s, %s) return False."%(server, url, md5key)
        return False
    #
    # return
    return True


#
def get_one_job():
    """
    get one job one time, from G_JOB_LIST
    G_JOB_LIST=[[1005, 'http://10.5.10.180/1005.flv.key'], [1006, 'http://10.5.10.180/1006.flv.key'],,,]
    """
    #
    global G_JOB_LIST
    #
    if len(G_JOB_LIST) > 0:# Yes
        try:
            try:
                G_LOCK_JOB.acquire()
                one=G_JOB_LIST.pop(0)
            finally:
                G_LOCK_JOB.release()
        except:
            pass
        # get one job
    else:
        print "len(G_JOB_LIST) == 0, means no job in"
        return False
    #
    if one:
        return one
    else:
        return False

################################################
#
# '2': all_dodo !
#
################################################
def all_dodo():
    """
    """
    while True:
        # reload 
        reload(mylog)
        reload(netutil)
        reload(subwork)
        reload_config_ini()
        #
        one_job=get_one_job()
        print 'one_job=', one_job
        if one_job:
            m_one_job={}
            m_one_job['_job_id']=one_job[0]
            m_one_job['_key_url']=one_job[1]
            print 'm_one_job=', m_one_job
            if not dodo(m_one_job):
                print "This time we Exception! dodo()"
                pprint_time("This time we Exception! dodo()")
        else:
            print "This time we can not get any jobs!"
            pprint_time("We can not get any jobs! please sleep(10), and then try again!")
            time.sleep(5)
    #
    # return
    return True

################################################
#
# report_process : report error!
#
################################################
def report_process(code=-100, _job_id=None, msg=None):
    """
    """
    print 
    print time.strftime("%Y-%m-%d %H:%M:%S", time.localtime())
    print "code=", code
    print "job_id=",_job_id
    print "msg=", msg
    m_msg= "%s %s %s"%(code, _job_id, msg)
    mylog.GenLog("ERROR", "%s"%m_msg)
    return True


################################################
#
# '1': get job from CentServer!
#
################################################
def getJobFromServer():
    """
    get job from CentServer to G_JOB_LIST
    """
    global G_JOB_LIST
    #
    while True:
        time.sleep(10)
        if len(G_JOB_LIST) >= MAX_JOBS_LIST: # more than MAX_JOBS_LIST, we sleep for wait!
            print "len(%s) >= %s, so we sleep for a moments."%(G_JOB_LIST, MAX_JOBS_LIST)
            time.sleep(WAIT_FROM_SERVER)
            continue
        #
        job_code, job_txt=actual_get_job_from_server()
        if job_code == 0: # Yes, get job
            # job_txt = [1005, 'http://10.5.10.180/1005.flv.key']
            try:
                G_LOCK_JOB.acquire()
                G_JOB_LIST.append(job_txt)
            finally:
                G_LOCK_JOB.release()
        elif job_code == 1: # means, no job in CentServer
            time.sleep(WAIT_FROM_SERVER)
            pass
        elif job_code == 2: # means, www Error
            time.sleep(WAIT_FROM_SERVER)
            pass


#
def actual_get_job_from_server():
    """
    get one job from CentServer!
    """
    # SERVER
    # USER
    # PASSWD
    #
    server="%s:%s"%(SHOST, PORT)
    #url_getjob     ="/y/finger/getjob/%s/"%(HOSTNAME)
    url   =url_getjob%(HOSTNAME)
    print "server=", server
    print "url=", url
    req=netutil.client_get(server, url, md5key)
    print "req=", req
    if req:
        try:
            tmp=req[0].split(',') # '1005,http://10.5.10.180/1005.flv.key'
        except Exception, e:
            msg="netutil.client_get(%s, %s, %s) return error, %s"%(server, url, md5key, e)
            mylog.GenLog("ERROR", msg)
            return 2, "job-info is error!"
        return 0, [tmp[0], tmp[1]]
    else:
        return 1, "no job!"
    #
    #
    #return 0, [1005, 'http://10.5.10.180/1005.flv.key']



################################################
#
#   '3': carry G_RESULT_LIST to G_POST_LIST
#
################################################
def carryGresult2Gpost():
    """
    G_RESULT_LIST= [[1005, 1225942516, msg, chunk/finish], [1005, 1225942500, msg, chunk/finish],,,]
    G_POST_LIST  = [[1005, 1225942516, msg, chunk/finish, post_times], [1005, 1225942500, msg, chunk/finish, post_times],,,]
    """
    global G_RESULT_LIST
    global G_POST_LIST
    #
    while True: # loop forever!
        time.sleep(5)
        print 
        print "----- carryGresult2Gpost() -----"
        print 
        if len(G_RESULT_LIST) == 0: # no result to carry
            print "len(G_RESULT_LIST) == 0, means no result to carry!"
            time.sleep(5)
            continue
        else: # OK, carry result from G_RESULT_LIST to G_POST_LIST
            tmp_list=[]
            try:
                try:
                    G_LOCK_RESULT.acquire()
                    tmp_list=copy.deepcopy(G_RESULT_LIST)
                    del G_RESULT_LIST[:] # new G_RESULT_LIST, must be LOCK, 防止丢失结果
                finally:
                    G_LOCK_RESULT.release()
            except:
                pass
            try:
                try:
                    G_LOCK_POST.acquire()
                    for e in tmp_list:
                        # e = [1005, 1225942500, msg. chunk/finish]
                        e.append(0) # add times count !
                        # new e = [1005, 1225942500, msg, chunk/finish, post_times]
                        G_POST_LIST.append(e) # add G_RESULT_LIST's first to G_POST_LIST's end !
                    del tmp_list[:]
                finally:
                    G_LOCK_POST.release()
            except:
                pass
    #
    # return
    return


################################################
#
# '4': post G_POST_LIST's results
#
################################################
def postG_POST_LIST():
    """
    G_POST_LIST  = [[1005, 1225942516, msg, chunk/finish, post_times], [1005, 1225942500, msg, chunk/finish, post_times],,,]
    """
    #
    global G_POST_LIST
    #
    while True: # loop forever!
        time.sleep(5)
        print 
        print "----- postG_POST_LIST() -----"
        print 
        if len(G_POST_LIST) == 0: # no result to report
            print "len(G_POST_LIST) == 0, means no result to report! "
            time.sleep(1)
            continue
        else:
            e=G_POST_LIST[0] # first go
            #
            if actual_post(e): # means post OK!
                try:
                    try:
                        # pop(0) G_POST_LIST !
                        G_LOCK_POST.acquire()
                        # e = [1005, 1225942500, msg, chunk/finish, post_times]
                        G_POST_LIST.pop(0)
                    finally:
                        G_LOCK_POST.release()
                except:
                    pass
            else: # means post Fail !
                try:
                    try:
                        #
                        G_LOCK_POST.acquire()
                        tmp_e = G_POST_LIST.pop(0)
                        # e = [1005, 1225942500, msg, chunk/finish, post_times]
                        # add post times
                        tmp_e[4] += 1
                        G_POST_LIST.append(tmp_e) # add to the post list's end
                    finally:
                        G_LOCK_POST.release()
                except:
                    pass
    #
    # return
    return

#
#
def actual_post(_m_list=None):
    """
    _m_list=[1005, 1225942516, msg, chunk/finish, post_times]
    """
    #
    if not _m_list:
        print "_m_list is unavailable!"
        return True
    #
    if not isinstance(_m_list, list):
        print "_m_list is unavailable!"
        return True
    #
    if _m_list[4] >= MAX_POST_TIMES:
        print "%s is post more than %s times, so we give up!"%(_m_list, _m_list[3])
        return True
    # post !
    post_req={}
    post_req['_job_id'] = _m_list[0]
    post_req['_db_time']= _m_list[1]
    post_req['_msg']    = _m_list[2]
    post_req['_isChunk']= _m_list[3]
    print "post_req=", post_req
    #mylog.GenLog("ERROR", "%s"%post_req)
    server="%s:%s"%(SHOST, PORT)
    if post_req['_isChunk'] == 0: # last, means finish !
        #url   ="/y/finger/postfinish/%s/%s/"%(HOSTNAME, post_req['_job_id'])
        url   =url_postfinish%(HOSTNAME, post_req['_job_id'])
        print "======================== finish ========================="
        print "server=", server
        print "url=", url
        if not netutil.client_get_report(server, url, md5key):
            print "client_get(%s, %s, %s) return False."%(server, url, md5key)
    else:
        #url   ="/y/finger/postchunk/%s/%s/"%(HOSTNAME, post_req['_job_id'])
        url   =url_postchunk%(HOSTNAME, post_req['_job_id'])
        print "server=", server
        print "url=", url
        if not netutil.client_post(server, url, post_req['_msg'], md5key):
            print "client_post(%s, %s, %s, %s) return False."%(server, url, post_req['_msg'], md5key)
    return True


################################################
#
# '5': monitor ALL !
#
################################################
def monitor_all():
    """
    monitor all in this programms!
    """
    while True:
        time.sleep(10)
        print 
        print "----- monitor_all() -----"
        print 
        # loadavg
        if pack_check_loadavg():
            print "OK, loadavg is OK!"
        #
        if make_sure_one():
            print "OK, this time is one mrserver is running!"
        #
        if check_memory_swap():
            print "OK, Swap < 200MB!"
        # memory used info!
        #

#
def check_memory_swap():
    """
    free -m ====> get Swap used lower swap
    make sure Swap < 200MB
    """
    cmd="free -m"
    try:
        swap_code, swap_txt=commands.getstatusoutput(cmd)
        if swap_code == 0: # ok
            tmp=swap_txt.split("%s"%(os.linesep))
            for i in tmp:
                if re.search(r'^Swap:', i): # find Swap info line
                    swap=i.split()[2]
                    print "swap=", swap
                    if int(swap) >= 100:
                        msg="ooo, swap >= 200MB"
                        pack_isAlive_mrserver()
                        mylog.GenLog("ERROR", msg)
                    else:
                        return True
    except Exception, e:
        msg="check_memory_swap(), %s"%e
        mylog.GenLog("ERROR", msg)
        return True
    #
    # return
    return True


################################################
#
# '6': clear logs !
#
################################################
def clear_logs():
    """
    clear logs ,which mtime > 7 days,
    """
    timeout=7*24*60*60 # 7 days
    while True:
        #
        time.sleep(timeout)
        #
        try:
            currpath=os.path.normpath(os.path.join(os.getcwd(),os.path.dirname(__file__)))
            logs_dir = os.path.join(currpath, 'logs')
            if os.path.exists(logs_dir) and os.path.isdir(logs_dir):
                for i in os.listdir(logs_dir):
                    #
                    currtime=int(time.time())
                    currlogs=os.path.join(logs_dir, i)
                    if currtime - os.path.getmtime(currlogs) >= timeout:
                        # delete
                        print "delete file %s"%currlogs
                        os.remove(currlogs)
        except Exception, e:
            msg="clear_logs() Exception, %s"%e
            mylog.GenLog("ERROR", msg)
        #
    #
    # return
    return True


#
G_threads={}
#
func_dict={\
        'getJobFromServer':getJobFromServer,\
        'all_dodo':all_dodo,\
        'carryGresult2Gpost':carryGresult2Gpost,\
        'postG_POST_LIST':postG_POST_LIST,\
        'monitor_all':monitor_all,\
        'clear_logs':clear_logs,\
        }


#
class mythread:
    #
    def __init__(self, threadname=None, func=None):
        global G_threads
        self.tmp_thread=threading.Thread(target=func)
        self.tmp_thread.setName(threadname)
        self.tmp_thread.setDaemon(1)
        self.tmp_thread.start()
        G_threads[threadname]=self.tmp_thread

#
#
def all_main():
    """
    """
    # reload 
    reload(mylog)
    reload(subwork)
    reload_config_ini()
    #
    # '1'
    #thread.start_new_thread(getJobFromServer, ())
    mythread("getJobFromServer", getJobFromServer)
    # '2'
    #thread.start_new_thread(all_dodo, ())
    mythread("all_dodo", all_dodo)
    # '3'
    #thread.start_new_thread(carryGresult2Gpost, ())
    mythread("carryGresult2Gpost", carryGresult2Gpost)
    # '4'
    #thread.start_new_thread(postG_POST_LIST, ())
    mythread("postG_POST_LIST", postG_POST_LIST)
    # '5'
    #thread.start_new_thread(monitor_all, ())
    mythread("monitor_all", monitor_all)
    # '6'
    #thread.start_new_thread(clear_logs, ())
    mythread("clear_logs", clear_logs)
    #
    # loop!
    while True:
        time.sleep(20)
        keys=G_threads.keys()
        for k in keys:
            print "%s isAlive= %s"%(G_threads[k].getName(), G_threads[k].isAlive())
            if not G_threads[k].isAlive():
                die_thread_name=G_threads[k].getName()
                msg="thread %s die!"%die_thread_name
                mylog.GenLog("ERROR", msg)
                #
                sys.exit(0)
                #
                if func_dict.has_key(die_thread_name):
                    mythread("%s"%die_thread_name, func_dict[die_thread_name])
        print "-----------------------------"
        print "G_JOB_LIST=", G_JOB_LIST
        print "G_RESULT_LIST=", G_RESULT_LIST
        print "G_POST_LIST=", G_POST_LIST
        print "-----------------------------"
        if os.path.exists(SAFE_FILE):
            sys.exit(0)
    #
    return

#
##################################################
if __name__=='__main__':
    #
    #unittest.main()
    #
    all_main()
    #

