#!/usr/bin/env python
#-*-encoding:utf-8-*-
'''
Created on 2014年12月12日

@author: chenyongbing
'''
import sys,os,datetime,commands,time,re
current_dir = os.path.dirname(__file__)

import logging,os
import logging.handlers
class AutoRun():
    def __init__(self):
        self.init_logger()
    def init_logger(self):
        LOG_FILE = 'tst.log'
        
        handler = logging.handlers.RotatingFileHandler(LOG_FILE, maxBytes = 1024*1024*10, backupCount = 5) # 实例化handler 
        fmt = '%(asctime)s - %(filename)s:%(lineno)s - %(name)s - %(message)s'
        
        formatter = logging.Formatter(fmt)   # 实例化formatter
        handler.setFormatter(formatter)      # 为handler添加formatter
        
        self.logger = logging.getLogger('tst')    # 获取名为tst的logger
        self.logger.addHandler(handler)           # 为logger添加handler
        self.logger.setLevel(logging.DEBUG)

    def get_slist_between_st_et(self,st,et):
        u'''获取2个时间间的所有日期'''
        dlist = []
        count = 0
        nday = (datetime.date(int(st[:4]),int(st[5:7]),int(st[8:]))+ datetime.timedelta(-1)).strftime('%Y-%m-%d')
        while nday < et:
            nday = (datetime.date(int(st[:4]),int(st[5:7]),int(st[8:]))+ datetime.timedelta(count)).strftime('%Y-%m-%d')
            count+=1
            dlist.append(nday)
        return dlist

    def run_commands(self,cmd=''):
        cout = commands.getstatusoutput(cmd)
        try:
            if cout[0]!=0:
                self.logger.error(cout[1])
                sys.exit()
        except:
            self.logger.error( 'check out error.')
            sys.exit()

    def auto_hadoop(self,day='',softPath='/data/soft',timType='all'):
        u'''timType:all,day,week,month,quarter'''
        self.logger.info( 'start run hadoop %s'%day)
        tday_date = datetime.date(int(day[:4]),int(day[5:7]),int(day[8:10])) +datetime.timedelta(1)
        tday = tday_date.strftime('%Y-%m-%d')
    
        if timType in ['all','day']:
            self.logger.info('Start Run Day:%s'%day)
            self.run_commands('sh %s/zm-hadoop/bin/start.sh --day %s'%(softPath,day))
        if re.search('-01$',tday) and timType in ['all','month']:
            #run month
            self.logger.info('Start Run Month:%s'%tday)
            self.run_commands('sh %s/zm-hadoop/bin/start.sh --month %s'%(softPath,tday))
        
        if tday_date.weekday()==0 and timType in ['all','week']:
            #run week
            self.logger.info('Start Run Week:%s'%tday)
            self.run_commands('sh %s/zm-hadoop/bin/start.sh --week %s'%(softPath,tday))
        
        if (re.search('-01-01',tday) or re.search('-04-01',tday) or re.search('-07-01',tday) or re.search('-10-01',tday)) and timType in ['all','quarter']:
            #run quarter
            self.logger.info('Start Run Quarter:%s'%day)
            self.run_commands('sh %s/zm-hadoop/bin/start.sh --quarter %s'%(softPath,tday))
        self.logger.info( 'End run hadoop %s'%day)
    def auto_mapreduce(self,rawfile='',day='',softPath='/data/soft'):
        self.logger.info( "start run mapreduce day:%s  rawdata"%day)
        #if not  os.path.exists(rawfile):
        #    self.logger.info( 'rawfile : %s not exists.'%rawfile)
        #    sys.exit()
        self.run_commands('sh %s/zm-mapreduce/bin/start.sh %s'%(softPath,day))
        self.logger.info( "end run mapreduce day:%s  rawdata"%day)
    
    
    def auto_mapreduce_old(self,rawfile='',day='',softPath='/data/soft'):
        self.logger.info( "start run mapreduce day:%s  rawdata"%day)
        #if not  os.path.exists(rawfile):
        #    self.logger.info( 'rawfile : %s not exists.'%rawfile)
        #    sys.exit()
        self.run_commands('sh %s/zm-mapreduce/bin/start_rawdata.sh %s'%(softPath,day))
        self.logger.info( "end run mapreduce day:%s  rawdata"%day)
        self.run_commands('sh %s/zm-mapreduce/bin/start.sh %s'%(softPath,day))
        self.logger.info( "end run mapreduce day:%s "%day)
    
    def autoRun(self,days,backPath='/mnt/rawdata/',rawPath='/data/rawdata',rawTarPath=None,sourcePath='/usr/java/data/source',softPath = '/data/soft',runType='mapreduce',timeType='all'):
        
        for day in days:
            try:
                os.popen('mkdir -p %s/%s'%(rawPath,day))
            except:pass
            self.logger.info('start run day %s'%day)
            tday_date = datetime.date(int(day[:4]),int(day[5:7]),int(day[8:10])) +datetime.timedelta(1)
            tday = tday_date.strftime('%Y-%m-%d')
            if runType=='mapreduce' or runType=='all':
                #commands.getstatusoutput('rm -rf %s'%rawfile) 
                rawfile = rawPath+'/rawdata.%s.txt'%day
                commands.getstatusoutput('rm -rf %s'%rawfile)
                backfile = backPath+'/rawdata.%s.txt'%day
                while 1:
                    if os.path.exists(backfile) or os.path.exists("%s.tar.gz"%backfile):
                        self.logger.info('backfile already exists.')
                        break
                    self.logger.info('start merger rawdata')
                    self.run_commands('python /usr/java/shell/merger_rawdata_from_remote.py --startTime %s --endTime %s --rawPath %s'%(day,day,backPath))
                    
                while 1:
                    if  os.path.exists(rawPath+'/'+day) and len(os.listdir(rawPath+'/'+day))>0:
                        self.logger.info('rawfile already exists.')
                        break
                    
                    
                    if os.path.exists("%s.tar.gz"%backfile):
                        cmd = 'tar -xzvf %s -C %s'%("%s.tar.gz"%backfile,rawPath+'/'+day+'/')
                    elif os.path.exists(backfile):
                        cmd = 'cp %s %s'%(backfile,rawPath+'/'+day+'/')   
                    
                    else:
                        self.logger.error('rawfile not exists.')
                        sys.exit()
                    
                    self.logger.debug(cmd)
                    out = commands.getstatusoutput(cmd)
                    print out
                    if int(out[0])==0:break
                #self.auto_mapreduce_old(rawfile=rawfile,day=tday,softPath=softPath)    
                self.auto_mapreduce(rawfile=rawfile,day=day,softPath=softPath)
            if runType=='hadoop' or runType=='all':
                self.auto_hadoop(day=day,softPath=softPath,timType=timeType)
            commands.getstatusoutput('rm -rf %s/%s'%(rawPath,day))
#autoRun(days,'/Udisk/rawdata','/usr/java/data/source',runType='hadoop')
if __name__ == '__main__':
    import argparse,re,datetime
    parser = argparse.ArgumentParser(description='args')
    parser.add_argument('--startTime',metavar=u"",default=None)
    parser.add_argument('--endTime',metavar=u"requests",default=None)
    parser.add_argument('--runType',metavar=u"run type:all,mapreduce,hadoop",default='all')
    parser.add_argument('--timeType',metavar=u"all,day,week,month,quarter",default='all')
    args = parser.parse_args()
    
    st = args.startTime
    et = args.endTime
    runType = args.runType
    timeType = args.timeType
    if st==None:
        st = (datetime.date.today() - datetime.timedelta(1)).strftime('%Y-%m-%d')
    if et==None:
        et = (datetime.date.today() - datetime.timedelta(1)).strftime('%Y-%m-%d')
    myAutoRun = AutoRun()
    days = myAutoRun.get_slist_between_st_et(st,et)
    
    rawTarPath = None
    rawPath = '/usr/java/data/rawdata/'
    softPath = '/usr/java/'
    sourcePath = '/usr/java/data/source'
    backPath = '/mnt/rawdata/'
    
    myAutoRun.autoRun(days,backPath=backPath,rawPath=rawPath,rawTarPath=rawTarPath,sourcePath=sourcePath,softPath=softPath,runType=runType,timeType=timeType)
