#encoding=utf-8
import os
import sys
import datetime
import time

from multiprocessing import Pool

'''
使用多进程执行hdfs命令进行文件的复制
组装多个命令
分成可配置的几块
交给进程执行
进程负责记录执行的结果
执行命令
python hadoop_cp_shell.py &2>> hadoop_cp_shell_out.log
因为执行的命令有些错误的输出是在屏幕上的，所以需要对错误进行重定向
'''
# hadoop 源目录
source_path = '/user/hive/warehouse/blackpanther.db/dw_man_warning_rt/startdate_day={0}/startdate_min={1}/'
# hadoop 目标目录
target_path = '/user/hive/warehouse/blackpanther.db/tbl_man_warning_rt/startdate_day={0}/'
# 每个进程处理的命令的数量和进程的启动数量
start_pool = 8

def handle_start_end_time():
    '''
    处理开始时间和结束时间之间的时间，时间间隔为分钟
    :return:
    '''
    start_time = str(sys.argv[0])
    start_time = datetime.datetime.strptime(start_time, '%Y%m%d')
    # 获取结束的时间
    end_time = str(sys.argv[0])
    end_time = datetime.datetime.strptime(end_time, '%Y%m%d')
    end_time = end_time + datetime.timedelta(days=1,minutes=-1)
    # 存放过程中的时间
    yield start_time.strftime('%Y%m%d%H%M')
    while True:
        start_time = start_time + datetime.timedelta(minutes=1)
        if start_time.day == end_time.day:
            yield start_time.strftime('%Y%m%d%H%M')
        else:
            break



def init_hadoop_command():
    '''
    init hadoop command
    @:return hadoop command list
    '''
    commands = []
    for mt in handle_start_end_time():
        tsp = source_path.format(mt[:-4], mt)
        ttp = target_path.format(mt[:-4])
        commands.append('hadoop fs -cp -f {0} {1}'.format(tsp, ttp))
    return commands
    
def execute_hadoop_command(hadoop_command_list):
    '''
    execute hadoop cp command
    :param hadoop_command_list: hadoop cp shell
    :return:
    '''
    for hcl in hadoop_command_list:
        start_time = time.time()
        #result = os.popen(hcl).read()
        end_time = time.time()
        print 'command={0},result={1},use_time={2}\n'.format(hcl, result, end_time - start_time)
        time.sleep(1)

def execute_pool():
    '''
    执行进程的方法
    @:param commands 所有的 hadoop命令
    @:param start_pool 需要开启的进程的数量
    '''
    commands = init_hadoop_command()
    pool = Pool(start_pool)
    start_pool_list = [commands[i:i + start_pool] for i in range(0, len(commands), start_pool)]
    for spl in start_pool_list:
        pool.apply_async(execute_hadoop_command, (spl,))
    pool.close()
    pool.join()


if __name__ == "__main__":
    for i in range(20180606,20180631)
        execute_pool(i)
